diff --git a/nix/.buildkite/pipeline.yml b/nix/.buildkite/pipeline.yml new file mode 100644 index 000000000..3175e8d02 --- /dev/null +++ b/nix/.buildkite/pipeline.yml @@ -0,0 +1,36 @@ +steps: + - label: Build stable Linux + command: nix-build release.nix -A dapphub.linux.stable --no-out-link + agents: + linux: 'true' + - label: Build stable Darwin + command: nix-build release.nix -A dapphub.darwin.stable --no-out-link + agents: + mac: 'true' + - wait + - label: Sign stable Linux + command: nix-build release.nix -A dapphub.linux.stable -o nixout && nix sign-paths -r -v -k /var/lib/buildkite-agent/nix-key.sk ./nixout* + agents: + linux: 'true' + - label: Sign stable Darwin + command: nix-build release.nix -A dapphub.darwin.stable -o nixout && nix sign-paths -r -v -k /Users/administrator/nix-key.sk ./nixout* + agents: + mac: 'true' + - wait + - label: Upload stable Linux + command: nix-build release.nix -A dapphub.linux.stable -o nixout && nix copy -v --to s3://dapphub-nix-cache ./nixout* + agents: + linux: 'true' + - label: Upload stable Darwin + command: nix-build release.nix -A dapphub.darwin.stable -o nixout && nix copy -v --to s3://dapphub-nix-cache ./nixout* + agents: + mac: 'true' + - wait + - label: Update channel + command: + - 'tar --exclude-vcs --transform="s|^|nixpkgs-dapphub/|" -cf /tmp/nixexprs.tar *' + - 'xz -f /tmp/nixexprs.tar' + - 'aws s3 cp --acl public-read /tmp/nixexprs.tar.xz s3://dapphub-nix-cache/pkgs/dapphub/nixexprs.tar.xz' + agents: + linux: 'true' + - wait diff --git a/nix/.gitmodules b/nix/.gitmodules new file mode 100644 index 000000000..bdf5cdbac --- /dev/null +++ b/nix/.gitmodules @@ -0,0 +1,76 @@ +[submodule "nixpkgs"] + path = nixpkgs + url = https://github.com/dapphub/nixpkgs + branch = carnix +[submodule "dappsys/ds-auth"] + path = dappsys/ds-auth + url = https://github.com/dapphub/ds-auth + branch = dappnix +[submodule "dappsys/erc20"] + path = dappsys/erc20 + url = https://github.com/dapphub/erc20 + branch = dappnix +[submodule "dappsys/ds-exec"] + path = dappsys/ds-exec + url = https://github.com/dapphub/ds-exec + branch = dappnix +[submodule "dappsys/ds-math"] + path = dappsys/ds-math + url = https://github.com/dapphub/ds-math + branch = dappnix +[submodule "dappsys/ds-note"] + path = dappsys/ds-note + url = https://github.com/dapphub/ds-note + branch = dappnix +[submodule "dappsys/ds-stop"] + path = dappsys/ds-stop + url = https://github.com/dapphub/ds-stop + branch = dappnix +[submodule "dappsys/ds-test"] + path = dappsys/ds-test + url = https://github.com/dapphub/ds-test + branch = dappnix +[submodule "dappsys/ds-warp"] + path = dappsys/ds-warp + url = https://github.com/dapphub/ds-warp + branch = dappnix +[submodule "dappsys/ds-cache"] + path = dappsys/ds-cache + url = https://github.com/dapphub/ds-cache + branch = dappnix +[submodule "dappsys/ds-chief"] + path = dappsys/ds-chief + url = https://github.com/dapphub/ds-chief + branch = dappnix +[submodule "dappsys/ds-group"] + path = dappsys/ds-group + url = https://github.com/dapphub/ds-group + branch = dappnix +[submodule "dappsys/ds-guard"] + path = dappsys/ds-guard + url = https://github.com/dapphub/ds-guard + branch = dappnix +[submodule "dappsys/ds-proxy"] + path = dappsys/ds-proxy + url = https://github.com/dapphub/ds-proxy + branch = dappnix +[submodule "dappsys/ds-roles"] + path = dappsys/ds-roles + url = https://github.com/dapphub/ds-roles + branch = dappnix +[submodule "dappsys/ds-thing"] + path = dappsys/ds-thing + url = https://github.com/dapphub/ds-thing + branch = dappnix +[submodule "dappsys/ds-token"] + path = dappsys/ds-token + url = https://github.com/dapphub/ds-token + branch = dappnix +[submodule "dappsys/ds-value"] + path = dappsys/ds-value + url = https://github.com/dapphub/ds-value + branch = dappnix +[submodule "dappsys/ds-vault"] + path = dappsys/ds-vault + url = https://github.com/dapphub/ds-vault + branch = dappnix diff --git a/nix/LICENSE b/nix/LICENSE new file mode 100644 index 000000000..b912c322c --- /dev/null +++ b/nix/LICENSE @@ -0,0 +1,22 @@ +This license ONLY applies to source code of the `overlay` directory, +and not to any of the upstream `nixpkgs` or any of the packages built +by these build specifications. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/nix/README.md b/nix/README.md new file mode 100644 index 000000000..330597e2b --- /dev/null +++ b/nix/README.md @@ -0,0 +1,8 @@ +# Dapphub's Nixpkgs channel + +This directory can act as a root ``. We use a submodule to +pin a specific version of the upstream, and our `default.nix` loads +that, configured with our overlay. + +You can also use the `overlay` directory as an overlay on whatever +version of nixpkgs you wish. diff --git a/nix/bump b/nix/bump new file mode 100755 index 000000000..0e0fa1e86 --- /dev/null +++ b/nix/bump @@ -0,0 +1,3 @@ +#!/usr/bin/env bash +cd "${0%/*}" +nix-shell -p bash nix-prefetch-scripts curl jshon jq --run ./bump.sh diff --git a/nix/bump-one.sh b/nix/bump-one.sh new file mode 100755 index 000000000..9be0f5908 --- /dev/null +++ b/nix/bump-one.sh @@ -0,0 +1,62 @@ +#!/usr/bin/env bash +set -eu + +exec > >(sed "s/^/${1/\//\\\/}: /") 2> >(sed >&2 "s/^/${1/\//\\\/}: /") + +GET() { + curl -s -H "Authorization: token ${GITHUB_TOKEN?Need OAuth token}" "$@" +} + +echo "bumping" +pkg="$1" +name=$(basename "$pkg") +tag=$(GET https://api.github.com/repos/"$pkg"/tags?per_page=1 | jshon -e 0) +version=$(jshon -e name -u <<<"$tag") +taghash=$(jshon -e commit -e sha -u <<<"$tag") +head=$(GET https://api.github.com/repos/"$pkg"/commits?per_page=1 | jshon -e 0) +headhash=$(jshon -e sha -u <<<"$head") + +echo "stable $version, master $headhash" + +tagsha256=$( + nix-prefetch-url \ + --unpack \ + https://github.com/"$pkg"/archive/"$version".tar.gz 2>/dev/null) + +headsha256=$( + nix-prefetch-url \ + --unpack \ + https://github.com/"$pkg"/archive/"$headhash".tar.gz 2>/dev/null) + +tree=$(GET https://api.github.com/repos/"$pkg"/git/trees/"$taghash") +nix=$(jq -r <<<"$tree" '.tree | .[] | select(.path == "default.nix") | .url') +GET "$nix" | jshon -e content -u | base64 -d > overlay/upstream/stable/$name.nix + +tree=$(GET https://api.github.com/repos/"$pkg"/git/trees/"$headhash") +nix=$(jq -r <<<"$tree" '.tree | .[] | select(.path == "default.nix") | .url') +GET "$nix" | jshon -e content -u | base64 -d > overlay/upstream/master/$name.nix + +( + flock 9 || exit 1 + json=$(cat bump.json) + json=$( + jshon <<<"$json" -n {} \ + -s "${version#v}" -i version \ + -n {} \ + -s "$headhash" -i rev \ + -s "$headsha256" -i sha256 \ + -s "$(dirname "$pkg")" -i owner \ + -s "$name" -i repo \ + -i master \ + -n {} \ + -s "$taghash" -i rev \ + -s "$tagsha256" -i sha256 \ + -s "$(dirname "$pkg")" -i owner \ + -s "$name" -i repo \ + -i stable \ + -i "$name" + ) + echo "$json" >bump.json +) 9>/tmp/dapphub-bump.lock + +echo "done" diff --git a/nix/bump.sh b/nix/bump.sh new file mode 100755 index 000000000..91ca860de --- /dev/null +++ b/nix/bump.sh @@ -0,0 +1,26 @@ +#!/usr/bin/env bash +set -e + +mkdir -p overlay/upstream/{master,stable} +echo '{}' >bump.json + +pkgs=( + dapphub/dapp + mbrock/jays + dapphub/hevm + dapphub/seth + dapphub/ethsign + dapphub/token + lessrest/restless-git + dapphub/libethjet + dapphub/libethjet-haskell + mbrock/symbex + makerdao/setzer + makerdao/dai-cli + mbrock/oasis-orders +) + +printf "%s\n" "${pkgs[@]}" | xargs -L 1 --max-procs=8 ./bump-one.sh + +jq --sort-keys . < ./bump.json > overlay/versions.json +rm bump.json diff --git a/nix/default.nix b/nix/default.nix new file mode 100644 index 000000000..1cb1bb0c3 --- /dev/null +++ b/nix/default.nix @@ -0,0 +1,61 @@ +# This file acts like the main file of an entire . +# It imports a full nixpkgs from our pinned submodule, +# and extends it with our overlay. +# +# This is meant to be used as a channel. + +{ pkgsPath ? null +, overlays ? [] +, system ? null +}: + +let + ## Horrible; copied from nixpkgs to get extra overlays to work with our overlay. + homeDir = builtins.getEnv "HOME"; + try = x: def: let res = builtins.tryEval x; in if res.success then res.value else def; + extra-overlays = with builtins; let + isDir = path: pathExists (path + "/."); + pathOverlays = try ""; + homeOverlaysFile = homeDir + "/.config/nixpkgs/overlays.nix"; + homeOverlaysDir = homeDir + "/.config/nixpkgs/overlays"; + overlays = path: + # check if the path is a directory or a file + if isDir path then + # it's a directory, so the set of overlays from the directory, ordered lexicographically + let content = readDir path; in + map (n: import (path + ("/" + n))) + (builtins.filter (n: builtins.match ".*\\.nix" n != null || pathExists (path + ("/" + n + "/default.nix"))) + (attrNames content)) + else + # it's a file, so the result is the contents of the file itself + import path; + in + if pathOverlays != "" && pathExists pathOverlays then overlays pathOverlays + else if pathExists homeOverlaysFile && pathExists homeOverlaysDir then + throw '' + Nixpkgs overlays can be specified with ${homeOverlaysFile} or ${homeOverlaysDir}, but not both. + Please remove one of them and try again. + '' + else if pathExists homeOverlaysFile then + if isDir homeOverlaysFile then + throw (homeOverlaysFile + " should be a file") + else overlays homeOverlaysFile + else if pathExists homeOverlaysDir then + if !(isDir homeOverlaysDir) then + throw (homeOverlaysDir + " should be a directory") + else overlays homeOverlaysDir + else []; + +in ( + (import ./nixpkgs) ({ + overlays = [(import ./overlay { flavor = "stable"; })] ++ extra-overlays; + } // ( + if system != null then { inherit system; } else {} + )) +) // { + master = (import ./nixpkgs) ({ + overlays = [(import ./overlay { flavor = "master"; })] ++ extra-overlays; + } // ( + if system != null then { inherit system; } else {} + )); +} diff --git a/nix/ethos-iso-image.nix b/nix/ethos-iso-image.nix new file mode 100644 index 000000000..2c1e158a1 --- /dev/null +++ b/nix/ethos-iso-image.nix @@ -0,0 +1,359 @@ +#### (Stolen from Nixpkgs iso-image.nix and modified!) + +# This module creates a bootable ISO image containing the given NixOS +# configuration. The derivation for the ISO image will be placed in +# config.system.build.isoImage. + +{ config, lib, pkgs, ... }: + +with lib; + +let + # Timeout in syslinux is in units of 1/10 of a second. + # 0 is used to disable timeouts. + syslinuxTimeout = if config.boot.loader.timeout == null then + 0 + else + max (config.boot.loader.timeout * 10) 1; + + + max = x: y: if x > y then x else y; + + # The configuration file for syslinux. + + # Notes on syslinux configuration and UNetbootin compatiblity: + # * Do not use '/syslinux/syslinux.cfg' as the path for this + # configuration. UNetbootin will not parse the file and use it as-is. + # This results in a broken configuration if the partition label does + # not match the specified config.isoImage.volumeID. For this reason + # we're using '/isolinux/isolinux.cfg'. + # * Use APPEND instead of adding command-line arguments directly after + # the LINUX entries. + # * COM32 entries (chainload, reboot, poweroff) are not recognized. They + # result in incorrect boot entries. + + baseIsolinuxCfg = '' + SERIAL 0 38400 + DEFAULT boot + + LABEL boot + MENU LABEL Ethos + LINUX /boot/bzImage + APPEND init=${config.system.build.toplevel}/init ${toString config.boot.kernelParams} + INITRD /boot/initrd + ''; + + isolinuxCfg = baseIsolinuxCfg; + + # The EFI boot image. + efiDir = pkgs.runCommand "efi-directory" {} '' + mkdir -p $out/EFI/boot + cp -v ${pkgs.systemd}/lib/systemd/boot/efi/systemd-boot${targetArch}.efi $out/EFI/boot/boot${targetArch}.efi + mkdir -p $out/loader/entries + + cat << EOF > $out/loader/entries/nixos-iso.conf + title Ethos + linux /boot/bzImage + initrd /boot/initrd + options init=${config.system.build.toplevel}/init ${toString config.boot.kernelParams} + EOF + + cat << EOF > $out/loader/loader.conf + default nixos-iso + EOF + ''; + + efiImg = pkgs.runCommand "efi-image_eltorito" { buildInputs = [ pkgs.mtools pkgs.libfaketime ]; } + # Be careful about determinism: du --apparent-size, + # dates (cp -p, touch, mcopy -m, faketime for label), IDs (mkfs.vfat -i) + '' + mkdir ./contents && cd ./contents + cp -rp "${efiDir}"/* . + mkdir ./boot + cp -p "${config.boot.kernelPackages.kernel}/bzImage" \ + "${config.system.build.initialRamdisk}/initrd" ./boot/ + touch --date=@0 ./* + + usage_size=$(du -sb --apparent-size . | tr -cd '[:digit:]') + # Make the image 110% as big as the files need to make up for FAT overhead + image_size=$(( ($usage_size * 110) / 100 )) + # Make the image fit blocks of 1M + block_size=$((1024*1024)) + image_size=$(( ($image_size / $block_size + 1) * $block_size )) + echo "Usage size: $usage_size" + echo "Image size: $image_size" + truncate --size=$image_size "$out" + ${pkgs.libfaketime}/bin/faketime "2000-01-01 00:00:00" ${pkgs.dosfstools}/sbin/mkfs.vfat -i 12345678 -n EFIBOOT "$out" + mcopy -bpsvm -i "$out" ./* :: + ''; # */ + + targetArch = if pkgs.stdenv.isi686 then + "ia32" + else if pkgs.stdenv.isx86_64 then + "x64" + else + throw "Unsupported architecture"; + +in + +{ + options = { + + isoImage.isoName = mkOption { + default = "${config.isoImage.isoBaseName}.iso"; + description = '' + Name of the generated ISO image file. + ''; + }; + + isoImage.isoBaseName = mkOption { + default = "nixos"; + description = '' + Prefix of the name of the generated ISO image file. + ''; + }; + + isoImage.compressImage = mkOption { + default = false; + description = '' + Whether the ISO image should be compressed using + bzip2. + ''; + }; + + isoImage.volumeID = mkOption { + default = "NIXOS_BOOT_CD"; + description = '' + Specifies the label or volume ID of the generated ISO image. + Note that the label is used by stage 1 of the boot process to + mount the CD, so it should be reasonably distinctive. + ''; + }; + + isoImage.contents = mkOption { + example = literalExample '' + [ { source = pkgs.memtest86 + "/memtest.bin"; + target = "boot/memtest.bin"; + } + ] + ''; + description = '' + This option lists files to be copied to fixed locations in the + generated ISO image. + ''; + }; + + isoImage.storeContents = mkOption { + example = literalExample "[ pkgs.stdenv ]"; + description = '' + This option lists additional derivations to be included in the + Nix store in the generated ISO image. + ''; + }; + + isoImage.includeSystemBuildDependencies = mkOption { + default = false; + description = '' + Set this option to include all the needed sources etc in the + image. It significantly increases image size. Use that when + you want to be able to keep all the sources needed to build your + system or when you are going to install the system on a computer + with slow or non-existent network connection. + ''; + }; + + isoImage.makeEfiBootable = mkOption { + default = false; + description = '' + Whether the ISO image should be an efi-bootable volume. + ''; + }; + + isoImage.makeUsbBootable = mkOption { + default = false; + description = '' + Whether the ISO image should be bootable from CD as well as USB. + ''; + }; + + isoImage.splashImage = mkOption { + default = pkgs.fetchurl { + url = https://raw.githubusercontent.com/NixOS/nixos-artwork/5729ab16c6a5793c10a2913b5a1b3f59b91c36ee/ideas/grub-splash/grub-nixos-1.png; + sha256 = "43fd8ad5decf6c23c87e9026170a13588c2eba249d9013cb9f888da5e2002217"; + }; + description = '' + The splash image to use in the bootloader. + ''; + }; + + isoImage.appendToMenuLabel = mkOption { + default = " Installer"; + example = " Live System"; + description = '' + The string to append after the menu label for the NixOS system. + This will be directly appended (without whitespace) to the NixOS version + string, like for example if it is set to XXX: + + NixOS 99.99-pre666XXX + ''; + }; + + }; + + config = { + + boot.loader.grub.version = 2; + + # Don't build the GRUB menu builder script, since we don't need it + # here and it causes a cyclic dependency. + boot.loader.grub.enable = false; + + # !!! Hack - attributes expected by other modules. + system.boot.loader.kernelFile = "bzImage"; + environment.systemPackages = [ pkgs.grub2 pkgs.grub2_efi pkgs.syslinux ]; + + # In stage 1 of the boot, mount the CD as the root FS by label so + # that we don't need to know its device. We pass the label of the + # root filesystem on the kernel command line, rather than in + # `fileSystems' below. This allows CD-to-USB converters such as + # UNetbootin to rewrite the kernel command line to pass the label or + # UUID of the USB stick. It would be nicer to write + # `root=/dev/disk/by-label/...' here, but UNetbootin doesn't + # recognise that. + boot.kernelParams = + [ "root=LABEL=${config.isoImage.volumeID}" + "boot.shell_on_fail" + ]; + + fileSystems."/" = + { fsType = "tmpfs"; + options = [ "mode=0755" ]; + }; + + # Note that /dev/root is a symlink to the actual root device + # specified on the kernel command line, created in the stage 1 + # init script. + fileSystems."/iso" = + { device = "/dev/root"; + neededForBoot = true; + noCheck = true; + }; + + # In stage 1, mount a tmpfs on top of /nix/store (the squashfs + # image) to make this a live CD. + fileSystems."/nix/.ro-store" = + { fsType = "squashfs"; + device = "/iso/nix-store.squashfs"; + options = [ "loop" ]; + neededForBoot = true; + }; + + fileSystems."/nix/.rw-store" = + { fsType = "tmpfs"; + options = [ "mode=0755" ]; + neededForBoot = true; + }; + + fileSystems."/nix/store" = + { fsType = "unionfs-fuse"; + device = "unionfs"; + options = [ "allow_other" "cow" "nonempty" "chroot=/mnt-root" "max_files=32768" "hide_meta_files" "dirs=/nix/.rw-store=rw:/nix/.ro-store=ro" ]; + }; + + boot.initrd.availableKernelModules = [ "squashfs" "iso9660" "usb-storage" "uas" ]; + + boot.blacklistedKernelModules = [ "nouveau" ]; + + boot.initrd.kernelModules = [ "loop" ]; + + # Closures to be copied to the Nix store on the CD, namely the init + # script and the top-level system configuration directory. + isoImage.storeContents = + [ config.system.build.toplevel ] ++ + optional config.isoImage.includeSystemBuildDependencies + config.system.build.toplevel.drvPath; + + # Create the squashfs image that contains the Nix store. + system.build.squashfsStore = import ./nixpkgs/nixos/lib/make-squashfs.nix { + inherit (pkgs) stdenv squashfsTools perl pathsFromGraph; + storeContents = config.isoImage.storeContents; + }; + + # Individual files to be included on the CD, outside of the Nix + # store on the CD. + isoImage.contents = + [ { source = pkgs.substituteAll { + name = "isolinux.cfg"; + src = pkgs.writeText "isolinux.cfg-in" isolinuxCfg; + bootRoot = "/boot"; + }; + target = "/isolinux/isolinux.cfg"; + } + { source = config.boot.kernelPackages.kernel + "/bzImage"; + target = "/boot/bzImage"; + } + { source = config.system.build.initialRamdisk + "/initrd"; + target = "/boot/initrd"; + } + { source = config.system.build.squashfsStore; + target = "/nix-store.squashfs"; + } + { source = "${pkgs.syslinux}/share/syslinux"; + target = "/isolinux"; + } + { source = pkgs.writeText "version" config.system.nixosVersion; + target = "/version.txt"; + } + ] ++ optionals config.isoImage.makeEfiBootable [ + { source = efiImg; + target = "/boot/efi.img"; + } + { source = "${efiDir}/EFI"; + target = "/EFI"; + } + { source = "${efiDir}/loader"; + target = "/loader"; + } + ] ++ optionals config.boot.loader.grub.memtest86.enable [ + { source = "${pkgs.memtest86plus}/memtest.bin"; + target = "/boot/memtest.bin"; + } + ]; + + boot.loader.timeout = 10; + + # Create the ISO image. + system.build.isoImage = import ./nixpkgs/nixos/lib/make-iso9660-image.nix ({ + inherit (pkgs) stdenv perl pathsFromGraph xorriso syslinux; + + inherit (config.isoImage) isoName compressImage volumeID contents; + + bootable = true; + bootImage = "/isolinux/isolinux.bin"; + } // optionalAttrs config.isoImage.makeUsbBootable { + usbBootable = true; + isohybridMbrImage = "${pkgs.syslinux}/share/syslinux/isohdpfx.bin"; + } // optionalAttrs config.isoImage.makeEfiBootable { + efiBootable = true; + efiBootImage = "boot/efi.img"; + }); + + boot.postBootCommands = + '' + # After booting, register the contents of the Nix store on the + # CD in the Nix database in the tmpfs. + ${config.nix.package.out}/bin/nix-store --load-db < /nix/store/nix-path-registration + + # nixos-rebuild also requires a "system" profile and an + # /etc/NIXOS tag. + touch /etc/NIXOS + ${config.nix.package.out}/bin/nix-env -p /nix/var/nix/profiles/system --set /run/current-system + ''; + + # Add vfat support to the initrd to enable people to copy the + # contents of the CD to a bootable USB stick. + boot.initrd.supportedFilesystems = [ "vfat" ]; + + }; + +} diff --git a/nix/ethos.nix b/nix/ethos.nix new file mode 100644 index 000000000..7c6843235 --- /dev/null +++ b/nix/ethos.nix @@ -0,0 +1,179 @@ +{ hidpi ? false } : + +{ config, pkgs, ... }: let + + usb.ledger.vendor = "2c97"; + +in { + imports = [ + ./ethos-iso-image.nix + ./nixpkgs/nixos/modules/profiles/all-hardware.nix + ]; + + fonts.fontconfig.dpi = if hidpi then 200 else 96; + + isoImage.isoName = if hidpi then "ethos-hidpi.iso" else "ethos.iso"; + isoImage.volumeID = pkgs.lib.substring 0 11 "NIXOS_ISO"; + isoImage.makeEfiBootable = true; + isoImage.makeUsbBootable = true; + + networking.hostName = "ethos"; + + users.extraUsers.ethos = { + isNormalUser = true; + uid = 1000; + extraGroups = ["wheel"]; + }; + + i18n.consoleFont = "sun12x22"; + + environment.systemPackages = with pkgs; [ + ethsign seth bc ethabi qrtx + xorg.xsetroot + ds-chief.vote + mkbip39 + + (bashScript { + name = "battery"; + deps = [upower gnugrep gnused]; + check = false; + text = '' + exec < <(upower -i `upower -e | grep BAT` | grep :. | sed 's/^ *//') + declare -A BAT; while IFS=: read k v; do BAT[$k]=`echo $v`; done + time=(''${BAT['time to full']}''${BAT['time to empty']}) + time=''${time[0]}''${time[1]:0:1} + rate=(''${BAT[energy-rate]}) rate=''${rate[0]%.*}''${rate[1]} + percent=''${BAT[percentage]} + state=''${BAT[state]} + echo $percent $state $rate $time + ''; + }) + ]; + + security.sudo.enable = true; + security.sudo.wheelNeedsPassword = false; + services.upower.enable = true; + + # Enable Ledger Nano S support. + services.udev.extraRules = let + usbHook = pkgs.bashScript { + name = "usb-hook"; + deps = [pkgs.ratpoison pkgs.coreutils pkgs.xorg.xsetroot]; + text = '' + export DISPLAY=:0 + if [ -v ID_VENDOR_ID ]; then + if [ "$ID_VENDOR_ID" = ${usb.ledger.vendor} ]; then + if [ "$ACTION" = add ]; then + text="Ledger device connected." + sudo -u ethos xsetroot -solid gold + else + text="Ledger device disconnected." + sudo -u ethos xsetroot -solid indigo + fi + sudo -u ethos ratpoison -c "echo $text" + fi + fi + ''; + }; + in '' + SUBSYSTEM=="usb", ATTRS{idVendor}=="${usb.ledger.vendor}", ATTRS{idProduct}=="0001", MODE="0600", OWNER="ethos" + SUBSYSTEM=="hidraw", ATTRS{idVendor}=="${usb.ledger.vendor}", KERNEL=="hidraw*", MODE="0600", OWNER="ethos" + SUBSYSTEM=="usb", RUN+="${usbHook}/bin/usb-hook" + ''; + + fonts.fontconfig.hinting.enable = false; + fonts.fontconfig.subpixel.rgba = "none"; + fonts.fonts = [pkgs.iosevka-term]; + + services.xserver.enable = true; + services.xserver.libinput.enable = true; + services.xserver.xkbOptions = "ctrl:nocaps"; + services.xserver.displayManager.slim = { + enable = true; + defaultUser = "ethos"; + autoLogin = true; + }; + + services.xserver.desktopManager = { + default = "ethos"; + session = [{ + name = "ethos"; + start = '' + if ${pkgs.usbutils}/bin/lsusb -d ${usb.ledger.vendor}: ; then + xsetroot -solid gold + echo + fi + ${pkgs.ratpoison}/bin/ratpoison -f /etc/ratpoisonrc & + ${pkgs.sxhkd}/bin/sxhkd -c /etc/sxhkdrc & + if ${pkgs.usbutils}/bin/lsusb -d ${usb.ledger.vendor}: ; then + xsetroot -solid gold + else + xsetroot -solid indigo + fi + wait + ''; + }]; + }; + + environment.etc.sxhkdrc.text = '' + XF86MonBrightnessUp + ${pkgs.xlibs.xbacklight}/bin/xbacklight +20 + XF86MonBrightnessDown + ${pkgs.xlibs.xbacklight}/bin/xbacklight =1 + Print + ${pkgs.ratpoison}/bin/ratpoison -c "echo `battery`" + ''; + + environment.etc."ratpoisonrc".text = '' + set font "Iosevka Term-16" + set startupmessage 0 + set bargravity c + set wingravity center + set padding 30 30 30 30 + set border 6 + set fwcolor black + set barpadding 8 4 + bind d exec setxkbmap dvorak + echo Welcome to Ethos. + bind c exec xterm -fa "Iosevka Term" -fs 16 + exec xterm -fa "Iosevka Term" -fs 16 + ''; + + environment.etc."bashrc.local".text = '' + HISTCONTROL=erasedups + HISTSIZE=99999 + [[ $PS1 ]] || return + PS1=$'\[\e[1m\]\h\[\e[0m\]:\$ ' + spaces=$(head -c 16 < /dev/zero | tr '\0' ' ') + cat /etc/ethos-help | sed "s/^/$spaces/" + + if ${pkgs.usbutils}/bin/lsusb -d ${usb.ledger.vendor}: ; then + echo + fi + + set -u # fail on missing variables + ''; + + environment.variables = { + DAI_MULTISIG_ADDRESS = "0x7Bb0b08587b8a6B8945e09F1Baca426558B0f06a"; + MKR_REDEEMER_ADDRESS = "0x642AE78FAfBB8032Da552D619aD43F1D81E4DD7C"; + MKR_TOKEN_ADDRESS = "0x9f8F72aA9304c8B593d555F12eF6589cC3A579A2"; + OLD_MKR_TOKEN_ADDRESS = "0xC66eA802717bFb9833400264Dd12c2bCeAa34a6d"; + }; + + environment.etc."ethos-help".text = '' + + ████████ ██████████ ██ ██ ███████ ████████ + ░██░░░░░ ░░░░░██░░░ ░██ ░██ ██░░░░░██ ██░░░░░░ + ░██ ░██ ░██ ░██ ██ ░░██░██ + ░███████ ░██ ░██████████░██ ░██░█████████ + ░██░░░░ ░██ ░██░░░░░░██░██ ░██░░░░░░░░██ + ░██ ░██ ░██ ░██░░██ ██ ░██ + ░████████ ░██ ░██ ░██ ░░███████ ████████ + ░░░░░░░░ ░░ ░░ ░░ ░░░░░░░ ░░░░░░░░ + + Version 1 ("At Least It's An ETHOS"; signature edition) + Ethereum distribution of GNU/Linux/NixOS by DappHub + + ''; +} diff --git a/nix/known-contracts.nix b/nix/known-contracts.nix new file mode 100644 index 000000000..75b16ee31 --- /dev/null +++ b/nix/known-contracts.nix @@ -0,0 +1,27 @@ +{ + mkr-2016-03 = "0xC66eA802717bFb9833400264Dd12c2bCeAa34a6d"; + mkr-2017-11 = "0x9f8F72aA9304c8B593d555F12eF6589cC3A579A2"; + weth-2016-06 = "0xECF8F87f810EcF450940c9f60066b4a7a501d6A7"; + weth-2017-12 = "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2"; + mkr-redeemer-2017-12 = "0x642AE78FAfBB8032Da552D619aD43F1D81E4DD7C"; + oasis-2017-09 = "0x3Aa927a97594c3ab7d7bf0d47C71c3877D1DE4A1"; + oasis-2017-12 = "0x14FBCA95be7e99C15Cc2996c6C9d841e54B79425"; + sai-2017-07 = "0x59aDCF176ED2f6788A41B8eA4c4904518e62B6A4"; + dai-2017-12 = "0x89d24A6b4CcB1B6fAA2625fE562bDD9a23260359"; + + dai-gem-2017-12 = "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"; + dai-gov-2017-12 = "0x9f8F72aA9304c8B593d555F12eF6589cC3A579A2"; + dai-pip-2017-12 = "0x729D19f657BD0614b4985Cf1D82531c67569197B"; + dai-pep-2017-12 = "0x99041F808D598B782D5a3e498681C2452A31da08"; + dai-pit-2017-12 = "0x69076e44a9C70a67D5b79d95795Aba299083c275"; + dai-adm-2017-12 = "0x8E2a84D6adE1E7ffFEe039A35EF5F19F13057152"; + dai-sai-2017-12 = "0x89d24A6b4CcB1B6fAA2625fE562bDD9a23260359"; + dai-sin-2017-12 = "0x79F6D0f646706E1261aCF0b93DCB864f357d4680"; + dai-skr-2017-12 = "0xf53AD2c6851052A81B42133467480961B2321C09"; + dai-dad-2017-12 = "0x315cBb88168396D12e1a255f9Cb935408fe80710"; + dai-mom-2017-12 = "0xF2C5369cFFb8Ea6284452b0326e326DbFdCb867C"; + dai-vox-2017-12 = "0x9B0F70Df76165442ca6092939132bBAEA77f2d7A"; + dai-tub-2017-12 = "0x448a5065aeBB8E423F0896E6c5D525C040f59af3"; + dai-tap-2017-12 = "0xBda109309f9FafA6Dd6A9CB9f1Df4085B27Ee8eF"; + dai-top-2017-12 = "0x9b0ccf7C8994E19F39b2B4CF708e0A7DF65fA8a3"; +} diff --git a/nix/nixpkgs b/nix/nixpkgs new file mode 160000 index 000000000..27fa09827 --- /dev/null +++ b/nix/nixpkgs @@ -0,0 +1 @@ +Subproject commit 27fa098270a16ad28799dc1229d6f56a046b564a diff --git a/nix/overlay/build-dapp.nix b/nix/overlay/build-dapp.nix new file mode 100644 index 000000000..05bc370d1 --- /dev/null +++ b/nix/overlay/build-dapp.nix @@ -0,0 +1,53 @@ +{ pkgs }: rec { + + remappings = xs: + builtins.foldl' pkgs.lib.mergeAttrs {} + (builtins.map + (x: { + "${x.name}/" = "${x}/src/"; + "${x.name}" = "${x}/src/index.sol"; + } // x.remappings) + xs); + + libPaths = xs: + builtins.foldl' pkgs.lib.mergeAttrs {} + (builtins.map + (x: { + "${x.name}" = "${x}/src"; + } // x.libPaths) + xs); + + dappPackage = attrs @ { dependencies ? [], ... }: + pkgs.stdenv.mkDerivation (rec { + buildInputs = [pkgs.dapp pkgs.solc]; + passthru = { + remappings = remappings dependencies; + libPaths = libPaths dependencies; + }; + + REMAPPINGS = + pkgs.lib.mapAttrsToList + (k: v: k + "=" + v) + passthru.remappings; + + LIBSCRIPT = + pkgs.lib.mapAttrsToList + (k: v: '' + ln -s ${v} lib/${k} + '') + passthru.libPaths; + + builder = ./build-dapp.sh; + } // attrs); + + dappsysPackage = { name, deps ? [], rev, sha256 }: + dappPackage { + inherit name; + src = pkgs.fetchFromGitHub { + inherit rev sha256; + owner = "dapphub"; + repo = name; + }; + dependencies = deps; + }; +} diff --git a/nix/overlay/build-dapp.sh b/nix/overlay/build-dapp.sh new file mode 100644 index 000000000..b505351f8 --- /dev/null +++ b/nix/overlay/build-dapp.sh @@ -0,0 +1,26 @@ +source $stdenv/setup +unpackPhase + +jsonopts=--combined-json=abi,bin,bin-runtime,srcmap,srcmap-runtime,ast + +export DAPP_SRC=$src/src +export DAPP_OUT=out + +find "$DAPP_SRC" -name '*.sol' | while read -r x; do + dir=${x%\/*} + dir=${dir#$DAPP_SRC} + dir=${dir#/} + mkdir -p "$DAPP_OUT/$dir" + (set -x; solc --overwrite $REMAPPINGS --abi --bin --bin-runtime = -o "$DAPP_OUT/$dir" "$x") + json_file=$DAPP_OUT/$dir/${x##*/}.json + (set -x; solc $REMAPPINGS $jsonopts = "$x" >"$json_file") +done + +mkdir lib +echo "$LIBSCRIPT" > setup.sh +source setup.sh +dapp test-hevm + +mkdir -p $out/{src,lib,out} +cp -r $src/src $out +cp -r out $out diff --git a/nix/overlay/celf.nix b/nix/overlay/celf.nix new file mode 100644 index 000000000..e7c349de3 --- /dev/null +++ b/nix/overlay/celf.nix @@ -0,0 +1,17 @@ +{ stdenv, fetchFromGitHub, mlton }: + +stdenv.mkDerivation rec { + name = "celf-${version}"; + version = "unstable-2013-07-25"; + src = fetchFromGitHub { + owner = "clf"; + repo = "celf"; + rev = "d61d95900ab316468ae850fa34a2fe9488bc5b59"; + sha256 = "0slrwcxglp0sdbp6wr65cdkl5wcap2i0fqxbwqfi1q3cpb6ph6hq"; + }; + buildInputs = [mlton]; + buildPhase = "make mlton"; + installPhase = '' + mkdir -p $out/bin && cp celf $out/bin + ''; +} diff --git a/nix/overlay/dapp-which.nix b/nix/overlay/dapp-which.nix new file mode 100644 index 000000000..66d35f16b --- /dev/null +++ b/nix/overlay/dapp-which.nix @@ -0,0 +1,35 @@ +{ lib, bashScript, coreutils, gawk }: + +let contracts = import ../known-contracts.nix; + +in bashScript { + name = "dapp-which"; + version = "0"; + deps = [coreutils gawk]; + text = '' + declare -A table + + ${builtins.concatStringsSep "\n" + (lib.mapAttrsToList (k: v: "table[${k}]=\"${v}\"") contracts)} + + if [[ $# = 0 ]]; then + for k in "''${!table[@]}"; do + echo "$k" "''${table[$k]}" + done | sort | awk '{ printf("%32s %s\n", $1, $2) }' + elif [[ $# = 1 ]]; then + if [[ ''${table[$1]+_} ]]; then + echo "''${table[$1]}" + else + for k in "''${!table[@]}"; do + if [[ ''${table[$k],,} = "''${1,,}" ]]; then + echo "$k" + exit + fi + done + echo >&2 "dapp-which: don't know \`$1'" + fi + else + echo >&2 "usage: dapp-which [CONTRACT-ID | ADDRESS]" + fi + ''; +} diff --git a/nix/overlay/dapp/dapp-test-hevm.nix b/nix/overlay/dapp/dapp-test-hevm.nix new file mode 100644 index 000000000..2082e0382 --- /dev/null +++ b/nix/overlay/dapp/dapp-test-hevm.nix @@ -0,0 +1,8 @@ +{ pkgs }: pkgs.bashScript { + name = "dapp2-test-hevm"; + deps = with pkgs; [findutils hevm]; + text = '' + find "''${DAPP_OUT?}" -type f -name '*.sol.json' -print0 | + xargs -0 -n1 -I{} hevm dapp-test --json-file={} --dapp-root=. + ''; +} diff --git a/nix/overlay/default.nix b/nix/overlay/default.nix new file mode 100644 index 000000000..2d4fe43b0 --- /dev/null +++ b/nix/overlay/default.nix @@ -0,0 +1,2 @@ +{ flavor }: self: super: +import ./overlay.nix { inherit self super flavor; } diff --git a/nix/overlay/ethabi.nix b/nix/overlay/ethabi.nix new file mode 100644 index 000000000..d6e225775 --- /dev/null +++ b/nix/overlay/ethabi.nix @@ -0,0 +1,370 @@ +{ pkgs }: with pkgs; +let release = true; + verbose = true; + aho_corasick_0_6_3_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "aho-corasick"; + version = "0.6.3"; + sha256 = "1cpqzf6acj8lm06z3f1cg41wn6c2n9l3v49nh0dvimv4055qib6k"; + libName = "aho_corasick"; + crateBin = [ { name = "aho-corasick-dot"; } ]; + inherit dependencies buildDependencies features release verbose; + }; + backtrace_0_3_3_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "backtrace"; + version = "0.3.3"; + sha256 = "0invfdxkj85v8zyrjs3amfxjdk2a36x8irq7wq7kny6q49hh8y0z"; + inherit dependencies buildDependencies features release verbose; + }; + backtrace_sys_0_1_16_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "backtrace-sys"; + version = "0.1.16"; + sha256 = "1cn2c8q3dn06crmnk0p62czkngam4l8nf57wy33nz1y5g25pszwy"; + build = "build.rs"; + inherit dependencies buildDependencies features release verbose; + }; + cc_1_0_1_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "cc"; + version = "1.0.1"; + sha256 = "1nyml8lw1vfjk4ajbcfdpw02fxahxs9m9gpkwiqm4lyka26za0ag"; + inherit dependencies buildDependencies features release verbose; + }; + cfg_if_0_1_2_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "cfg-if"; + version = "0.1.2"; + sha256 = "0x06hvrrqy96m97593823vvxcgvjaxckghwyy2jcyc8qc7c6cyhi"; + inherit dependencies buildDependencies features release verbose; + }; + dbghelp_sys_0_2_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "dbghelp-sys"; + version = "0.2.0"; + sha256 = "0ylpi3bbiy233m57hnisn1df1v0lbl7nsxn34b0anzsgg440hqpq"; + libName = "dbghelp"; + build = "build.rs"; + inherit dependencies buildDependencies features release verbose; + }; + docopt_0_8_1_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "docopt"; + version = "0.8.1"; + sha256 = "0kmqy534qgcc2hh81nd248jmnvdjb5y4wclddd7y2jjm27rzibss"; + crateBin = [ { name = "docopt-wordlist"; path = "src/wordlist.rs"; } ]; + inherit dependencies buildDependencies features release verbose; + }; + dtoa_0_4_2_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "dtoa"; + version = "0.4.2"; + sha256 = "1bxsh6fags7nr36vlz07ik2a1rzyipc8x1y30kjk832hf2pzadmw"; + inherit dependencies buildDependencies features release verbose; + }; + error_chain_0_11_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "error-chain"; + version = "0.11.0"; + sha256 = "19nz17q6dzp0mx2jhh9qbj45gkvvgcl7zq9z2ai5a8ihbisfj6d7"; + inherit dependencies buildDependencies features release verbose; + }; + ethabi_4_1_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "ethabi"; + version = "4.1.0"; + src = ./ethabi; + inherit dependencies buildDependencies features release verbose; + }; + ethabi_cli_4_0_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "ethabi-cli"; + version = "4.0.0"; + src = ./cli; + crateBin = [ { name = "ethabi"; path = "src/main.rs"; } ]; + inherit dependencies buildDependencies features release verbose; + }; + ethabi_contract_4_1_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "ethabi-contract"; + version = "4.1.0"; + src = ./contract; + inherit dependencies buildDependencies features release verbose; + }; + ethabi_derive_4_1_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "ethabi-derive"; + version = "4.1.0"; + src = ./derive; + procMacro = true; + inherit dependencies buildDependencies features release verbose; + }; + ethabi_tests_0_1_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "ethabi-tests"; + version = "0.1.0"; + src = ./.; + inherit dependencies buildDependencies features release verbose; + }; + heck_0_2_1_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "heck"; + version = "0.2.1"; + sha256 = "16156shpigdbz1kkykiv0ddsigg4x0571h4psgrfrfbci5h4dcba"; + inherit dependencies buildDependencies features release verbose; + }; + itoa_0_3_4_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "itoa"; + version = "0.3.4"; + sha256 = "1nfkzz6vrgj0d9l3yzjkkkqzdgs68y294fjdbl7jq118qi8xc9d9"; + inherit dependencies buildDependencies features release verbose; + }; + kernel32_sys_0_2_2_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "kernel32-sys"; + version = "0.2.2"; + sha256 = "1lrw1hbinyvr6cp28g60z97w32w8vsk6pahk64pmrv2fmby8srfj"; + libName = "kernel32"; + build = "build.rs"; + inherit dependencies buildDependencies features release verbose; + }; + lazy_static_0_2_9_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "lazy_static"; + version = "0.2.9"; + sha256 = "08ldzr5292y3hvi6l6v8l4i6v95lm1aysmnfln65h10sqrfh6iw7"; + inherit dependencies buildDependencies features release verbose; + }; + libc_0_2_32_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "libc"; + version = "0.2.32"; + sha256 = "1i8njlar6v9qvmkyfvwzhxrvkqw6ijp8fqdnya5csqixxz18a532"; + inherit dependencies buildDependencies features release verbose; + }; + memchr_1_0_1_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "memchr"; + version = "1.0.1"; + sha256 = "071m5y0zm9p1k7pzqm20f44ixvmycf71xsrpayqaypxrjwchnkxm"; + inherit dependencies buildDependencies features release verbose; + }; + num_traits_0_1_40_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "num-traits"; + version = "0.1.40"; + sha256 = "1fr8ghp4i97q3agki54i0hpmqxv3s65i2mqd1pinc7w7arc3fplw"; + inherit dependencies buildDependencies features release verbose; + }; + quote_0_3_15_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "quote"; + version = "0.3.15"; + sha256 = "09il61jv4kd1360spaj46qwyl21fv1qz18fsv2jra8wdnlgl5jsg"; + inherit dependencies buildDependencies features release verbose; + }; + regex_0_2_2_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "regex"; + version = "0.2.2"; + sha256 = "1f1zrrynfylg0vcfyfp60bybq4rp5g1yk2k7lc7fyz7mmc7k2qr7"; + inherit dependencies buildDependencies features release verbose; + }; + regex_syntax_0_4_1_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "regex-syntax"; + version = "0.4.1"; + sha256 = "01yrsm68lj86ad1whgg1z95c2pfsvv58fz8qjcgw7mlszc0c08ls"; + inherit dependencies buildDependencies features release verbose; + }; + rustc_demangle_0_1_5_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "rustc-demangle"; + version = "0.1.5"; + sha256 = "096kkcx9j747700fhxj1s4rlwkj21pqjmvj64psdj6bakb2q13nc"; + inherit dependencies buildDependencies features release verbose; + }; + rustc_hex_1_0_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "rustc-hex"; + version = "1.0.0"; + sha256 = "1rvrll1vmsdi09bq4j03vvc44kh92174kq1gkxdiwpc3d41l1r9i"; + inherit dependencies buildDependencies features release verbose; + }; + serde_1_0_15_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "serde"; + version = "1.0.15"; + sha256 = "0pj4qq0is7abcd1jw0q66lw1q583rxljmjrriic7v1i2m5fardq2"; + inherit dependencies buildDependencies features release verbose; + }; + serde_derive_1_0_15_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "serde_derive"; + version = "1.0.15"; + sha256 = "15zp8gp0h8962z40xdzay83p5kd55s24nwhdcp6ab10963lb9blk"; + procMacro = true; + inherit dependencies buildDependencies features release verbose; + }; + serde_derive_internals_0_16_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "serde_derive_internals"; + version = "0.16.0"; + sha256 = "1k96ypwlhnvmaksimkx1pd5rwvjaanfcdzpgndhy994hx03xplhs"; + inherit dependencies buildDependencies features release verbose; + }; + serde_json_1_0_4_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "serde_json"; + version = "1.0.4"; + sha256 = "174cn8v7x42phmd789wsqvw9b1idmpfcpxcbp00pwhnb8l2i6lin"; + inherit dependencies buildDependencies features release verbose; + }; + strsim_0_6_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "strsim"; + version = "0.6.0"; + sha256 = "1lz85l6y68hr62lv4baww29yy7g8pg20dlr0lbaswxmmcb0wl7gd"; + inherit dependencies buildDependencies features release verbose; + }; + syn_0_11_11_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "syn"; + version = "0.11.11"; + sha256 = "0yw8ng7x1dn5a6ykg0ib49y7r9nhzgpiq2989rqdp7rdz3n85502"; + inherit dependencies buildDependencies features release verbose; + }; + synom_0_11_3_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "synom"; + version = "0.11.3"; + sha256 = "1l6d1s9qjfp6ng2s2z8219igvlv7gyk8gby97sdykqc1r93d8rhc"; + inherit dependencies buildDependencies features release verbose; + }; + thread_local_0_3_4_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "thread_local"; + version = "0.3.4"; + sha256 = "1y6cwyhhx2nkz4b3dziwhqdvgq830z8wjp32b40pjd8r0hxqv2jr"; + inherit dependencies buildDependencies features release verbose; + }; + tiny_keccak_1_3_1_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "tiny-keccak"; + version = "1.3.1"; + sha256 = "0sf70d2yq2nb8rxlvjh779lv4xkfb0zwmgmvkqd3ala7grxn6dbh"; + inherit dependencies buildDependencies features release verbose; + }; + unicode_segmentation_1_2_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "unicode-segmentation"; + version = "1.2.0"; + sha256 = "0yz43x7wrhr3n7a2zsinx3r60yxsdqicg8a5kycyyhdaq1zmiz1y"; + inherit dependencies buildDependencies features release verbose; + }; + unicode_xid_0_0_4_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "unicode-xid"; + version = "0.0.4"; + sha256 = "1dc8wkkcd3s6534s5aw4lbjn8m67flkkbnajp5bl8408wdg8rh9v"; + inherit dependencies buildDependencies features release verbose; + }; + unreachable_1_0_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "unreachable"; + version = "1.0.0"; + sha256 = "1am8czbk5wwr25gbp2zr007744fxjshhdqjz9liz7wl4pnv3whcf"; + inherit dependencies buildDependencies features release verbose; + }; + utf8_ranges_1_0_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "utf8-ranges"; + version = "1.0.0"; + sha256 = "0rzmqprwjv9yp1n0qqgahgm24872x6c0xddfym5pfndy7a36vkn0"; + inherit dependencies buildDependencies features release verbose; + }; + void_1_0_2_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "void"; + version = "1.0.2"; + sha256 = "0h1dm0dx8dhf56a83k68mijyxigqhizpskwxfdrs1drwv2cdclv3"; + inherit dependencies buildDependencies features release verbose; + }; + winapi_0_2_8_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "winapi"; + version = "0.2.8"; + sha256 = "0a45b58ywf12vb7gvj6h3j264nydynmzyqz8d8rqxsj6icqv82as"; + inherit dependencies buildDependencies features release verbose; + }; + winapi_build_0_1_1_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "winapi-build"; + version = "0.1.1"; + sha256 = "1lxlpi87rkhxcwp2ykf1ldw3p108hwm24nywf3jfrvmff4rjhqga"; + libName = "build"; + inherit dependencies buildDependencies features release verbose; + }; + +in +rec { + aho_corasick_0_6_3 = aho_corasick_0_6_3_ { + dependencies = [ memchr_1_0_1 ]; + }; + backtrace_0_3_3 = backtrace_0_3_3_ { + dependencies = [ cfg_if_0_1_2 rustc_demangle_0_1_5 ] + ++ (if (buildPlatform.parsed.kernel.name == "linux" || buildPlatform.parsed.kernel.name == "darwin") && !(buildPlatform.parsed.kernel.name == "emscripten") && !(buildPlatform.parsed.kernel.name == "darwin") && !(buildPlatform.parsed.kernel.name == "ios") then [ backtrace_sys_0_1_16 ] else []) + ++ (if (buildPlatform.parsed.kernel.name == "linux" || buildPlatform.parsed.kernel.name == "darwin") then [ libc_0_2_32 ] else []) + ++ (if buildPlatform.parsed.kernel.name == "windows" then [ dbghelp_sys_0_2_0 kernel32_sys_0_2_2 winapi_0_2_8 ] else []); + features = [ "backtrace-sys" "coresymbolication" "dbghelp" "dbghelp-sys" "dladdr" "kernel32-sys" "libbacktrace" "libunwind" "winapi" ]; + }; + backtrace_sys_0_1_16 = backtrace_sys_0_1_16_ { + dependencies = [ libc_0_2_32 ]; + buildDependencies = [ cc_1_0_1 ]; + }; + cc_1_0_1 = cc_1_0_1_ { + dependencies = []; + }; + cfg_if_0_1_2 = cfg_if_0_1_2_ {}; + dbghelp_sys_0_2_0 = dbghelp_sys_0_2_0_ { + dependencies = [ winapi_0_2_8 ]; + buildDependencies = [ winapi_build_0_1_1 ]; + }; + docopt_0_8_1 = docopt_0_8_1_ { + dependencies = [ lazy_static_0_2_9 regex_0_2_2 serde_1_0_15 serde_derive_1_0_15 strsim_0_6_0 ]; + }; + dtoa_0_4_2 = dtoa_0_4_2_ {}; + error_chain_0_11_0 = error_chain_0_11_0_ { + dependencies = [ backtrace_0_3_3 ]; + features = [ "backtrace" "example_generated" ]; + }; + ethabi_4_1_0 = ethabi_4_1_0_ { + dependencies = [ error_chain_0_11_0 rustc_hex_1_0_0 serde_1_0_15 serde_derive_1_0_15 serde_json_1_0_4 tiny_keccak_1_3_1 ]; + }; + ethabi_cli_4_0_0 = ethabi_cli_4_0_0_ { + dependencies = [ docopt_0_8_1 error_chain_0_11_0 ethabi_4_1_0 rustc_hex_1_0_0 serde_1_0_15 serde_derive_1_0_15 ]; + }; + ethabi_contract_4_1_0 = ethabi_contract_4_1_0_ {}; + ethabi_derive_4_1_0 = ethabi_derive_4_1_0_ { + dependencies = [ ethabi_4_1_0 heck_0_2_1 quote_0_3_15 syn_0_11_11 ]; + }; + ethabi_tests_0_1_0 = ethabi_tests_0_1_0_ { + dependencies = [ ethabi_4_1_0 ethabi_contract_4_1_0 ethabi_derive_4_1_0 rustc_hex_1_0_0 ]; + }; + heck_0_2_1 = heck_0_2_1_ { + dependencies = [ unicode_segmentation_1_2_0 ]; + }; + itoa_0_3_4 = itoa_0_3_4_ {}; + kernel32_sys_0_2_2 = kernel32_sys_0_2_2_ { + dependencies = [ winapi_0_2_8 ]; + buildDependencies = [ winapi_build_0_1_1 ]; + }; + lazy_static_0_2_9 = lazy_static_0_2_9_ { + dependencies = []; + }; + libc_0_2_32 = libc_0_2_32_ {}; + memchr_1_0_1 = memchr_1_0_1_ { + dependencies = [ libc_0_2_32 ]; + }; + num_traits_0_1_40 = num_traits_0_1_40_ {}; + quote_0_3_15 = quote_0_3_15_ {}; + regex_0_2_2 = regex_0_2_2_ { + dependencies = [ aho_corasick_0_6_3 memchr_1_0_1 regex_syntax_0_4_1 thread_local_0_3_4 utf8_ranges_1_0_0 ]; + }; + regex_syntax_0_4_1 = regex_syntax_0_4_1_ {}; + rustc_demangle_0_1_5 = rustc_demangle_0_1_5_ {}; + rustc_hex_1_0_0 = rustc_hex_1_0_0_ {}; + serde_1_0_15 = serde_1_0_15_ { + features = [ "std" ]; + }; + serde_derive_1_0_15 = serde_derive_1_0_15_ { + dependencies = [ quote_0_3_15 serde_derive_internals_0_16_0 syn_0_11_11 ]; + }; + serde_derive_internals_0_16_0 = serde_derive_internals_0_16_0_ { + dependencies = [ syn_0_11_11 synom_0_11_3 ]; + }; + serde_json_1_0_4 = serde_json_1_0_4_ { + dependencies = [ dtoa_0_4_2 itoa_0_3_4 num_traits_0_1_40 serde_1_0_15 ]; + }; + strsim_0_6_0 = strsim_0_6_0_ {}; + syn_0_11_11 = syn_0_11_11_ { + dependencies = [ quote_0_3_15 synom_0_11_3 unicode_xid_0_0_4 ]; + features = [ "parsing" "printing" "quote" "synom" "unicode-xid" "visit" ]; + }; + synom_0_11_3 = synom_0_11_3_ { + dependencies = [ unicode_xid_0_0_4 ]; + }; + thread_local_0_3_4 = thread_local_0_3_4_ { + dependencies = [ lazy_static_0_2_9 unreachable_1_0_0 ]; + }; + tiny_keccak_1_3_1 = tiny_keccak_1_3_1_ {}; + unicode_segmentation_1_2_0 = unicode_segmentation_1_2_0_ {}; + unicode_xid_0_0_4 = unicode_xid_0_0_4_ {}; + unreachable_1_0_0 = unreachable_1_0_0_ { + dependencies = [ void_1_0_2 ]; + }; + utf8_ranges_1_0_0 = utf8_ranges_1_0_0_ {}; + void_1_0_2 = void_1_0_2_ {}; + winapi_0_2_8 = winapi_0_2_8_ {}; + winapi_build_0_1_1 = winapi_build_0_1_1_ {}; +} diff --git a/nix/overlay/ethabi/.editorconfig b/nix/overlay/ethabi/.editorconfig new file mode 100644 index 000000000..3acd97f33 --- /dev/null +++ b/nix/overlay/ethabi/.editorconfig @@ -0,0 +1,15 @@ +root = true +[*] +indent_style=tab +indent_size=tab +tab_width=4 +end_of_line=lf +charset=utf-8 +trim_trailing_whitespace=true +max_line_length=120 +insert_final_newline=true + +[*.json] +indent_style=space +indent_size=4 + diff --git a/nix/overlay/ethabi/.gitignore b/nix/overlay/ethabi/.gitignore new file mode 100644 index 000000000..6e55ad0a4 --- /dev/null +++ b/nix/overlay/ethabi/.gitignore @@ -0,0 +1,3 @@ +target +*.swp +*.swo diff --git a/nix/overlay/ethabi/.travis.yml b/nix/overlay/ethabi/.travis.yml new file mode 100644 index 000000000..fa2498c10 --- /dev/null +++ b/nix/overlay/ethabi/.travis.yml @@ -0,0 +1,37 @@ +sudo: required + +language: rust + +cache: + - apt + - cargo + +addons: + apt: + packages: + - libcurl4-openssl-dev + - libelf-dev + - libdw-dev + - binutils-dev + - cmake + sources: + - kalakris-cmake + +branches: + only: + - master + +matrix: + fast_finish: false + include: + - rust: stable + +before_script: + - export PATH=$HOME/.cargo/bin:$PATH + - cargo install cargo-travis || echo "cargo-travis already installed" + +script: + - cargo test + +after_success: + - cargo coveralls --exclude-pattern cli/,res/,snap/,target/,tests/,tools/,derive diff --git a/nix/overlay/ethabi/Cargo.lock b/nix/overlay/ethabi/Cargo.lock new file mode 100644 index 000000000..b5e6928a3 --- /dev/null +++ b/nix/overlay/ethabi/Cargo.lock @@ -0,0 +1,348 @@ +[root] +name = "ethabi-tests" +version = "0.1.0" +dependencies = [ + "ethabi 4.1.0", + "ethabi-contract 4.1.0", + "ethabi-derive 4.1.0", + "rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "aho-corasick" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "backtrace" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "backtrace-sys 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "dbghelp-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-demangle 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "backtrace-sys" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cc 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "cc" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "cfg-if" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "dbghelp-sys" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "docopt" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "lazy_static 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)", + "strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "dtoa" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "error-chain" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "backtrace 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "ethabi" +version = "4.1.0" +dependencies = [ + "error-chain 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "tiny-keccak 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "ethabi-cli" +version = "4.0.0" +dependencies = [ + "docopt 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", + "error-chain 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", + "ethabi 4.1.0", + "rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "ethabi-contract" +version = "4.1.0" + +[[package]] +name = "ethabi-derive" +version = "4.1.0" +dependencies = [ + "ethabi 4.1.0", + "heck 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "heck" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "unicode-segmentation 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "itoa" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "kernel32-sys" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "lazy_static" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "libc" +version = "0.2.32" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "memchr" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "num-traits" +version = "0.1.40" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "quote" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "regex" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "aho-corasick 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)", + "memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "thread_local 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "regex-syntax" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "rustc-demangle" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "rustc-hex" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "serde" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "serde_derive" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive_internals 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "serde_derive_internals" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)", + "synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "serde_json" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "dtoa 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "itoa 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "strsim" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "syn" +version = "0.11.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "synom" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "thread_local" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "lazy_static 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", + "unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "tiny-keccak" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "unicode-segmentation" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "unicode-xid" +version = "0.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "unreachable" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "utf8-ranges" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "void" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "winapi" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "winapi-build" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"checksum aho-corasick 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "500909c4f87a9e52355b26626d890833e9e1d53ac566db76c36faa984b889699" +"checksum backtrace 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "99f2ce94e22b8e664d95c57fff45b98a966c2252b60691d0b7aeeccd88d70983" +"checksum backtrace-sys 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "44585761d6161b0f57afc49482ab6bd067e4edef48c12a152c237eb0203f7661" +"checksum cc 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2c674f0870e3dbd4105184ea035acb1c32c8ae69939c9e228d2b11bbfe29efad" +"checksum cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d4c819a1287eb618df47cc647173c5c4c66ba19d888a6e50d605672aed3140de" +"checksum dbghelp-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "97590ba53bcb8ac28279161ca943a924d1fd4a8fb3fa63302591647c4fc5b850" +"checksum docopt 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3b5b93718f8b3e5544fcc914c43de828ca6c6ace23e0332c6080a2977b49787a" +"checksum dtoa 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "09c3753c3db574d215cba4ea76018483895d7bff25a31b49ba45db21c48e50ab" +"checksum error-chain 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ff511d5dc435d703f4971bc399647c9bc38e20cb41452e3b9feb4765419ed3f3" +"checksum heck 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e0db42a2924a5d7d628685e7a8cf9a2edd628650a9d01efc3dde35d3cdd22451" +"checksum itoa 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8324a32baf01e2ae060e9de58ed0bc2320c9a2833491ee36cd3b4c414de4db8c" +"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" +"checksum lazy_static 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "c9e5e58fa1a4c3b915a561a78a22ee0cac6ab97dca2504428bc1cb074375f8d5" +"checksum libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)" = "56cce3130fd040c28df6f495c8492e5ec5808fb4c9093c310df02b0c8f030148" +"checksum memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1dbccc0e46f1ea47b9f17e6d67c5a96bd27030519c519c9c91327e31275a47b4" +"checksum num-traits 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "99843c856d68d8b4313b03a17e33c4bb42ae8f6610ea81b28abe076ac721b9b0" +"checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a" +"checksum regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1731164734096285ec2a5ec7fea5248ae2f5485b3feeb0115af4fda2183b2d1b" +"checksum regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ad890a5eef7953f55427c50575c680c42841653abd2b028b68cd223d157f62db" +"checksum rustc-demangle 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "aee45432acc62f7b9a108cc054142dac51f979e69e71ddce7d6fc7adf29e817e" +"checksum rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0ceb8ce7a5e520de349e1fa172baeba4a9e8d5ef06c47471863530bc4972ee1e" +"checksum serde 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)" = "6a7046c9d4c6c522d10b2d098f9bebe2bef227e0e74044d8c1bfcf6b476af799" +"checksum serde_derive 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)" = "1afcaae083fd1c46952a315062326bc9957f182358eb7da03b57ef1c688f7aa9" +"checksum serde_derive_internals 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bd381f6d01a6616cdba8530492d453b7761b456ba974e98768a18cad2cd76f58" +"checksum serde_json 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "ee28c1d94a7745259b767ca9e5b95d55bafbd3205ca3acb978cad84a6ed6bc62" +"checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694" +"checksum syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d3b891b9015c88c576343b9b3e41c2c11a51c219ef067b264bd9c8aa9b441dad" +"checksum synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a393066ed9010ebaed60b9eafa373d4b1baac186dd7e008555b0f702b51945b6" +"checksum thread_local 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1697c4b57aeeb7a536b647165a2825faddffb1d3bad386d507709bd51a90bb14" +"checksum tiny-keccak 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d52d12ad79e4063e0cb0ca5efa202ed7244b6ce4d25f4d3abe410b2a66128292" +"checksum unicode-segmentation 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a8083c594e02b8ae1654ae26f0ade5158b119bd88ad0e8227a5d8fcd72407946" +"checksum unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc" +"checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56" +"checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122" +"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" +"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" +"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" diff --git a/nix/overlay/ethabi/Cargo.toml b/nix/overlay/ethabi/Cargo.toml new file mode 100644 index 000000000..61b1df496 --- /dev/null +++ b/nix/overlay/ethabi/Cargo.toml @@ -0,0 +1,2 @@ +[workspace] +members = ["ethabi", "cli", "derive", "contract", "tests"] diff --git a/nix/overlay/ethabi/README.md b/nix/overlay/ethabi/README.md new file mode 100644 index 000000000..33c934e4c --- /dev/null +++ b/nix/overlay/ethabi/README.md @@ -0,0 +1,173 @@ +# ethabi + +[![Build Status][travis-image]][travis-url][![Build coverage][coveralls-image]][coveralls-url] + +[travis-image]: https://travis-ci.org/paritytech/ethabi.svg?branch=master +[travis-url]: https://travis-ci.org/paritytech/ethabi +[coveralls-image]: https://coveralls.io/repos/github/paritytech/ethabi/badge.svg?branch=master +[coveralls-url]: http://coveralls.io/github/paritytech/ethabi?branch=master + +The ABI, Application Binary Interface, is basically how you call functions in a contract and get data back. + +> An ABI determines such details as how functions are called and in which binary format information should be passed from one program component to the next... + +An Ethereum smart contract is bytecode, EVM, on the Ethereum blockchain. Among the EVM, there could be several functions in a contract. An ABI is necessary so that you can specify which function in the contract to invoke, as well as get a guarantee that the function will return data in the format you are expecting. [read more](http://ethereum.stackexchange.com/a/1171/394) + +This library encodes function calls and decodes their output. + +[Documentation](https://docs.rs/ethabi) + +### Installation + +- via cargo + + ``` + cargo install ethabi-cli + ``` + +- via homebrew + + ``` + brew tap paritytech/paritytech + brew install ethabi + ``` + +### Usage + +``` +Ethereum ABI coder. + Copyright 2016-2017 Parity Technologies (UK) Limited + +Usage: + ethabi encode function [-p ]... [-l | --lenient] + ethabi encode params [-v ]... [-l | --lenient] + ethabi decode function + ethabi decode params [-t ]... + ethabi decode log [-l ]... + ethabi -h | --help + +Options: + -h, --help Display this message and exit. + -l, --lenient Allow short representation of input params. + +Commands: + encode Encode ABI call. + decode Decode ABI call result. + function Load function from json ABI file. + params Specify types of input params inline. + log Decode event log. +``` + +### Examples + +``` +ethabi encode params -v bool 1 +``` + +> 0000000000000000000000000000000000000000000000000000000000000001 + +-- + +``` +ethabi encode params -v bool 1 -v string gavofyork -v bool 0 +``` + +> 00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000096761766f66796f726b0000000000000000000000000000000000000000000000 + +-- + +``` +ethabi encode params -v bool[] [1,0,false] +``` + +> 00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 + +-- + +``` +ethabi encode function examples/test.json foo -p 1 +``` + +```json +[{ + "type":"function", + "inputs": [{ + "name":"a", + "type":"bool" + }], + "name":"foo", + "outputs": [] +}] +``` + +> 455575780000000000000000000000000000000000000000000000000000000000000001 + +-- + +``` +ethabi decode params -t bool 0000000000000000000000000000000000000000000000000000000000000001 +``` + +> bool true + +-- + +``` +ethabi decode params -t bool -t string -t bool 00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000096761766f66796f726b0000000000000000000000000000000000000000000000 +``` + +> bool true
+> string gavofyork
+> bool false + +-- + +``` +ethabi decode params -t bool[] 00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 +``` + +> bool[] [true,false,false] + +-- + +``` +ethabi decode function ./examples/foo.json bar 0000000000000000000000000000000000000000000000000000000000000001 +``` + +```json +[{ + "constant":false, + "inputs":[{ + "name":"hello", + "type":"address" + }], + "name":"bar", + "outputs":[{ + "name":"", + "type":"bool" + }], + "type":"function" +}] +``` + +> bool true + +-- + +``` +ethabi decode log ./examples/event.json Event -l 0000000000000000000000000000000000000000000000000000000000000001 0000000000000000000000004444444444444444444444444444444444444444 +``` + +> a bool true
+> b address 4444444444444444444444444444444444444444 + +### Alternative tools + +- [ethabi-js](https://github.com/jacogr/ethabi-js) - javascript port of this library created by [@jacogr](https://github.com/jacogr) + +# Parity toolchain +*this project is a part of the parity toolchain* + +- [**ethkey**](https://github.com/paritytech/ethkey) - Ethereum keys generator and signer. +- [**ethstore**](https://github.com/paritytech/ethstore) - Ethereum key management. +- [**ethabi**](https://github.com/paritytech/ethabi) - Ethereum function calls encoding. diff --git a/nix/overlay/ethabi/cli/Cargo.toml b/nix/overlay/ethabi/cli/Cargo.toml new file mode 100644 index 000000000..86a97091d --- /dev/null +++ b/nix/overlay/ethabi/cli/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "ethabi-cli" +version = "4.0.0" +authors = ["Parity Technologies "] +keywords = ["ethereum", "eth", "abi", "solidity", "cli"] +description = "Easy to use cli for conversion of ethereum contract calls to bytecode." +homepage = "https://github.com/paritytech/ethabi" +license = "MIT" + +[dependencies] +rustc-hex = "1.0" +serde = "1.0" +serde_derive = "1.0" +docopt = "0.8.1" +ethabi = { version = "4.1", path = "../ethabi" } +error-chain = { version = "0.11.0", default-features = false } + +[features] +backtrace = ["error-chain/backtrace"] + +[[bin]] +name = "ethabi" +path = "src/main.rs" diff --git a/nix/overlay/ethabi/cli/src/error.rs b/nix/overlay/ethabi/cli/src/error.rs new file mode 100644 index 000000000..432f85830 --- /dev/null +++ b/nix/overlay/ethabi/cli/src/error.rs @@ -0,0 +1,16 @@ +#![allow(unknown_lints)] + +use std::io; +use {ethabi, docopt, hex}; + +error_chain! { + links { + Ethabi(ethabi::Error, ethabi::ErrorKind); + } + + foreign_links { + Io(io::Error); + Docopt(docopt::Error); + Hex(hex::FromHexError); + } +} diff --git a/nix/overlay/ethabi/cli/src/main.rs b/nix/overlay/ethabi/cli/src/main.rs new file mode 100644 index 000000000..02ceeb0d3 --- /dev/null +++ b/nix/overlay/ethabi/cli/src/main.rs @@ -0,0 +1,293 @@ +extern crate docopt; +extern crate rustc_hex as hex; +extern crate serde; +#[macro_use] +extern crate serde_derive; +#[macro_use] +extern crate error_chain; +extern crate ethabi; + +mod error; + +use std::fs::File; +use std::env; +use docopt::Docopt; +use hex::{ToHex, FromHex}; +use ethabi::param_type::{ParamType, Reader}; +use ethabi::token::{Token, Tokenizer, StrictTokenizer, LenientTokenizer, TokenFromHex}; +use ethabi::{encode, decode, Contract, Function, Event}; +use error::{Error, ResultExt}; + +pub const ETHABI: &'static str = r#" +Ethereum ABI coder. + Copyright 2016-2017 Parity Technologies (UK) Limited + +Usage: + ethabi encode function [-p ]... [-l | --lenient] + ethabi encode params [-v ]... [-l | --lenient] + ethabi decode function + ethabi decode params [-t ]... + ethabi decode log [-l ]... + ethabi -h | --help + +Options: + -h, --help Display this message and exit. + -l, --lenient Allow short representation of input params. + +Commands: + encode Encode ABI call. + decode Decode ABI call result. + function Load function from json ABI file. + params Specify types of input params inline. + log Decode event log. +"#; + +#[derive(Debug, Deserialize)] +struct Args { + cmd_encode: bool, + cmd_decode: bool, + cmd_function: bool, + cmd_params: bool, + cmd_log: bool, + arg_abi_path: String, + arg_function_name: String, + arg_event_name: String, + arg_param: Vec, + arg_type: Vec, + arg_data: String, + arg_topic: Vec, + flag_lenient: bool, +} + +fn main() { + let result = execute(env::args()); + + match result { + Ok(s) => println!("{}", s), + Err(error) => print_err(error), + } +} + +fn print_err(err: Error) { + let message = err.iter() + .map(|e| e.to_string()) + .filter(|e| !e.is_empty()) + .collect::>().join("\n\nCaused by:\n "); + println!("{}", message); +} + +fn execute(command: I) -> Result where I: IntoIterator, S: AsRef { + let args: Args = Docopt::new(ETHABI) + .and_then(|d| d.argv(command).deserialize())?; + + if args.cmd_encode && args.cmd_function { + encode_input(&args.arg_abi_path, &args.arg_function_name, &args.arg_param, args.flag_lenient) + } else if args.cmd_encode && args.cmd_params { + encode_params(&args.arg_type, &args.arg_param, args.flag_lenient) + } else if args.cmd_decode && args.cmd_function { + decode_call_output(&args.arg_abi_path, &args.arg_function_name, &args.arg_data) + } else if args.cmd_decode && args.cmd_params { + decode_params(&args.arg_type, &args.arg_data) + } else if args.cmd_decode && args.cmd_log { + decode_log(&args.arg_abi_path, &args.arg_event_name, &args.arg_topic, &args.arg_data) + } else { + unreachable!() + } +} + +fn load_function(path: &str, function: &str) -> Result { + let file = File::open(path)?; + let contract = Contract::load(file)?; + let function = contract.function(function)?.clone(); + Ok(function) +} + +fn load_event(path: &str, event: &str) -> Result { + let file = File::open(path)?; + let contract = Contract::load(file)?; + let event = contract.event(event)?.clone(); + Ok(event) +} + +fn parse_tokens(params: &[(ParamType, &str)], lenient: bool) -> Result, Error> { + params.iter() + .map(|&(ref param, value)| match lenient { + true => LenientTokenizer::tokenize(param, value), + false => StrictTokenizer::tokenize(param, value) + }) + .collect::>() + .map_err(From::from) +} + +fn encode_input(path: &str, function: &str, values: &[String], lenient: bool) -> Result { + let function = load_function(path, function)?; + + let params: Vec<_> = function.inputs.iter() + .map(|param| param.kind.clone()) + .zip(values.iter().map(|v| v as &str)) + .collect(); + + let tokens = parse_tokens(¶ms, lenient)?; + let result = function.encode_input(&tokens)?; + + Ok(result.to_hex()) +} + +fn encode_params(types: &[String], values: &[String], lenient: bool) -> Result { + assert_eq!(types.len(), values.len()); + + let types: Vec = types.iter() + .map(|s| Reader::read(s)) + .collect::>()?; + + let params: Vec<_> = types.into_iter() + .zip(values.iter().map(|v| v as &str)) + .collect(); + + let tokens = parse_tokens(¶ms, lenient)?; + let result = encode(&tokens); + + Ok(result.to_hex()) +} + +fn decode_call_output(path: &str, function: &str, data: &str) -> Result { + let function = load_function(path, function)?; + let data = data.from_hex().chain_err(|| "Expected to be hex")?; + let tokens = function.decode_output(&data)?; + let types = function.outputs; + + assert_eq!(types.len(), tokens.len()); + + let result = types.iter() + .zip(tokens.iter()) + .map(|(ty, to)| format!("{} {}", ty.kind, to)) + .collect::>() + .join("\n"); + + Ok(result) +} + +fn decode_params(types: &[String], data: &str) -> Result { + let types: Vec = types.iter() + .map(|s| Reader::read(s)) + .collect::>()?; + + let data = data.from_hex().chain_err(|| "Expected to be hex")?; + + let tokens = decode(&types, &data)?; + + assert_eq!(types.len(), tokens.len()); + + let result = types.iter() + .zip(tokens.iter()) + .map(|(ty, to)| format!("{} {}", ty, to)) + .collect::>() + .join("\n"); + + Ok(result) +} + +fn decode_log(path: &str, event: &str, topics: &[String], data: &str) -> Result { + let event = load_event(path, event)?; + let topics: Vec<[u8; 32]> = topics.into_iter() + .map(|t| t.token_from_hex().map_err(From::from)) + .collect::>()?; + let data = data.from_hex().chain_err(|| "Expected to be hex")?; + let decoded = event.parse_log((topics, data).into())?; + + let result = decoded.params.into_iter() + .map(|log_param| format!("{} {}", log_param.name, log_param.value)) + .collect::>() + .join("\n"); + + Ok(result) +} + +#[cfg(test)] +mod tests { + use super::execute; + + #[test] + fn simple_encode() { + let command = "ethabi encode params -v bool 1".split(" "); + let expected = "0000000000000000000000000000000000000000000000000000000000000001"; + assert_eq!(execute(command).unwrap(), expected); + } + + // TODO: parsing negative values is not working + #[test] + #[ignore] + fn int_encode() { + let command = "ethabi encode paramas -v int256 -2 --lenient".split(" "); + let expected = "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe"; + assert_eq!(execute(command).unwrap(), expected); + } + + #[test] + fn multi_encode() { + let command = "ethabi encode params -v bool 1 -v string gavofyork -v bool 0".split(" "); + let expected = "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000096761766f66796f726b0000000000000000000000000000000000000000000000"; + assert_eq!(execute(command).unwrap(), expected); + } + + #[test] + fn array_encode() { + let command = "ethabi encode params -v bool[] [1,0,false]".split(" "); + let expected = "00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"; + assert_eq!(execute(command).unwrap(), expected); + } + + #[test] + fn abi_encode() { + let command = "ethabi encode function ../res/test.abi foo -p 1".split(" "); + let expected = "455575780000000000000000000000000000000000000000000000000000000000000001"; + assert_eq!(execute(command).unwrap(), expected); + } + + #[test] + fn simple_decode() { + let command = "ethabi decode params -t bool 0000000000000000000000000000000000000000000000000000000000000001".split(" "); + let expected = "bool true"; + assert_eq!(execute(command).unwrap(), expected); + } + + #[test] + fn int_decode() { + let command = "ethabi decode params -t int256 fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe".split(" "); + let expected = "int256 fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe"; + assert_eq!(execute(command).unwrap(), expected); + } + + #[test] + fn multi_decode() { + let command = "ethabi decode params -t bool -t string -t bool 00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000096761766f66796f726b0000000000000000000000000000000000000000000000".split(" "); + let expected = +"bool true +string gavofyork +bool false"; + assert_eq!(execute(command).unwrap(), expected); + } + + #[test] + fn array_decode() { + let command = "ethabi decode params -t bool[] 00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000".split(" "); + let expected = "bool[] [true,false,false]"; + assert_eq!(execute(command).unwrap(), expected); + } + + #[test] + fn abi_decode() { + let command = "ethabi decode function ../res/foo.abi bar 0000000000000000000000000000000000000000000000000000000000000001".split(" "); + let expected = "bool true"; + assert_eq!(execute(command).unwrap(), expected); + } + + #[test] + fn log_decode() { + let command = "ethabi decode log ../res/event.abi Event -l 0000000000000000000000000000000000000000000000000000000000000001 0000000000000000000000004444444444444444444444444444444444444444".split(" "); + let expected = +"a true +b 4444444444444444444444444444444444444444"; + assert_eq!(execute(command).unwrap(), expected); + } +} diff --git a/nix/overlay/ethabi/contract/Cargo.toml b/nix/overlay/ethabi/contract/Cargo.toml new file mode 100644 index 000000000..8a8424725 --- /dev/null +++ b/nix/overlay/ethabi/contract/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "ethabi-contract" +version = "4.1.0" +authors = ["Parity Technologies "] +homepage = "https://github.com/paritytech/ethabi" +license = "MIT" +keywords = ["ethereum", "eth", "abi", "solidity", "derive"] +description = "Easy to use conversion of ethereum contract calls to bytecode." diff --git a/nix/overlay/ethabi/contract/src/lib.rs b/nix/overlay/ethabi/contract/src/lib.rs new file mode 100644 index 000000000..047a76f22 --- /dev/null +++ b/nix/overlay/ethabi/contract/src/lib.rs @@ -0,0 +1,11 @@ +#[macro_export] +macro_rules! use_contract { + ($module: ident, $name: expr, $path: expr) => { + #[allow(dead_code)] + pub mod $module { + #[derive(EthabiContract)] + #[ethabi_contract_options(name = $name, path = $path)] + struct _Dummy; + } + } +} diff --git a/nix/overlay/ethabi/default.nix b/nix/overlay/ethabi/default.nix new file mode 100644 index 000000000..ec56765fb --- /dev/null +++ b/nix/overlay/ethabi/default.nix @@ -0,0 +1,371 @@ +{ pkgs }: +with pkgs; +let release = true; + verbose = true; + aho_corasick_0_6_3_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "aho-corasick"; + version = "0.6.3"; + sha256 = "1cpqzf6acj8lm06z3f1cg41wn6c2n9l3v49nh0dvimv4055qib6k"; + libName = "aho_corasick"; + crateBin = [ { name = "aho-corasick-dot"; } ]; + inherit dependencies buildDependencies features release verbose; + }; + backtrace_0_3_3_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "backtrace"; + version = "0.3.3"; + sha256 = "0invfdxkj85v8zyrjs3amfxjdk2a36x8irq7wq7kny6q49hh8y0z"; + inherit dependencies buildDependencies features release verbose; + }; + backtrace_sys_0_1_16_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "backtrace-sys"; + version = "0.1.16"; + sha256 = "1cn2c8q3dn06crmnk0p62czkngam4l8nf57wy33nz1y5g25pszwy"; + build = "build.rs"; + inherit dependencies buildDependencies features release verbose; + }; + cc_1_0_1_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "cc"; + version = "1.0.1"; + sha256 = "1nyml8lw1vfjk4ajbcfdpw02fxahxs9m9gpkwiqm4lyka26za0ag"; + inherit dependencies buildDependencies features release verbose; + }; + cfg_if_0_1_2_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "cfg-if"; + version = "0.1.2"; + sha256 = "0x06hvrrqy96m97593823vvxcgvjaxckghwyy2jcyc8qc7c6cyhi"; + inherit dependencies buildDependencies features release verbose; + }; + dbghelp_sys_0_2_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "dbghelp-sys"; + version = "0.2.0"; + sha256 = "0ylpi3bbiy233m57hnisn1df1v0lbl7nsxn34b0anzsgg440hqpq"; + libName = "dbghelp"; + build = "build.rs"; + inherit dependencies buildDependencies features release verbose; + }; + docopt_0_8_1_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "docopt"; + version = "0.8.1"; + sha256 = "0kmqy534qgcc2hh81nd248jmnvdjb5y4wclddd7y2jjm27rzibss"; + crateBin = [ { name = "docopt-wordlist"; path = "src/wordlist.rs"; } ]; + inherit dependencies buildDependencies features release verbose; + }; + dtoa_0_4_2_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "dtoa"; + version = "0.4.2"; + sha256 = "1bxsh6fags7nr36vlz07ik2a1rzyipc8x1y30kjk832hf2pzadmw"; + inherit dependencies buildDependencies features release verbose; + }; + error_chain_0_11_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "error-chain"; + version = "0.11.0"; + sha256 = "19nz17q6dzp0mx2jhh9qbj45gkvvgcl7zq9z2ai5a8ihbisfj6d7"; + inherit dependencies buildDependencies features release verbose; + }; + ethabi_4_1_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "ethabi"; + version = "4.1.0"; + src = ./ethabi; + inherit dependencies buildDependencies features release verbose; + }; + ethabi_cli_4_0_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "ethabi-cli"; + version = "4.0.0"; + src = ./cli; + crateBin = [ { name = "ethabi"; path = "src/main.rs"; } ]; + inherit dependencies buildDependencies features release verbose; + }; + ethabi_contract_4_1_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "ethabi-contract"; + version = "4.1.0"; + src = ./contract; + inherit dependencies buildDependencies features release verbose; + }; + ethabi_derive_4_1_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "ethabi-derive"; + version = "4.1.0"; + src = ./derive; + procMacro = true; + inherit dependencies buildDependencies features release verbose; + }; + ethabi_tests_0_1_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "ethabi-tests"; + version = "0.1.0"; + src = ./.; + inherit dependencies buildDependencies features release verbose; + }; + heck_0_2_1_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "heck"; + version = "0.2.1"; + sha256 = "16156shpigdbz1kkykiv0ddsigg4x0571h4psgrfrfbci5h4dcba"; + inherit dependencies buildDependencies features release verbose; + }; + itoa_0_3_4_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "itoa"; + version = "0.3.4"; + sha256 = "1nfkzz6vrgj0d9l3yzjkkkqzdgs68y294fjdbl7jq118qi8xc9d9"; + inherit dependencies buildDependencies features release verbose; + }; + kernel32_sys_0_2_2_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "kernel32-sys"; + version = "0.2.2"; + sha256 = "1lrw1hbinyvr6cp28g60z97w32w8vsk6pahk64pmrv2fmby8srfj"; + libName = "kernel32"; + build = "build.rs"; + inherit dependencies buildDependencies features release verbose; + }; + lazy_static_0_2_9_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "lazy_static"; + version = "0.2.9"; + sha256 = "08ldzr5292y3hvi6l6v8l4i6v95lm1aysmnfln65h10sqrfh6iw7"; + inherit dependencies buildDependencies features release verbose; + }; + libc_0_2_32_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "libc"; + version = "0.2.32"; + sha256 = "1i8njlar6v9qvmkyfvwzhxrvkqw6ijp8fqdnya5csqixxz18a532"; + inherit dependencies buildDependencies features release verbose; + }; + memchr_1_0_1_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "memchr"; + version = "1.0.1"; + sha256 = "071m5y0zm9p1k7pzqm20f44ixvmycf71xsrpayqaypxrjwchnkxm"; + inherit dependencies buildDependencies features release verbose; + }; + num_traits_0_1_40_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "num-traits"; + version = "0.1.40"; + sha256 = "1fr8ghp4i97q3agki54i0hpmqxv3s65i2mqd1pinc7w7arc3fplw"; + inherit dependencies buildDependencies features release verbose; + }; + quote_0_3_15_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "quote"; + version = "0.3.15"; + sha256 = "09il61jv4kd1360spaj46qwyl21fv1qz18fsv2jra8wdnlgl5jsg"; + inherit dependencies buildDependencies features release verbose; + }; + regex_0_2_2_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "regex"; + version = "0.2.2"; + sha256 = "1f1zrrynfylg0vcfyfp60bybq4rp5g1yk2k7lc7fyz7mmc7k2qr7"; + inherit dependencies buildDependencies features release verbose; + }; + regex_syntax_0_4_1_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "regex-syntax"; + version = "0.4.1"; + sha256 = "01yrsm68lj86ad1whgg1z95c2pfsvv58fz8qjcgw7mlszc0c08ls"; + inherit dependencies buildDependencies features release verbose; + }; + rustc_demangle_0_1_5_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "rustc-demangle"; + version = "0.1.5"; + sha256 = "096kkcx9j747700fhxj1s4rlwkj21pqjmvj64psdj6bakb2q13nc"; + inherit dependencies buildDependencies features release verbose; + }; + rustc_hex_1_0_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "rustc-hex"; + version = "1.0.0"; + sha256 = "1rvrll1vmsdi09bq4j03vvc44kh92174kq1gkxdiwpc3d41l1r9i"; + inherit dependencies buildDependencies features release verbose; + }; + serde_1_0_15_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "serde"; + version = "1.0.15"; + sha256 = "0pj4qq0is7abcd1jw0q66lw1q583rxljmjrriic7v1i2m5fardq2"; + inherit dependencies buildDependencies features release verbose; + }; + serde_derive_1_0_15_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "serde_derive"; + version = "1.0.15"; + sha256 = "15zp8gp0h8962z40xdzay83p5kd55s24nwhdcp6ab10963lb9blk"; + procMacro = true; + inherit dependencies buildDependencies features release verbose; + }; + serde_derive_internals_0_16_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "serde_derive_internals"; + version = "0.16.0"; + sha256 = "1k96ypwlhnvmaksimkx1pd5rwvjaanfcdzpgndhy994hx03xplhs"; + inherit dependencies buildDependencies features release verbose; + }; + serde_json_1_0_4_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "serde_json"; + version = "1.0.4"; + sha256 = "174cn8v7x42phmd789wsqvw9b1idmpfcpxcbp00pwhnb8l2i6lin"; + inherit dependencies buildDependencies features release verbose; + }; + strsim_0_6_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "strsim"; + version = "0.6.0"; + sha256 = "1lz85l6y68hr62lv4baww29yy7g8pg20dlr0lbaswxmmcb0wl7gd"; + inherit dependencies buildDependencies features release verbose; + }; + syn_0_11_11_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "syn"; + version = "0.11.11"; + sha256 = "0yw8ng7x1dn5a6ykg0ib49y7r9nhzgpiq2989rqdp7rdz3n85502"; + inherit dependencies buildDependencies features release verbose; + }; + synom_0_11_3_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "synom"; + version = "0.11.3"; + sha256 = "1l6d1s9qjfp6ng2s2z8219igvlv7gyk8gby97sdykqc1r93d8rhc"; + inherit dependencies buildDependencies features release verbose; + }; + thread_local_0_3_4_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "thread_local"; + version = "0.3.4"; + sha256 = "1y6cwyhhx2nkz4b3dziwhqdvgq830z8wjp32b40pjd8r0hxqv2jr"; + inherit dependencies buildDependencies features release verbose; + }; + tiny_keccak_1_3_1_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "tiny-keccak"; + version = "1.3.1"; + sha256 = "0sf70d2yq2nb8rxlvjh779lv4xkfb0zwmgmvkqd3ala7grxn6dbh"; + inherit dependencies buildDependencies features release verbose; + }; + unicode_segmentation_1_2_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "unicode-segmentation"; + version = "1.2.0"; + sha256 = "0yz43x7wrhr3n7a2zsinx3r60yxsdqicg8a5kycyyhdaq1zmiz1y"; + inherit dependencies buildDependencies features release verbose; + }; + unicode_xid_0_0_4_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "unicode-xid"; + version = "0.0.4"; + sha256 = "1dc8wkkcd3s6534s5aw4lbjn8m67flkkbnajp5bl8408wdg8rh9v"; + inherit dependencies buildDependencies features release verbose; + }; + unreachable_1_0_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "unreachable"; + version = "1.0.0"; + sha256 = "1am8czbk5wwr25gbp2zr007744fxjshhdqjz9liz7wl4pnv3whcf"; + inherit dependencies buildDependencies features release verbose; + }; + utf8_ranges_1_0_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "utf8-ranges"; + version = "1.0.0"; + sha256 = "0rzmqprwjv9yp1n0qqgahgm24872x6c0xddfym5pfndy7a36vkn0"; + inherit dependencies buildDependencies features release verbose; + }; + void_1_0_2_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "void"; + version = "1.0.2"; + sha256 = "0h1dm0dx8dhf56a83k68mijyxigqhizpskwxfdrs1drwv2cdclv3"; + inherit dependencies buildDependencies features release verbose; + }; + winapi_0_2_8_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "winapi"; + version = "0.2.8"; + sha256 = "0a45b58ywf12vb7gvj6h3j264nydynmzyqz8d8rqxsj6icqv82as"; + inherit dependencies buildDependencies features release verbose; + }; + winapi_build_0_1_1_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "winapi-build"; + version = "0.1.1"; + sha256 = "1lxlpi87rkhxcwp2ykf1ldw3p108hwm24nywf3jfrvmff4rjhqga"; + libName = "build"; + inherit dependencies buildDependencies features release verbose; + }; + +in +rec { + aho_corasick_0_6_3 = aho_corasick_0_6_3_ { + dependencies = [ memchr_1_0_1 ]; + }; + backtrace_0_3_3 = backtrace_0_3_3_ { + dependencies = [ cfg_if_0_1_2 rustc_demangle_0_1_5 ] + ++ (if (buildPlatform.parsed.kernel.name == "linux" || buildPlatform.parsed.kernel.name == "darwin") && !(buildPlatform.parsed.kernel.name == "emscripten") && !(buildPlatform.parsed.kernel.name == "darwin") && !(buildPlatform.parsed.kernel.name == "ios") then [ backtrace_sys_0_1_16 ] else []) + ++ (if (buildPlatform.parsed.kernel.name == "linux" || buildPlatform.parsed.kernel.name == "darwin") then [ libc_0_2_32 ] else []) + ++ (if buildPlatform.parsed.kernel.name == "windows" then [ dbghelp_sys_0_2_0 kernel32_sys_0_2_2 winapi_0_2_8 ] else []); + features = [ "backtrace-sys" "coresymbolication" "dbghelp" "dbghelp-sys" "dladdr" "kernel32-sys" "libbacktrace" "libunwind" "winapi" ]; + }; + backtrace_sys_0_1_16 = backtrace_sys_0_1_16_ { + dependencies = [ libc_0_2_32 ]; + buildDependencies = [ cc_1_0_1 ]; + }; + cc_1_0_1 = cc_1_0_1_ { + dependencies = []; + }; + cfg_if_0_1_2 = cfg_if_0_1_2_ {}; + dbghelp_sys_0_2_0 = dbghelp_sys_0_2_0_ { + dependencies = [ winapi_0_2_8 ]; + buildDependencies = [ winapi_build_0_1_1 ]; + }; + docopt_0_8_1 = docopt_0_8_1_ { + dependencies = [ lazy_static_0_2_9 regex_0_2_2 serde_1_0_15 serde_derive_1_0_15 strsim_0_6_0 ]; + }; + dtoa_0_4_2 = dtoa_0_4_2_ {}; + error_chain_0_11_0 = error_chain_0_11_0_ { + dependencies = [ backtrace_0_3_3 ]; + features = [ "backtrace" "example_generated" ]; + }; + ethabi_4_1_0 = ethabi_4_1_0_ { + dependencies = [ error_chain_0_11_0 rustc_hex_1_0_0 serde_1_0_15 serde_derive_1_0_15 serde_json_1_0_4 tiny_keccak_1_3_1 ]; + }; + ethabi_cli_4_0_0 = ethabi_cli_4_0_0_ { + dependencies = [ docopt_0_8_1 error_chain_0_11_0 ethabi_4_1_0 rustc_hex_1_0_0 serde_1_0_15 serde_derive_1_0_15 ]; + }; + ethabi_contract_4_1_0 = ethabi_contract_4_1_0_ {}; + ethabi_derive_4_1_0 = ethabi_derive_4_1_0_ { + dependencies = [ ethabi_4_1_0 heck_0_2_1 quote_0_3_15 syn_0_11_11 ]; + }; + ethabi_tests_0_1_0 = ethabi_tests_0_1_0_ { + dependencies = [ ethabi_4_1_0 ethabi_contract_4_1_0 ethabi_derive_4_1_0 rustc_hex_1_0_0 ]; + }; + heck_0_2_1 = heck_0_2_1_ { + dependencies = [ unicode_segmentation_1_2_0 ]; + }; + itoa_0_3_4 = itoa_0_3_4_ {}; + kernel32_sys_0_2_2 = kernel32_sys_0_2_2_ { + dependencies = [ winapi_0_2_8 ]; + buildDependencies = [ winapi_build_0_1_1 ]; + }; + lazy_static_0_2_9 = lazy_static_0_2_9_ { + dependencies = []; + }; + libc_0_2_32 = libc_0_2_32_ {}; + memchr_1_0_1 = memchr_1_0_1_ { + dependencies = [ libc_0_2_32 ]; + }; + num_traits_0_1_40 = num_traits_0_1_40_ {}; + quote_0_3_15 = quote_0_3_15_ {}; + regex_0_2_2 = regex_0_2_2_ { + dependencies = [ aho_corasick_0_6_3 memchr_1_0_1 regex_syntax_0_4_1 thread_local_0_3_4 utf8_ranges_1_0_0 ]; + }; + regex_syntax_0_4_1 = regex_syntax_0_4_1_ {}; + rustc_demangle_0_1_5 = rustc_demangle_0_1_5_ {}; + rustc_hex_1_0_0 = rustc_hex_1_0_0_ {}; + serde_1_0_15 = serde_1_0_15_ { + features = [ "std" ]; + }; + serde_derive_1_0_15 = serde_derive_1_0_15_ { + dependencies = [ quote_0_3_15 serde_derive_internals_0_16_0 syn_0_11_11 ]; + }; + serde_derive_internals_0_16_0 = serde_derive_internals_0_16_0_ { + dependencies = [ syn_0_11_11 synom_0_11_3 ]; + }; + serde_json_1_0_4 = serde_json_1_0_4_ { + dependencies = [ dtoa_0_4_2 itoa_0_3_4 num_traits_0_1_40 serde_1_0_15 ]; + }; + strsim_0_6_0 = strsim_0_6_0_ {}; + syn_0_11_11 = syn_0_11_11_ { + dependencies = [ quote_0_3_15 synom_0_11_3 unicode_xid_0_0_4 ]; + features = [ "parsing" "printing" "quote" "synom" "unicode-xid" "visit" ]; + }; + synom_0_11_3 = synom_0_11_3_ { + dependencies = [ unicode_xid_0_0_4 ]; + }; + thread_local_0_3_4 = thread_local_0_3_4_ { + dependencies = [ lazy_static_0_2_9 unreachable_1_0_0 ]; + }; + tiny_keccak_1_3_1 = tiny_keccak_1_3_1_ {}; + unicode_segmentation_1_2_0 = unicode_segmentation_1_2_0_ {}; + unicode_xid_0_0_4 = unicode_xid_0_0_4_ {}; + unreachable_1_0_0 = unreachable_1_0_0_ { + dependencies = [ void_1_0_2 ]; + }; + utf8_ranges_1_0_0 = utf8_ranges_1_0_0_ {}; + void_1_0_2 = void_1_0_2_ {}; + winapi_0_2_8 = winapi_0_2_8_ {}; + winapi_build_0_1_1 = winapi_build_0_1_1_ {}; +} diff --git a/nix/overlay/ethabi/derive/Cargo.toml b/nix/overlay/ethabi/derive/Cargo.toml new file mode 100644 index 000000000..87cb3b2b1 --- /dev/null +++ b/nix/overlay/ethabi/derive/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "ethabi-derive" +version = "4.1.0" +authors = ["Parity Technologies "] +homepage = "https://github.com/paritytech/ethabi" +license = "MIT" +keywords = ["ethereum", "eth", "abi", "solidity", "derive"] +description = "Easy to use conversion of ethereum contract calls to bytecode." + +[lib] +proc-macro = true + +[dependencies] +ethabi = { path = "../ethabi", version = "4.1" } +heck = "0.2" +syn = "0.11.11" +quote = "0.3.15" diff --git a/nix/overlay/ethabi/derive/src/lib.rs b/nix/overlay/ethabi/derive/src/lib.rs new file mode 100644 index 000000000..d45d619e2 --- /dev/null +++ b/nix/overlay/ethabi/derive/src/lib.rs @@ -0,0 +1,673 @@ +#![recursion_limit="256"] + +extern crate proc_macro; +extern crate syn; +#[macro_use] +extern crate quote; +extern crate heck; +extern crate ethabi; + +use std::{env, fs}; +use std::path::PathBuf; +use proc_macro::TokenStream; +use heck::{SnakeCase, CamelCase}; +use ethabi::{Result, ResultExt, Contract, Event, Function, ParamType, Constructor}; + +const ERROR_MSG: &'static str = "`derive(EthabiContract)` failed"; + +#[proc_macro_derive(EthabiContract, attributes(ethabi_contract_options))] +pub fn ethabi_derive(input: TokenStream) -> TokenStream { + let s = input.to_string(); + let ast = syn::parse_derive_input(&s).expect(ERROR_MSG); + let gen = impl_ethabi_derive(&ast).expect(ERROR_MSG); + gen.parse().expect(ERROR_MSG) +} + +fn impl_ethabi_derive(ast: &syn::DeriveInput) -> Result { + let options = get_options(&ast.attrs, "ethabi_contract_options")?; + let path = get_option(&options, "path")?; + let normalized_path = normalize_path(path)?; + let source_file = fs::File::open(&normalized_path) + .chain_err(|| format!("Cannot load contract abi from `{}`", normalized_path.display()))?; + let contract = Contract::load(source_file)?; + + let functions: Vec<_> = contract.functions().map(impl_contract_function).collect(); + let events_impl: Vec<_> = contract.events().map(impl_contract_event).collect(); + let constructor_impl = contract.constructor.as_ref().map(impl_contract_constructor); + let logs_structs: Vec<_> = contract.events().map(declare_logs).collect(); + let events_structs: Vec<_> = contract.events().map(declare_events).collect(); + let func_structs: Vec<_> = contract.functions().map(declare_functions).collect(); + + let name = get_option(&options, "name")?; + let name = syn::Ident::new(name); + let functions_name = syn::Ident::new(format!("{}Functions", name)); + let events_name = syn::Ident::new(format!("{}Events", name)); + + let events_and_logs_quote = if events_structs.is_empty() { + quote! {} + } else { + quote! { + pub mod events { + use ethabi; + + #(#events_structs)* + } + + pub mod logs { + use ethabi; + + #(#logs_structs)* + } + + pub struct #events_name { + } + + impl #events_name { + #(#events_impl)* + } + + impl #name { + pub fn events(&self) -> #events_name { + #events_name { + } + } + } + } + }; + + let functions_quote = if func_structs.is_empty() { + quote! {} + } else { + quote! { + pub mod functions { + use ethabi; + + #(#func_structs)* + } + + pub struct #functions_name { + } + impl #functions_name { + #(#functions)* + } + impl #name { + pub fn functions(&self) -> #functions_name { + #functions_name {} + } + } + + } + }; + + let result = quote! { + #[allow(unused_imports)] + use ethabi; // Not used if no constructor + + #[allow(dead_code)] + // Not used in Constructor contract for example + const INTERNAL_ERR: &'static str = "`ethabi_derive` internal error"; + + /// Contract + pub struct #name { + } + + impl Default for #name { + fn default() -> Self { + #name { + } + } + } + + impl #name { + #constructor_impl + } + + #events_and_logs_quote + + #functions_quote + }; + + Ok(result) +} + +fn get_options(attrs: &[syn::Attribute], name: &str) -> Result> { + let options = attrs.iter().find(|a| a.name() == name).map(|a| &a.value); + match options { + Some(&syn::MetaItem::List(_, ref options)) => { + options.iter().map(|o| match *o { + syn::NestedMetaItem::MetaItem(ref m) => Ok(m.clone()), + syn::NestedMetaItem::Literal(ref lit) => Err(format!("Unexpected meta item {:?}", lit).into()) + }).collect::>>() + }, + Some(e) => Err(format!("Unexpected meta item {:?}", e).into()), + None => Ok(vec![]), + } +} + +fn get_option<'a>(options: &'a [syn::MetaItem], name: &str) -> Result<&'a str> { + let item = options.iter().find(|a| a.name() == name).chain_err(|| format!("Expected to find option {}", name))?; + str_value_of_meta_item(item, name) +} + +fn str_value_of_meta_item<'a>(item: &'a syn::MetaItem, name: &str) -> Result<&'a str> { + match *item { + syn::MetaItem::NameValue(_, syn::Lit::Str(ref value, _)) => Ok(&*value), + _ => Err(format!(r#"`{}` must be in the form `#[{}="something"]`"#, name, name).into()), + } +} + +fn normalize_path(relative_path: &str) -> Result { + // workaround for https://github.com/rust-lang/rust/issues/43860 + let cargo_toml_directory = env::var("CARGO_MANIFEST_DIR").chain_err(|| "Cannot find manifest file")?; + let mut path: PathBuf = cargo_toml_directory.into(); + path.push(relative_path); + Ok(path) +} + +fn impl_contract_function(function: &Function) -> quote::Tokens { + let name = syn::Ident::new(function.name.to_snake_case()); + let function_name = syn::Ident::new(function.name.to_camel_case()); + + quote! { + pub fn #name(&self) -> functions::#function_name { + functions::#function_name::default() + } + } +} + +fn to_syntax_string(param_type : ðabi::ParamType) -> quote::Tokens { + match *param_type { + ParamType::Address => quote! { ethabi::ParamType::Address }, + ParamType::Bytes => quote! { ethabi::ParamType::Address }, + ParamType::Int(x) => quote! { ethabi::ParamType::Int(#x) }, + ParamType::Uint(x) => quote! { ethabi::ParamType::Uint(#x) }, + ParamType::Bool => quote! { ethabi::ParamType::Bool }, + ParamType::String => quote! { ethabi::ParamType::String }, + ParamType::Array(ref param_type) => { + let param_type_quote = to_syntax_string(param_type); + quote! { ethabi::ParamType::Array(Box::new(#param_type_quote)) } + }, + ParamType::FixedBytes(x) => quote! { ethabi::ParamType::FixedBytes(#x) }, + ParamType::FixedArray(ref param_type, ref x) => { + let param_type_quote = to_syntax_string(param_type); + quote! { ethabi::ParamType::FixedArray(Box::new(#param_type_quote), #x) } + } + } +} + +fn rust_type(input: &ParamType) -> syn::Ident { + match *input { + ParamType::Address => "ethabi::Address".into(), + ParamType::Bytes => "ethabi::Bytes".into(), + ParamType::FixedBytes(32) => "ethabi::Hash".into(), + ParamType::FixedBytes(size) => format!("[u8; {}]", size).into(), + ParamType::Int(_) => "ethabi::Int".into(), + ParamType::Uint(_) => "ethabi::Uint".into(), + ParamType::Bool => "bool".into(), + ParamType::String => "String".into(), + ParamType::Array(ref kind) => format!("Vec<{}>", rust_type(&*kind)).into(), + ParamType::FixedArray(ref kind, size) => format!("[{}; {}]", rust_type(&*kind), size).into(), + } +} + +fn template_param_type(input: &ParamType, index: usize) -> syn::Ident { + match *input { + ParamType::Address => format!("T{}: Into", index).into(), + ParamType::Bytes => format!("T{}: Into", index).into(), + ParamType::FixedBytes(32) => format!("T{}: Into", index).into(), + ParamType::FixedBytes(size) => format!("T{}: Into<[u8; {}]>", index, size).into(), + ParamType::Int(_) => format!("T{}: Into", index).into(), + ParamType::Uint(_) => format!("T{}: Into", index).into(), + ParamType::Bool => format!("T{}: Into", index).into(), + ParamType::String => format!("T{}: Into", index).into(), + ParamType::Array(ref kind) => format!("T{}: IntoIterator, U{}: Into<{}>", index, index, index, rust_type(&*kind)).into(), + ParamType::FixedArray(ref kind, size) => format!("T{}: Into<[U{}; {}]>, U{}: Into<{}>", index, index, size, index, rust_type(&*kind)).into(), + } +} + +fn from_template_param(input: &ParamType, name: &syn::Ident) -> syn::Ident { + match *input { + ParamType::Array(_) => format!("{}.into_iter().map(Into::into).collect::>()", name).into(), + ParamType::FixedArray(_, _) => format!("(Box::new({}.into()) as Box<[_]>).into_vec().into_iter().map(Into::into).collect::>()", name).into(), + _ => format!("{}.into()", name).into(), + } +} + +fn to_token(name: &syn::Ident, kind: &ParamType) -> quote::Tokens { + match *kind { + ParamType::Address => quote! { ethabi::Token::Address(#name) }, + ParamType::Bytes => quote! { ethabi::Token::Bytes(#name) }, + ParamType::FixedBytes(_) => quote! { ethabi::Token::FixedBytes(#name.to_vec()) }, + ParamType::Int(_) => quote! { ethabi::Token::Int(#name) }, + ParamType::Uint(_) => quote! { ethabi::Token::Uint(#name) }, + ParamType::Bool => quote! { ethabi::Token::Bool(#name) }, + ParamType::String => quote! { ethabi::Token::String(#name) }, + ParamType::Array(ref kind) => { + let inner_name: syn::Ident = "inner".into(); + let inner_loop = to_token(&inner_name, kind); + quote! { + // note the double {{ + { + let v = #name.into_iter().map(|#inner_name| #inner_loop).collect(); + ethabi::Token::Array(v) + } + } + } + ParamType::FixedArray(ref kind, _) => { + let inner_name: syn::Ident = "inner".into(); + let inner_loop = to_token(&inner_name, kind); + quote! { + // note the double {{ + { + let v = #name.into_iter().map(|#inner_name| #inner_loop).collect(); + ethabi::Token::FixedArray(v) + } + } + }, + } +} + +fn from_token(kind: &ParamType, token: &syn::Ident) -> quote::Tokens { + match *kind { + ParamType::Address => quote! { #token.to_address().expect(super::INTERNAL_ERR) }, + ParamType::Bytes => quote! { #token.to_bytes().expect(super::INTERNAL_ERR) }, + ParamType::FixedBytes(size) => { + let size: syn::Ident = format!("{}", size).into(); + quote! { + { + let mut result = [0u8; #size]; + let v = #token.to_fixed_bytes().expect(super::INTERNAL_ERR); + result.copy_from_slice(&v); + result + } + } + }, + ParamType::Int(_) => quote! { #token.to_int().expect(super::INTERNAL_ERR) }, + ParamType::Uint(_) => quote! { #token.to_uint().expect(super::INTERNAL_ERR) }, + ParamType::Bool => quote! { #token.to_bool().expect(super::INTERNAL_ERR) }, + ParamType::String => quote! { #token.to_string().expect(super::INTERNAL_ERR) }, + ParamType::Array(ref kind) => { + let inner: syn::Ident = "inner".into(); + let inner_loop = from_token(kind, &inner); + quote! { + #token.to_array().expect(super::INTERNAL_ERR).into_iter() + .map(|#inner| #inner_loop) + .collect() + } + }, + ParamType::FixedArray(ref kind, size) => { + let inner: syn::Ident = "inner".into(); + let inner_loop = from_token(kind, &inner); + let to_array = vec![quote! { iter.next() }; size]; + quote! { + { + let iter = #token.to_array().expect(super::INTERNAL_ERR).into_iter() + .map(|#inner| #inner_loop); + [#(#to_array),*] + } + } + }, + } +} + +fn impl_contract_event(event: &Event) -> quote::Tokens { + let name = syn::Ident::new(event.name.to_snake_case()); + let event_name = syn::Ident::new(event.name.to_camel_case()); + quote! { + pub fn #name(&self) -> events::#event_name { + events::#event_name::default() + } + } +} + +fn impl_contract_constructor(constructor: &Constructor) -> quote::Tokens { + // [param0, hello_world, param2] + let names: Vec<_> = constructor.inputs + .iter() + .enumerate() + .map(|(index, param)| if param.name.is_empty() { + syn::Ident::new(format!("param{}", index)) + } else { + param.name.to_snake_case().into() + }).collect(); + + // [Uint, Bytes, Vec] + let kinds: Vec<_> = constructor.inputs + .iter() + .map(|param| rust_type(¶m.kind)) + .collect(); + + // [T0, T1, T2] + let template_names: Vec<_> = kinds.iter().enumerate() + .map(|(index, _)| syn::Ident::new(format!("T{}", index))) + .collect(); + + // [T0: Into, T1: Into, T2: IntoIterator, U2 = Into] + let template_params: Vec<_> = constructor.inputs.iter().enumerate() + .map(|(index, param)| template_param_type(¶m.kind, index)) + .collect(); + + // [param0: T0, hello_world: T1, param2: T2] + let params: Vec<_> = names.iter().zip(template_names.iter()) + .map(|(param_name, template_name)| quote! { #param_name: #template_name }) + .collect(); + + // [Token::Uint(param0.into()), Token::Bytes(hello_world.into()), Token::Array(param2.into())] + let usage: Vec<_> = names.iter().zip(constructor.inputs.iter()) + .map(|(param_name, param)| to_token(&from_template_param(¶m.kind, param_name), ¶m.kind)) + .collect(); + + let constructor_inputs = &constructor.inputs.iter().map(|x| { + let name = &x.name; + let kind = to_syntax_string(&x.kind); + format!(r##"ethabi::Param {{ name: "{}".to_owned(), kind: {} }}"##, name, kind).into() + }).collect::>(); + let constructor_inputs = quote! { vec![ #(#constructor_inputs),* ] }; + + quote! { + pub fn constructor<#(#template_params),*>(&self, code: ethabi::Bytes, #(#params),* ) -> ethabi::Bytes { + let v: Vec = vec![#(#usage),*]; + + ethabi::Constructor { + inputs: #constructor_inputs + } + .encode_input(code, &v) + .expect(INTERNAL_ERR) + } + } +} + +fn declare_logs(event: &Event) -> quote::Tokens { + let name = syn::Ident::new(event.name.to_camel_case()); + let names: Vec<_> = event.inputs + .iter() + .enumerate() + .map(|(index, param)| if param.name.is_empty() { + syn::Ident::new(format!("param{}", index)) + } else { + param.name.to_snake_case().into() + }).collect(); + let kinds: Vec<_> = event.inputs + .iter() + .map(|param| rust_type(¶m.kind)) + .collect(); + let params: Vec<_> = names.iter().zip(kinds.iter()) + .map(|(param_name, kind)| quote! { pub #param_name: #kind, }) + .collect(); + + quote! { + pub struct #name { + #(#params)* + } + } +} + +fn declare_events(event: &Event) -> quote::Tokens { + let name = syn::Ident::new(event.name.to_camel_case()); + + // parse log + + let names: Vec<_> = event.inputs + .iter() + .enumerate() + .map(|(index, param)| if param.name.is_empty() { + if param.indexed { + syn::Ident::new(format!("topic{}", index)) + } else { + syn::Ident::new(format!("param{}", index)) + } + } else { + param.name.to_snake_case().into() + }).collect(); + + let log_iter = syn::Ident::new("log.next().expect(super::INTERNAL_ERR).value"); + + let to_log: Vec<_> = event.inputs + .iter() + .map(|param| from_token(¶m.kind, &log_iter)) + .collect(); + + let log_params: Vec<_> = names.iter().zip(to_log.iter()) + .map(|(param_name, convert)| quote! { #param_name: #convert }) + .collect(); + + // create filter + + let topic_names: Vec<_> = event.inputs + .iter() + .enumerate() + .filter(|&(_, param)| param.indexed) + .map(|(index, param)| if param.name.is_empty() { + syn::Ident::new(format!("topic{}", index)) + } else { + param.name.to_snake_case().into() + }) + .collect(); + + let topic_kinds: Vec<_> = event.inputs + .iter() + .filter(|param| param.indexed) + .map(|param| rust_type(¶m.kind)) + .collect(); + + // [T0, T1, T2] + let template_names: Vec<_> = topic_kinds.iter().enumerate() + .map(|(index, _)| syn::Ident::new(format!("T{}", index))) + .collect(); + + let params: Vec<_> = topic_names.iter().zip(template_names.iter()) + .map(|(param_name, template_name)| quote! { #param_name: #template_name }) + .collect(); + + let template_params: Vec<_> = topic_kinds.iter().zip(template_names.iter()) + .map(|(kind, template_name)| quote! { #template_name: Into> }) + .collect(); + + let to_filter: Vec<_> = topic_names.iter().zip(event.inputs.iter().filter(|p| p.indexed)) + .enumerate() + .take(3) + .map(|(index, (param_name, param))| { + let topic = syn::Ident::new(format!("topic{}", index)); + let i = "i".into(); + let to_token = to_token(&i, ¶m.kind); + quote! { #topic: #param_name.into().map(|#i| #to_token), } + }) + .collect(); + + let event_name = &event.name; + + let event_inputs = &event.inputs.iter().map(|x| { + let name = &x.name; + let kind = to_syntax_string(&x.kind); + let indexed = x.indexed; + format!(r##"ethabi::EventParam {{ name: "{}".to_owned(), kind: {}, indexed: {} }}"##, name, kind, indexed.to_string()).into() + }).collect::>(); + let event_inputs = quote! { vec![ #(#event_inputs),* ] }; + + let event_anonymous = &event.anonymous; + + + quote! { + pub struct #name { + event: ethabi::Event, + } + + impl Default for #name { + fn default() -> Self { + #name { + event: ethabi::Event { + name: #event_name.to_owned(), + inputs: #event_inputs, + anonymous: #event_anonymous + } + } + } + } + + impl #name { + /// Parses log. + pub fn parse_log(&self, log: ethabi::RawLog) -> ethabi::Result { + let mut log = self.event.parse_log(log)?.params.into_iter(); + let result = super::logs::#name { + #(#log_params),* + }; + Ok(result) + } + + /// Creates topic filter. + pub fn create_filter<#(#template_params),*>(&self, #(#params),*) -> ethabi::TopicFilter { + let raw = ethabi::RawTopicFilter { + #(#to_filter)* + ..Default::default() + }; + + self.event.create_filter(raw).expect(super::INTERNAL_ERR) + } + } + } +} + +fn declare_functions(function: &Function) -> quote::Tokens { + let name = syn::Ident::new(function.name.to_camel_case()); + + // [param0, hello_world, param2] + let ref names: Vec<_> = function.inputs + .iter() + .enumerate() + .map(|(index, param)| if param.name.is_empty() { + syn::Ident::new(format!("param{}", index)) + } else { + param.name.to_snake_case().into() + }).collect(); + + // [Uint, Bytes, Vec] + let kinds: Vec<_> = function.inputs + .iter() + .map(|param| rust_type(¶m.kind)) + .collect(); + + // [T0, T1, T2] + let template_names: Vec<_> = kinds.iter().enumerate() + .map(|(index, _)| syn::Ident::new(format!("T{}", index))) + .collect(); + + // [T0: Into, T1: Into, T2: IntoIterator, U2 = Into] + let ref template_params: Vec<_> = function.inputs.iter().enumerate() + .map(|(index, param)| template_param_type(¶m.kind, index)) + .collect(); + + // [param0: T0, hello_world: T1, param2: T2] + let ref params: Vec<_> = names.iter().zip(template_names.iter()) + .map(|(param_name, template_name)| quote! { #param_name: #template_name }) + .collect(); + + // [Token::Uint(param0.into()), Token::Bytes(hello_world.into()), Token::Array(param2.into_iter().map(Into::into).collect())] + let usage: Vec<_> = names.iter().zip(function.inputs.iter()) + .map(|(param_name, param)| to_token(&from_template_param(¶m.kind, param_name), ¶m.kind)) + .collect(); + + let output_call_impl = if !function.constant { + quote! {} + } else { + let output_kinds = match function.outputs.len() { + 0 => quote! {()}, + 1 => { + let t = rust_type(&function.outputs[0].kind); + quote! { #t } + }, + _ => { + let outs: Vec<_> = function.outputs + .iter() + .map(|param| rust_type(¶m.kind)) + .collect(); + quote! { (#(#outs),*) } + } + }; + + let o_impl = match function.outputs.len() { + 0 => quote! { Ok(()) }, + 1 => { + let o = "out".into(); + let from_first = from_token(&function.outputs[0].kind, &o); + quote! { + let out = self.function.decode_output(output)?.into_iter().next().expect(super::INTERNAL_ERR); + Ok(#from_first) + } + }, + _ => { + let o = "out.next().expect(super::INTERNAL_ERR)".into(); + let outs: Vec<_> = function.outputs + .iter() + .map(|param| from_token(¶m.kind, &o)) + .collect(); + + quote! { + let mut out = self.function.decode_output(output)?.into_iter(); + Ok(( #(#outs),* )) + } + }, + }; + + quote! { + pub fn output(&self, output: &[u8]) -> ethabi::Result<#output_kinds> { + #o_impl + } + + pub fn call<#(#template_params),*>(&self, #(#params ,)* do_call: &Fn(ethabi::Bytes) -> Result) -> ethabi::Result<#output_kinds> + { + let encoded_input = self.input(#(#names),*); + + do_call(encoded_input) + .map_err(|x| ethabi::Error::with_chain(ethabi::Error::from(x), ethabi::ErrorKind::CallError)) + .and_then(|encoded_output| self.output(&encoded_output)) + } + } + }; + + let function_name = &function.name; + + let function_inputs = &function.inputs.iter().map(|x| { + let name = &x.name; + let kind = to_syntax_string(&x.kind); + format!(r##"ethabi::Param {{ name: "{}".to_owned(), kind: {} }}"##, name, kind).into() + }).collect::>(); + let function_inputs = quote! { vec![ #(#function_inputs),* ] }; + + let function_outputs = &function.outputs.iter().map(|x| { + let name = &x.name; + let kind = to_syntax_string(&x.kind); + format!(r##"ethabi::Param {{ name: "{}".to_owned(), kind: {} }}"##, name, kind).into() + }).collect::>(); + let function_outputs = quote! { vec![ #(#function_outputs),* ] }; + + let function_constant = &function.constant; + + quote! { + pub struct #name { + function: ethabi::Function, + } + + impl Default for #name { + fn default() -> Self { + #name { + function: ethabi::Function { + name: #function_name.to_owned(), + inputs: #function_inputs, + outputs: #function_outputs, + constant: #function_constant + } + } + } + } + + impl #name { + + pub fn input<#(#template_params),*>(&self, #(#params),*) -> ethabi::Bytes { + let v: Vec = vec![#(#usage),*]; + self.function.encode_input(&v).expect(super::INTERNAL_ERR) + } + + #output_call_impl + } + } +} diff --git a/nix/overlay/ethabi/ethabi/Cargo.toml b/nix/overlay/ethabi/ethabi/Cargo.toml new file mode 100644 index 000000000..e39a92aed --- /dev/null +++ b/nix/overlay/ethabi/ethabi/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "ethabi" +version = "4.1.0" +authors = ["Parity Technologies "] +homepage = "https://github.com/paritytech/ethabi" +license = "MIT" +keywords = ["ethereum", "eth", "abi", "solidity", "cli"] +description = "Easy to use conversion of ethereum contract calls to bytecode." + +[dependencies] +rustc-hex = "1.0" +serde = "1.0" +serde_json = "1.0" +serde_derive = "1.0" +tiny-keccak = "1.3" +error-chain = { version = "0.11", default-features = false } + +[features] +backtrace = ["error-chain/backtrace"] + +[badges] +travis-ci = { repository = "paritytech/ethabi", branch = "master" } +coveralls = { repository = "paritytech/ethabi", branch = "master" } diff --git a/nix/overlay/ethabi/ethabi/src/constructor.rs b/nix/overlay/ethabi/ethabi/src/constructor.rs new file mode 100644 index 000000000..9cb419763 --- /dev/null +++ b/nix/overlay/ethabi/ethabi/src/constructor.rs @@ -0,0 +1,29 @@ +//! Contract constructor call builder. +use {Param, Result, ErrorKind, Token, ParamType, encode, Bytes}; + +/// Contract constructor specification. +#[derive(Debug, Clone, PartialEq, Deserialize)] +pub struct Constructor { + /// Constructor input. + pub inputs: Vec, +} + +impl Constructor { + /// Returns all input params of given constructor. + fn param_types(&self) -> Vec { + self.inputs.iter() + .map(|p| p.kind.clone()) + .collect() + } + + /// Prepares ABI constructor call with given input params. + pub fn encode_input(&self, code: Bytes, tokens: &[Token]) -> Result { + let params = self.param_types(); + + if Token::types_check(tokens, ¶ms) { + Ok(code.into_iter().chain(encode(tokens)).collect()) + } else { + Err(ErrorKind::InvalidData.into()) + } + } +} diff --git a/nix/overlay/ethabi/ethabi/src/contract.rs b/nix/overlay/ethabi/ethabi/src/contract.rs new file mode 100644 index 000000000..253f025a2 --- /dev/null +++ b/nix/overlay/ethabi/ethabi/src/contract.rs @@ -0,0 +1,124 @@ +use std::{io, fmt}; +use std::collections::HashMap; +use std::collections::hash_map::Values; +use serde::{Deserialize, Deserializer}; +use serde::de::{Visitor, SeqAccess}; +use serde_json; +use operation::Operation; +use {errors, ErrorKind, Event, Constructor, Function}; + +/// API building calls to contracts ABI. +#[derive(Clone, Debug, PartialEq)] +pub struct Contract { + /// Contract constructor. + pub constructor: Option, + /// Contract functions. + pub functions: HashMap, + /// Contract events. + pub events: HashMap, + /// Contract has fallback function. + pub fallback: bool, +} + +impl<'a> Deserialize<'a> for Contract { + fn deserialize(deserializer: D) -> Result where D: Deserializer<'a> { + deserializer.deserialize_any(ContractVisitor) + } +} + +struct ContractVisitor; + +impl<'a> Visitor<'a> for ContractVisitor { + type Value = Contract; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("valid abi spec file") + } + + fn visit_seq(self, mut seq: A) -> Result where A: SeqAccess<'a> { + let mut result = Contract { + constructor: None, + functions: HashMap::default(), + events: HashMap::default(), + fallback: false, + }; + + while let Some(operation) = seq.next_element()? { + match operation { + Operation::Constructor(constructor) => { + result.constructor = Some(constructor); + }, + Operation::Function(func) => { + result.functions.insert(func.name.clone(), func); + }, + Operation::Event(event) => { + result.events.insert(event.name.clone(), event); + }, + Operation::Fallback => { + result.fallback = true; + }, + } + } + + Ok(result) + } +} + +impl Contract { + /// Loads contract from json. + pub fn load(reader: T) -> errors::Result { + serde_json::from_reader(reader).map_err(From::from) + } + + /// Creates constructor call builder. + pub fn constructor(&self) -> Option<&Constructor> { + self.constructor.as_ref() + } + + /// Creates function call builder. + pub fn function(&self, name: &str) -> errors::Result<&Function> { + self.functions.get(name).ok_or_else(|| ErrorKind::InvalidName(name.to_owned()).into()) + } + + /// Creates event decoder. + pub fn event(&self, name: &str) -> errors::Result<&Event> { + self.events.get(name).ok_or_else(|| ErrorKind::InvalidName(name.to_owned()).into()) + } + + /// Iterate over all functions of the contract in arbitrary order. + pub fn functions(&self) -> Functions { + Functions(self.functions.values()) + } + + /// Iterate over all events of the contract in arbitrary order. + pub fn events(&self) -> Events { + Events(self.events.values()) + } + + /// Returns true if contract has fallback + pub fn fallback(&self) -> bool { + self.fallback + } +} + +/// Contract functions interator. +pub struct Functions<'a>(Values<'a, String, Function>); + +impl<'a> Iterator for Functions<'a> { + type Item = &'a Function; + + fn next(&mut self) -> Option { + self.0.next() + } +} + +/// Contract events interator. +pub struct Events<'a>(Values<'a, String, Event>); + +impl<'a> Iterator for Events<'a> { + type Item = &'a Event; + + fn next(&mut self) -> Option { + self.0.next() + } +} diff --git a/nix/overlay/ethabi/ethabi/src/decoder.rs b/nix/overlay/ethabi/ethabi/src/decoder.rs new file mode 100644 index 000000000..d6f5cb164 --- /dev/null +++ b/nix/overlay/ethabi/ethabi/src/decoder.rs @@ -0,0 +1,479 @@ +//! ABI decoder. + +use util::slice_data; +use {Token, ErrorKind, Error, ResultExt, ParamType, Hash}; + +struct DecodeResult { + token: Token, + new_offset: usize, +} + +struct BytesTaken { + bytes: Vec, + new_offset: usize, +} + +fn as_u32(slice: &Hash) -> Result { + if !slice[..28].iter().all(|x| *x == 0) { + return Err(ErrorKind::InvalidData.into()); + } + + let result = ((slice[28] as u32) << 24) + + ((slice[29] as u32) << 16) + + ((slice[30] as u32) << 8) + + (slice[31] as u32); + + Ok(result) +} + +fn as_bool(slice: &Hash) -> Result { + if !slice[..31].iter().all(|x| *x == 0) { + return Err(ErrorKind::InvalidData.into()); + } + + Ok(slice[31] == 1) +} + +/// Decodes ABI compliant vector of bytes into vector of tokens described by types param. +pub fn decode(types: &[ParamType], data: &[u8]) -> Result, Error> { + let slices = slice_data(data)?; + let mut tokens = vec![]; + let mut offset = 0; + for param in types { + let res = decode_param(param, &slices, offset).chain_err(|| format!("Cannot decode {}", param))?; + offset = res.new_offset; + tokens.push(res.token); + } + Ok(tokens) +} + +fn peek(slices: &[Hash], position: usize) -> Result<&Hash, Error> { + slices.get(position).ok_or_else(|| ErrorKind::InvalidData.into()) +} + +fn take_bytes(slices: &[Hash], position: usize, len: usize) -> Result { + let slices_len = (len + 31) / 32; + + let mut bytes_slices = vec![]; + for i in 0..slices_len { + let slice = try!(peek(slices, position + i)).clone(); + bytes_slices.push(slice); + } + + let bytes = bytes_slices.into_iter() + .flat_map(|slice| slice.to_vec()) + .take(len) + .collect(); + + let taken = BytesTaken { + bytes: bytes, + new_offset: position + slices_len, + }; + + Ok(taken) +} + +fn decode_param(param: &ParamType, slices: &[Hash], offset: usize) -> Result { + match *param { + ParamType::Address => { + let slice = try!(peek(slices, offset)); + let mut address = [0u8; 20]; + address.copy_from_slice(&slice[12..]); + + let result = DecodeResult { + token: Token::Address(address), + new_offset: offset + 1, + }; + + Ok(result) + }, + ParamType::Int(_) => { + let slice = try!(peek(slices, offset)); + + let result = DecodeResult { + token: Token::Int(slice.clone()), + new_offset: offset + 1, + }; + + Ok(result) + }, + ParamType::Uint(_) => { + let slice = try!(peek(slices, offset)); + + let result = DecodeResult { + token: Token::Uint(slice.clone()), + new_offset: offset + 1, + }; + + Ok(result) + }, + ParamType::Bool => { + let slice = try!(peek(slices, offset)); + + let b = try!(as_bool(slice)); + + let result = DecodeResult { + token: Token::Bool(b), + new_offset: offset + 1, + }; + + Ok(result) + }, + ParamType::FixedBytes(len) => { + let taken = try!(take_bytes(slices, offset, len)); + + let result = DecodeResult { + token: Token::FixedBytes(taken.bytes), + new_offset: taken.new_offset, + }; + + Ok(result) + }, + ParamType::Bytes => { + let offset_slice = try!(peek(slices, offset)); + let len_offset = (try!(as_u32(offset_slice)) / 32) as usize; + + let len_slice = try!(peek(slices, len_offset)); + let len = try!(as_u32(len_slice)) as usize; + + let taken = try!(take_bytes(slices, len_offset + 1, len)); + + let result = DecodeResult { + token: Token::Bytes(taken.bytes), + new_offset: offset + 1, + }; + + Ok(result) + }, + ParamType::String => { + let offset_slice = try!(peek(slices, offset)); + let len_offset = (try!(as_u32(offset_slice)) / 32) as usize; + + let len_slice = try!(peek(slices, len_offset)); + let len = try!(as_u32(len_slice)) as usize; + + let taken = try!(take_bytes(slices, len_offset + 1, len)); + + let result = DecodeResult { + token: Token::String(try!(String::from_utf8(taken.bytes))), + new_offset: offset + 1, + }; + + Ok(result) + }, + ParamType::Array(ref t) => { + let offset_slice = try!(peek(slices, offset)); + let len_offset = (try!(as_u32(offset_slice)) / 32) as usize; + + let len_slice = try!(peek(slices, len_offset)); + let len = try!(as_u32(len_slice)) as usize; + + let mut tokens = vec![]; + let mut new_offset = len_offset + 1; + + for _ in 0..len { + let res = try!(decode_param(t, &slices, new_offset)); + new_offset = res.new_offset; + tokens.push(res.token); + } + + let result = DecodeResult { + token: Token::Array(tokens), + new_offset: offset + 1, + }; + + Ok(result) + }, + ParamType::FixedArray(ref t, len) => { + let mut tokens = vec![]; + let mut new_offset = offset; + for _ in 0..len { + let res = try!(decode_param(t, &slices, new_offset)); + new_offset = res.new_offset; + tokens.push(res.token); + } + + let result = DecodeResult { + token: Token::FixedArray(tokens), + new_offset: new_offset, + }; + + Ok(result) + } + } +} + +#[cfg(test)] +mod tests { + use hex::FromHex; + use {decode, Token, ParamType}; + + #[test] + fn decode_address() { + let encoded = "0000000000000000000000001111111111111111111111111111111111111111".from_hex().unwrap(); + let address = Token::Address([0x11u8; 20]); + let expected = vec![address]; + let decoded = decode(&[ParamType::Address], &encoded).unwrap(); + assert_eq!(decoded, expected); + } + + #[test] + fn decode_two_address() { + let encoded = ("".to_owned() + + "0000000000000000000000001111111111111111111111111111111111111111" + + "0000000000000000000000002222222222222222222222222222222222222222").from_hex().unwrap(); + let address1 = Token::Address([0x11u8; 20]); + let address2 = Token::Address([0x22u8; 20]); + let expected = vec![address1, address2]; + let decoded = decode(&[ParamType::Address, ParamType::Address], &encoded).unwrap(); + assert_eq!(decoded, expected); + } + + #[test] + fn decode_fixed_array_of_addresses() { + let encoded = ("".to_owned() + + "0000000000000000000000001111111111111111111111111111111111111111" + + "0000000000000000000000002222222222222222222222222222222222222222").from_hex().unwrap(); + let address1 = Token::Address([0x11u8; 20]); + let address2 = Token::Address([0x22u8; 20]); + let expected = vec![Token::FixedArray(vec![address1, address2])]; + let decoded = decode(&[ParamType::FixedArray(Box::new(ParamType::Address), 2)], &encoded).unwrap(); + assert_eq!(decoded, expected); + } + + #[test] + fn decode_uint() { + let encoded = "1111111111111111111111111111111111111111111111111111111111111111".from_hex().unwrap(); + let uint = Token::Uint([0x11u8; 32]); + let expected = vec![uint]; + let decoded = decode(&[ParamType::Uint(32)], &encoded).unwrap(); + assert_eq!(decoded, expected); + } + + #[test] + fn decode_int() { + let encoded = "1111111111111111111111111111111111111111111111111111111111111111".from_hex().unwrap(); + let int = Token::Int([0x11u8; 32]); + let expected = vec![int]; + let decoded = decode(&[ParamType::Int(32)], &encoded).unwrap(); + assert_eq!(decoded, expected); + } + + #[test] + fn decode_dynamic_array_of_addresses() { + let encoded = ("".to_owned() + + "0000000000000000000000000000000000000000000000000000000000000020" + + "0000000000000000000000000000000000000000000000000000000000000002" + + "0000000000000000000000001111111111111111111111111111111111111111" + + "0000000000000000000000002222222222222222222222222222222222222222").from_hex().unwrap(); + let address1 = Token::Address([0x11u8; 20]); + let address2 = Token::Address([0x22u8; 20]); + let addresses = Token::Array(vec![address1, address2]); + let expected = vec![addresses]; + let decoded = decode(&[ParamType::Array(Box::new(ParamType::Address))], &encoded).unwrap(); + assert_eq!(decoded, expected); + } + + #[test] + fn decode_dynamic_array_of_fixed_arrays() { + let encoded = ("".to_owned() + + "0000000000000000000000000000000000000000000000000000000000000020" + + "0000000000000000000000000000000000000000000000000000000000000002" + + "0000000000000000000000001111111111111111111111111111111111111111" + + "0000000000000000000000002222222222222222222222222222222222222222" + + "0000000000000000000000003333333333333333333333333333333333333333" + + "0000000000000000000000004444444444444444444444444444444444444444").from_hex().unwrap(); + let address1 = Token::Address([0x11u8; 20]); + let address2 = Token::Address([0x22u8; 20]); + let address3 = Token::Address([0x33u8; 20]); + let address4 = Token::Address([0x44u8; 20]); + let array0 = Token::FixedArray(vec![address1, address2]); + let array1 = Token::FixedArray(vec![address3, address4]); + let dynamic = Token::Array(vec![array0, array1]); + let expected = vec![dynamic]; + let decoded = decode(&[ + ParamType::Array(Box::new( + ParamType::FixedArray(Box::new(ParamType::Address), 2) + )) + ], &encoded).unwrap(); + assert_eq!(decoded, expected); + } + + #[test] + fn decode_dynamic_array_of_dynamic_arrays() { + let encoded = ("".to_owned() + + "0000000000000000000000000000000000000000000000000000000000000020" + + "0000000000000000000000000000000000000000000000000000000000000002" + + "0000000000000000000000000000000000000000000000000000000000000080" + + "00000000000000000000000000000000000000000000000000000000000000c0" + + "0000000000000000000000000000000000000000000000000000000000000001" + + "0000000000000000000000001111111111111111111111111111111111111111" + + "0000000000000000000000000000000000000000000000000000000000000001" + + "0000000000000000000000002222222222222222222222222222222222222222").from_hex().unwrap(); + + let address1 = Token::Address([0x11u8; 20]); + let address2 = Token::Address([0x22u8; 20]); + let array0 = Token::Array(vec![address1]); + let array1 = Token::Array(vec![address2]); + let dynamic = Token::Array(vec![array0, array1]); + let expected = vec![dynamic]; + let decoded = decode(&[ + ParamType::Array(Box::new( + ParamType::Array(Box::new(ParamType::Address)) + )) + ], &encoded).unwrap(); + assert_eq!(decoded, expected); + } + + #[test] + fn decode_dynamic_array_of_dynamic_arrays2() { + let encoded = ("".to_owned() + + "0000000000000000000000000000000000000000000000000000000000000020" + + "0000000000000000000000000000000000000000000000000000000000000002" + + "0000000000000000000000000000000000000000000000000000000000000080" + + "00000000000000000000000000000000000000000000000000000000000000e0" + + "0000000000000000000000000000000000000000000000000000000000000002" + + "0000000000000000000000001111111111111111111111111111111111111111" + + "0000000000000000000000002222222222222222222222222222222222222222" + + "0000000000000000000000000000000000000000000000000000000000000002" + + "0000000000000000000000003333333333333333333333333333333333333333" + + "0000000000000000000000004444444444444444444444444444444444444444").from_hex().unwrap(); + + let address1 = Token::Address([0x11u8; 20]); + let address2 = Token::Address([0x22u8; 20]); + let address3 = Token::Address([0x33u8; 20]); + let address4 = Token::Address([0x44u8; 20]); + let array0 = Token::Array(vec![address1, address2]); + let array1 = Token::Array(vec![address3, address4]); + let dynamic = Token::Array(vec![array0, array1]); + let expected = vec![dynamic]; + let decoded = decode(&[ + ParamType::Array(Box::new( + ParamType::Array(Box::new(ParamType::Address)) + )) + ], &encoded).unwrap(); + assert_eq!(decoded, expected); + } + + #[test] + fn decode_fixed_array_fixed_arrays() { + let encoded = ("".to_owned() + + "0000000000000000000000001111111111111111111111111111111111111111" + + "0000000000000000000000002222222222222222222222222222222222222222" + + "0000000000000000000000003333333333333333333333333333333333333333" + + "0000000000000000000000004444444444444444444444444444444444444444").from_hex().unwrap(); + let address1 = Token::Address([0x11u8; 20]); + let address2 = Token::Address([0x22u8; 20]); + let address3 = Token::Address([0x33u8; 20]); + let address4 = Token::Address([0x44u8; 20]); + let array0 = Token::FixedArray(vec![address1, address2]); + let array1 = Token::FixedArray(vec![address3, address4]); + let fixed = Token::FixedArray(vec![array0, array1]); + let expected = vec![fixed]; + + let decoded = decode(&[ + ParamType::FixedArray( + Box::new(ParamType::FixedArray(Box::new(ParamType::Address), 2)), + 2 + ) + ], &encoded).unwrap(); + + assert_eq!(decoded, expected); + } + + #[test] + fn decode_fixed_array_of_dynamic_array_of_addresses() { + let encoded = ("".to_owned() + + "0000000000000000000000000000000000000000000000000000000000000040" + + "00000000000000000000000000000000000000000000000000000000000000a0" + + "0000000000000000000000000000000000000000000000000000000000000002" + + "0000000000000000000000001111111111111111111111111111111111111111" + + "0000000000000000000000002222222222222222222222222222222222222222" + + "0000000000000000000000000000000000000000000000000000000000000002" + + "0000000000000000000000003333333333333333333333333333333333333333" + + "0000000000000000000000004444444444444444444444444444444444444444").from_hex().unwrap(); + let address1 = Token::Address([0x11u8; 20]); + let address2 = Token::Address([0x22u8; 20]); + let address3 = Token::Address([0x33u8; 20]); + let address4 = Token::Address([0x44u8; 20]); + let array0 = Token::Array(vec![address1, address2]); + let array1 = Token::Array(vec![address3, address4]); + let fixed = Token::FixedArray(vec![array0, array1]); + let expected = vec![fixed]; + + let decoded = decode(&[ + ParamType::FixedArray( + Box::new(ParamType::Array(Box::new(ParamType::Address))), + 2 + ) + ], &encoded).unwrap(); + + assert_eq!(decoded, expected); + } + + #[test] + fn decode_fixed_bytes() { + let encoded = ("".to_owned() + + "1234000000000000000000000000000000000000000000000000000000000000").from_hex().unwrap(); + let bytes = Token::FixedBytes(vec![0x12, 0x34]); + let expected = vec![bytes]; + let decoded = decode(&[ParamType::FixedBytes(2)], &encoded).unwrap(); + assert_eq!(decoded, expected); + } + + #[test] + fn decode_bytes() { + let encoded = ("".to_owned() + + "0000000000000000000000000000000000000000000000000000000000000020" + + "0000000000000000000000000000000000000000000000000000000000000002" + + "1234000000000000000000000000000000000000000000000000000000000000").from_hex().unwrap(); + let bytes = Token::Bytes(vec![0x12, 0x34]); + let expected = vec![bytes]; + let decoded = decode(&[ParamType::Bytes], &encoded).unwrap(); + assert_eq!(decoded, expected); + } + + #[test] + fn decode_bytes2() { + let encoded = ("".to_owned() + + "0000000000000000000000000000000000000000000000000000000000000020" + + "0000000000000000000000000000000000000000000000000000000000000040" + + "1000000000000000000000000000000000000000000000000000000000000000" + + "1000000000000000000000000000000000000000000000000000000000000000").from_hex().unwrap(); + let bytes = Token::Bytes(("".to_owned() + + "1000000000000000000000000000000000000000000000000000000000000000" + + "1000000000000000000000000000000000000000000000000000000000000000").from_hex().unwrap()); + let expected = vec![bytes]; + let decoded = decode(&[ParamType::Bytes], &encoded).unwrap(); + assert_eq!(decoded, expected); + } + + #[test] + fn decode_two_bytes() { + let encoded = ("".to_owned() + + "0000000000000000000000000000000000000000000000000000000000000040" + + "0000000000000000000000000000000000000000000000000000000000000080" + + "000000000000000000000000000000000000000000000000000000000000001f" + + "1000000000000000000000000000000000000000000000000000000000000200" + + "0000000000000000000000000000000000000000000000000000000000000020" + + "0010000000000000000000000000000000000000000000000000000000000002").from_hex().unwrap(); + let bytes1 = Token::Bytes("10000000000000000000000000000000000000000000000000000000000002".from_hex().unwrap()); + let bytes2 = Token::Bytes("0010000000000000000000000000000000000000000000000000000000000002".from_hex().unwrap()); + let expected = vec![bytes1, bytes2]; + let decoded = decode(&[ParamType::Bytes, ParamType::Bytes], &encoded).unwrap(); + assert_eq!(decoded, expected); + } + + #[test] + fn decode_string() { + let encoded = ("".to_owned() + + "0000000000000000000000000000000000000000000000000000000000000020" + + "0000000000000000000000000000000000000000000000000000000000000009" + + "6761766f66796f726b0000000000000000000000000000000000000000000000").from_hex().unwrap(); + let s = Token::String("gavofyork".to_owned()); + let expected = vec![s]; + let decoded = decode(&[ParamType::String], &encoded).unwrap(); + assert_eq!(decoded, expected); + } +} + diff --git a/nix/overlay/ethabi/ethabi/src/encoder.rs b/nix/overlay/ethabi/ethabi/src/encoder.rs new file mode 100644 index 000000000..10dc0da9b --- /dev/null +++ b/nix/overlay/ethabi/ethabi/src/encoder.rs @@ -0,0 +1,522 @@ +//! ABI encoder. + +use util::pad_u32; +use {Token, Hash, Bytes}; + +fn pad_bytes(bytes: &[u8]) -> Vec { + let mut result = vec![pad_u32(bytes.len() as u32)]; + result.extend(pad_fixed_bytes(bytes)); + result +} + +fn pad_fixed_bytes(bytes: &[u8]) -> Vec { + let mut result = vec![]; + let len = (bytes.len() + 31) / 32; + for i in 0..len { + let mut padded = [0u8; 32]; + + let to_copy = match i == len - 1 { + false => 32, + true => match bytes.len() % 32 { + 0 => 32, + x => x, + }, + }; + + let offset = 32 * i; + padded[..to_copy].copy_from_slice(&bytes[offset..offset + to_copy]); + result.push(padded); + } + + result +} + +#[derive(Debug)] +enum Mediate { + Raw(Vec), + Prefixed(Vec), + FixedArray(Vec), + Array(Vec), +} + +impl Mediate { + fn init_len(&self) -> u32 { + match *self { + Mediate::Raw(ref raw) => 32 * raw.len() as u32, + Mediate::Prefixed(_) => 32, + Mediate::FixedArray(ref nes) => nes.iter().fold(0, |acc, m| acc + m.init_len()), + Mediate::Array(_) => 32, + } + } + + fn closing_len(&self) -> u32 { + match *self { + Mediate::Raw(_) => 0, + Mediate::Prefixed(ref pre) => pre.len() as u32 * 32, + Mediate::FixedArray(ref nes) => nes.iter().fold(0, |acc, m| acc + m.closing_len()), + Mediate::Array(ref nes) => nes.iter().fold(32, |acc, m| acc + m.init_len() + m.closing_len()), + } + } + + fn offset_for(mediates: &[Mediate], position: usize) -> u32 { + assert!(position < mediates.len()); + + let init_len = mediates.iter().fold(0, |acc, m| acc + m.init_len()); + mediates[0..position].iter().fold(init_len, |acc, m| acc + m.closing_len()) + } + + fn init(&self, suffix_offset: u32) -> Vec { + match *self { + Mediate::Raw(ref raw) => raw.clone(), + Mediate::FixedArray(ref nes) => { + nes.iter() + .enumerate() + .flat_map(|(i, m)| m.init(Mediate::offset_for(nes, i))) + .collect() + }, + Mediate::Prefixed(_) | Mediate::Array(_) => { + vec![pad_u32(suffix_offset)] + } + } + } + + fn closing(&self, offset: u32) -> Vec { + match *self { + Mediate::Raw(_) => vec![], + Mediate::Prefixed(ref pre) => pre.clone(), + Mediate::FixedArray(ref nes) => { + // offset is not taken into account, cause it would be counted twice + // fixed array is just raw representations of similar consecutive items + nes.iter() + .enumerate() + .flat_map(|(i, m)| m.closing(Mediate::offset_for(nes, i))) + .collect() + }, + Mediate::Array(ref nes) => { + // + 32 added to offset represents len of the array prepanded to closing + let prefix = vec![pad_u32(nes.len() as u32)].into_iter(); + + let inits = nes.iter() + .enumerate() + .flat_map(|(i, m)| m.init(offset + Mediate::offset_for(nes, i) + 32)); + + let closings = nes.iter() + .enumerate() + .flat_map(|(i, m)| m.closing(offset + Mediate::offset_for(nes, i))); + + prefix.chain(inits).chain(closings).collect() + }, + } + } +} + +/// Encodes vector of tokens into ABI compliant vector of bytes. +pub fn encode(tokens: &[Token]) -> Bytes { + let mediates: Vec = tokens.iter() + .map(encode_token) + .collect(); + + let inits = mediates.iter() + .enumerate() + .flat_map(|(i, m)| m.init(Mediate::offset_for(&mediates, i))); + + let closings = mediates.iter() + .enumerate() + .flat_map(|(i, m)| m.closing(Mediate::offset_for(&mediates, i))); + + inits.chain(closings) + .flat_map(|item| item.to_vec()) + .collect() +} + +fn encode_token(token: &Token) -> Mediate { + match *token { + Token::Address(ref address) => { + let mut padded = [0u8; 32]; + padded[12..].copy_from_slice(address); + Mediate::Raw(vec![padded]) + }, + Token::Bytes(ref bytes) => Mediate::Prefixed(pad_bytes(bytes)), + Token::String(ref s) => Mediate::Prefixed(pad_bytes(s.as_bytes())), + Token::FixedBytes(ref bytes) => Mediate::Raw(pad_fixed_bytes(bytes)), + Token::Int(ref int) => Mediate::Raw(vec![int.clone()]), + Token::Uint(ref uint) => Mediate::Raw(vec![uint.clone()]), + Token::Bool(b) => { + let value = if b { 1 } else { 0 }; + Mediate::Raw(vec![pad_u32(value)]) + }, + Token::Array(ref tokens) => { + let mediates = tokens.iter() + .map(encode_token) + .collect(); + + Mediate::Array(mediates) + }, + Token::FixedArray(ref tokens) => { + let mediates = tokens.iter() + .map(encode_token) + .collect(); + + Mediate::FixedArray(mediates) + }, + } +} + +#[cfg(test)] +mod tests { + use hex::FromHex; + use util::pad_u32; + use {Token, encode}; + + #[test] + fn encode_address() { + let address = Token::Address([0x11u8; 20]); + let encoded = encode(&vec![address]); + let expected = "0000000000000000000000001111111111111111111111111111111111111111".from_hex().unwrap(); + assert_eq!(encoded, expected); + } + + #[test] + fn encode_dynamic_array_of_addresses() { + let address1 = Token::Address([0x11u8; 20]); + let address2 = Token::Address([0x22u8; 20]); + let addresses = Token::Array(vec![address1, address2]); + let encoded = encode(&vec![addresses]); + let expected = ("".to_owned() + + "0000000000000000000000000000000000000000000000000000000000000020" + + "0000000000000000000000000000000000000000000000000000000000000002" + + "0000000000000000000000001111111111111111111111111111111111111111" + + "0000000000000000000000002222222222222222222222222222222222222222").from_hex().unwrap(); + assert_eq!(encoded, expected); + } + + #[test] + fn encode_fixed_array_of_addresses() { + let address1 = Token::Address([0x11u8; 20]); + let address2 = Token::Address([0x22u8; 20]); + let addresses = Token::FixedArray(vec![address1, address2]); + let encoded = encode(&vec![addresses]); + let expected = ("".to_owned() + + "0000000000000000000000001111111111111111111111111111111111111111" + + "0000000000000000000000002222222222222222222222222222222222222222").from_hex().unwrap(); + assert_eq!(encoded, expected); + } + + #[test] + fn encode_two_addresses() { + let address1 = Token::Address([0x11u8; 20]); + let address2 = Token::Address([0x22u8; 20]); + let encoded = encode(&vec![address1, address2]); + let expected = ("".to_owned() + + "0000000000000000000000001111111111111111111111111111111111111111" + + "0000000000000000000000002222222222222222222222222222222222222222").from_hex().unwrap(); + assert_eq!(encoded, expected); + } + + #[test] + fn encode_fixed_array_of_dynamic_array_of_addresses() { + let address1 = Token::Address([0x11u8; 20]); + let address2 = Token::Address([0x22u8; 20]); + let address3 = Token::Address([0x33u8; 20]); + let address4 = Token::Address([0x44u8; 20]); + let array0 = Token::Array(vec![address1, address2]); + let array1 = Token::Array(vec![address3, address4]); + let fixed = Token::FixedArray(vec![array0, array1]); + let encoded = encode(&vec![fixed]); + let expected = ("".to_owned() + + "0000000000000000000000000000000000000000000000000000000000000040" + + "00000000000000000000000000000000000000000000000000000000000000a0" + + "0000000000000000000000000000000000000000000000000000000000000002" + + "0000000000000000000000001111111111111111111111111111111111111111" + + "0000000000000000000000002222222222222222222222222222222222222222" + + "0000000000000000000000000000000000000000000000000000000000000002" + + "0000000000000000000000003333333333333333333333333333333333333333" + + "0000000000000000000000004444444444444444444444444444444444444444").from_hex().unwrap(); + assert_eq!(encoded, expected); + } + + #[test] + fn encode_dynamic_array_of_fixed_array_of_addresses() { + let address1 = Token::Address([0x11u8; 20]); + let address2 = Token::Address([0x22u8; 20]); + let address3 = Token::Address([0x33u8; 20]); + let address4 = Token::Address([0x44u8; 20]); + let array0 = Token::FixedArray(vec![address1, address2]); + let array1 = Token::FixedArray(vec![address3, address4]); + let dynamic = Token::Array(vec![array0, array1]); + let encoded = encode(&vec![dynamic]); + let expected = ("".to_owned() + + "0000000000000000000000000000000000000000000000000000000000000020" + + "0000000000000000000000000000000000000000000000000000000000000002" + + "0000000000000000000000001111111111111111111111111111111111111111" + + "0000000000000000000000002222222222222222222222222222222222222222" + + "0000000000000000000000003333333333333333333333333333333333333333" + + "0000000000000000000000004444444444444444444444444444444444444444").from_hex().unwrap(); + assert_eq!(encoded, expected); + } + + #[test] + fn encode_dynamic_array_of_dynamic_arrays() { + let address1 = Token::Address([0x11u8; 20]); + let address2 = Token::Address([0x22u8; 20]); + let array0 = Token::Array(vec![address1]); + let array1 = Token::Array(vec![address2]); + let dynamic = Token::Array(vec![array0, array1]); + let encoded = encode(&vec![dynamic]); + let expected = ("".to_owned() + + "0000000000000000000000000000000000000000000000000000000000000020" + + "0000000000000000000000000000000000000000000000000000000000000002" + + "0000000000000000000000000000000000000000000000000000000000000080" + + "00000000000000000000000000000000000000000000000000000000000000c0" + + "0000000000000000000000000000000000000000000000000000000000000001" + + "0000000000000000000000001111111111111111111111111111111111111111" + + "0000000000000000000000000000000000000000000000000000000000000001" + + "0000000000000000000000002222222222222222222222222222222222222222").from_hex().unwrap(); + assert_eq!(encoded, expected); + } + + #[test] + fn encode_dynamic_array_of_dynamic_arrays2() { + let address1 = Token::Address([0x11u8; 20]); + let address2 = Token::Address([0x22u8; 20]); + let address3 = Token::Address([0x33u8; 20]); + let address4 = Token::Address([0x44u8; 20]); + let array0 = Token::Array(vec![address1, address2]); + let array1 = Token::Array(vec![address3, address4]); + let dynamic = Token::Array(vec![array0, array1]); + let encoded = encode(&vec![dynamic]); + let expected = ("".to_owned() + + "0000000000000000000000000000000000000000000000000000000000000020" + + "0000000000000000000000000000000000000000000000000000000000000002" + + "0000000000000000000000000000000000000000000000000000000000000080" + + "00000000000000000000000000000000000000000000000000000000000000e0" + + "0000000000000000000000000000000000000000000000000000000000000002" + + "0000000000000000000000001111111111111111111111111111111111111111" + + "0000000000000000000000002222222222222222222222222222222222222222" + + "0000000000000000000000000000000000000000000000000000000000000002" + + "0000000000000000000000003333333333333333333333333333333333333333" + + "0000000000000000000000004444444444444444444444444444444444444444").from_hex().unwrap(); + assert_eq!(encoded, expected); + } + + #[test] + fn encode_fixed_array_of_fixed_arrays() { + let address1 = Token::Address([0x11u8; 20]); + let address2 = Token::Address([0x22u8; 20]); + let address3 = Token::Address([0x33u8; 20]); + let address4 = Token::Address([0x44u8; 20]); + let array0 = Token::FixedArray(vec![address1, address2]); + let array1 = Token::FixedArray(vec![address3, address4]); + let fixed = Token::FixedArray(vec![array0, array1]); + let encoded = encode(&vec![fixed]); + let expected = ("".to_owned() + + "0000000000000000000000001111111111111111111111111111111111111111" + + "0000000000000000000000002222222222222222222222222222222222222222" + + "0000000000000000000000003333333333333333333333333333333333333333" + + "0000000000000000000000004444444444444444444444444444444444444444").from_hex().unwrap(); + assert_eq!(encoded, expected); + } + + #[test] + fn encode_empty_array() { + // Empty arrays + let encoded = encode(&vec![ + Token::Array(vec![]), + Token::Array(vec![])] + ); + let expected = ("".to_owned() + + "0000000000000000000000000000000000000000000000000000000000000040" + + "0000000000000000000000000000000000000000000000000000000000000060" + + "0000000000000000000000000000000000000000000000000000000000000000" + + "0000000000000000000000000000000000000000000000000000000000000000").from_hex().unwrap(); + assert_eq!(encoded, expected); + + // Nested empty arrays + let encoded = encode(&vec![ + Token::Array(vec![Token::Array(vec![])]), + Token::Array(vec![Token::Array(vec![])]), + ]); + let expected = ("".to_owned() + + "0000000000000000000000000000000000000000000000000000000000000040" + + "00000000000000000000000000000000000000000000000000000000000000a0" + + "0000000000000000000000000000000000000000000000000000000000000001" + + "0000000000000000000000000000000000000000000000000000000000000080" + + "0000000000000000000000000000000000000000000000000000000000000000" + + "0000000000000000000000000000000000000000000000000000000000000001" + + "00000000000000000000000000000000000000000000000000000000000000e0" + + "0000000000000000000000000000000000000000000000000000000000000000").from_hex().unwrap(); + assert_eq!(encoded, expected); + } + + #[test] + fn encode_bytes() { + let bytes = Token::Bytes(vec![0x12, 0x34]); + let encoded = encode(&vec![bytes]); + let expected = ("".to_owned() + + "0000000000000000000000000000000000000000000000000000000000000020" + + "0000000000000000000000000000000000000000000000000000000000000002" + + "1234000000000000000000000000000000000000000000000000000000000000").from_hex().unwrap(); + assert_eq!(encoded, expected); + } + + #[test] + fn encode_fixed_bytes() { + let bytes = Token::FixedBytes(vec![0x12, 0x34]); + let encoded = encode(&vec![bytes]); + let expected = ("".to_owned() + + "1234000000000000000000000000000000000000000000000000000000000000").from_hex().unwrap(); + assert_eq!(encoded, expected); + } + + #[test] + fn encode_string() { + let s = Token::String("gavofyork".to_owned()); + let encoded = encode(&vec![s]); + let expected = ("".to_owned() + + "0000000000000000000000000000000000000000000000000000000000000020" + + "0000000000000000000000000000000000000000000000000000000000000009" + + "6761766f66796f726b0000000000000000000000000000000000000000000000").from_hex().unwrap(); + assert_eq!(encoded, expected); + } + + #[test] + fn encode_bytes2() { + let bytes = Token::Bytes("10000000000000000000000000000000000000000000000000000000000002".from_hex().unwrap()); + let encoded = encode(&vec![bytes]); + let expected = ("".to_owned() + + "0000000000000000000000000000000000000000000000000000000000000020" + + "000000000000000000000000000000000000000000000000000000000000001f" + + "1000000000000000000000000000000000000000000000000000000000000200").from_hex().unwrap(); + assert_eq!(encoded, expected); + } + + #[test] + fn encode_bytes3() { + let bytes = Token::Bytes(("".to_owned() + + "1000000000000000000000000000000000000000000000000000000000000000" + + "1000000000000000000000000000000000000000000000000000000000000000").from_hex().unwrap()); + let encoded = encode(&vec![bytes]); + let expected = ("".to_owned() + + "0000000000000000000000000000000000000000000000000000000000000020" + + "0000000000000000000000000000000000000000000000000000000000000040" + + "1000000000000000000000000000000000000000000000000000000000000000" + + "1000000000000000000000000000000000000000000000000000000000000000").from_hex().unwrap(); + assert_eq!(encoded, expected); + } + + #[test] + fn encode_two_bytes() { + let bytes1 = Token::Bytes("10000000000000000000000000000000000000000000000000000000000002".from_hex().unwrap()); + let bytes2 = Token::Bytes("0010000000000000000000000000000000000000000000000000000000000002".from_hex().unwrap()); + let encoded = encode(&vec![bytes1, bytes2]); + let expected = ("".to_owned() + + "0000000000000000000000000000000000000000000000000000000000000040" + + "0000000000000000000000000000000000000000000000000000000000000080" + + "000000000000000000000000000000000000000000000000000000000000001f" + + "1000000000000000000000000000000000000000000000000000000000000200" + + "0000000000000000000000000000000000000000000000000000000000000020" + + "0010000000000000000000000000000000000000000000000000000000000002").from_hex().unwrap(); + assert_eq!(encoded, expected); + } + + #[test] + fn encode_uint() { + let mut uint = [0u8; 32]; + uint[31] = 4; + let encoded = encode(&vec![Token::Uint(uint)]); + let expected = ("".to_owned() + + "0000000000000000000000000000000000000000000000000000000000000004").from_hex().unwrap(); + assert_eq!(encoded, expected); + } + + #[test] + fn encode_int() { + let mut int = [0u8; 32]; + int[31] = 4; + let encoded = encode(&vec![Token::Int(int)]); + let expected = ("".to_owned() + + "0000000000000000000000000000000000000000000000000000000000000004").from_hex().unwrap(); + assert_eq!(encoded, expected); + } + + #[test] + fn encode_bool() { + let encoded = encode(&vec![Token::Bool(true)]); + let expected = ("".to_owned() + + "0000000000000000000000000000000000000000000000000000000000000001").from_hex().unwrap(); + assert_eq!(encoded, expected); + } + + #[test] + fn encode_bool2() { + let encoded = encode(&vec![Token::Bool(false)]); + let expected = ("".to_owned() + + "0000000000000000000000000000000000000000000000000000000000000000").from_hex().unwrap(); + assert_eq!(encoded, expected); + } + + #[test] + fn comprehensive_test() { + let bytes = ("".to_owned() + + "131a3afc00d1b1e3461b955e53fc866dcf303b3eb9f4c16f89e388930f48134b" + + "131a3afc00d1b1e3461b955e53fc866dcf303b3eb9f4c16f89e388930f48134b").from_hex().unwrap(); + let encoded = encode(&vec![ + Token::Int(pad_u32(5)), + Token::Bytes(bytes.clone()), + Token::Int(pad_u32(3)), + Token::Bytes(bytes) + ]); + + let expected = ("".to_owned() + + "0000000000000000000000000000000000000000000000000000000000000005" + + "0000000000000000000000000000000000000000000000000000000000000080" + + "0000000000000000000000000000000000000000000000000000000000000003" + + "00000000000000000000000000000000000000000000000000000000000000e0" + + "0000000000000000000000000000000000000000000000000000000000000040" + + "131a3afc00d1b1e3461b955e53fc866dcf303b3eb9f4c16f89e388930f48134b" + + "131a3afc00d1b1e3461b955e53fc866dcf303b3eb9f4c16f89e388930f48134b" + + "0000000000000000000000000000000000000000000000000000000000000040" + + "131a3afc00d1b1e3461b955e53fc866dcf303b3eb9f4c16f89e388930f48134b" + + "131a3afc00d1b1e3461b955e53fc866dcf303b3eb9f4c16f89e388930f48134b").from_hex().unwrap(); + assert_eq!(encoded, expected); + } + + #[test] + fn test_pad_u32() { + // this will fail if endianess is not supported + assert_eq!(pad_u32(0x1)[31], 1); + assert_eq!(pad_u32(0x100)[30], 1); + } + + #[test] + fn comprehensive_test2() { + let encoded = encode(&vec![ + Token::Int(pad_u32(1)), + Token::String("gavofyork".to_owned()), + Token::Int(pad_u32(2)), + Token::Int(pad_u32(3)), + Token::Int(pad_u32(4)), + Token::Array(vec![ + Token::Int(pad_u32(5)), + Token::Int(pad_u32(6)), + Token::Int(pad_u32(7)) + ]) + ]); + + let expected = ("".to_owned() + + "0000000000000000000000000000000000000000000000000000000000000001" + + "00000000000000000000000000000000000000000000000000000000000000c0" + + "0000000000000000000000000000000000000000000000000000000000000002" + + "0000000000000000000000000000000000000000000000000000000000000003" + + "0000000000000000000000000000000000000000000000000000000000000004" + + "0000000000000000000000000000000000000000000000000000000000000100" + + "0000000000000000000000000000000000000000000000000000000000000009" + + "6761766f66796f726b0000000000000000000000000000000000000000000000" + + "0000000000000000000000000000000000000000000000000000000000000003" + + "0000000000000000000000000000000000000000000000000000000000000005" + + "0000000000000000000000000000000000000000000000000000000000000006" + + "0000000000000000000000000000000000000000000000000000000000000007").from_hex().unwrap(); + assert_eq!(encoded, expected); + } +} + diff --git a/nix/overlay/ethabi/ethabi/src/errors.rs b/nix/overlay/ethabi/ethabi/src/errors.rs new file mode 100644 index 000000000..6ea767b08 --- /dev/null +++ b/nix/overlay/ethabi/ethabi/src/errors.rs @@ -0,0 +1,31 @@ +#![allow(unknown_lints)] +#![allow(missing_docs)] + +use std::{num, string}; +use {serde_json, hex}; + +error_chain! { + foreign_links { + SerdeJson(serde_json::Error); + ParseInt(num::ParseIntError); + Utf8(string::FromUtf8Error); + Hex(hex::FromHexError); + } + + errors { + InvalidName(name: String) { + description("Invalid name"), + display("Invalid name `{}`", name), + } + + InvalidData { + description("Invalid data"), + display("Invalid data"), + } + + CallError { + description("Call error"), + display("Call error"), + } + } +} diff --git a/nix/overlay/ethabi/ethabi/src/event.rs b/nix/overlay/ethabi/ethabi/src/event.rs new file mode 100644 index 000000000..2c5825054 --- /dev/null +++ b/nix/overlay/ethabi/ethabi/src/event.rs @@ -0,0 +1,228 @@ +//! Contract event. + +use std::collections::HashMap; +use tiny_keccak::keccak256; +use signature::long_signature; +use { + Log, Hash, RawLog, LogParam, RawTopicFilter, TopicFilter, + Topic, ParamType, EventParam, encode, decode, Token, + Result, ErrorKind +}; + +/// Contract event. +#[derive(Clone, Debug, PartialEq, Deserialize)] +pub struct Event { + /// Event name. + pub name: String, + /// Event input. + pub inputs: Vec, + /// If anonymous, event cannot be found using `from` filter. + pub anonymous: bool, +} + +impl Event { + /// Returns names of all params. + fn params_names(&self) -> Vec { + self.inputs.iter() + .map(|p| p.name.clone()) + .collect() + } + + /// Returns types of all params. + fn param_types(&self) -> Vec { + self.inputs.iter() + .map(|p| p.kind.clone()) + .collect() + } + + /// Returns all params of the event. + fn indexed_params(&self, indexed: bool) -> Vec { + self.inputs.iter() + .filter(|p| p.indexed == indexed) + .cloned() + .collect() + } + + /// Event signature + pub fn signature(&self) -> Hash { + long_signature(&self.name, &self.param_types()) + } + + /// Creates topic filter + pub fn create_filter(&self, raw: RawTopicFilter) -> Result { + fn convert_token(token: Token, kind: &ParamType) -> Result { + if !token.type_check(kind) { + return Err(ErrorKind::InvalidData.into()); + } + let encoded = encode(&[token]); + if encoded.len() == 32 { + let mut data = [0u8; 32]; + data.copy_from_slice(&encoded); + Ok(data) + } else { + Ok(keccak256(&encoded)) + } + } + + fn convert_topic(topic: Topic, kind: Option<&ParamType>) -> Result> { + match topic { + Topic::Any => Ok(Topic::Any), + Topic::OneOf(tokens) => match kind { + None => Err(ErrorKind::InvalidData.into()), + Some(kind) => { + let topics = tokens.into_iter() + .map(|token| convert_token(token, kind)) + .collect::>>()?; + Ok(Topic::OneOf(topics)) + } + }, + Topic::This(token) => match kind { + None => Err(ErrorKind::InvalidData.into()), + Some(kind) => Ok(Topic::This(convert_token(token, kind)?)), + } + } + } + + let kinds: Vec<_> = self.indexed_params(true).into_iter().map(|param| param.kind).collect(); + let result = if self.anonymous { + TopicFilter { + topic0: convert_topic(raw.topic0, kinds.get(0))?, + topic1: convert_topic(raw.topic1, kinds.get(1))?, + topic2: convert_topic(raw.topic2, kinds.get(2))?, + topic3: Topic::Any, + } + } else { + TopicFilter { + topic0: Topic::This(self.signature()), + topic1: convert_topic(raw.topic0, kinds.get(0))?, + topic2: convert_topic(raw.topic1, kinds.get(1))?, + topic3: convert_topic(raw.topic2, kinds.get(2))?, + } + }; + + Ok(result) + } + + /// Parses `RawLog` and retrieves all log params from it. + pub fn parse_log(&self, log: RawLog) -> Result { + let topics = log.topics; + let data = log.data; + let topics_len = topics.len(); + // obtains all params info + let topic_params = self.indexed_params(true); + let data_params = self.indexed_params(false); + // then take first topic if event is not anonymous + let to_skip = if self.anonymous { + 0 + } else { + // verify + let event_signature = topics.get(0).ok_or(ErrorKind::InvalidData)?; + if event_signature != &self.signature() { + return Err(ErrorKind::InvalidData.into()); + } + 1 + }; + + let topic_types = topic_params.iter() + .map(|p| p.kind.clone()) + .collect::>(); + + let flat_topics = topics.into_iter() + .skip(to_skip) + .flat_map(|t| t.to_vec()) + .collect::>(); + + let topic_tokens = try!(decode(&topic_types, &flat_topics)); + + // topic may be only a 32 bytes encoded token + if topic_tokens.len() != topics_len - to_skip { + return Err(ErrorKind::InvalidData.into()); + } + + let topics_named_tokens = topic_params.into_iter() + .map(|p| p.name) + .zip(topic_tokens.into_iter()); + + let data_types = data_params.iter() + .map(|p| p.kind.clone()) + .collect::>(); + + let data_tokens = try!(decode(&data_types, &data)); + + let data_named_tokens = data_params.into_iter() + .map(|p| p.name) + .zip(data_tokens.into_iter()); + + let named_tokens = topics_named_tokens + .chain(data_named_tokens) + .collect::>(); + + let decoded_params = self.params_names() + .into_iter() + .map(|name| LogParam { + name: name.clone(), + value: named_tokens.get(&name).unwrap().clone() + }) + .collect(); + + let result = Log { + params: decoded_params, + }; + + Ok(result) + } +} + +#[cfg(test)] +mod tests { + use hex::FromHex; + use token::{Token, TokenFromHex}; + use signature::long_signature; + use log::{RawLog, Log}; + use {EventParam, ParamType, Event, LogParam}; + + #[test] + fn test_decoding_event() { + let event = Event { + name: "foo".to_owned(), + inputs: vec![EventParam { + name: "a".to_owned(), + kind: ParamType::Int(256), + indexed: false, + }, EventParam { + name: "b".to_owned(), + kind: ParamType::Int(256), + indexed: true, + }, EventParam { + name: "c".to_owned(), + kind: ParamType::Address, + indexed: false, + }, EventParam { + name: "d".to_owned(), + kind: ParamType::Address, + indexed: true, + }], + anonymous: false, + }; + + let log = RawLog { + topics: vec![ + long_signature("foo", &[ParamType::Int(256), ParamType::Int(256), ParamType::Address, ParamType::Address]), + "0000000000000000000000000000000000000000000000000000000000000002".token_from_hex().unwrap(), + "0000000000000000000000001111111111111111111111111111111111111111".token_from_hex().unwrap(), + ], + data: + ("".to_owned() + + "0000000000000000000000000000000000000000000000000000000000000003" + + "0000000000000000000000002222222222222222222222222222222222222222").from_hex().unwrap() + }; + let result = event.parse_log(log).unwrap(); + + assert_eq!(result, Log { params: vec![ + ("a".to_owned(), Token::Int("0000000000000000000000000000000000000000000000000000000000000003".token_from_hex().unwrap())), + ("b".to_owned(), Token::Int("0000000000000000000000000000000000000000000000000000000000000002".token_from_hex().unwrap())), + ("c".to_owned(), Token::Address("2222222222222222222222222222222222222222".token_from_hex().unwrap())), + ("d".to_owned(), Token::Address("1111111111111111111111111111111111111111".token_from_hex().unwrap())), + ].into_iter().map(|(name, value)| LogParam { name, value }).collect::>()}); + } +} diff --git a/nix/overlay/ethabi/ethabi/src/event_param.rs b/nix/overlay/ethabi/ethabi/src/event_param.rs new file mode 100644 index 000000000..b3db27a6d --- /dev/null +++ b/nix/overlay/ethabi/ethabi/src/event_param.rs @@ -0,0 +1,38 @@ +//! Event param specification. + +use {ParamType}; + +/// Event param specification. +#[derive(Debug, Clone, PartialEq, Deserialize)] +pub struct EventParam { + /// Param name. + pub name: String, + /// Param type. + #[serde(rename="type")] + pub kind: ParamType, + /// Indexed flag. If true, param is used to build block bloom. + pub indexed: bool, +} + +#[cfg(test)] +mod tests { + use serde_json; + use {EventParam, ParamType}; + + #[test] + fn event_param_deserialization() { + let s = r#"{ + "name": "foo", + "type": "address", + "indexed": true + }"#; + + let deserialized: EventParam = serde_json::from_str(s).unwrap(); + + assert_eq!(deserialized, EventParam { + name: "foo".to_owned(), + kind: ParamType::Address, + indexed: true, + }); + } +} diff --git a/nix/overlay/ethabi/ethabi/src/filter.rs b/nix/overlay/ethabi/ethabi/src/filter.rs new file mode 100644 index 000000000..7af01c4db --- /dev/null +++ b/nix/overlay/ethabi/ethabi/src/filter.rs @@ -0,0 +1,163 @@ +use serde::{Serialize, Serializer}; +use serde_json::Value; +use hex::ToHex; +use {Hash, Token}; + +/// Raw topic filter. +#[derive(Debug, PartialEq, Default)] +pub struct RawTopicFilter { + /// Topic. + pub topic0: Topic, + /// Topic. + pub topic1: Topic, + /// Topic. + pub topic2: Topic, +} + +/// Topic filter. +#[derive(Debug, PartialEq, Default)] +pub struct TopicFilter { + /// Usually (for not-anonymous transactions) the first topic is event signature. + pub topic0: Topic, + /// Second topic. + pub topic1: Topic, + /// Third topic. + pub topic2: Topic, + /// Fourth topic. + pub topic3: Topic, +} + +impl Serialize for TopicFilter { + fn serialize(&self, serializer: S) -> Result + where S: Serializer { + vec![&self.topic0, &self.topic1, &self.topic2, &self.topic3].serialize(serializer) + } +} + +/// Acceptable topic possibilities. +#[derive(Debug, PartialEq)] +pub enum Topic { + /// Match any. + Any, + /// Match any of the hashes. + OneOf(Vec), + /// Match only this hash. + This(T), +} + +impl Topic { + /// Map + pub fn map(self, f: F) -> Topic where F: Fn(T) -> O { + match self { + Topic::Any => Topic::Any, + Topic::OneOf(topics) => Topic::OneOf(topics.into_iter().map(f).collect()), + Topic::This(topic) => Topic::This(f(topic)), + } + } +} + +impl Default for Topic { + fn default() -> Self { + Topic::Any + } +} + +impl From> for Topic { + fn from(o: Option) -> Self { + match o { + Some(topic) => Topic::This(topic), + None => Topic::Any, + } + } +} + +impl From for Topic { + fn from(topic: T) -> Self { + Topic::This(topic) + } +} + +impl From> for Topic { + fn from(topics: Vec) -> Self { + Topic::OneOf(topics) + } +} + +impl Into> for Topic { + fn into(self: Self) -> Vec { + match self { + Topic::Any => vec![], + Topic::This(topic) => vec![topic], + Topic::OneOf(topics) => topics, + } + } +} + +impl Serialize for Topic { + fn serialize(&self, serializer: S) -> Result + where S: Serializer { + let value = match *self { + Topic::Any => Value::Null, + Topic::OneOf(ref vec) => { + let v = vec.iter() + .map(|h| format!("0x{}", h.to_hex())) + .map(Value::String) + .collect(); + Value::Array(v) + }, + Topic::This(ref hash) => Value::String(format!("0x{}", hash.to_hex())), + }; + value.serialize(serializer) + } +} + +#[cfg(test)] +mod tests { + use serde_json; + use hex::FromHex; + use super::{Topic, TopicFilter}; + use Hash; + + fn hash(s: &str) -> Hash { + let v = s.from_hex().unwrap(); + let mut result = [0u8; 32]; + result.copy_from_slice(&v); + result + } + + #[test] + fn test_topic_filter_serialization() { + let expected = +r#"["0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b",null,["0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b","0x0000000000000000000000000aff3454fce5edbc8cca8697c15331677e6ebccc"],null]"#; + + let topic = TopicFilter { + topic0: Topic::This(hash("000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b")), + topic1: Topic::Any, + topic2: Topic::OneOf(vec![hash("000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b"), hash("0000000000000000000000000aff3454fce5edbc8cca8697c15331677e6ebccc")]), + topic3: Topic::Any, + }; + + let topic_str = serde_json::to_string(&topic).unwrap(); + assert_eq!(expected, &topic_str); + } + + #[test] + fn test_topic_from() { + assert_eq!(Topic::Any as Topic, None.into()); + assert_eq!(Topic::This(10u64), 10u64.into()); + assert_eq!(Topic::OneOf(vec![10u64, 20]), vec![10u64, 20].into()); + } + + #[test] + fn test_topic_into_vec() { + let expected: Vec = vec![]; + let is: Vec = (Topic::Any as Topic).into(); + assert_eq!(expected, is); + let expected: Vec = vec![10]; + let is: Vec = Topic::This(10u64).into(); + assert_eq!(expected, is); + let expected: Vec = vec![10, 20]; + let is: Vec = Topic::OneOf(vec![10u64, 20]).into(); + assert_eq!(expected, is); + } +} diff --git a/nix/overlay/ethabi/ethabi/src/function.rs b/nix/overlay/ethabi/ethabi/src/function.rs new file mode 100644 index 000000000..2bcec76ad --- /dev/null +++ b/nix/overlay/ethabi/ethabi/src/function.rs @@ -0,0 +1,81 @@ +//! Contract function call builder. + +use signature::short_signature; +use {Param, Token, Result, ErrorKind, Bytes, decode, ParamType, encode}; + +/// Contract function specification. +#[derive(Debug, Clone, PartialEq, Deserialize)] +pub struct Function { + /// Function name. + pub name: String, + /// Function input. + pub inputs: Vec, + /// Function output. + pub outputs: Vec, + /// Constant function. + #[serde(default)] + pub constant: bool, +} + +impl Function { + /// Returns all input params of given function. + fn input_param_types(&self) -> Vec { + self.inputs.iter() + .map(|p| p.kind.clone()) + .collect() + } + + /// Returns all output params of given function. + fn output_param_types(&self) -> Vec { + self.outputs.iter() + .map(|p| p.kind.clone()) + .collect() + } + + /// Prepares ABI function call with given input params. + pub fn encode_input(&self, tokens: &[Token]) -> Result { + let params = self.input_param_types(); + + if !Token::types_check(tokens, ¶ms) { + return Err(ErrorKind::InvalidData.into()); + } + + let signed = short_signature(&self.name, ¶ms).to_vec(); + let encoded = encode(tokens); + Ok(signed.into_iter().chain(encoded.into_iter()).collect()) + } + + /// Parses the ABI function output to list of tokens. + pub fn decode_output(&self, data: &[u8]) -> Result> { + decode(&self.output_param_types(), &data) + } +} + +#[cfg(test)] +mod tests { + use hex::FromHex; + use {Token, Param, Function, ParamType}; + + #[test] + fn test_function_encode_call() { + let interface = Function { + name: "baz".to_owned(), + inputs: vec![Param { + name: "a".to_owned(), + kind: ParamType::Uint(32), + }, Param { + name: "b".to_owned(), + kind: ParamType::Bool, + }], + outputs: vec![], + constant: false, + }; + + let func = Function::from(interface); + let mut uint = [0u8; 32]; + uint[31] = 69; + let encoded = func.encode_input(&[Token::Uint(uint), Token::Bool(true)]).unwrap(); + let expected = "cdcd77c000000000000000000000000000000000000000000000000000000000000000450000000000000000000000000000000000000000000000000000000000000001".from_hex().unwrap(); + assert_eq!(encoded, expected); + } +} diff --git a/nix/overlay/ethabi/ethabi/src/lib.rs b/nix/overlay/ethabi/ethabi/src/lib.rs new file mode 100644 index 000000000..ee4d8604e --- /dev/null +++ b/nix/overlay/ethabi/ethabi/src/lib.rs @@ -0,0 +1,63 @@ +//! Ethereum ABI encoding decoding library. + +#![warn(missing_docs)] + +extern crate rustc_hex as hex; +extern crate serde; +extern crate serde_json; +extern crate tiny_keccak; + +#[macro_use] +extern crate serde_derive; + +#[macro_use] +extern crate error_chain; + +pub mod param_type; +pub mod token; +mod constructor; +mod contract; +mod decoder; +mod encoder; +mod errors; +mod event; +mod event_param; +mod filter; +mod function; +mod log; +mod operation; +mod param; +mod signature; +pub mod util; + +pub use param_type::ParamType; +pub use constructor::Constructor; +pub use contract::{Contract, Functions, Events}; +pub use token::Token; +pub use errors::{Error, ErrorKind, Result, ResultExt}; +pub use encoder::encode; +pub use decoder::decode; +pub use filter::{Topic, TopicFilter, RawTopicFilter}; +pub use function::Function; +pub use param::Param; +pub use log::{Log, RawLog, LogParam}; +pub use event::Event; +pub use event_param::EventParam; + +/// ABI address. +pub type Address = [u8; 20]; + +/// ABI fixed bytes. +pub type FixedBytes = Vec; + +/// ABI bytes. +pub type Bytes = Vec; + +/// ABI signed integer. +pub type Int = [u8; 32]; + +/// ABI unsigned integer. +pub type Uint = [u8; 32]; + +/// Commonly used FixedBytes of size 32 +pub type Hash = [u8; 32]; diff --git a/nix/overlay/ethabi/ethabi/src/log.rs b/nix/overlay/ethabi/ethabi/src/log.rs new file mode 100644 index 000000000..5425c9451 --- /dev/null +++ b/nix/overlay/ethabi/ethabi/src/log.rs @@ -0,0 +1,35 @@ +use {Hash, Token, Bytes}; + +/// Ethereum log. +#[derive(Debug, PartialEq)] +pub struct RawLog { + /// Indexed event params are represented as log topics. + pub topics: Vec, + /// Others are just plain data. + pub data: Bytes, +} + +impl From<(Vec, Bytes)> for RawLog { + fn from(raw: (Vec, Bytes)) -> Self { + RawLog { + topics: raw.0, + data: raw.1, + } + } +} + +/// Decoded log param. +#[derive(Debug, PartialEq)] +pub struct LogParam { + /// Decoded log name. + pub name: String, + /// Decoded log value. + pub value: Token, +} + +/// Decoded log. +#[derive(Debug, PartialEq)] +pub struct Log { + /// Log params. + pub params: Vec, +} diff --git a/nix/overlay/ethabi/ethabi/src/operation.rs b/nix/overlay/ethabi/ethabi/src/operation.rs new file mode 100644 index 000000000..a73d72b23 --- /dev/null +++ b/nix/overlay/ethabi/ethabi/src/operation.rs @@ -0,0 +1,144 @@ +//! Operation type. + +use serde::{Deserialize, Deserializer}; +use serde::de::{Error as SerdeError}; +use serde_json::Value; +use serde_json::value::from_value; +use {Function, Constructor, Event}; + +/// Operation type. +#[derive(Clone, Debug, PartialEq)] +pub enum Operation { + /// Contract constructor. + Constructor(Constructor), + /// Contract function. + Function(Function), + /// Contract event. + Event(Event), + /// Fallback, ignored. + Fallback, +} + +impl<'a> Deserialize<'a> for Operation { + fn deserialize(deserializer: D) -> Result where D: Deserializer<'a> { + let v: Value = try!(Deserialize::deserialize(deserializer)); + let cloned = v.clone(); + let map = try!(cloned.as_object().ok_or_else(|| SerdeError::custom("Invalid operation"))); + let s = try!(map.get("type").and_then(Value::as_str).ok_or_else(|| SerdeError::custom("Invalid operation type"))); + + // This is a workaround to support non-spec compliant function and event names, + // see: https://github.com/paritytech/parity/issues/4122 + fn sanitize_name(name: &mut String) { + if let Some(i) = name.find('(') { + name.truncate(i); + } + } + + let result = match s { + "constructor" => from_value(v).map(Operation::Constructor), + "function" => from_value(v).map(|mut f: Function| { + sanitize_name(&mut f.name); + Operation::Function(f) + }), + "event" => from_value(v).map(|mut e: Event| { + sanitize_name(&mut e.name); + Operation::Event(e) + }), + "fallback" => Ok(Operation::Fallback), + _ => Err(SerdeError::custom("Invalid operation type.")), + }; + result.map_err(|e| D::Error::custom(e.to_string())) + } +} + +#[cfg(test)] +mod tests { + use serde_json; + use super::Operation; + use {Function, Param, ParamType}; + + #[test] + fn deserialize_operation() { + let s = r#"{ + "type":"function", + "inputs": [{ + "name":"a", + "type":"address" + }], + "name":"foo", + "outputs": [] + }"#; + + let deserialized: Operation = serde_json::from_str(s).unwrap(); + + assert_eq!(deserialized, Operation::Function(Function { + name: "foo".to_owned(), + inputs: vec![ + Param { + name: "a".to_owned(), + kind: ParamType::Address, + } + ], + outputs: vec![], + constant: false, + })); + } + + #[test] + fn deserialize_sanitize_function_name() { + fn test_sanitize_function_name(name: &str, expected: &str) { + let s = format!(r#"{{ + "type":"function", + "inputs": [{{ + "name":"a", + "type":"address" + }}], + "name":"{}", + "outputs": [] + }}"#, name); + + let deserialized: Operation = serde_json::from_str(&s).unwrap(); + let function = match deserialized { + Operation::Function(f) => f, + _ => panic!("expected funciton"), + }; + + assert_eq!(function.name, expected); + } + + test_sanitize_function_name("foo", "foo"); + test_sanitize_function_name("foo()", "foo"); + test_sanitize_function_name("()", ""); + test_sanitize_function_name("", ""); + } + + #[test] + fn deserialize_sanitize_event_name() { + fn test_sanitize_event_name(name: &str, expected: &str) { + let s = format!(r#"{{ + "type":"event", + "inputs": [{{ + "name":"a", + "type":"address", + "indexed":true + }}], + "name":"{}", + "outputs": [], + "anonymous": false + }}"#, name); + + let deserialized: Operation = serde_json::from_str(&s).unwrap(); + let event = match deserialized { + Operation::Event(e) => e, + _ => panic!("expected event!"), + }; + + assert_eq!(event.name, expected); + } + + test_sanitize_event_name("foo", "foo"); + test_sanitize_event_name("foo()", "foo"); + test_sanitize_event_name("()", ""); + test_sanitize_event_name("", ""); + } +} diff --git a/nix/overlay/ethabi/ethabi/src/param.rs b/nix/overlay/ethabi/ethabi/src/param.rs new file mode 100644 index 000000000..6f86096b9 --- /dev/null +++ b/nix/overlay/ethabi/ethabi/src/param.rs @@ -0,0 +1,33 @@ +//! Function param. +use ParamType; + +/// Function param. +#[derive(Debug, Clone, PartialEq, Deserialize)] +pub struct Param { + /// Param name. + pub name: String, + /// Param type. + #[serde(rename="type")] + pub kind: ParamType, +} + +#[cfg(test)] +mod tests { + use serde_json; + use {Param, ParamType}; + + #[test] + fn param_deserialization() { + let s = r#"{ + "name": "foo", + "type": "address" + }"#; + + let deserialized: Param = serde_json::from_str(s).unwrap(); + + assert_eq!(deserialized, Param { + name: "foo".to_owned(), + kind: ParamType::Address, + }); + } +} diff --git a/nix/overlay/ethabi/ethabi/src/param_type/deserialize.rs b/nix/overlay/ethabi/ethabi/src/param_type/deserialize.rs new file mode 100644 index 000000000..8e8aae82c --- /dev/null +++ b/nix/overlay/ethabi/ethabi/src/param_type/deserialize.rs @@ -0,0 +1,52 @@ +use std::fmt; +use serde::{Deserialize, Deserializer}; +use serde::de::{Error as SerdeError, Visitor}; +use super::{ParamType, Reader}; + +impl<'a> Deserialize<'a> for ParamType { + fn deserialize(deserializer: D) -> Result where D: Deserializer<'a> { + deserializer.deserialize_identifier(ParamTypeVisitor) + } +} + +struct ParamTypeVisitor; + +impl<'a> Visitor<'a> for ParamTypeVisitor { + type Value = ParamType; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + write!(formatter, "a correct name of abi-encodable parameter type") + } + + fn visit_str(self, value: &str) -> Result where E: SerdeError { + Reader::read(value).map_err(|e| SerdeError::custom(format!("{:?}", e).as_str())) + } + + fn visit_string(self, value: String) -> Result where E: SerdeError { + self.visit_str(value.as_str()) + } +} + +#[cfg(test)] +mod tests { + use serde_json; + use ParamType; + + #[test] + fn param_type_deserialization() { + let s = r#"["address", "bytes", "bytes32", "bool", "string", "int", "uint", "address[]", "uint[3]", "bool[][5]"]"#; + let deserialized: Vec = serde_json::from_str(s).unwrap(); + assert_eq!(deserialized, vec![ + ParamType::Address, + ParamType::Bytes, + ParamType::FixedBytes(32), + ParamType::Bool, + ParamType::String, + ParamType::Int(256), + ParamType::Uint(256), + ParamType::Array(Box::new(ParamType::Address)), + ParamType::FixedArray(Box::new(ParamType::Uint(256)), 3), + ParamType::FixedArray(Box::new(ParamType::Array(Box::new(ParamType::Bool))), 5) + ]); + } +} diff --git a/nix/overlay/ethabi/ethabi/src/param_type/mod.rs b/nix/overlay/ethabi/ethabi/src/param_type/mod.rs new file mode 100644 index 000000000..9f13ada79 --- /dev/null +++ b/nix/overlay/ethabi/ethabi/src/param_type/mod.rs @@ -0,0 +1,10 @@ +//! Function and event param types. + +mod deserialize; +mod param_type; +mod reader; +mod writer; + +pub use self::param_type::ParamType; +pub use self::writer::Writer; +pub use self::reader::Reader; diff --git a/nix/overlay/ethabi/ethabi/src/param_type/param_type.rs b/nix/overlay/ethabi/ethabi/src/param_type/param_type.rs new file mode 100644 index 000000000..be3769ea2 --- /dev/null +++ b/nix/overlay/ethabi/ethabi/src/param_type/param_type.rs @@ -0,0 +1,52 @@ +//! Function and event param types. + +use std::fmt; +use super::Writer; + +/// Function and event param types. +#[derive(Debug, Clone, PartialEq)] +pub enum ParamType { + /// Address. + Address, + /// Bytes. + Bytes, + /// Signed integer. + Int(usize), + /// Unisgned integer. + Uint(usize), + /// Boolean. + Bool, + /// String. + String, + /// Array of unknown size. + Array(Box), + /// Vector of bytes with fixed size. + FixedBytes(usize), + /// Array with fixed size. + FixedArray(Box, usize), +} + +impl fmt::Display for ParamType { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", Writer::write(self)) + } +} + +#[cfg(test)] +mod tests { + use ParamType; + + #[test] + fn test_param_type_display() { + assert_eq!(format!("{}", ParamType::Address), "address".to_owned()); + assert_eq!(format!("{}", ParamType::Bytes), "bytes".to_owned()); + assert_eq!(format!("{}", ParamType::FixedBytes(32)), "bytes32".to_owned()); + assert_eq!(format!("{}", ParamType::Uint(256)), "uint256".to_owned()); + assert_eq!(format!("{}", ParamType::Int(64)), "int64".to_owned()); + assert_eq!(format!("{}", ParamType::Bool), "bool".to_owned()); + assert_eq!(format!("{}", ParamType::String), "string".to_owned()); + assert_eq!(format!("{}", ParamType::Array(Box::new(ParamType::Bool))), "bool[]".to_owned()); + assert_eq!(format!("{}", ParamType::FixedArray(Box::new(ParamType::String), 2)), "string[2]".to_owned()); + assert_eq!(format!("{}", ParamType::FixedArray(Box::new(ParamType::Array(Box::new(ParamType::Bool))), 2)), "bool[][2]".to_owned()); + } +} diff --git a/nix/overlay/ethabi/ethabi/src/param_type/reader.rs b/nix/overlay/ethabi/ethabi/src/param_type/reader.rs new file mode 100644 index 000000000..28f985fd4 --- /dev/null +++ b/nix/overlay/ethabi/ethabi/src/param_type/reader.rs @@ -0,0 +1,100 @@ +use {ParamType, Error, ErrorKind}; + +/// Used to convert param type represented as a string to rust structure. +pub struct Reader; + +impl Reader { + /// Converts string to param type. + pub fn read(name: &str) -> Result { + // check if it is a fixed or dynamic array. + if let Some(']') = name.chars().last() { + // take number part + let num: String = name.chars() + .rev() + .skip(1) + .take_while(|c| *c != '[') + .collect::() + .chars() + .rev() + .collect(); + + let count = name.chars().count(); + if num.len() == 0 { + // we already know it's a dynamic array! + let subtype = try!(Reader::read(&name[..count - 2])); + return Ok(ParamType::Array(Box::new(subtype))); + } else { + // it's a fixed array. + let len = try!(usize::from_str_radix(&num, 10)); + let subtype = try!(Reader::read(&name[..count - num.len() - 2])); + return Ok(ParamType::FixedArray(Box::new(subtype), len)); + } + } + + let result = match name { + "address" => ParamType::Address, + "bytes" => ParamType::Bytes, + "bool" => ParamType::Bool, + "string" => ParamType::String, + "int" => ParamType::Int(256), + "uint" => ParamType::Uint(256), + s if s.starts_with("int") => { + let len = try!(usize::from_str_radix(&s[3..], 10)); + ParamType::Int(len) + }, + s if s.starts_with("uint") => { + let len = try!(usize::from_str_radix(&s[4..], 10)); + ParamType::Uint(len) + }, + s if s.starts_with("bytes") => { + let len = try!(usize::from_str_radix(&s[5..], 10)); + ParamType::FixedBytes(len) + }, + _ => { + return Err(ErrorKind::InvalidName(name.to_owned()).into()); + } + }; + + Ok(result) + } +} + +#[cfg(test)] +mod tests { + use ParamType; + use super::Reader; + + #[test] + fn test_read_param() { + assert_eq!(Reader::read("address").unwrap(), ParamType::Address); + assert_eq!(Reader::read("bytes").unwrap(), ParamType::Bytes); + assert_eq!(Reader::read("bytes32").unwrap(), ParamType::FixedBytes(32)); + assert_eq!(Reader::read("bool").unwrap(), ParamType::Bool); + assert_eq!(Reader::read("string").unwrap(), ParamType::String); + assert_eq!(Reader::read("int").unwrap(), ParamType::Int(256)); + assert_eq!(Reader::read("uint").unwrap(), ParamType::Uint(256)); + assert_eq!(Reader::read("int32").unwrap(), ParamType::Int(32)); + assert_eq!(Reader::read("uint32").unwrap(), ParamType::Uint(32)); + } + + #[test] + fn test_read_array_param() { + assert_eq!(Reader::read("address[]").unwrap(), ParamType::Array(Box::new(ParamType::Address))); + assert_eq!(Reader::read("uint[]").unwrap(), ParamType::Array(Box::new(ParamType::Uint(256)))); + assert_eq!(Reader::read("bytes[]").unwrap(), ParamType::Array(Box::new(ParamType::Bytes))); + assert_eq!(Reader::read("bool[][]").unwrap(), ParamType::Array(Box::new(ParamType::Array(Box::new(ParamType::Bool))))); + } + + #[test] + fn test_read_fixed_array_param() { + assert_eq!(Reader::read("address[2]").unwrap(), ParamType::FixedArray(Box::new(ParamType::Address), 2)); + assert_eq!(Reader::read("bool[17]").unwrap(), ParamType::FixedArray(Box::new(ParamType::Bool), 17)); + assert_eq!(Reader::read("bytes[45][3]").unwrap(), ParamType::FixedArray(Box::new(ParamType::FixedArray(Box::new(ParamType::Bytes), 45)), 3)); + } + + #[test] + fn test_read_mixed_arrays() { + assert_eq!(Reader::read("bool[][3]").unwrap(), ParamType::FixedArray(Box::new(ParamType::Array(Box::new(ParamType::Bool))), 3)); + assert_eq!(Reader::read("bool[3][]").unwrap(), ParamType::Array(Box::new(ParamType::FixedArray(Box::new(ParamType::Bool), 3)))); + } +} diff --git a/nix/overlay/ethabi/ethabi/src/param_type/writer.rs b/nix/overlay/ethabi/ethabi/src/param_type/writer.rs new file mode 100644 index 000000000..353d32c20 --- /dev/null +++ b/nix/overlay/ethabi/ethabi/src/param_type/writer.rs @@ -0,0 +1,41 @@ +use ParamType; + +/// Output formatter for param type. +pub struct Writer; + +impl Writer { + /// Returns string which is a formatted represenation of param. + pub fn write(param: &ParamType) -> String { + match *param { + ParamType::Address => "address".to_owned(), + ParamType::Bytes => "bytes".to_owned(), + ParamType::FixedBytes(len) => format!("bytes{}", len), + ParamType::Int(len) => format!("int{}", len), + ParamType::Uint(len) => format!("uint{}", len), + ParamType::Bool => "bool".to_owned(), + ParamType::String => "string".to_owned(), + ParamType::FixedArray(ref param, len) => format!("{}[{}]", Writer::write(param), len), + ParamType::Array(ref param) => format!("{}[]", Writer::write(param)), + } + } +} + +#[cfg(test)] +mod tests { + use ParamType; + use super::Writer; + + #[test] + fn test_write_param() { + assert_eq!(Writer::write(&ParamType::Address), "address".to_owned()); + assert_eq!(Writer::write(&ParamType::Bytes), "bytes".to_owned()); + assert_eq!(Writer::write(&ParamType::FixedBytes(32)), "bytes32".to_owned()); + assert_eq!(Writer::write(&ParamType::Uint(256)), "uint256".to_owned()); + assert_eq!(Writer::write(&ParamType::Int(64)), "int64".to_owned()); + assert_eq!(Writer::write(&ParamType::Bool), "bool".to_owned()); + assert_eq!(Writer::write(&ParamType::String), "string".to_owned()); + assert_eq!(Writer::write(&ParamType::Array(Box::new(ParamType::Bool))), "bool[]".to_owned()); + assert_eq!(Writer::write(&ParamType::FixedArray(Box::new(ParamType::String), 2)), "string[2]".to_owned()); + assert_eq!(Writer::write(&ParamType::FixedArray(Box::new(ParamType::Array(Box::new(ParamType::Bool))), 2)), "bool[][2]".to_owned()); + } +} diff --git a/nix/overlay/ethabi/ethabi/src/signature.rs b/nix/overlay/ethabi/ethabi/src/signature.rs new file mode 100644 index 000000000..351b48704 --- /dev/null +++ b/nix/overlay/ethabi/ethabi/src/signature.rs @@ -0,0 +1,39 @@ +use tiny_keccak::Keccak; +use param_type::{Writer, ParamType}; + +pub fn short_signature(name: &str, params: &[ParamType]) -> [u8; 4] { + let mut result = [0u8; 4]; + fill_signature(name, params, &mut result); + result +} + +pub fn long_signature(name: &str, params: &[ParamType]) -> [u8; 32] { + let mut result = [0u8; 32]; + fill_signature(name, params, &mut result); + result +} + +fn fill_signature(name: &str, params: &[ParamType], result: &mut [u8]) { + let types = params.iter() + .map(Writer::write) + .collect::>() + .join(","); + + let data: Vec = From::from(format!("{}({})", name, types).as_str()); + + let mut sponge = Keccak::new_keccak256(); + sponge.update(&data); + sponge.finalize(result); +} + +#[cfg(test)] +mod tests { + use hex::FromHex; + use super::short_signature; + use {ParamType}; + + #[test] + fn test_signature() { + assert_eq!("cdcd77c0".from_hex().unwrap(), short_signature("baz", &[ParamType::Uint(32), ParamType::Bool])); + } +} diff --git a/nix/overlay/ethabi/ethabi/src/token/from_hex.rs b/nix/overlay/ethabi/ethabi/src/token/from_hex.rs new file mode 100644 index 000000000..6eff62070 --- /dev/null +++ b/nix/overlay/ethabi/ethabi/src/token/from_hex.rs @@ -0,0 +1,31 @@ +//! Creates fixed size token from bytes. + +use hex::FromHex; +use errors::{Error, ErrorKind}; + +/// Creates fixed size token from bytes. +pub trait TokenFromHex { + /// Converts bytes to token. + fn token_from_hex(&self) -> Result; +} + +macro_rules! impl_token_from_hex { + ($size: expr) => { + impl TokenFromHex<[u8; $size]> for str { + fn token_from_hex(&self) -> Result<[u8; $size], Error> { + let mut result = [0u8; $size]; + let bytes = self.from_hex()?; + + if bytes.len() != $size { + return Err(ErrorKind::InvalidData.into()); + } + + result.copy_from_slice(&bytes); + Ok(result) + } + } + } +} + +impl_token_from_hex!(20); +impl_token_from_hex!(32); diff --git a/nix/overlay/ethabi/ethabi/src/token/lenient.rs b/nix/overlay/ethabi/ethabi/src/token/lenient.rs new file mode 100644 index 000000000..8571b6e2f --- /dev/null +++ b/nix/overlay/ethabi/ethabi/src/token/lenient.rs @@ -0,0 +1,48 @@ +use token::{Tokenizer, StrictTokenizer}; +use util::{pad_u32, pad_i32}; +use errors::Error; + +/// Tries to parse string as a token. Does not require string to clearly represent the value. +pub struct LenientTokenizer; + +impl Tokenizer for LenientTokenizer { + fn tokenize_address(value: &str) -> Result<[u8; 20], Error> { + StrictTokenizer::tokenize_address(value) + } + + fn tokenize_string(value: &str) -> Result { + StrictTokenizer::tokenize_string(value) + } + + fn tokenize_bool(value: &str) -> Result { + StrictTokenizer::tokenize_bool(value) + } + + fn tokenize_bytes(value: &str) -> Result, Error> { + StrictTokenizer::tokenize_bytes(value) + } + + fn tokenize_fixed_bytes(value: &str, len: usize) -> Result, Error> { + StrictTokenizer::tokenize_fixed_bytes(value, len) + } + + fn tokenize_uint(value: &str) -> Result<[u8; 32], Error> { + let result = StrictTokenizer::tokenize_uint(value); + if result.is_ok() { + return result; + } + + let uint = try!(u32::from_str_radix(value, 10)); + Ok(pad_u32(uint)) + } + + fn tokenize_int(value: &str) -> Result<[u8; 32], Error> { + let result = StrictTokenizer::tokenize_int(value); + if result.is_ok() { + return result; + } + + let int = try!(i32::from_str_radix(value, 10)); + Ok(pad_i32(int)) + } +} diff --git a/nix/overlay/ethabi/ethabi/src/token/mod.rs b/nix/overlay/ethabi/ethabi/src/token/mod.rs new file mode 100644 index 000000000..93b1a9d61 --- /dev/null +++ b/nix/overlay/ethabi/ethabi/src/token/mod.rs @@ -0,0 +1,106 @@ +//! ABI param and parsing for it. + +mod lenient; +mod strict; +mod token; +mod from_hex; + +use {ParamType, Error, ErrorKind, ResultExt}; +pub use self::lenient::LenientTokenizer; +pub use self::strict::StrictTokenizer; +pub use self::token::Token; +pub use self::from_hex::TokenFromHex; + +/// This trait should be used to parse string values as tokens. +pub trait Tokenizer { + /// Tries to parse a string as a token of given type. + fn tokenize(param: &ParamType, value: &str) -> Result { + match *param { + ParamType::Address => Self::tokenize_address(value).map(Token::Address), + ParamType::String => Self::tokenize_string(value).map(Token::String), + ParamType::Bool => Self::tokenize_bool(value).map(Token::Bool), + ParamType::Bytes => Self::tokenize_bytes(value).map(Token::Bytes), + ParamType::FixedBytes(len) => Self::tokenize_fixed_bytes(value, len).map(Token::FixedBytes), + ParamType::Uint(_) => Self::tokenize_uint(value).map(Token::Uint), + ParamType::Int(_) => Self::tokenize_int(value).map(Token::Int), + ParamType::Array(ref p) => Self::tokenize_array(value, p).map(Token::Array), + ParamType::FixedArray(ref p, len) => Self::tokenize_fixed_array(value, p, len).map(Token::FixedArray), + }.chain_err(|| format!("Cannot parse {}", param)) + } + + /// Tries to parse a value as a vector of tokens of fixed size. + fn tokenize_fixed_array(value: &str, param: &ParamType, len: usize) -> Result, Error> { + let result = try!(Self::tokenize_array(value, param)); + match result.len() == len { + true => Ok(result), + false => Err(ErrorKind::InvalidData.into()), + } + } + + /// Tries to parse a value as a vector of tokens. + fn tokenize_array(value: &str, param: &ParamType) -> Result, Error> { + if Some('[') != value.chars().next() || Some(']') != value.chars().last() { + return Err(ErrorKind::InvalidData.into()); + } + + if value.chars().count() == 2 { + return Ok(vec![]); + } + + let mut result = vec![]; + let mut nested = 0isize; + let mut ignore = false; + let mut last_item = 1; + for (i, ch) in value.chars().enumerate() { + match ch { + '[' if ignore == false => { + nested += 1; + }, + ']' if ignore == false => { + nested -= 1; + if nested < 0 { + return Err(ErrorKind::InvalidData.into()); + } else if nested == 0 { + let sub = &value[last_item..i]; + let token = try!(Self::tokenize(param, sub)); + result.push(token); + last_item = i + 1; + } + }, + '"' => { + ignore = !ignore; + }, + ',' if nested == 1 && ignore == false => { + let sub = &value[last_item..i]; + let token = try!(Self::tokenize(param, sub)); + result.push(token); + last_item = i + 1; + }, + _ => () + } + } + + Ok(result) + } + + /// Tries to parse a value as an address. + fn tokenize_address(value: &str) -> Result<[u8; 20], Error>; + + /// Tries to parse a value as a string. + fn tokenize_string(value: &str) -> Result; + + /// Tries to parse a value as a bool. + fn tokenize_bool(value: &str) -> Result; + + /// Tries to parse a value as bytes. + fn tokenize_bytes(value: &str) -> Result, Error>; + + /// Tries to parse a value as bytes. + fn tokenize_fixed_bytes(value: &str, len: usize) -> Result, Error>; + + /// Tries to parse a value as unsigned integer. + fn tokenize_uint(value: &str) -> Result<[u8; 32], Error>; + + /// Tries to parse a value as signed integer. + fn tokenize_int(value: &str) -> Result<[u8; 32], Error>; +} diff --git a/nix/overlay/ethabi/ethabi/src/token/strict.rs b/nix/overlay/ethabi/ethabi/src/token/strict.rs new file mode 100644 index 000000000..be48a33a0 --- /dev/null +++ b/nix/overlay/ethabi/ethabi/src/token/strict.rs @@ -0,0 +1,160 @@ +use hex::FromHex; +use token::Tokenizer; +use errors::{Error, ErrorKind}; + +/// Tries to parse string as a token. Require string to clearly represent the value. +pub struct StrictTokenizer; + +impl Tokenizer for StrictTokenizer { + fn tokenize_address(value: &str) -> Result<[u8; 20], Error> { + let hex = try!(value.from_hex()); + match hex.len() == 20 { + false => Err(ErrorKind::InvalidData.into()), + true => { + let mut address = [0u8; 20]; + address.copy_from_slice(&hex); + Ok(address) + } + } + } + + fn tokenize_string(value: &str) -> Result { + Ok(value.to_owned()) + } + + fn tokenize_bool(value: &str) -> Result { + match value { + "true" | "1" => Ok(true), + "false" | "0" => Ok(false), + _ => Err(ErrorKind::InvalidData.into()), + } + } + + fn tokenize_bytes(value: &str) -> Result, Error> { + let hex = try!(value.from_hex()); + Ok(hex) + } + + fn tokenize_fixed_bytes(value: &str, len: usize) -> Result, Error> { + let hex = try!(value.from_hex()); + match hex.len() == len { + true => Ok(hex), + false => Err(ErrorKind::InvalidData.into()), + } + } + + fn tokenize_uint(value: &str) -> Result<[u8; 32], Error> { + let hex = try!(value.from_hex()); + match hex.len() == 32 { + true => { + let mut uint = [0u8; 32]; + uint.copy_from_slice(&hex); + Ok(uint) + }, + false => Err(ErrorKind::InvalidData.into()) + } + } + + fn tokenize_int(value: &str) -> Result<[u8; 32], Error> { + let hex = try!(value.from_hex()); + match hex.len() == 32 { + true => { + let mut int = [0u8; 32]; + int.copy_from_slice(&hex); + Ok(int) + }, + false => Err(ErrorKind::InvalidData.into()) + } + } +} + +#[cfg(test)] +mod tests { + use ParamType; + use token::{Token, Tokenizer, StrictTokenizer}; + + #[test] + fn tokenize_address() { + assert_eq!(StrictTokenizer::tokenize(&ParamType::Address, "1111111111111111111111111111111111111111").unwrap(), Token::Address([0x11u8; 20])); + assert_eq!(StrictTokenizer::tokenize(&ParamType::Address, "2222222222222222222222222222222222222222").unwrap(), Token::Address([0x22u8; 20])); + } + + #[test] + fn tokenize_string() { + assert_eq!(StrictTokenizer::tokenize(&ParamType::String, "gavofyork").unwrap(), Token::String("gavofyork".to_owned())); + assert_eq!(StrictTokenizer::tokenize(&ParamType::String, "hello").unwrap(), Token::String("hello".to_owned())); + } + + #[test] + fn tokenize_bool() { + assert_eq!(StrictTokenizer::tokenize(&ParamType::Bool, "true").unwrap(), Token::Bool(true)); + assert_eq!(StrictTokenizer::tokenize(&ParamType::Bool, "1").unwrap(), Token::Bool(true)); + assert_eq!(StrictTokenizer::tokenize(&ParamType::Bool, "false").unwrap(), Token::Bool(false)); + assert_eq!(StrictTokenizer::tokenize(&ParamType::Bool, "0").unwrap(), Token::Bool(false)); + } + + #[test] + fn tokenize_bytes() { + assert_eq!(StrictTokenizer::tokenize(&ParamType::Bytes, "123456").unwrap(), Token::Bytes(vec![0x12, 0x34, 0x56])); + assert_eq!(StrictTokenizer::tokenize(&ParamType::Bytes, "0017").unwrap(), Token::Bytes(vec![0x00, 0x17])); + } + + #[test] + fn tokenize_fixed_bytes() { + assert_eq!(StrictTokenizer::tokenize(&ParamType::FixedBytes(3), "123456").unwrap(), Token::FixedBytes(vec![0x12, 0x34, 0x56])); + assert_eq!(StrictTokenizer::tokenize(&ParamType::FixedBytes(2), "0017").unwrap(), Token::FixedBytes(vec![0x00, 0x17])); + } + + #[test] + fn tokenize_uint() { + assert_eq!( + StrictTokenizer::tokenize(&ParamType::Uint(256), "1111111111111111111111111111111111111111111111111111111111111111").unwrap(), + Token::Uint([0x11u8; 32]) + ); + + assert_eq!( + StrictTokenizer::tokenize(&ParamType::Uint(256), "2222222222222222222222222222222222222222222222222222222222222222").unwrap(), + Token::Uint([0x22u8; 32]) + ); + } + + #[test] + fn tokenize_int() { + assert_eq!( + StrictTokenizer::tokenize(&ParamType::Int(256), "1111111111111111111111111111111111111111111111111111111111111111").unwrap(), + Token::Int([0x11u8; 32]) + ); + + assert_eq!( + StrictTokenizer::tokenize(&ParamType::Int(256), "2222222222222222222222222222222222222222222222222222222222222222").unwrap(), + Token::Int([0x22u8; 32]) + ); + } + + #[test] + fn tokenize_empty_array() { + assert_eq!( + StrictTokenizer::tokenize(&ParamType::Array(Box::new(ParamType::Bool)), "[]").unwrap(), + Token::Array(vec![]) + ); + } + + #[test] + fn tokenize_bool_array() { + assert_eq!( + StrictTokenizer::tokenize(&ParamType::Array(Box::new(ParamType::Bool)), "[true,1,0,false]").unwrap(), + Token::Array(vec![Token::Bool(true), Token::Bool(true), Token::Bool(false), Token::Bool(false)]) + ); + } + + #[test] + fn tokenize_bool_array_of_arrays() { + assert_eq!( + StrictTokenizer::tokenize(&ParamType::Array(Box::new(ParamType::Array(Box::new(ParamType::Bool)))), "[[true,1,0],[false]]").unwrap(), + Token::Array(vec![ + Token::Array(vec![Token::Bool(true), Token::Bool(true), Token::Bool(false)]), + Token::Array(vec![Token::Bool(false)]) + ]) + ); + } +} diff --git a/nix/overlay/ethabi/ethabi/src/token/token.rs b/nix/overlay/ethabi/ethabi/src/token/token.rs new file mode 100644 index 000000000..6ea7d743d --- /dev/null +++ b/nix/overlay/ethabi/ethabi/src/token/token.rs @@ -0,0 +1,236 @@ +//! Ethereum ABI params. +use std::fmt; +use hex::ToHex; +use {ParamType, Hash, Address, FixedBytes, Bytes}; + +/// Ethereum ABI params. +#[derive(Debug, PartialEq, Clone)] +pub enum Token { + /// Address. + /// + /// solidity name: address + /// Encoded to left padded [0u8; 32]. + Address(Address), + /// Vector of bytes with known size. + /// + /// solidity name eg.: bytes8, bytes32, bytes64, bytes1024 + /// Encoded to right padded [0u8; ((N + 31) / 32) * 32]. + FixedBytes(FixedBytes), + /// Vector of bytes of unknown size. + /// + /// solidity name: bytes + /// Encoded in two parts. + /// Init part: offset of 'closing part`. + /// Closing part: encoded length followed by encoded right padded bytes. + Bytes(Bytes), + /// Signed integer. + /// + /// solidity name: int + Int(Hash), + /// Unisnged integer. + /// + /// solidity name: uint + Uint(Hash), + /// Boolean value. + /// + /// solidity name: bool + /// Encoded as left padded [0u8; 32], where last bit represents boolean value. + Bool(bool), + /// String. + /// + /// solidity name: string + /// Encoded in the same way as bytes. Must be utf8 compliant. + String(String), + /// Array with known size. + /// + /// solidity name eg.: int[3], bool[3], address[][8] + /// Encoding of array is equal to encoding of consecutive elements of array. + FixedArray(Vec), + /// Array of params with unknown size. + /// + /// solidity name eg. int[], bool[], address[5][] + Array(Vec), +} + +impl fmt::Display for Token { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + Token::Bool(b) => write!(f, "{}", b), + Token::String(ref s) => write!(f, "{}", s), + Token::Address(ref a) => write!(f, "{}", a.to_hex()), + Token::Bytes(ref bytes) | Token::FixedBytes(ref bytes) => write!(f, "{}", bytes.to_hex()), + Token::Uint(ref i) | Token::Int(ref i) => write!(f, "{}", i.to_hex()), + Token::Array(ref arr) | Token::FixedArray(ref arr) => { + let s = arr.iter() + .map(|ref t| format!("{}", t)) + .collect::>() + .join(","); + + write!(f, "[{}]", s) + } + } + } +} + +impl Token { + /// Check whether the type of the token matches the given parameter type. + /// + /// Numeric types (`Int` and `Uint`) type check if the size of the token + /// type is of greater or equal size than the provided parameter type. + pub fn type_check(&self, param_type: &ParamType) -> bool { + match *self { + Token::Address(_) => *param_type == ParamType::Address, + Token::Bytes(_) => *param_type == ParamType::Bytes, + Token::Int(_) => + if let ParamType::Int(_) = *param_type { + true + } else { + false + }, + Token::Uint(_) => + if let ParamType::Uint(_) = *param_type { + true + } else { + false + }, + Token::Bool(_) => *param_type == ParamType::Bool, + Token::String(_) => *param_type == ParamType::String, + Token::FixedBytes(ref bytes) => + if let ParamType::FixedBytes(size) = *param_type { + size >= bytes.len() + } else { + false + }, + Token::Array(ref tokens) => + if let ParamType::Array(ref param_type) = *param_type { + tokens.iter().all(|t| t.type_check(param_type)) + } else { + false + }, + Token::FixedArray(ref tokens) => + if let ParamType::FixedArray(ref param_type, size) = *param_type { + size == tokens.len() && tokens.iter().all(|t| t.type_check(param_type)) + } else { + false + }, + } + } + + /// Converts token to... + pub fn to_address(self) -> Option<[u8; 20]> { + match self { + Token::Address(address) => Some(address), + _ => None, + } + } + + /// Converts token to... + pub fn to_fixed_bytes(self) -> Option> { + match self { + Token::FixedBytes(bytes) => Some(bytes), + _ => None, + } + } + + /// Converts token to... + pub fn to_bytes(self) -> Option> { + match self { + Token::Bytes(bytes) => Some(bytes), + _ => None, + } + } + + /// Converts token to... + pub fn to_int(self) -> Option<[u8; 32]> { + match self { + Token::Int(int) => Some(int), + _ => None, + } + } + + /// Converts token to... + pub fn to_uint(self) -> Option<[u8; 32]> { + match self { + Token::Uint(uint) => Some(uint), + _ => None, + } + } + + /// Converts token to... + pub fn to_bool(self) -> Option { + match self { + Token::Bool(b) => Some(b), + _ => None, + } + } + + /// Converts token to... + pub fn to_string(self) -> Option { + match self { + Token::String(s) => Some(s), + _ => None, + } + } + + /// Converts token to... + pub fn to_fixed_array(self) -> Option> { + match self { + Token::FixedArray(arr) => Some(arr), + _ => None, + } + } + + /// Converts token to... + pub fn to_array(self) -> Option> { + match self { + Token::Array(arr) => Some(arr), + _ => None, + } + } + + /// Check if all the types of the tokens match the given parameter types. + pub fn types_check(tokens: &[Token], param_types: &[ParamType]) -> bool { + param_types.len() == tokens.len() && { + param_types.iter().zip(tokens).all(|(param_type, token)| { + token.type_check(param_type) + }) + } + } +} + + +#[cfg(test)] +mod tests { + use {Token, ParamType}; + + #[test] + fn test_type_check() { + fn assert_type_check(tokens: Vec, param_types: Vec) { + assert!(Token::types_check(&tokens, ¶m_types)) + } + + fn assert_not_type_check(tokens: Vec, param_types: Vec) { + assert!(!Token::types_check(&tokens, ¶m_types)) + } + + assert_type_check(vec![Token::Uint([0u8; 32]), Token::Bool(false)], vec![ParamType::Uint(256), ParamType::Bool]); + assert_type_check(vec![Token::Uint([0u8; 32]), Token::Bool(false)], vec![ParamType::Uint(32), ParamType::Bool]); + + assert_not_type_check(vec![Token::Uint([0u8; 32])], vec![ParamType::Uint(32), ParamType::Bool]); + assert_not_type_check(vec![Token::Uint([0u8; 32]), Token::Bool(false)], vec![ParamType::Uint(32)]); + assert_not_type_check(vec![Token::Bool(false), Token::Uint([0u8; 32])], vec![ParamType::Uint(32), ParamType::Bool]); + + assert_type_check(vec![Token::FixedBytes(vec![0, 0, 0, 0])], vec![ParamType::FixedBytes(4)]); + assert_type_check(vec![Token::FixedBytes(vec![0, 0, 0])], vec![ParamType::FixedBytes(4)]); + assert_not_type_check(vec![Token::FixedBytes(vec![0, 0, 0, 0])], vec![ParamType::FixedBytes(3)]); + + assert_type_check(vec![Token::Array(vec![Token::Bool(false), Token::Bool(true)])], vec![ParamType::Array(Box::new(ParamType::Bool))]); + assert_not_type_check(vec![Token::Array(vec![Token::Bool(false), Token::Uint([0u8; 32])])], vec![ParamType::Array(Box::new(ParamType::Bool))]); + assert_not_type_check(vec![Token::Array(vec![Token::Bool(false), Token::Bool(true)])], vec![ParamType::Array(Box::new(ParamType::Address))]); + + assert_type_check(vec![Token::FixedArray(vec![Token::Bool(false), Token::Bool(true)])], vec![ParamType::FixedArray(Box::new(ParamType::Bool), 2)]); + assert_not_type_check(vec![Token::FixedArray(vec![Token::Bool(false), Token::Bool(true)])], vec![ParamType::FixedArray(Box::new(ParamType::Bool), 3)]); + assert_not_type_check(vec![Token::FixedArray(vec![Token::Bool(false), Token::Uint([0u8; 32])])], vec![ParamType::FixedArray(Box::new(ParamType::Bool), 2)]); + assert_not_type_check(vec![Token::FixedArray(vec![Token::Bool(false), Token::Bool(true)])], vec![ParamType::FixedArray(Box::new(ParamType::Address), 2)]); + } +} diff --git a/nix/overlay/ethabi/ethabi/src/util.rs b/nix/overlay/ethabi/ethabi/src/util.rs new file mode 100644 index 000000000..c231f0dab --- /dev/null +++ b/nix/overlay/ethabi/ethabi/src/util.rs @@ -0,0 +1,58 @@ +//! Utils used by different modules. + +use {Error, ErrorKind, Hash}; + +/// Convers vector of bytes with len equal n * 32, to a vector of slices. +pub fn slice_data(data: &[u8]) -> Result, Error> { + if data.len() % 32 != 0 { + return Err(ErrorKind::InvalidData.into()); + } + + let times = data.len() / 32; + let mut result = vec![]; + for i in 0..times { + let mut slice = [0u8; 32]; + let offset = 32 * i; + slice.copy_from_slice(&data[offset..offset + 32]); + result.push(slice); + } + Ok(result) +} + +/// Converts u32 to right aligned array of 32 bytes. +pub fn pad_u32(value: u32) -> Hash { + let mut padded = [0u8; 32]; + padded[28] = (value >> 24) as u8; + padded[29] = (value >> 16) as u8; + padded[30] = (value >> 8) as u8; + padded[31] = value as u8; + padded +} + +/// Converts i32 to right aligned array of 32 bytes. +pub fn pad_i32(value: i32) -> Hash { + if value >= 0 { + return pad_u32(value as u32); + } + + let mut padded = [0xffu8; 32]; + padded[28] = (value >> 24) as u8; + padded[29] = (value >> 16) as u8; + padded[30] = (value >> 8) as u8; + padded[31] = value as u8; + padded +} + +#[cfg(test)] +mod tests { + use hex::FromHex; + use super::pad_i32; + + #[test] + fn test_i32() { + assert_eq!("0000000000000000000000000000000000000000000000000000000000000000".from_hex().unwrap(), pad_i32(0).to_vec()); + assert_eq!("ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff".from_hex().unwrap(), pad_i32(-1).to_vec()); + assert_eq!("fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe".from_hex().unwrap(), pad_i32(-2).to_vec()); + assert_eq!("ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff00".from_hex().unwrap(), pad_i32(-256).to_vec()); + } +} diff --git a/nix/overlay/ethabi/res/Validators.abi b/nix/overlay/ethabi/res/Validators.abi new file mode 100644 index 000000000..3e3a85e3c --- /dev/null +++ b/nix/overlay/ethabi/res/Validators.abi @@ -0,0 +1,72 @@ +[ + { + "constant": true, + "inputs": [ + { + "name": "", + "type": "uint256" + } + ], + "name": "validators", + "outputs": [ + { + "name": "", + "type": "address" + } + ], + "payable": false, + "stateMutability": "view", + "type": "function" + }, + { + "constant": false, + "inputs": [ + { + "name": "newValidators", + "type": "address[2]" + } + ], + "name": "addTwoValidators", + "outputs": [], + "payable": false, + "stateMutability": "nonpayable", + "type": "function" + }, + { + "constant": false, + "inputs": [ + { + "name": "newValidators", + "type": "address[]" + } + ], + "name": "setValidators", + "outputs": [], + "payable": false, + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "name": "newValidators", + "type": "address[]" + } + ], + "payable": false, + "stateMutability": "nonpayable", + "type": "constructor" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "name": "validators", + "type": "address[]" + } + ], + "name": "Changed", + "type": "event" + } +] diff --git a/nix/overlay/ethabi/res/con.abi b/nix/overlay/ethabi/res/con.abi new file mode 100644 index 000000000..ee41141e3 --- /dev/null +++ b/nix/overlay/ethabi/res/con.abi @@ -0,0 +1,11 @@ +[ + { + "inputs": [ + { + "name": "a", + "type": "address" + } + ], + "type": "constructor" + } +] diff --git a/nix/overlay/ethabi/res/eip20.abi b/nix/overlay/ethabi/res/eip20.abi new file mode 100644 index 000000000..57d762393 --- /dev/null +++ b/nix/overlay/ethabi/res/eip20.abi @@ -0,0 +1,163 @@ +[ + { + "constant": false, + "inputs": [ + { + "name": "_spender", + "type": "address" + }, + { + "name": "_value", + "type": "uint256" + } + ], + "name": "approve", + "outputs": [ + { + "name": "success", + "type": "bool" + } + ], + "type": "function" + }, + { + "constant": true, + "inputs": [], + "name": "totalSupply", + "outputs": [ + { + "name": "total", + "type": "uint256" + } + ], + "type": "function" + }, + { + "constant": false, + "inputs": [ + { + "name": "_from", + "type": "address" + }, + { + "name": "_to", + "type": "address" + }, + { + "name": "_value", + "type": "uint256" + } + ], + "name": "transferFrom", + "outputs": [ + { + "name": "success", + "type": "bool" + } + ], + "type": "function" + }, + { + "constant": true, + "inputs": [ + { + "name": "_owner", + "type": "address" + } + ], + "name": "balanceOf", + "outputs": [ + { + "name": "balance", + "type": "uint256" + } + ], + "type": "function" + }, + { + "constant": false, + "inputs": [ + { + "name": "_to", + "type": "address" + }, + { + "name": "_value", + "type": "uint256" + } + ], + "name": "transfer", + "outputs": [ + { + "name": "success", + "type": "bool" + } + ], + "type": "function" + }, + { + "constant": true, + "inputs": [ + { + "name": "_owner", + "type": "address" + }, + { + "name": "_spender", + "type": "address" + } + ], + "name": "allowance", + "outputs": [ + { + "name": "remaining", + "type": "uint256" + } + ], + "type": "function" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "name": "from", + "type": "address" + }, + { + "indexed": true, + "name": "to", + "type": "address" + }, + { + "indexed": false, + "name": "value", + "type": "uint256" + } + ], + "name": "Transfer", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "name": "owner", + "type": "address" + }, + { + "indexed": true, + "name": "spender", + "type": "address" + }, + { + "indexed": false, + "name": "value", + "type": "uint256" + } + ], + "name": "Approval", + "type": "event" + } +] diff --git a/nix/overlay/ethabi/res/event.abi b/nix/overlay/ethabi/res/event.abi new file mode 100644 index 000000000..a321a399c --- /dev/null +++ b/nix/overlay/ethabi/res/event.abi @@ -0,0 +1,19 @@ +[ + { + "anonymous": true, + "inputs": [ + { + "indexed": true, + "name": "a", + "type": "bool" + }, + { + "indexed": false, + "name": "b", + "type": "address" + } + ], + "name": "Event", + "type": "event" + } +] diff --git a/nix/overlay/ethabi/res/foo.abi b/nix/overlay/ethabi/res/foo.abi new file mode 100644 index 000000000..e71140dca --- /dev/null +++ b/nix/overlay/ethabi/res/foo.abi @@ -0,0 +1,19 @@ +[ + { + "constant": false, + "inputs": [ + { + "name": "hello", + "type": "address" + } + ], + "name": "bar", + "outputs": [ + { + "name": "", + "type": "bool" + } + ], + "type": "function" + } +] diff --git a/nix/overlay/ethabi/res/test.abi b/nix/overlay/ethabi/res/test.abi new file mode 100644 index 000000000..0b3e0cb83 --- /dev/null +++ b/nix/overlay/ethabi/res/test.abi @@ -0,0 +1,13 @@ +[ + { + "inputs": [ + { + "name": "a", + "type": "bool" + } + ], + "name": "foo", + "outputs": [], + "type": "function" + } +] diff --git a/nix/overlay/ethabi/res/validators.sol b/nix/overlay/ethabi/res/validators.sol new file mode 100644 index 000000000..86cb071e4 --- /dev/null +++ b/nix/overlay/ethabi/res/validators.sol @@ -0,0 +1,20 @@ +pragma solidity ^0.4.17; + +contract Validators { + event Changed (address[] indexed validators); + + address[] public validators; + + function Validators (address[] newValidators) public { + validators = newValidators; + } + + function setValidators (address[] newValidators) public { + validators = newValidators; + } + + function addTwoValidators (address[2] newValidators) public { + validators.push(newValidators[0]); + validators.push(newValidators[1]); + } +} diff --git a/nix/overlay/ethabi/snap/snapcraft.yaml b/nix/overlay/ethabi/snap/snapcraft.yaml new file mode 100644 index 000000000..1c578d146 --- /dev/null +++ b/nix/overlay/ethabi/snap/snapcraft.yaml @@ -0,0 +1,23 @@ +name: ethabi +version: master +summary: encodes smart contracts function calls and decodes their output +description: | + An Ethereum smart contract is bytecode, EVM, on the Ethereum blockchain. + Among the EVM, there could be several functions in a contract. An ABI is + necessary so that you can specify which function in the contract to invoke, + as well as get a guarantee that the function will return data in the format + you are expecting. + +grade: devel # must be 'stable' to release into candidate/stable channels +confinement: strict + +apps: + ethabi: + command: ethabi + plugs: [home] + +parts: + ethabi: + source: . + source-subdir: cli + plugin: rust diff --git a/nix/overlay/ethabi/tests/Cargo.toml b/nix/overlay/ethabi/tests/Cargo.toml new file mode 100644 index 000000000..c5b0ffa33 --- /dev/null +++ b/nix/overlay/ethabi/tests/Cargo.toml @@ -0,0 +1,10 @@ +[package] +name = "ethabi-tests" +version = "0.1.0" +authors = ["debris "] + +[dependencies] +ethabi = { path = "../ethabi" } +ethabi-derive = { path = "../derive" } +ethabi-contract = { path = "../contract" } +rustc-hex = "1.0" diff --git a/nix/overlay/ethabi/tests/src/lib.rs b/nix/overlay/ethabi/tests/src/lib.rs new file mode 100644 index 000000000..78da4bf54 --- /dev/null +++ b/nix/overlay/ethabi/tests/src/lib.rs @@ -0,0 +1,131 @@ +#![deny(warnings)] + +extern crate rustc_hex; +extern crate ethabi; +#[macro_use] +extern crate ethabi_derive; +#[macro_use] +extern crate ethabi_contract; + +use_contract!(eip20, "Eip20", "../res/eip20.abi"); +use_contract!(constructor, "Constructor", "../res/con.abi"); +use_contract!(validators, "Validators", "../res/Validators.abi"); + +#[cfg(test)] +mod tests { + use rustc_hex::{ToHex, FromHex}; + + struct Wrapper([u8; 20]); + + impl Into<[u8; 20]> for Wrapper { + fn into(self) -> [u8; 20] { + self.0 + } + } + + #[test] + fn test_encoding_function_input_as_array() { + use validators::Validators; + + let contract = Validators::default(); + let first = [0x11u8; 20]; + let second = [0x22u8; 20]; + + let functions = contract.functions(); + let set_validators = functions.set_validators(); + + let encoded_from_vec = set_validators.input(vec![first.clone(), second.clone()]); + let encoded_from_vec_iter = set_validators.input(vec![first.clone(), second.clone()].into_iter()); + let encoded_from_vec_wrapped = set_validators.input(vec![Wrapper(first), Wrapper(second)]); + + let expected = "9300c9260000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000011111111111111111111111111111111111111110000000000000000000000002222222222222222222222222222222222222222".to_owned(); + assert_eq!(expected, encoded_from_vec.to_hex()); + assert_eq!(expected, encoded_from_vec_iter.to_hex()); + assert_eq!(expected, encoded_from_vec_wrapped.to_hex()); + } + + #[test] + fn test_decoding_function_output() { + // Make sure that the output param type of the derived contract is correct + + use eip20::Eip20; + + let contract = Eip20::default(); + let output = "000000000000000000000000000000000000000000000000000000000036455B".from_hex().unwrap(); + let decoded_output = contract.functions().total_supply().output(&output).unwrap(); + let expected_output = output.clone(); + assert_eq!(expected_output, decoded_output); + } + + #[test] + fn test_encoding_constructor_as_array() { + use validators::Validators; + + let contract = Validators::default(); + let code = Vec::new(); + let first = [0x11u8; 20]; + let second = [0x22u8; 20]; + + let encoded_from_vec = contract.constructor(code.clone(), vec![first.clone(), second.clone()]); + let encoded_from_vec_iter = contract.constructor(code.clone(), vec![first.clone(), second.clone()].into_iter()); + let encoded_from_vec_wrapped = contract.constructor(code.clone(), vec![Wrapper(first), Wrapper(second)]); + + let expected = "0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000011111111111111111111111111111111111111110000000000000000000000002222222222222222222222222222222222222222".to_owned(); + assert_eq!(expected, encoded_from_vec.to_hex()); + assert_eq!(expected, encoded_from_vec_iter.to_hex()); + assert_eq!(expected, encoded_from_vec_wrapped.to_hex()); + } + + #[test] + fn test_encoding_function_input_as_fixed_array() { + use validators::Validators; + + let contract = Validators::default(); + let first = [0x11u8; 20]; + let second = [0x22u8; 20]; + + let functions = contract.functions(); + let add_validators = functions.add_two_validators(); + + let encoded_from_array = add_validators.input([first.clone(), second.clone()]); + let encoded_from_array_wrapped = add_validators.input([Wrapper(first), Wrapper(second)]); + + let expected = "7de33d2000000000000000000000000011111111111111111111111111111111111111110000000000000000000000002222222222222222222222222222222222222222".to_owned(); + assert_eq!(expected, encoded_from_array.to_hex()); + assert_eq!(expected, encoded_from_array_wrapped.to_hex()); + } + + #[test] + fn encoding_input_works() { + use eip20::Eip20; + + let expected = "dd62ed3e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000101010101010101010101010101010101010101".to_owned(); + let contract = Eip20::default(); + let owner = [0u8; 20]; + let spender = [1u8; 20]; + let encoded = contract.functions().allowance().input(owner, spender); + // 4 bytes signature + 2 * 32 bytes for params + assert_eq!(encoded.to_hex(), expected); + + let from = [2u8; 20]; + let to = [3u8; 20]; + let to2 = [4u8; 20]; + let _filter = contract.events().transfer().create_filter(from, vec![to, to2]); + let _filter = contract.events().transfer().create_filter(None, None); + } + + #[test] + fn test_calling_function() { + use eip20::Eip20; + + let contract = Eip20::default(); + let address_param = [0u8; 20]; + let result = contract.functions().balance_of().call(address_param, &|data| { + assert_eq!(data, "70a082310000000000000000000000000000000000000000000000000000000000000000".from_hex().unwrap()); + Ok("000000000000000000000000000000000000000000000000000000000036455b".from_hex().unwrap()) + }); + assert_eq!(result.unwrap().to_hex(), "000000000000000000000000000000000000000000000000000000000036455b"); + } + +} + diff --git a/nix/overlay/ethabi/tools/solc_compile.sh b/nix/overlay/ethabi/tools/solc_compile.sh new file mode 100755 index 000000000..fa434fb02 --- /dev/null +++ b/nix/overlay/ethabi/tools/solc_compile.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +cd res + +for sol in *.sol; do + solc --abi -o . --overwrite "$sol" +done + +for abi in *.abi; do + python -m json.tool "$abi" > tmp + cat tmp > "$abi" +done + +rm tmp diff --git a/nix/overlay/evmdis.nix b/nix/overlay/evmdis.nix new file mode 100644 index 000000000..cd1c5d7a3 --- /dev/null +++ b/nix/overlay/evmdis.nix @@ -0,0 +1,21 @@ +{ stdenv, lib, buildGoPackage, fetchFromGitHub }: + +buildGoPackage rec { + name = "evmdis-${version}"; + version = "0.20170616"; + goPackagePath = "github.com/Arachnid/evmdis"; + + src = fetchFromGitHub { + owner = "Arachnid"; + repo = "evmdis"; + rev = "1abeda0a402b0aa4f755d571565d7044b3e56c77"; + sha256 = "0mjh9rcpdwd96m68pps1hhrsn9004g052hq8y2gc579pq50izl0a"; + }; + + meta = with stdenv.lib; { + homepage = https://github.com/Arachnid/evmdis; + description = "Ethereum EVM disassembler"; + license = [ licenses.asl20 ]; + maintainers = [ maintainers.dbrock ]; + }; +} diff --git a/nix/overlay/keeper.nix b/nix/overlay/keeper.nix new file mode 100644 index 000000000..33e00fcde --- /dev/null +++ b/nix/overlay/keeper.nix @@ -0,0 +1,43 @@ +{ stdenv, fetchFromGitHub, python3Packages, openssl }: + +with python3Packages; + +buildPythonApplication rec { + version = "unstable-2017-09-19"; + name = "keeper"; + + src = fetchFromGitHub { + rev = "a316db1dc13288e417a6582eeb10b714863af8d5"; + owner = "makerdao"; + repo = "keeper"; + sha256 = "069q85a3icsnmzhy9ifh9f0x996sagbgwzgam4wnh7avh6z10qmj"; + }; + + propagatedBuildInputs = [ + pytest web3 eth-testrpc sortedcontainers networkx tinydb + ]; + + doCheck = false; + + installPhase = '' + mkdir -p $out/bin + mkdir -p $out/${python.sitePackages} + cp -r keeper $out/${python.sitePackages} + cp bin/* $out/bin + ''; + + fixupPhase = '' + for x in $out/bin/*; do wrapProgram "$x" \ + --set PYTHONPATH "$PYTHONPATH:$out/${python.sitePackages}" \ + --set PATH ${python}/bin:$PATH + done + ''; + + meta = with stdenv.lib; { + description = "Maker Keeper framework"; + homepage = https://github.com/makerdao/keeper; + license = licenses.agpl3; + platforms = platforms.unix; + maintainers = with maintainers; [ dbrock ]; + }; +} diff --git a/nix/overlay/mkbip39/mkbip39 b/nix/overlay/mkbip39/mkbip39 new file mode 100755 index 000000000..a1e0475ee --- /dev/null +++ b/nix/overlay/mkbip39/mkbip39 @@ -0,0 +1,5 @@ +#!/usr/bin/env python +import secrets +import mnemonic +m = mnemonic.Mnemonic("english") +print(m.to_mnemonic(secrets.token_bytes(32))) diff --git a/nix/overlay/mkbip39/setup.py b/nix/overlay/mkbip39/setup.py new file mode 100755 index 000000000..0ca0ab734 --- /dev/null +++ b/nix/overlay/mkbip39/setup.py @@ -0,0 +1,13 @@ +#!/usr/bin/env python + +from setuptools import setup + +setup( + name="mkbip39", + version="0.5", + description="Make a seed phrase", + author="mbrock", + url="https://github.com/dapphub/mkbip39", + install_requires=["mnemonic"], + scripts=["mkbip39"] +) diff --git a/nix/overlay/overlay.nix b/nix/overlay/overlay.nix new file mode 100644 index 000000000..4bfd7de4e --- /dev/null +++ b/nix/overlay/overlay.nix @@ -0,0 +1,621 @@ +{ flavor ? "stable", self, super }: + +let + + versions = super.lib.importJSON ./versions.json; + versioned = pkg: caller: (caller (import (./upstream + "/${flavor}/${pkg}.nix"))).overrideAttrs (x: { + src = self.pkgs.fetchFromGitHub versions.${pkg}.${flavor}; + } // (if flavor == "stable" then rec { + name = "${pkg}-${version}"; + version = versions.${pkg}.version; + } else { + name = "${pkg}-${flavor}"; + version = flavor; + })); + + local = src: pkg: pkg.overrideAttrs (_: { + inherit src; + }); + + # This is a specific revision of Nixpkgs that we use to avoid + # rebuilding all the versions of solc when we bump our submodule, or + # to allow a package to succeed when something breaks in nixpkgs. + past = import (super.fetchFromGitHub { + owner = "NixOS"; + repo = "nixpkgs"; + rev = "0bb2d3112b259940df18ec6c0203bb01234f4e48"; + sha256 = "110jcn1k0kc9jmcbg97av10m36i4mqyxa057hwl6lpzjhrq40f3k"; + }) { config = {}; }; + + callPackage = self.pkgs.callPackage; + pastPackage = past.pkgs.callPackage; + + lib = self.pkgs.lib; + stdenv = self.pkgs.stdenv; + + makeIso = { module, config }: + self.pkgs.lib.hydraJob ( + (import ../nixpkgs/nixos/lib/eval-config.nix { + inherit (self) pkgs; + system = "x86_64-linux"; + modules = [module config]; + }).config.system.build.isoImage + ); + + ourPerlPackages = import ./perl.nix { + inherit (self) buildPerlPackage perlPackages; + }; + +in { + haskellPackages = super.haskellPackages.extend ( + self-hs: super-hs: + let + dontCheck = x: + self.haskell.lib.dontCheck + (self-hs.callPackage x {}); + in { + restless-git = versioned "restless-git" dontCheck; + symbex = versioned "symbex" dontCheck; + ethjet = versioned "libethjet-haskell" + (x: self-hs.callPackage x { + # Haskell libs with the same names as C libs... + # Depend on the C libs, not the Haskell libs. + # These are system deps, not Cabal deps. + inherit (self.pkgs) secp256k1 ethjet; + }); + + # We don't want Megaparsec 5! + megaparsec = self-hs.megaparsec_6_2_0; + + hevm = ( + versioned "hevm" + (x: self-hs.callPackage x {}) + ).overrideAttrs (attrs: { + postInstall = '' + wrapProgram $out/bin/hevm \ + --suffix PATH : "${lib.makeBinPath (with self.pkgs; [bash coreutils git])}" + ''; + + enableSeparateDataOutput = true; + buildInputs = attrs.buildInputs ++ [self.pkgs.solc]; + nativeBuildInputs = attrs.nativeBuildInputs ++ [self.pkgs.makeWrapper]; + }); + } + ); + + profilingHaskellPackages = self.haskellPackages.extend ( + self: super-hs: { + mkDerivation = args: super-hs.mkDerivation + (args // { enableLibraryProfiling = true; }); + } + ); + + callSolidityPackage = self.lib.callPackageWith { + inherit (self) solidityPackage dappsys; + }; + + dappsys = self.callPackage ( + self.pkgs.fetchFromGitHub { + owner = "dapphub"; + repo = "dappsys"; + rev = "73dea5a7d1e265dd2921ba420efbfcca3e8cdcc8"; + sha256 = "16hnlim0da8sh7l3rhd6lxdxhhaskbkabr8zf9mx6s5vahyc39gl"; + fetchSubmodules = true; + } + ) {}; + + solidityPackage = import ./solidity-package.nix { + inherit (self) pkgs; + }; + + dapps = { + maker-otc = import (self.pkgs.fetchFromGitHub { + owner = "makerdao"; + repo = "maker-otc"; + rev = "513f102ad20129ea76e5c9b79afaa18693f63b88"; + sha256 = "0jpdanhihv94yw3ay8dfcbv7l1dg30rfbdxq9lshm0hg94mblb6l"; + }) self.pkgs; + }; + + known-contracts = import ../known-contracts.nix; + dapp-which = callPackage ./dapp-which.nix {}; + + bashScript = { name, version ? "0", deps ? [], text, check ? true } : + self.pkgs.writeTextFile { + name = "${name}-${version}"; + executable = true; + destination = "/bin/${name}"; + text = '' + #!${self.pkgs.bash}/bin/bash + set -euo pipefail + shopt -s lastpipe + export PATH="${lib.makeBinPath deps}:/run/wrappers/bin" + ${text} + ''; + checkPhase = '' + ${self.pkgs.bash}/bin/bash -n $out/bin/${name} + '' + (if check then '' + ${self.pkgs.shellcheck}/bin/shellcheck -s bash $out/bin/${name} + '' else ""); + }; + + dapp2 = { + test-hevm = import ./dapp/dapp-test-hevm.nix { pkgs = self.pkgs; }; + }; + + solc = callPackage ((import ./solc-versions.nix).solc_0_4_23) {}; + solc-versions = + super.lib.mapAttrs + (_: value: pastPackage value {}) + (import ./solc-versions.nix); + + python3 = self.python36; + python36 = super.python36.override { + packageOverrides = (import ./python.nix { pkgs = super.pkgs; }); + }; + + symbex-mueval = let + env = self.haskellPackages.ghcWithPackages (x: with x; [ + symbex QuickCheck show simple-reflect + ]); + in (self.haskellPackages.mueval.override { + hint = self.haskellPackages.hint.override { + ghc = env; + }; + }).overrideAttrs (attrs: { + preConfigure = '' + substituteInPlace Mueval/ArgsParse.hs \ + --replace 'Just defaultModules' 'Just []' + ''; + postInstall = '' + wrapProgram $out/bin/mueval \ + --set NIX_GHC ${env}/bin/ghc \ + --set NIX_GHCPKG ${env}/bin/ghc-pkg \ + --set NIX_GHC_LIBDIR $(${env}/bin/ghc --print-libdir) + ''; + nativeBuildInputs = attrs.nativeBuildInputs ++ [self.pkgs.makeWrapper]; + }); + + hevmas = self.pkgs.bashScript { + name = "hevmas"; + version = "0"; + deps = with self.pkgs; [symbex-mueval gnused]; + text = '' + mueval -XRecursiveDo -m EVM.Assembly \ + -e "$(echo "bytecode $ mdo"; sed 's/^/ /')" \ + | sed -e 's/^"//' -e 's/"$//' + ''; + }; + + hevml = self.pkgs.bashScript { + name = "hevml"; + version = "0"; + deps = with self.pkgs; [ + coreutils + (haskellPackages.ghcWithPackages (x: [x.symbex])) + ]; + text = '' + { echo "import qualified Prelude" + cat + echo + echo "main :: Prelude.IO ()" + echo "main = Prelude.putStrLn (bytecode contract)" + } | runghc --ghc-arg=-XNoImplicitPrelude --ghc-arg=-XRecursiveDo + ''; + }; + + hevmls = self.pkgs.bashScript { + name = "hevmls"; + version = "0"; + deps = with self.pkgs; [ + coreutils + (haskellPackages.ghcWithPackages (x: [x.symbex])) + ]; + text = '' + { echo "import qualified Prelude" + echo "import qualified EVM.Symbex.Main as Symbex" + cat + echo + echo "main :: Prelude.IO ()" + echo "main = Symbex.showPaths (Symbex.run (assemble contract))" + } | runghc --ghc-arg=-XNoImplicitPrelude --ghc-arg=-XRecursiveDo + ''; + }; + + hevmlsj = self.pkgs.bashScript { + name = "hevmlsj"; + version = "0"; + deps = with self.pkgs; [ + coreutils + (haskellPackages.ghcWithPackages (x: [x.symbex])) + ]; + text = '' + { echo "import qualified Prelude" + echo "import qualified EVM.Symbex.Main as Symbex" + echo "import qualified EVM.Symbex as Symbex" + echo "import qualified Data.ByteString.Lazy.Char8 as B8" + echo "import qualified Data.Aeson as Aeson" + echo "import Prelude ((.), ($))" + cat + echo + echo "main :: Prelude.IO ()" + echo "main = B8.putStrLn . Aeson.encode $" + echo " Symbex.step' (assemble contract) Symbex.emptyState" + } | runghc --ghc-arg=-XNoImplicitPrelude --ghc-arg=-XRecursiveDo + ''; + }; + + symbex = + self.pkgs.haskell.lib.justStaticExecutables + (versioned "symbex" (x: self.haskellPackages.callPackage x {})); + + hevm = self.pkgs.haskell.lib.justStaticExecutables self.haskellPackages.hevm; + + jays = ( + versioned "jays" (x: + self.pkgs.haskell.lib.justStaticExecutables + (self.haskellPackages.callPackage x {}) + ) + ).overrideAttrs (_: { postInstall = "cp $out/bin/{jays,jshon}"; }); + + # Override buggy jshon program with Haskell-based replacement. + jshon = self.jays; + + seth = versioned "seth" (x: callPackage x {}); + dapp = versioned "dapp" (x: callPackage x {}); + + ethsign = versioned "ethsign" (x: (callPackage x {}).bin); + + setzer = versioned "setzer" (x: callPackage x {}); + + keeper = callPackage ./keeper.nix {}; + evmdis = callPackage ./evmdis.nix {}; + + token = versioned "token" (x: callPackage x {}); + dai = versioned "dai-cli" (x: callPackage x {}); + + go-ethereum = super.go-ethereum.overrideDerivation (_: rec { + name = "go-ethereum-${version}"; + version = "1.8.10"; + src = self.pkgs.fetchFromGitHub { + owner = "ethereum"; + repo = "go-ethereum"; + rev = "v${version}"; + sha256 = "1n36pz4y3xa4d46mynym98bra79qx5n9lb29chyxfpvi5fmprdg1"; + }; + # (mbrock backported) fix for usb-related segmentation faults on darwin + propagatedBuildInputs = + stdenv.lib.optionals stdenv.isDarwin + (with self.pkgs; [ darwin.libobjc darwin.apple_sdk.frameworks.IOKit ]); + }); + + # We use this to run private testnets without + # the pesky transaction size limit. + go-ethereum-unlimited = self.go-ethereum.overrideDerivation (this: rec { + name = "go-ethereum-unlimited-${this.version}"; + preConfigure = '' + # Huge transaction calldata + substituteInPlace core/tx_pool.go --replace 'return ErrOversizedData' "" + + # Huge contracts + substituteInPlace params/protocol_params.go --replace \ + 'MaxCodeSize = 24576' \ + 'MaxCodeSize = 1000000' + + # Huge block gas limit in --dev mode + substituteInPlace core/genesis.go --replace \ + 'GasLimit: 6283185,' \ + 'GasLimit: 0xffffffffffffffff,' + ''; + }); + + # Use unreleased ethabi that fixes empty array encoding. + ethabi = ((import ./ethabi { pkgs = super; }).ethabi_cli_4_0_0); + + iosevka-term = let version = "1.13.3"; in self.pkgs.fetchzip rec { + name = "iosevka-ss02-term-${version}"; + url = "https://github.com/be5invis/Iosevka/releases/download/v${version}/iosevka-term-ss02-${version}.zip"; + sha256 = "03hcmqjbm4rvy1ydar07p7a7vfr2mpfjrr8sbrs5y6z07vdml3xd"; + postFetch = '' + mkdir -p $out/share/fonts + unzip -j $downloadedFile \*.ttf -d $out/share/fonts/truetype + ''; + meta = with stdenv.lib; { + homepage = https://github.com/be5invis/iosevka; + description = "Iosevka font (fixed width, Anonymous Pro style)"; + license = licenses.ofl; + platforms = platforms.all; + }; + }; + + qrtx = self.bashScript { + name = "qrtx"; + version = "0"; + deps = with self.pkgs; [qrencode feh vim gnused coreutils]; + text = '' + sed 's/^0x//' | tr -d '[:space:]' | xxd -r -p | base64 -w0 | + qrencode -s 1 -o - | feh -ZB white --force-aliasing - + ''; + }; + + qrtx-term = self.bashScript { + name = "qrtx-term"; + version = "0"; + deps = with self.pkgs; [qrencode vim gnused coreutils]; + text = '' + sed 's/^0x//' | tr -d '[:space:]' | xxd -r -p | base64 -w0 | + qrencode -t ANSIUTF8 + ''; + }; + + secp256k1 = super.secp256k1.overrideDerivation (_: { + dontDisableStatic = true; + }); + + ethjet = versioned "libethjet" (x: callPackage x {}); + + dialog-to-file = self.bashScript { + name = "dialog-to-file"; + version = "0"; + deps = with self.pkgs; [dialog ncurses]; + text = '' + set +e + file=$1; shift + exec 3>&1; x=$(dialog "$@" 2>&1 1>&3) + status=$?; exec 3>&-; clear + if [ "$status" -eq 0 ]; then echo >"$file" "$x"; else exit 1; fi + ''; + }; + + eth-pick-account = self.bashScript { + name = "eth-pick-account"; + version = "0"; + deps = with self.pkgs; [ethsign dialog-to-file]; + text = '' + accts=() + while read -r x y; do accts+=("$x" "$y"); done < <(ethsign ls 2>/dev/null) + dialog-to-file "$1" --ok-label "Use account" \ + --menu "Pick an Ethereum account:" \ + 20 80 10 "''${accts[@]}" + ''; + }; + + ds-chief = { + vote = self.bashScript { + name = "ds-chief-vote"; + version = "0"; + deps = with self.pkgs; [ + seth eth-pick-account ethsign dialog-to-file ncurses coreutils qrtx readline + ]; + text = '' + acctfile=$(mktemp) + txfile=$(mktemp) + eth-pick-account "$acctfile" + acct=$(cat "$acctfile") + dialog-to-file "$txfile" --ok-label "Sign" \ + --title "Make a signed transaction without publishing" \ + --form "ds-chief vote -- set your approval to an existing slate" 10 60 4 \ + "Slate ID" 1 1 "" 1 12 32 0 \ + "Nonce" 2 1 "1" 2 12 10 0 \ + "Gas price" 3 1 50 3 12 10 0 \ + "Gas limit" 4 1 10000 4 12 10 0 \ + "Account" 5 1 "$acct" 5 12 0 0 + + { + read -r slate + read -r nonce + read -r gasprice + read -r gaslimit + } < "$txfile" + + slate=$(seth --to-bytes32 "$(seth --from-ascii "$slate")") + calldata=$(seth calldata 'vote(bytes32)' "$slate") + + echo "Offline ds-chief vote transaction creation" + echo + echo "Account: $acct" + echo "Slate ID: $slate" + echo "Nonce: $nonce" + echo "Gas price: $gasprice" + echo "Gas limit: $gaslimit" + echo "Call data: $calldata" + echo + echo "After authentication, signed transaction will be shown as QR code." + + ethsign tx --from "$acct" --to 0x0 --nonce "$nonce" --gas-price "$gasprice" \ + --gas-limit "$gaslimit" --data "$calldata" \ + --value 0 --chain-id 1 | qrtx + echo + ''; + }; + }; + + ethos-iso = makeIso { + module = import ../ethos.nix { hidpi = false; }; + config.isoImage.appendToMenuLabel = " (Ethos by DappHub)"; + }; + + ethos-iso-hidpi = makeIso { + module = import ../ethos.nix { hidpi = true; }; + config.isoImage.appendToMenuLabel = " (Ethos by DappHub, HiDPI)"; + }; + + mkbip39 = with self.pkgs.python3Packages; buildPythonApplication rec { + version = "0.5"; + name = "mkbip39"; + src = ./mkbip39; + propagatedBuildInputs = [mnemonic]; + }; + + oasis-orders = ( + versioned "oasis-orders" + (x: self.pkgs.haskell.lib.justStaticExecutables + (self.haskellPackages.callPackage x {})) + ).overrideAttrs (attrs: { + postInstall = '' + wrapProgram $out/bin/oasis-orders \ + --set OASIS_DAPP_PATH ${self.dapps.maker-otc}/dapp/maker-otc + ''; + nativeBuildInputs = attrs.nativeBuildInputs ++ [self.pkgs.makeWrapper]; + }); + + dapphub-emacs-experiment = let + version = "1"; + dapphub-elisp = self.pkgs.writeTextFile { + name = "dapphub-emacs-${version}"; + destination = "/dapphub.el"; + text = '' + (package-initialize) + (set-face-attribute 'default (selected-frame) :height 180 :family "Courier") + (menu-bar-mode -1) + (tool-bar-mode -1) + (setq initial-buffer-choice + (lambda () + (with-current-buffer (get-buffer-create "*DappHub*") + (insert ";; Hello, and welcome to DappHub!") + (current-buffer)))) + (require 'agda2-mode) + (setq auto-mode-alist '(("\\.agda" . agda2-mode))) + (setq agda2-program-args (list (concat "--include-path=" (expand-file-name "~/src/agda-stdlib/src")))) + ''; + }; + in self.bashScript { + inherit version; + name = "dapphub-emacs-experiment"; + deps = with self.pkgs; [ + coreutils + (emacsWithPackages (e: with e; [ + zenburn-theme agda2-mode + ])) + ethsign + seth + haskellPackages.Agda + ]; + text = '' + emacs -q --no-splash --load=${dapphub-elisp}/dapphub.el + ''; + }; + + celf = callPackage ./celf.nix {}; + + # shellcheck = super.shellcheck.overrideDerivation (_: rec { + # name = "shellcheck-${version}"; + # version = "0.4.7"; + # src = self.pkgs.fetchFromGitHub { + # owner = "koalaman"; + # repo = "shellcheck"; + # rev = "v${version}"; + # sha256 = "08j33ipk7w56vj315smq9hxz512gbi5w283d7mvcyjvpddr001gc"; + # }; + # }); + + # Use an older version of PolyML, for TLA+. + # I'm not familiar with these tools, but this seemed to be necessary. + # The specific commit is the parent of a change that is incompatible + # with some stage of the TLA+ build. + polyml = self.polyml56.overrideDerivation (_: rec { + name = "polyml-${version}"; + version = "unstable-2015-10-15"; + src = self.pkgs.fetchFromGitHub { + owner = "polyml"; + repo = "polyml"; + rev = "257ef837c10e685170909878a64339b1144ff960"; + sha256 = "00migdfxzj2m28ikbnpbri8aysf5w836qdmflmrpxk7mddncimvw"; + }; + }); + + tla-plus = + let + core = callPackage ./tla/core.nix {}; + toolbox = callPackage ./tla/toolbox.nix { gtk = self.gtk2; }; + isabelle2011-1 = + callPackage ./tla/isabelle2011-1 { + proofgeneral = self.emacsPackages.proofgeneral; + }; + tlaps = callPackage ./tla/tlaps.nix { + inherit isabelle2011-1; + }; + tla-smt = with self; [z3 yices cvc3]; + in super.buildEnv { + name = "tla-plus-full"; + paths = [toolbox] ++ core.all ++ tlaps.all ++ tla-smt; + }; + + git-stitch-repo = ourPerlPackages.GitFastExport; + + tomono = stdenv.mkDerivation rec { + name = "tomono-${version}"; + version = "unstable-2018-01-03"; + src = self.fetchFromGitHub { + owner = "unravelin"; + repo = "tomono"; + rev = "ec59cb019a181f461e769feb22d43e09cf907566"; + sha256 = "1s2bl8iwwalslh46gp37zrg19jvbzb65sajrqkhwb3bkbbx4s9pd"; + }; + installPhase = '' + mkdir -p $out/bin + cp tomono.sh $out/bin/tomono + ''; + postInstall = '' + wrapProgram $out/bin/tomono \ + --suffix PATH : "${lib.makeBinPath (with self.pkgs; [bash coreutils git])}" + ''; + nativeBuildInputs = [self.pkgs.makeWrapper]; + }; + + myetherwallet = stdenv.mkDerivation rec { + name = "myetherwallet-${version}"; + version = "3.11.3.1"; + src = self.fetchFromGitHub { + owner = "kvhnuke"; + repo = "etherwallet"; + rev = "v${version}"; + sha256 = "1985zhy8lwnyg5hc436gcma0z9azm1qzsl3rj2vqq080s5czm4d2"; + }; + installPhase = '' + mkdir -p $out/myetherwallet + cp -R dist/* $out/myetherwallet + ''; + }; + + ocaml-getopt = stdenv.mkDerivation rec { + name = "ocaml-getopt-${version}"; + version = "unstable-2012-06-15"; + src = self.fetchurl { + url = "https://forge.ocamlcore.org/frs/download.php/896/ocaml-getopt-20120615.tar.gz"; + sha256 = "1rz2mi3gddwpd249bvz6h897swiajk4d6cczrsscibwpkmdvrfwa"; + }; + buildInputs = with self.ocamlPackages; [ocaml ocamlbuild findlib]; + createFindlibDestdir = true; + }; + + dry-analyzer = stdenv.mkDerivation rec { + name = "dry-analyzer-${version}"; + version = "unstable-2017-07-10"; + src = ~/src/dry-analyzer; + buildInputs = with self.ocamlPackages; [ + ocaml cohttp cohttp-lwt cohttp-lwt-unix batteries ocamlnet ocamlbuild findlib ocaml-getopt + ] ++ [self.pkgs.coq]; + patchPhase = '' + patchShebangs ./compile.sh + ''; + }; + + dafny = super.dafny.overrideAttrs (_: rec { + name = "Dafny-${version}"; + version = "2.1.0"; + + src = self.fetchurl { + url = "https://github.com/Microsoft/dafny/archive/v${version}.tar.gz"; + sha256 = "1iyhy0zpi6wvqif7826anzgdipgsy5bk775ds9qqwfw27j7x6fy5"; + }; + + postPatch = '' + sed -i \ + -e 's/ Visible="False"//' \ + -e "s/Exists(\$(CodeContractsInstallDir))/Exists('\$(CodeContractsInstallDir)')/" \ + Source/*/*.csproj + ''; + }); + +} diff --git a/nix/overlay/perl.nix b/nix/overlay/perl.nix new file mode 100644 index 000000000..43e3d1583 --- /dev/null +++ b/nix/overlay/perl.nix @@ -0,0 +1,71 @@ +# These entries were generated using nix-generate-from-cpan. + +{ buildPerlPackage, perlPackages }: with perlPackages; rec { + + GitFastExport = buildPerlPackage rec { + name = "Git-FastExport-0.107"; + src = fetchurl { + url = "mirror://cpan/authors/id/B/BO/BOOK/${name}.tar.gz"; + sha256 = "8607a8d7ea2c46f8cb78199b2f584cc1cf1d97dbf2cef4e74e23076c9095f89a"; + }; + buildInputs = [ TestRequiresGit TestScript ]; + propagatedBuildInputs = [ GitRepository ]; + meta = { + description = "A parser for git fast-export streams"; + license = with stdenv.lib.licenses; [ artistic1 gpl1Plus ]; + }; + }; + + SystemCommand = buildPerlPackage rec { + name = "System-Command-1.118"; + src = fetchurl { + url = "mirror://cpan/authors/id/B/BO/BOOK/${name}.tar.gz"; + sha256 = "00c4dcc606aeea4d5d58ca027bfb484a7f59edfca882c84d01924c2af32f1ea7"; + }; + propagatedBuildInputs = [ IPCRun ]; + meta = { + description = "Object for running system commands"; + license = with stdenv.lib.licenses; [ artistic1 gpl1Plus ]; + }; + }; + + TestRequiresGit = buildPerlPackage rec { + name = "Test-Requires-Git-1.008"; + src = fetchurl { + url = "mirror://cpan/authors/id/B/BO/BOOK/${name}.tar.gz"; + sha256 = "70916210970d84d7491451159ab8b67e15251c8c0dae7c3df6c8d88542ea42a6"; + }; + propagatedBuildInputs = [ GitVersionCompare ]; + meta = { + description = "Check your test requirements against the available version of Git"; + license = with stdenv.lib.licenses; [ artistic1 gpl1Plus ]; + }; + }; + + GitVersionCompare = buildPerlPackage rec { + name = "Git-Version-Compare-1.004"; + src = fetchurl { + url = "mirror://cpan/authors/id/B/BO/BOOK/${name}.tar.gz"; + sha256 = "63e8264ed351cb2371b47852a72366214164b5f3fad9dbd68309c7fc63d06491"; + }; + buildInputs = [ TestNoWarnings ]; + meta = { + description = "Functions to compare Git versions"; + license = with stdenv.lib.licenses; [ artistic1 gpl1Plus ]; + }; + }; + + GitRepository = buildPerlPackage rec { + name = "Git-Repository-1.321"; + src = fetchurl { + url = "mirror://cpan/authors/id/B/BO/BOOK/${name}.tar.gz"; + sha256 = "67848f2dfc09adb208e21fc6fcaf55fc4e3b0f6e687b8e866d4c87520701880c"; + }; + buildInputs = [ TestRequiresGit ]; + propagatedBuildInputs = [ GitVersionCompare SystemCommand namespaceclean ]; + meta = { + description = "Perl interface to Git repositories"; + license = with stdenv.lib.licenses; [ artistic1 gpl1Plus ]; + }; + }; +} diff --git a/nix/overlay/python.nix b/nix/overlay/python.nix new file mode 100644 index 000000000..7255d1a4c --- /dev/null +++ b/nix/overlay/python.nix @@ -0,0 +1,239 @@ +{ pkgs }: self: super: with super; + +let + stdenv = pkgs.stdenv; + fetchPypi = pkgs.pythonPackages.fetchPypi; + +in rec { + + web3 = buildPythonPackage rec { + pname = "web3"; + version = "3.13.5"; + name = "${pname}-${version}"; + src = fetchPypi { + inherit pname version; + sha256 = "0q27bqgy4a847spx4zarg6qahvc0a38dqr1pgwlj8dnh20qp2pf3"; + }; + + propagatedBuildInputs = with self; [ethereum-abi-utils requests]; + doCheck = false; + + meta = with stdenv.lib; { + homepage = https://github.com/pipermerriam/web3.py; + description = "A Python interface for the Ethereum blockchain"; + license = licenses.mit; + maintainers = with maintainers; [ dbrock ]; + }; + }; + + ethereum = buildPythonPackage rec { + pname = "ethereum"; + version = "1.6.1"; + name = "${pname}-${version}"; + src = fetchPypi { + inherit pname version; + sha256 = "0k93zs01ff7ki835c3f7gp2sskhr3xaiqm1mkn60i60kyipslfrc"; + }; + preConfigure = '' + substituteInPlace setup.py --replace pytest-runner==2.7 pytest-runner==2.6.2 + ''; + propagatedBuildInputs = with self; [ + pyyaml rlp pysha3 pyethash bitcoin pbkdf2 repoze_lru scrypt + pycryptodome pytestrunner secp256k1 + ]; + doCheck = false; + meta = with stdenv.lib; { + homepage = https://github.com/ethereum/pyethereum; + description = "Python core library of the Ethereum project"; + license = licenses.mit; + maintainers = with maintainers; [ dbrock ]; + }; + }; + + scrypt = buildPythonPackage rec { + pname = "scrypt"; + version = "0.8.0"; + name = "${pname}-${version}"; + src = fetchPypi { + inherit pname version; + sha256 = "0830r3q8f8mc4738ngcvwhv9kih5c6zf87mzkdifzf2h6kss99fl"; + }; + buildInputs = [pkgs.openssl]; + doCheck = false; + meta = with stdenv.lib; { + homepage = https://bitbucket.org/mhallin/py-scrypt/src; + description = "Bindings for the scrypt key derivation function"; + license = licenses.mit; + maintainers = with maintainers; [ dbrock ]; + }; + }; + + bitcoin = buildPythonPackage rec { + pname = "bitcoin"; + version = "1.1.42"; + name = "${pname}-${version}"; + src = fetchPypi { + inherit pname version; + sha256 = "0dkjifd8d60an0jl6k9zqx9r0p5xprzlrgf4n9mlyxhwksyp1fhi"; + }; + propagatedBuildInputs = with self; []; + meta = with stdenv.lib; { + homepage = https://github.com/vbuterin/pybitcointools; + description = "Bitcoin-themed Python ECC library"; + license = licenses.mit; + maintainers = with maintainers; [ dbrock ]; + }; + }; + + pyethash = buildPythonPackage rec { + pname = "pyethash"; + version = "0.1.27"; + name = "${pname}-${version}"; + src = fetchPypi { + inherit pname version; + sha256 = "0618kkn2sb0a3h2pphpj2vi455xc9mil42b13zgpg7bbwaf32rpz"; + }; + propagatedBuildInputs = with self; []; + meta = with stdenv.lib; { + homepage = https://github.com/ethereum/ethash; + description = "Python wrappers for the Ethereum PoW hash function"; + license = licenses.mit; + maintainers = with maintainers; [ dbrock ]; + }; + }; + + ethereum-abi-utils = buildPythonPackage rec { + pname = "ethereum-abi-utils"; + version = "0.4.0"; + name = "${pname}-${version}"; + src = fetchPypi { + inherit pname version; + sha256 = "1170dxm8vffpj4gc2mmpndj61mrwmsp3y2xnfhda1rj5w33bm7js"; + }; + + checkInputs = with self; [pytest]; + propagatedBuildInputs = with self; [ethereum-utils]; + + meta = with stdenv.lib; { + homepage = https://github.com/pipermerriam/ethereum-abi-utils; + description = "Ethereum ABI utilities for Python"; + license = licenses.mit; + maintainers = with maintainers; [ dbrock ]; + }; + }; + + ethereum-utils = buildPythonPackage rec { + pname = "ethereum-utils"; + version = "0.4.0"; + name = "${pname}-${version}"; + src = fetchPypi { + inherit pname version; + sha256 = "1z8bpkwmfrn6rrigippydkpl4077r5g81h3vrmdy4ri8iy5519rz"; + }; + + checkInputs = with self; [pytest]; + propagatedBuildInputs = with self; [pysha3 cytoolz pylru rlp]; + + meta = with stdenv.lib; { + homepage = https://github.com/pipermerriam/ethereum-utils; + description = "Ethereum utilities for Python"; + license = licenses.mit; + maintainers = with maintainers; [ dbrock ]; + }; + }; + + pysha3 = buildPythonPackage rec { + pname = "pysha3"; + version = "1.0.2"; + name = "${pname}-${version}"; + src = fetchPypi { + inherit pname version; + sha256 = "17kkjapv6sr906ib0r5wpldmzw7scza08kv241r98vffy9rqx67y"; + }; + + checkInputs = with self; [pytest]; + + meta = with stdenv.lib; { + homepage = https://github.com/tiran/pysha3; + description = "SHA-3 wrapper (Keccak) for Python"; + license = licenses.psfl; + maintainers = with maintainers; [ dbrock ]; + }; + }; + + rlp = buildPythonPackage rec { + pname = "rlp"; + version = "0.6.0"; + name = "${pname}-${version}"; + src = fetchPypi { + inherit pname version; + sha256 = "0d3gx4mp8q4z369s5yk1n9c55sgfw9fidbwqxq67d6s7l45rm1w7"; + }; + + checkInputs = [self.pytest]; + propagatedBuildInputs = with self; [wheel]; + + meta = with stdenv.lib; { + homepage = https://github.com/ethereum/pyrlp; + description = "Recursive length prefix notation for Ethereum"; + license = licenses.mit; + maintainers = with maintainers; [ dbrock ]; + }; + }; + + eth-testrpc = buildPythonPackage rec { + pname = "eth-testrpc"; + version = "1.3.0"; + name = "${pname}-${version}"; + src = fetchPypi { + inherit pname version; + sha256 = "1bmds2shkxxvhdksji1dxiadm95rf6f8cp7bzs0iirjrx1fyjni0"; + }; + + doCheck = false; + propagatedBuildInputs = with self; [werkzeug click rlp json-rpc ethereum]; + + meta = with stdenv.lib; { + homepage = https://github.com/pipermerriam/eth-testrpc; + description = "Used for testing Ethereum JSON-RPC interactions"; + license = licenses.mit; + maintainers = with maintainers; [ dbrock ]; + }; + }; + + json-rpc = buildPythonPackage rec { + pname = "json-rpc"; + version = "1.10.3"; + name = "${pname}-${version}"; + src = fetchPypi { + inherit pname version; + sha256 = "1195767r25mclnkz1pxr74wm0j21qqyq75pkw85fsxf9d8wj8gni"; + }; + doCheck = false; # Installs too many web servers + meta = with stdenv.lib; { + homepage = https://github.com/pavlov99/json-rpc; + description = "JSON-RPC2.0 and JSON-RPC1.0 transport specification implementation"; + license = licenses.mit; + maintainers = with maintainers; [ dbrock ]; + }; + }; + + tinydb = buildPythonPackage rec { + pname = "tinydb"; + version = "3.5.0"; + name = "${pname}-${version}"; + src = fetchPypi { + inherit pname version; + sha256 = "18rwlyqbsp7g5pjq792w2vc4gwbb281lgfjvfrg4idlhir53klh0"; + }; + + checkInputs = [self.pytest]; + + meta = with stdenv.lib; { + homepage = https://github.com/msiemens/tinydb; + description = "Pure Python lightweight document oriented database"; + license = licenses.mit; + maintainers = with maintainers; [ dbrock ]; + }; + }; +} diff --git a/nix/overlay/solc-post-0.4.17.nix b/nix/overlay/solc-post-0.4.17.nix new file mode 100644 index 000000000..045ef3fcf --- /dev/null +++ b/nix/overlay/solc-post-0.4.17.nix @@ -0,0 +1,53 @@ +version: rev: sha256: +{ stdenv, fetchzip, fetchgit, boost, cmake, z3 }: + +let + jsoncppURL = https://github.com/open-source-parsers/jsoncpp/archive/1.7.7.tar.gz; + jsoncpp = fetchzip { + url = jsoncppURL; + sha256 = "0jz93zv17ir7lbxb3dv8ph2n916rajs8i96immwx9vb45pqid3n0"; + }; +in + +stdenv.mkDerivation { + name = "solc-${version}"; + + # Cannot use `fetchFromGitHub' because of submodules + src = fetchgit { + url = "https://github.com/ethereum/solidity"; + inherit rev sha256; + }; + + patchPhase = '' + echo >commit_hash.txt '${rev}' + echo >prerelease.txt + substituteInPlace cmake/jsoncpp.cmake \ + --replace '${jsoncppURL}' ${jsoncpp} + substituteInPlace cmake/EthCompilerSettings.cmake \ + --replace 'add_compile_options(-Werror)' "" + '' + stdenv.lib.optionalString stdenv.isDarwin '' + substituteInPlace cmake/EthDependencies.cmake \ + --replace 'Boost_USE_STATIC_LIBS ON' 'Boost_USE_STATIC_LIBS OFF' + ''; + + # The Darwin flag for patch phase is a hack to avoid some + # recompilation. Actually the cmakeFlags way works fine, except not + # in older versions. I want to build those older version on Mac, + # but not rebuild my Linux versions, so I do it this silly way. + + cmakeFlags = [ + "-DBoost_USE_STATIC_LIBS=OFF" + ]; + + buildInputs = [ boost cmake z3 ]; + + meta = { + description = "Compiler for Ethereum smart contract language Solidity"; + longDescription = "This package also includes `lllc', the LLL compiler."; + homepage = https://github.com/ethereum/solidity; + license = stdenv.lib.licenses.gpl3; + platforms = with stdenv.lib.platforms; linux ++ darwin; + maintainers = [ stdenv.lib.maintainers.dbrock ]; + inherit version; + }; +} diff --git a/nix/overlay/solc-post-0.4.24.nix b/nix/overlay/solc-post-0.4.24.nix new file mode 100644 index 000000000..5001b3d11 --- /dev/null +++ b/nix/overlay/solc-post-0.4.24.nix @@ -0,0 +1,53 @@ +version: rev: sha256: +{ stdenv, fetchzip, fetchgit, boost, cmake, z3 }: + +let + jsoncppURL = https://github.com/open-source-parsers/jsoncpp/archive/1.8.4.tar.gz; + jsoncpp = fetchzip { + url = jsoncppURL; + sha256 = "1z0gj7a6jypkijmpknis04qybs1hkd04d1arr3gy89lnxmp6qzlm"; + }; +in + +stdenv.mkDerivation { + name = "solc-${version}"; + + # Cannot use `fetchFromGitHub' because of submodules + src = fetchgit { + url = "https://github.com/ethereum/solidity"; + inherit rev sha256; + }; + + patchPhase = '' + echo >commit_hash.txt '${rev}' + echo >prerelease.txt + substituteInPlace cmake/jsoncpp.cmake \ + --replace '${jsoncppURL}' ${jsoncpp} + substituteInPlace cmake/EthCompilerSettings.cmake \ + --replace 'add_compile_options(-Werror)' "" + '' + stdenv.lib.optionalString stdenv.isDarwin '' + substituteInPlace cmake/EthDependencies.cmake \ + --replace 'Boost_USE_STATIC_LIBS ON' 'Boost_USE_STATIC_LIBS OFF' + ''; + + # The Darwin flag for patch phase is a hack to avoid some + # recompilation. Actually the cmakeFlags way works fine, except not + # in older versions. I want to build those older version on Mac, + # but not rebuild my Linux versions, so I do it this silly way. + + cmakeFlags = [ + "-DBoost_USE_STATIC_LIBS=OFF" + ]; + + buildInputs = [ boost cmake z3 ]; + + meta = { + description = "Compiler for Ethereum smart contract language Solidity"; + longDescription = "This package also includes `lllc', the LLL compiler."; + homepage = https://github.com/ethereum/solidity; + license = stdenv.lib.licenses.gpl3; + platforms = with stdenv.lib.platforms; linux ++ darwin; + maintainers = [ stdenv.lib.maintainers.dbrock ]; + inherit version; + }; +} diff --git a/nix/overlay/solc-versions.nix b/nix/overlay/solc-versions.nix new file mode 100644 index 000000000..4481fc583 --- /dev/null +++ b/nix/overlay/solc-versions.nix @@ -0,0 +1,62 @@ +let + mk1 = import ./solc.nix; + mk2 = import ./solc-post-0.4.17.nix; + mk3 = import ./solc-post-0.4.24.nix; +in { + + ## I looked up the hashes for all the older versions, but prior to + ## 0.3.6 the Solc repository was not standalone and I haven't even + ## tried getting them to build. + + ## In fact 0.4.5 is the first version that compiles with our build + ## script. Prior versions could probably be made to work, but for + ## now I disable them. + + ## Also note that in v0.4.9 the compiler started prefixing things + ## with "filename:", so that's the oldest compiler some things will + ## work with. + + # solc_0_1_2 = mk "0.1.2" "0906042ce05f01c4d371aa98d0fd9dddfb93a196" "12498k4skfv3a6jlvm97p1v9c77nfj5910305a19pl9s7w38gbcy"; + # solc_0_1_3 = mk "0.1.3" "028f561dac6aec0692daa8217b0ee1e3a0c5aa72" "1ax3hj5varljk4rl6by63v3ymigm2r8sjx4wl39ia118xcvcl7mp"; + # solc_0_1_4 = mk "0.1.4" "63a9c4f8960664af056f9b41c24e6b5d74edcb14" "1pga8xg8klxnilhy9kx0vf4989xhykc4p0avfzgl8n7r2h6vqmw5"; + # solc_0_1_5 = mk "0.1.5" "23865e39295dd9199769727b037c1d126807b20e" "16dr9isr8dfhb7rps76kb406ac1z5ibaxr0w0w1mjw8j47qbx8sw"; + # solc_0_1_6 = mk "0.1.6" "d41f8b7ce702c3b25c48d27e2e895ccdcd04e4e0" "173s5f04ii7px85qil7wqypx8z8vx9x6f031ak42ysjf82dqv4y9"; + # solc_0_1_7 = mk "0.1.7" "b4e666ccf4fe39b0a1fc909b61daf6dc39e77fc4" "0h2i6x9yjqjmhzjcni0nw8jqdqqfvnbxdnc6dn3hl8nx0778ri80"; + # solc_0_2_0 = mk "0.2.0" "4dc2445edd05d9d6bdc9fc4673ad91ead11f1632" "1npcfn68ykhwmq907g3qq72z4zd4j1nm81ahvw32c6m4pivq0idl"; + # solc_0_2_1 = mk "0.2.1" "91a6b35f18c9e3ae38b08460ba3b0d3590e573dc" "0d0m3nc6sh1wwngz1sxfan9g7nf9jsdqfxclvpw3vsiygj1y9v3c"; + # solc_0_2_2 = mk "0.2.2" "ef92f5661b06f7a277aec4b76f3eb62772239e69" "0s5mkkvyr19f5zw4xhpxccgf9a5q9fbp0qnd2zzkfvzsl6c9xy1s"; + # solc_0_3_0 = mk "0.3.0" "1f9578cea3f7ea1982ba2288cd3238bfe791b348" "0ikix08y6l4izcvkjrydqwprxwiz1102gl4s80dm63r8crd15c3v"; + # solc_0_3_1 = mk "0.3.1" "c67926cf2be95e9275eb2b7fe904131e7e8842ad" "1v3s9h57c6w8lcdcy49g2p5msip026sm4zsv8pncwfqw0fchdric"; + # solc_0_3_2 = mk "0.3.2" "81ae2a78321fddcd2d32efc51568ebeca28866a8" "0k5b15ywx324lajg2hmp8p527qp6ai1ak4wr7s4rdw4s0ljyjqz8"; + # solc_0_3_3 = mk "0.3.3" "4dc1cb149c7e3677119b052ae0f7dc7a019fa952" "0flp7i58hx56rf431n9lxq39r65n7f4p7y9v7bpii2sqrz1af9a7"; + # solc_0_3_4 = mk "0.3.4" "7dab8902789b15190e39d4fed0bc46418f1ace5e" "11w51phzaac96x9gz7r2ac6wkpry9yhav6a0rahcr76rvyxmhj2b"; + # solc_0_3_5 = mk "0.3.5" "5f97274a6cc2d9346357d69b2c4a9348cdffa7f9" "147sq3zh1cqhkinxcwv5i1la444y8bgn92fcac7xd84qddn3yd6n"; + + # solc_0_3_6 = mk "0.3.6" "988fe5e5aa271d89e0700d36ef0f210ca50051e3" "1lg7knx9a9488yhl2gavaq8g9ywvskp73sd31fc8ql15qf4ay83r"; + # solc_0_4_0 = mk "0.4.0" "acd334c9d289a7cc4674dde2534d8728450cedde" "0i63bcclcs8i2j90p8l3hvmvjmad9rh8vkfz1l4w1c2501ani971"; + # solc_0_4_1 = mk "0.4.1" "4fc6fc2ca59579fae2472df319c2d8d31fe5bde5" "0ixrkp2y49lf4av52ik6ywngmg53vigj8pk10jwd94ld1aw2bcpd"; + # solc_0_4_2 = mk "0.4.2" "af6afb0415761b53721f89c7f65064807f41cbd3" "1086w2hkjlqh73p62nnrkcwxsiiy2q2mz3r4zgppbrn717cq68v5"; + # solc_0_4_3 = mk "0.4.3" "2353da71c77dd235b35d16e7e024fa62408df610" "06m0zhidnd8vc56247yw7zqg6bk6girgr0cv9fh7avkfcwjpn42g"; + # solc_0_4_4 = mk "0.4.4" "4633f3def897db0f91237f98cf46e5d84fb05e61" "1z32a9vb6rjxmg5cqpkm45xbjq6nyx9p31m7cjppqw6zljw6zjzs"; + + solc_0_4_5 = mk1 "0.4.5" "b318366e6f16ed6a4274247d09badac4affff8d5" "1lnmbqv4qqdc6077i27471mg9sr34d4wd9505w8wj94scjq3mpxm"; + solc_0_4_6 = mk1 "0.4.6" "2dabbdf06f414750ef0425c664f861aeb3e470b8" "0q1dvizx60f7l97w8241wra7vpghimc9x7gzb18vn34sxv4bqy9g"; + solc_0_4_7 = mk1 "0.4.7" "822622cf5bf23e79a6e2292cb837d1a39ca1c419" "1x2s9gi9y8h03j7nv6wyjjbw74vnzzhr206nik0qasyajgrb0sk2"; + solc_0_4_8 = mk1 "0.4.8" "60cc1668517f56ce6ca8225555472e7a27eab8b0" "09mwah7c5ca1bgnqp5qgghsi6mbsi7p16z8yxm0aylsn2cjk23na"; + solc_0_4_9 = mk1 "0.4.9" "364da425d3116a4b85863df39a1864340861d71e" "1qlc3isbdvg4llvqxcdfhqx1m4z80jd7iw89sivz2nga4nx06wwz"; + solc_0_4_10 = mk1 "0.4.10" "f0d539ae05739e35336cc9cc8f44bd9798a95c28" "04kmibc4q0sr5qyawbp855iix79di6ynix7vpsfx9gjwdddjk6j8"; + solc_0_4_11 = mk1 "0.4.11" "68ef5810593e7c8092ed41d5f474dd43141624eb" "13zycybf23yvf3hkf9zgw9gbc1y4ifzxaf7sll69bsn24fcyq961"; + solc_0_4_12 = mk1 "0.4.12" "194ff033ae44944ac59aa7bd3da89ba94ec5893c" "0gkg3nay0625qmhxxxax1d1c4dl554ri3pkwd12qfg6g1w6j04w7"; + solc_0_4_13 = mk1 "0.4.13" "0fb4cb1ab9bb4b6cc72e28cc5a1753ad14781f14" "0rhrm0bmk5s2358j40yx7dzr1938q17dchzflrxw6y7yvkhscxrm"; + solc_0_4_14 = mk1 "0.4.14" "c2215d4605d1fbcef1366d6b822ec610fc031b3c" "0pfn0b8nmdp61ig2g1jnhy4sdlxvkrhv4pw237zyvs97sjnll377"; + solc_0_4_15 = mk1 "0.4.15" "8b45bddb559d17250c8a5619efa1a21f296d4e03" "0a1gy4j3yximb7ja7q0ldwg34h6759dmdkfjb87nqfizj05cg5q3"; + solc_0_4_16 = mk1 "0.4.16" "d7661dd97460250b4e1127b9e7ea91e116143780" "1fd69pdhkkkvbkrxipkck1icpqkpdskjzar48a1yzdsx3l8s4lil"; + solc_0_4_17 = mk2 "0.4.17" "bdeb9e52a2211510644fb53df93fb98258b40a65" "1x6q2rlq6gxggidgsy6li7m4phwr1hcfi65pq9yimz64ddqfiira"; + solc_0_4_18 = mk2 "0.4.18" "9cf6e910bd2b90d0c9415d9c257f85fe0c518de8" "0ij7qbn3ci6v4jf4gqcdphwy8lnc1l4ycw9pvq6c80kd1fayf2s6"; + solc_0_4_19 = mk2 "0.4.19" "c4cbbb054b5ed3b8ceaa21ee5b47b0704762ff40" "1h2ziwdswghj4aa3vd3k3y2ckfiwjk6x38w2kp4m324k2ydxd15c"; + solc_0_4_20 = mk2 "0.4.20" "3155dd8058672ce8f04bc2c0f2536cb549067d0a" "0jgqi6rnyr8d3plbag1p0yp1s1fzvjjsk4yrv06v46bsvyx4lgcn"; + solc_0_4_21 = mk2 "0.4.21" "dfe3193c7382c80f1814247a162663a97c3f5e67" "0gbf3r6waqsp76aaql779jw9817sgvw4vdlrrpq0l1r1nm82lxq5"; + solc_0_4_22 = mk2 "0.4.22" "4cb486ee993cadde5564fb6c611d2bcf4fc44414" "05zrqf1pq4khjhvlk3nbg86s8f8dnzikd20ln1sy83pl9fchc0h3"; + solc_0_4_23 = mk2 "0.4.23" "124ca40dc525a987a88176c6e5170978e82fa290" "07l8rfqh95yrdmbxc4pfb77s06k5v65dk3rgdqscqmwchkndrmm0"; +# solc_0_4_24 = mk3 "0.4.24" "e67f0147998a9e3835ed3ce8bf6a0a0c634216c5" "1gy2miv6ia1z98zy6w4y03balwfr964bnvwzyg8v7pn2mayqnaap"; +} diff --git a/nix/overlay/solc.nix b/nix/overlay/solc.nix new file mode 100644 index 000000000..53af2f99f --- /dev/null +++ b/nix/overlay/solc.nix @@ -0,0 +1,52 @@ +version: rev: sha256: +{ stdenv, fetchzip, fetchgit, boost, cmake, z3 }: + +let + jsoncppURL = https://github.com/open-source-parsers/jsoncpp/archive/1.7.7.tar.gz; + jsoncpp = fetchzip { + url = jsoncppURL; + sha256 = "0jz93zv17ir7lbxb3dv8ph2n916rajs8i96immwx9vb45pqid3n0"; + }; +in + +stdenv.mkDerivation { + name = "solc-${version}"; + + # Cannot use `fetchFromGitHub' because of submodules + src = fetchgit { + url = "https://github.com/ethereum/solidity"; + inherit rev sha256; + }; + + patchPhase = '' + echo >commit_hash.txt '${rev}' + echo >prerelease.txt + substituteInPlace deps/jsoncpp.cmake \ + --replace '${jsoncppURL}' ${jsoncpp} + substituteInPlace cmake/EthCompilerSettings.cmake \ + --replace 'add_compile_options(-Werror)' "" + '' + stdenv.lib.optionalString stdenv.isDarwin '' + substituteInPlace cmake/EthDependencies.cmake \ + --replace 'Boost_USE_STATIC_LIBS ON' 'Boost_USE_STATIC_LIBS OFF' + ''; + + # The Darwin flag for patch phase is a hack to avoid some + # recompilation. Actually the cmakeFlags way works fine, except not + # in older versions. I want to build those older version on Mac, + # but not rebuild my Linux versions, so I do it this silly way. + + cmakeFlags = [ + "-DBoost_USE_STATIC_LIBS=OFF" + ]; + + buildInputs = [ boost cmake z3 ]; + + meta = { + description = "Compiler for Ethereum smart contract language Solidity"; + longDescription = "This package also includes `lllc', the LLL compiler."; + homepage = https://github.com/ethereum/solidity; + license = stdenv.lib.licenses.gpl3; + platforms = with stdenv.lib.platforms; linux ++ darwin; + maintainers = [ stdenv.lib.maintainers.dbrock ]; + }; +} diff --git a/nix/overlay/solidity-package.nix b/nix/overlay/solidity-package.nix new file mode 100644 index 000000000..05329891c --- /dev/null +++ b/nix/overlay/solidity-package.nix @@ -0,0 +1,42 @@ +{ pkgs }: let + remappings = xs: + builtins.foldl' pkgs.lib.mergeAttrs {} + (builtins.map + (x: { + "${x.name}/" = "${x}/dapp/${x.name}/src/"; + } // x.remappings) + xs); + libPaths = xs: + builtins.foldl' pkgs.lib.mergeAttrs {} + (builtins.map + (x: { + "${x.name}" = "${x}/dapp/${x.name}/src"; + } // x.libPaths) + xs); +in + pkgs.lib.makeOverridable ( + attrs @ { test ? true, deps ? [], ... }: + pkgs.stdenv.mkDerivation (rec { + buildInputs = [pkgs.dapp2.test-hevm pkgs.solc]; + passthru = { + remappings = remappings deps; + libPaths = libPaths deps; + }; + + TEST = test; + + REMAPPINGS = + pkgs.lib.mapAttrsToList + (k: v: k + "=" + v) + passthru.remappings; + + LIBSCRIPT = + pkgs.lib.mapAttrsToList + (k: v: '' + ln -s ${v} lib/${k} + '') + passthru.libPaths; + + builder = ./solidity-package.sh; + } // attrs) + ) diff --git a/nix/overlay/solidity-package.sh b/nix/overlay/solidity-package.sh new file mode 100644 index 000000000..601eac512 --- /dev/null +++ b/nix/overlay/solidity-package.sh @@ -0,0 +1,28 @@ +source $stdenv/setup +unpackPhase + +jsonopts=--combined-json=abi,bin,bin-runtime,srcmap,srcmap-runtime,ast + +export DAPP_SRC=$src +export DAPP_OUT=out + +find "$DAPP_SRC" -name '*.sol' | while read -r x; do + dir=${x%\/*} + dir=${dir#$DAPP_SRC} + dir=${dir#/} + mkdir -p "$DAPP_OUT/$dir" + (set -x; solc --overwrite $REMAPPINGS --abi --bin --bin-runtime = -o "$DAPP_OUT/$dir" "$x") + json_file=$DAPP_OUT/$dir/${x##*/}.json + (set -x; solc $REMAPPINGS $jsonopts = "$x" >"$json_file") +done + +mkdir lib +echo "$LIBSCRIPT" > setup.sh +source setup.sh +export DAPP_LIB=lib +dapp2-test-hevm + +mkdir -p $out/dapp/$name +cp -r $src $out/dapp/$name/src +cp -r lib $out/dapp/$name/lib +cp -r out $out/dapp/$name/out diff --git a/nix/overlay/tla/core.nix b/nix/overlay/tla/core.nix new file mode 100644 index 000000000..4725be1b9 --- /dev/null +++ b/nix/overlay/tla/core.nix @@ -0,0 +1,89 @@ +# http://research.microsoft.com/en-us/um/people/lamport/tla/tools.html + +{ lib, fetchzip, makeWrapper, stdenv, jre, ... }: + +let + mkModule = { name, version, java-main, meta }: stdenv.mkDerivation { + name = "tla-plus-${name}-${version}"; + src = fetchzip { + url = "https://github.com/tlaplus/tlaplus/releases/download/v1.5.5/tla.zip"; + sha256 = "1bagl7zqmjaxss8kv74x2myf9mf6bn64avmmz1vwlhxkaj1rj7l5"; + }; + buildInputs = [ makeWrapper ]; + phases = [ "installPhase" ]; + installPhase = '' + mkdir -pv "$out/bin" + echo -e "${jre}/bin/java ${java-main} \"\$@\"" > "$out/bin/${name}" + chmod +x "$out/bin/${name}" + wrapProgram "$out/bin/${name}" --set CLASSPATH "$src" + ''; + + meta = { + # http://research.microsoft.com/en-us/um/people/lamport/tla/license.html + license = with lib.licenses; [ mit ]; + } // meta; + }; + + modules = { + tlc = mkModule { + name = "tlc"; + version = "2.08"; + java-main = "tlc2.TLC"; + meta = { + homepage = "http://research.microsoft.com/en-us/um/people/lamport/tla/tlc.html"; + description = "The TLA+ Model Checker"; + longDescription = '' + Model checker for specifications written in TLA+. TLA+ is a specification + language based on TLA, the Temporal Logic of Actions. + ''; + }; + }; + + sany = mkModule { + name = "sany"; + version = "2.1"; + java-main = "tla2sany.SANY"; + meta = { + homepage = "http://research.microsoft.com/en-us/um/people/lamport/tla/sany.html"; + description = "The TLA+ Syntactic Analyzer"; + longDescription = '' + Parser and semantic analyzer for the TLA+ specification language. + ''; + }; + }; + + pluscal = mkModule { + name = "pluscal"; + version = "1.8"; + java-main = "pcal.trans"; + meta = { + homepage = "http://research.microsoft.com/en-us/um/people/lamport/tla/pluscal.html"; + description = "The PlusCal Algorithm Language"; + longDescription = '' + Algorithm language based on TLA+. A PlusCal algorithm is translated to a TLA+ + specification, which can be checked with the TLC model checker. An algorithm + language is for writing algorithms, just as a programming language is for writing + programs. Formerly called +CAL. + ''; + }; + }; + + tlatex = mkModule { + name = "tlatex"; + version = "1.0"; + java-main = "tla2tex.TLA"; + meta = { + homepage = "http://research.microsoft.com/en-us/um/people/lamport/tla/tlatex.html"; + description = "A Typesetter for TLA+ Specifications"; + longDescription = '' + Uses the LaTeX document production system to typeset TLA+ specifications. + TLA+ is a specification language based on TLA, the Temporal Logic of Actions. + ''; + }; + }; + + }; + + all = with modules; [ tlc sany pluscal tlatex ]; + +in modules // { inherit all; } diff --git a/nix/overlay/tla/default.nix b/nix/overlay/tla/default.nix new file mode 100644 index 000000000..cecd100df --- /dev/null +++ b/nix/overlay/tla/default.nix @@ -0,0 +1,16 @@ +{ callPackage, buildEnv, ... }: + +let + + core = callPackage ./core.nix {}; + + tlaps = callPackage ./tlaps.nix {}; + + toolbox = callPackage ./toolbox.nix {}; + + full = buildEnv { + name = "tla-plus-full"; + paths = core.all ++ tlaps.all ++ [ toolbox ]; + }; + +in core // tlaps // { inherit toolbox full; } diff --git a/nix/overlay/tla/isabelle2011-1/default.nix b/nix/overlay/tla/isabelle2011-1/default.nix new file mode 100644 index 000000000..b6d83db5a --- /dev/null +++ b/nix/overlay/tla/isabelle2011-1/default.nix @@ -0,0 +1,61 @@ +{ stdenv, fetchurl, perl, nettools, polyml, proofgeneral }: +# nettools needed for hostname + +let + pname = "Isabelle"; + version = "2011-1"; + name = "${pname}${version}"; + theories = ["HOL" "FOL" "ZF"]; +in + +stdenv.mkDerivation { + inherit name theories; + + src = fetchurl { + url = "http://isabelle.in.tum.de/website-Isabelle${version}/dist/Isabelle${version}.tar.gz"; + sha256 = "027wxm9w163vvhq7d130kvngrn6s1fk56yda2mh4zd0n3bipzms8"; + }; + + buildInputs = [ perl polyml nettools ]; + + sourceRoot = name; + + patches = [ ./settings.patch ]; + + postPatch = '' + ENV=$(type -p env) + patchShebangs "." + substituteInPlace lib/Tools/env \ + --replace /usr/bin/env $ENV + substituteInPlace lib/Tools/install \ + --replace /usr/bin/env $ENV + substituteInPlace src/Pure/IsaMakefile \ + --replace /bin/bash /bin/sh + substituteInPlace etc/settings \ + --subst-var-by ML_HOME "${polyml}/bin" \ + --subst-var-by PROOFGENERAL_HOME "${proofgeneral}/share/emacs/site-lisp/ProofGeneral" + ''; + + buildPhase = '' + ./build $theories + ''; + + installPhase = '' + mkdir -p $out/bin + mv $TMP/$name $out + cd $out/$name + bin/isabelle install -p $out/bin + ''; + + meta = { + description = "A generic proof assistant"; + + longDescription = '' + Isabelle is a generic proof assistant. It allows mathematical formulas + to be expressed in a formal language and provides tools for proving those + formulas in a logical calculus. + ''; + homepage = http://isabelle.in.tum.de/; + license = "LGPL"; + }; +} diff --git a/nix/overlay/tla/isabelle2011-1/settings.patch b/nix/overlay/tla/isabelle2011-1/settings.patch new file mode 100644 index 000000000..bf509a4ba --- /dev/null +++ b/nix/overlay/tla/isabelle2011-1/settings.patch @@ -0,0 +1,33 @@ +diff -Nuar Isabelle2011/etc/settings Isabelle2011-fix/etc/settings +--- Isabelle2011/etc/settings 2011-01-30 13:02:18.000000000 +0100 ++++ Isabelle2011-fix/etc/settings 2011-05-14 22:56:04.000000000 +0200 +@@ -17,13 +17,7 @@ + + # Poly/ML 5.x (automated settings) + ML_PLATFORM="$ISABELLE_PLATFORM" +-ML_HOME="$(choosefrom \ +- "$ISABELLE_HOME/contrib/polyml/$ML_PLATFORM" \ +- "$ISABELLE_HOME/../polyml/$ML_PLATFORM" \ +- "/usr/local/polyml/$ML_PLATFORM" \ +- "/usr/share/polyml/$ML_PLATFORM" \ +- "/opt/polyml/$ML_PLATFORM" \ +- "")" ++ML_HOME=@ML_HOME@ + ML_SYSTEM=$("$ISABELLE_HOME/lib/scripts/polyml-version") + ML_OPTIONS="-H 200" + ML_SOURCES="$ML_HOME/../src" +@@ -175,13 +169,7 @@ + ### + + # Proof General home, look in a variety of places +-PROOFGENERAL_HOME="$(choosefrom \ +- "$ISABELLE_HOME/contrib/ProofGeneral" \ +- "$ISABELLE_HOME/../ProofGeneral" \ +- "/usr/local/ProofGeneral" \ +- "/usr/share/ProofGeneral" \ +- "/opt/ProofGeneral" \ +- "")" ++PROOFGENERAL_HOME=@PROOFGENERAL_HOME@ + + PROOFGENERAL_OPTIONS="" + #PROOFGENERAL_OPTIONS="-m no_brackets -m no_type_brackets" diff --git a/nix/overlay/tla/tla-toolbox.desktop b/nix/overlay/tla/tla-toolbox.desktop new file mode 100644 index 000000000..675354a86 --- /dev/null +++ b/nix/overlay/tla/tla-toolbox.desktop @@ -0,0 +1,9 @@ +[Desktop Entry] +Type=Application +Exec=tla-toolbox +Icon=tla-toolbox +Comment=IDE for TLA+ +Terminal=false +Name=TLA Toolbox +Categories=Application;Development; +StartupWMClass=TLA+ Toolbox diff --git a/nix/overlay/tla/tlaps.nix b/nix/overlay/tla/tlaps.nix new file mode 100644 index 000000000..8690bf5b3 --- /dev/null +++ b/nix/overlay/tla/tlaps.nix @@ -0,0 +1,97 @@ +{ lib, fetchurl, makeWrapper, stdenv, ocaml, gawk, isabelle2011-1, cvc3, perl +, wget, ... }: + +let + + version = "1.4.3"; + src = fetchurl { + + # Originally from "https://tla.msr-inria.inria.fr/tlaps/dist/${version}/tlaps-${version}.tar.gz"; + url = "https://github.com/chris-martin/tla-plus/raw/5c9786746f6a2ba74e031279eb858bd9a1c59613/tlaps-${version}.tar.gz"; + sha256 = "1w5z3ns5xxmhmp8r4x2kjmy3clqam935gmvx82imyxrr1bamx6gf"; + }; + + mkModule = { name, meta }: args: + stdenv.mkDerivation (args // { + inherit src; + name = "tlaps-${name}-${version}"; + preConfigure = "cd ${name}"; + meta = { + homepage = "http://tla.msr-inria.inria.fr/tlaps/content/Home.html"; + # https://tla.msr-inria.inria.fr/tlaps/content/Download/License.html + license = with lib.licenses; [ bsd2 ]; + } // meta; + }); + + modules = { + isabelle = mkModule { + name = "isabelle"; + meta = {}; + } { + buildInputs = [ ocaml isabelle2011-1 cvc3 perl ]; + buildPhase = "#"; + installPhase = '' + runHook preBuild + + mkdir -pv "$out" + export HOME="$out" + + pushd "${isabelle2011-1}/Isabelle2011-1/src/Pure" + isabelle make + popd + + # Use a modified version of the command in the Makefile + # that avoids needing LaTeX dependencies + isabelle usedir -b -i true Pure TLA+ + + runHook postBuild + ''; + }; + + zenon = mkModule { + name = "zenon"; + meta = {}; + } { + buildInputs = [ ocaml ]; + configurePhase = '' + runHook preConfigure + ./configure --prefix "$out" + runHook postConfigure + ''; + }; + + tlapm = mkModule { + name = "tlapm"; + meta = { + description = "The TLA+ Proof System (TLAPS)"; + longDescription = '' + Mechanically checks TLA+ proofs. TLA+ is a general-purpose formal specification + language that is particularly useful for describing concurrent and distributed + systems. The TLA+ proof language is declarative, hierarchical, and scalable to + large system specifications. It provides a consistent abstraction over the + various "backend" verifiers. The current release of TLAPS does not perform + temporal reasoning, and it does not handle some features of TLA+. + ''; + }; + } { + + buildInputs = [ makeWrapper ocaml gawk wget ]; + + configurePhase = '' + runHook preConfigure + ./configure --prefix $out + runHook postConfigure + ''; + + postInstall = with modules; '' + wrapProgram "$out/bin/tlapm" \ + --prefix PATH : "${isabelle2011-1}/bin:${zenon}/bin" \ + --prefix ISABELLE_PATH : "${modules.isabelle}/.isabelle/Isabelle2011-1/heaps/polyml-5.5.2_x86-linux" + ''; + }; + + }; + + all = with modules; [ tlapm isabelle zenon ]; + +in modules // { inherit all; } diff --git a/nix/overlay/tla/toolbox.nix b/nix/overlay/tla/toolbox.nix new file mode 100644 index 000000000..b705e32c4 --- /dev/null +++ b/nix/overlay/tla/toolbox.nix @@ -0,0 +1,57 @@ +{ lib, fetchzip, makeWrapper, stdenv, jre, swt, gtk, libXtst, glib, ... }: + +let + version = "1.5.5"; + +in stdenv.mkDerivation { + name = "tla-toolbox-${version}"; + meta = { + homepage = "http://research.microsoft.com/en-us/um/people/lamport/tla/toolbox.html"; + description = "IDE for the TLA+ tools"; + longDescription = '' + Integrated development environment for the TLA+ tools, based on Eclipse. You can use it + to create and edit your specs, run the PlusCal translator, view the pretty-printed + versions of your modules, run the TLC model checker, and run TLAPS, the TLA+ proof system. + ''; + # http://research.microsoft.com/en-us/um/people/lamport/tla/license.html + license = with lib.licenses; [ mit ]; + }; + + src = fetchzip { + url = "https://github.com/tlaplus/tlaplus/releases/download/v${version}/TLAToolbox-${version}-linux.gtk.x86_64.zip"; + sha256 = "077vm1d81phg5dpaayh177dpb56zkgm472fs1yfbgl6gc0wswzb3"; + }; + + buildInputs = [ makeWrapper ]; + phases = [ "installPhase" ]; + installPhase = '' + mkdir -pv "$out/bin" + cp -rv "$src" "$out/toolbox" + chmod +w "$out/toolbox/toolbox" + patchelf \ + --set-interpreter $(cat $NIX_CC/nix-support/dynamic-linker) \ + "$out/toolbox/toolbox" + echo "cd $(echo $out/toolbox); ./toolbox -data ~/.tla-toolbox \"$@\"" \ + > "$out/bin/tla-toolbox" + chmod +x $out/bin/tla-toolbox + wrapProgram "$out/bin/tla-toolbox" \ + --prefix PATH : "${jre}/bin" \ + --prefix LD_LIBRARY_PATH : "${swt}/lib:${gtk}/lib:${libXtst}/lib:${glib}/lib" + echo -e "\nCreating TLA Toolbox icons..." + pushd "$src" + for icon_in in $(find . -path "./plugins/*/icons/full/etool16/tla_launch_check_wiz_*.png") + do + icon_size=$(echo $icon_in | grep -Po "wiz_\K[0-9]+") + icon_out="$out/share/icons/hicolor/$icon_size""x$icon_size/apps/tla-toolbox.png" + mkdir -pv "$(dirname $icon_out)" + cp -v "$icon_in" "$icon_out" + done + popd + echo -e "\nCreating TLA Toolbox desktop entry..." + desktop_dir="$out/share/applications" + mkdir -pv "$desktop_dir" + cp -v ${./tla-toolbox.desktop} "$desktop_dir" + echo + ''; + +} diff --git a/nix/overlay/upstream/master/dai-cli.nix b/nix/overlay/upstream/master/dai-cli.nix new file mode 100644 index 000000000..1a969ad97 --- /dev/null +++ b/nix/overlay/upstream/master/dai-cli.nix @@ -0,0 +1,23 @@ +{ lib, stdenv, makeWrapper, coreutils, perl, gnugrep, nodejs, seth, jays, token }: + +stdenv.mkDerivation rec { + name = "dai-${version}"; + version = "0.6"; + src = ./.; + + nativeBuildInputs = [makeWrapper]; + buildPhase = "true"; + makeFlags = ["prefix=$(out)"]; + postInstall = let path = lib.makeBinPath [ + coreutils perl gnugrep nodejs seth jays token + ]; in '' + wrapProgram "$out/bin/dai" --prefix PATH : "${path}" + ''; + + meta = { + description = "Command-line tool for the Dai stablecoin system"; + homepage = https://github.com/makerdao/sai; + license = lib.licenses.gpl3; + inherit version; + }; +} diff --git a/nix/overlay/upstream/master/dapp.nix b/nix/overlay/upstream/master/dapp.nix new file mode 100644 index 000000000..659688969 --- /dev/null +++ b/nix/overlay/upstream/master/dapp.nix @@ -0,0 +1,27 @@ +{ lib, stdenv, fetchFromGitHub, makeWrapper +, seth, git, solc, shellcheck, nodejs, hevm, jshon, nix, coreutils }: + +stdenv.mkDerivation rec { + name = "dapp-${version}"; + version = "0.8.2"; + src = ./.; + + nativeBuildInputs = [makeWrapper shellcheck coreutils]; + buildPhase = "true"; + doCheck = true; + checkPhase = "make test"; + makeFlags = ["prefix=$(out)"]; + postInstall = let path = lib.makeBinPath [ + nodejs solc git seth hevm jshon nix coreutils + ]; in '' + wrapProgram "$out/bin/dapp" --prefix PATH : "${path}" + ''; + + meta = { + description = "Simple tool for creating Ethereum-based dapps"; + homepage = https://github.com/dapphub/dapp/; + maintainers = [stdenv.lib.maintainers.dbrock]; + license = lib.licenses.gpl3; + inherit version; + }; +} diff --git a/nix/overlay/upstream/master/ethsign.nix b/nix/overlay/upstream/master/ethsign.nix new file mode 100644 index 000000000..b49d44891 --- /dev/null +++ b/nix/overlay/upstream/master/ethsign.nix @@ -0,0 +1,53 @@ +{ stdenv, buildGoPackage, fetchFromGitHub, fetchgit, clang }: + +buildGoPackage rec { + name = "ethsign-${version}"; + version = "0.10"; + + goPackagePath = "github.com/dapphub/ethsign"; + hardeningDisable = ["fortify"]; + src = ./.; + + extraSrcs = [ + { + goPackagePath = "github.com/ethereum/go-ethereum"; + src = fetchFromGitHub { + owner = "ethereum"; + repo = "go-ethereum"; + rev = "v1.8.1"; + sha256 = "0k7ly9cw68ranksa1fdn7v2lncmlqgabw3qiiyqya2xz3s4aazlf"; + }; + } + { + goPackagePath = "gopkg.in/urfave/cli.v1"; + src = fetchFromGitHub { + owner = "urfave"; + repo = "cli"; + rev = "v1.19.1"; + sha256 = "1ny63c7bfwfrsp7vfkvb4i0xhq4v7yxqnwxa52y4xlfxs4r6v6fg"; + }; + } + { + goPackagePath = "golang.org/x/crypto"; + src = fetchgit { + url = "https://go.googlesource.com/crypto"; + rev = "94eea52f7b742c7cbe0b03b22f0c4c8631ece122"; + sha256 = "095zyvjb0m2pz382500miqadhk7w3nis8z3j941z8cq4rdafijvi"; + }; + } + { + goPackagePath = "golang.org/x/sys"; + src = fetchgit { + url = "https://go.googlesource.com/sys"; + rev = "53aa286056ef226755cd898109dbcdaba8ac0b81"; + sha256 = "1yd17ccklby099cpdcsgx6lf0lj968hsnppp16mwh9009ldf72r1"; + }; + } + ]; + + meta = with stdenv.lib; { + homepage = http://github.com/dapphub/ethsign; + description = "Make raw signed Ethereum transactions"; + license = [licenses.gpl3]; + }; +} diff --git a/nix/overlay/upstream/master/hevm.nix b/nix/overlay/upstream/master/hevm.nix new file mode 100644 index 000000000..88f0f88b4 --- /dev/null +++ b/nix/overlay/upstream/master/hevm.nix @@ -0,0 +1,44 @@ +{ mkDerivation, abstract-par, aeson, ansi-wl-pprint, async, base +, base16-bytestring, base64-bytestring, binary, brick, bytestring +, cereal, containers, cryptonite, data-dword, deepseq, directory +, ethjet, fgl, filepath, ghci-pretty, haskeline, here, HUnit, lens +, lens-aeson, megaparsec, memory, monad-par, mtl, multiset +, operational, optparse-generic, process, QuickCheck +, quickcheck-text, readline, regex-tdfa, restless-git, rosezipper +, scientific, stdenv, tasty, tasty-hunit, tasty-quickcheck +, temporary, text, text-format, time, transformers, tree-view +, unordered-containers, vector, vty, wreq +}: +mkDerivation { + pname = "hevm"; + version = "0.15"; + src = ./.; + isLibrary = true; + isExecutable = true; + enableSeparateDataOutput = true; + libraryHaskellDepends = [ + abstract-par aeson ansi-wl-pprint base base16-bytestring + base64-bytestring binary brick bytestring cereal containers + cryptonite data-dword deepseq directory ethjet fgl filepath + ghci-pretty haskeline lens lens-aeson megaparsec memory monad-par + mtl multiset operational optparse-generic process QuickCheck + quickcheck-text readline restless-git rosezipper scientific + temporary text text-format time transformers tree-view + unordered-containers vector vty wreq + ]; + executableHaskellDepends = [ + aeson ansi-wl-pprint async base base16-bytestring base64-bytestring + binary brick bytestring containers cryptonite data-dword deepseq + directory filepath ghci-pretty lens lens-aeson memory mtl + optparse-generic process QuickCheck quickcheck-text readline + regex-tdfa temporary text text-format unordered-containers vector + vty + ]; + testHaskellDepends = [ + base binary bytestring ghci-pretty here HUnit lens mtl QuickCheck + tasty tasty-hunit tasty-quickcheck text vector + ]; + homepage = "https://github.com/mbrock/hevm"; + description = "Ethereum virtual machine evaluator"; + license = stdenv.lib.licenses.agpl3; +} diff --git a/nix/overlay/upstream/master/jays.nix b/nix/overlay/upstream/master/jays.nix new file mode 100644 index 000000000..8318dc458 --- /dev/null +++ b/nix/overlay/upstream/master/jays.nix @@ -0,0 +1,22 @@ +{ mkDerivation, aeson, aeson-pretty, base, bytestring, containers +, HUnit, stdenv, tasty, tasty-hunit, text, unix +, unordered-containers, vector +}: +mkDerivation { + pname = "jays"; + version = "1.20171121"; + src = ./.; + isLibrary = true; + isExecutable = true; + libraryHaskellDepends = [ + aeson aeson-pretty base bytestring containers text + unordered-containers vector + ]; + executableHaskellDepends = [ base bytestring text unix ]; + testHaskellDepends = [ + aeson base bytestring HUnit tasty tasty-hunit text + ]; + homepage = "https://github.com/mbrock/jays"; + description = "Rewrite of jshon"; + license = stdenv.lib.licenses.gpl3; +} diff --git a/nix/overlay/upstream/master/libethjet-haskell.nix b/nix/overlay/upstream/master/libethjet-haskell.nix new file mode 100644 index 000000000..4e35424b8 --- /dev/null +++ b/nix/overlay/upstream/master/libethjet-haskell.nix @@ -0,0 +1,16 @@ +{ mkDerivation, base, base16-bytestring, bytestring, ethjet, HUnit +, secp256k1, stdenv, tasty, tasty-hunit +}: +mkDerivation { + pname = "ethjet"; + version = "0.5"; + src = ./.; + libraryHaskellDepends = [ base bytestring ]; + librarySystemDepends = [ ethjet secp256k1 ]; + testHaskellDepends = [ + base base16-bytestring bytestring HUnit tasty tasty-hunit + ]; + homepage = "https://github.com/dapphub/libethjet"; + description = "Binding to libethjet for Ethereum precompiled contracts"; + license = stdenv.lib.licenses.gpl3; +} diff --git a/nix/overlay/upstream/master/libethjet.nix b/nix/overlay/upstream/master/libethjet.nix new file mode 100644 index 000000000..1c0f59a0f --- /dev/null +++ b/nix/overlay/upstream/master/libethjet.nix @@ -0,0 +1,16 @@ +{ stdenv, secp256k1 }: + +stdenv.mkDerivation rec { + name = "libethjet-${version}"; + version = "0.5"; + src = ./.; + meta = with stdenv.lib; { + description = "C library for Ethereum precompiled contracts"; + homepage = https://github.com/dapphub/libethjet; + license = [licenses.mit]; + platforms = platforms.unix; + }; + + buildInputs = [secp256k1]; + installFlags = ["PREFIX=$(out)"]; +} diff --git a/nix/overlay/upstream/master/oasis-orders.nix b/nix/overlay/upstream/master/oasis-orders.nix new file mode 100644 index 000000000..470ff69d4 --- /dev/null +++ b/nix/overlay/upstream/master/oasis-orders.nix @@ -0,0 +1,19 @@ +{ mkDerivation, aeson, base, base16-bytestring, binary, bytestring +, containers, data-dword, directory, hevm, lens, lens-aeson, pipes +, pipes-text, stdenv, text, vector +}: +mkDerivation { + pname = "oasis-orders"; + version = "0.7.1"; + src = ./.; + isLibrary = false; + isExecutable = true; + executableHaskellDepends = [ + aeson base base16-bytestring binary bytestring containers + data-dword directory hevm lens lens-aeson pipes pipes-text text + vector + ]; + homepage = "https://github.com/mbrock/oasis"; + description = "Parse order books from OasisDEX JSON logs"; + license = stdenv.lib.licenses.gpl3; +} diff --git a/nix/overlay/upstream/master/restless-git.nix b/nix/overlay/upstream/master/restless-git.nix new file mode 100644 index 000000000..347ce3233 --- /dev/null +++ b/nix/overlay/upstream/master/restless-git.nix @@ -0,0 +1,17 @@ +{ mkDerivation, base, bytestring, clock, containers, HSH, stdenv +, tasty, tasty-hunit, temporary, text, time +}: +mkDerivation { + pname = "restless-git"; + version = "0.5"; + src = ./.; + libraryHaskellDepends = [ + base bytestring clock containers HSH text time + ]; + testHaskellDepends = [ + base bytestring containers tasty tasty-hunit temporary text + ]; + homepage = "https://github.com/lessrest/restless-git"; + description = "Easy Git repository serialization"; + license = stdenv.lib.licenses.gpl3; +} diff --git a/nix/overlay/upstream/master/seth.nix b/nix/overlay/upstream/master/seth.nix new file mode 100644 index 000000000..0120d9e44 --- /dev/null +++ b/nix/overlay/upstream/master/seth.nix @@ -0,0 +1,25 @@ +{ stdenv, makeWrapper, lib, fetchFromGitHub +, bc, coreutils, curl, ethabi, ethsign, git, gnused, jshon, perl, solc, which, nodejs }: + +stdenv.mkDerivation rec { + name = "seth-${version}"; + version = "0.6.3"; + src = ./.; + + nativeBuildInputs = [makeWrapper]; + buildPhase = "true"; + makeFlags = ["prefix=$(out)"]; + postInstall = let path = lib.makeBinPath [ + bc coreutils curl ethabi git gnused jshon perl solc which nodejs ethsign + ]; in '' + wrapProgram "$out/bin/seth" --prefix PATH : "${path}" + ''; + + meta = { + description = "Command-line client for talking to Ethereum nodes"; + homepage = https://github.com/dapphub/seth/; + maintainers = [stdenv.lib.maintainers.dbrock]; + license = lib.licenses.gpl3; + inherit version; + }; +} diff --git a/nix/overlay/upstream/master/setzer.nix b/nix/overlay/upstream/master/setzer.nix new file mode 100644 index 000000000..93e753617 --- /dev/null +++ b/nix/overlay/upstream/master/setzer.nix @@ -0,0 +1,26 @@ +{ stdenv, makeWrapper, lib, fetchFromGitHub +, seth, curl, jshon, bc, gnused, which, perl +, datamash +}: + +stdenv.mkDerivation rec { + name = "setzer-${version}"; + version = "0.1.18"; + src = ./.; + + nativeBuildInputs = [makeWrapper]; + buildPhase = "true"; + makeFlags = ["prefix=$(out)"]; + postInstall = let path = lib.makeBinPath [ + seth curl jshon bc gnused which perl datamash + ]; in '' + wrapProgram "$out/bin/setzer" --prefix PATH : "${path}" + ''; + + meta = with lib; { + description = "Ethereum price feed tool"; + homepage = https://github.com/makerdao/setzer; + license = licenses.gpl3; + inherit version; + }; +} diff --git a/nix/overlay/upstream/master/symbex.nix b/nix/overlay/upstream/master/symbex.nix new file mode 100644 index 000000000..50db1f55f --- /dev/null +++ b/nix/overlay/upstream/master/symbex.nix @@ -0,0 +1,17 @@ +{ mkDerivation, aeson, base, base16-bytestring, bytestring, mtl +, s-cargot, stdenv, text, uniplate +}: +mkDerivation { + pname = "symbex"; + version = "0.6.2"; + src = ./.; + isLibrary = true; + isExecutable = true; + libraryHaskellDepends = [ + aeson base base16-bytestring bytestring mtl s-cargot text uniplate + ]; + executableHaskellDepends = [ base ]; + homepage = "https://github.com/dapphub/symbex"; + description = "Ethereum symbolic execution engine"; + license = stdenv.lib.licenses.agpl3; +} diff --git a/nix/overlay/upstream/master/token.nix b/nix/overlay/upstream/master/token.nix new file mode 100644 index 000000000..e19021bc4 --- /dev/null +++ b/nix/overlay/upstream/master/token.nix @@ -0,0 +1,23 @@ +{ lib, stdenv, makeWrapper, coreutils, perl, seth }: + +stdenv.mkDerivation rec { + name = "token-${version}"; + version = "0.5"; + src = ./.; + + nativeBuildInputs = [makeWrapper]; + buildPhase = "true"; + makeFlags = ["prefix=$(out)"]; + postInstall = let path = lib.makeBinPath [ + coreutils perl seth + ]; in '' + wrapProgram "$out/bin/token" --prefix PATH : "${path}" + ''; + + meta = { + description = "Command-line tool for ERC20 tokens"; + homepage = https://github.com/dapphub/token; + license = lib.licenses.gpl3; + inherit version; + }; +} diff --git a/nix/overlay/upstream/stable/dai-cli.nix b/nix/overlay/upstream/stable/dai-cli.nix new file mode 100644 index 000000000..1a969ad97 --- /dev/null +++ b/nix/overlay/upstream/stable/dai-cli.nix @@ -0,0 +1,23 @@ +{ lib, stdenv, makeWrapper, coreutils, perl, gnugrep, nodejs, seth, jays, token }: + +stdenv.mkDerivation rec { + name = "dai-${version}"; + version = "0.6"; + src = ./.; + + nativeBuildInputs = [makeWrapper]; + buildPhase = "true"; + makeFlags = ["prefix=$(out)"]; + postInstall = let path = lib.makeBinPath [ + coreutils perl gnugrep nodejs seth jays token + ]; in '' + wrapProgram "$out/bin/dai" --prefix PATH : "${path}" + ''; + + meta = { + description = "Command-line tool for the Dai stablecoin system"; + homepage = https://github.com/makerdao/sai; + license = lib.licenses.gpl3; + inherit version; + }; +} diff --git a/nix/overlay/upstream/stable/dapp.nix b/nix/overlay/upstream/stable/dapp.nix new file mode 100644 index 000000000..659688969 --- /dev/null +++ b/nix/overlay/upstream/stable/dapp.nix @@ -0,0 +1,27 @@ +{ lib, stdenv, fetchFromGitHub, makeWrapper +, seth, git, solc, shellcheck, nodejs, hevm, jshon, nix, coreutils }: + +stdenv.mkDerivation rec { + name = "dapp-${version}"; + version = "0.8.2"; + src = ./.; + + nativeBuildInputs = [makeWrapper shellcheck coreutils]; + buildPhase = "true"; + doCheck = true; + checkPhase = "make test"; + makeFlags = ["prefix=$(out)"]; + postInstall = let path = lib.makeBinPath [ + nodejs solc git seth hevm jshon nix coreutils + ]; in '' + wrapProgram "$out/bin/dapp" --prefix PATH : "${path}" + ''; + + meta = { + description = "Simple tool for creating Ethereum-based dapps"; + homepage = https://github.com/dapphub/dapp/; + maintainers = [stdenv.lib.maintainers.dbrock]; + license = lib.licenses.gpl3; + inherit version; + }; +} diff --git a/nix/overlay/upstream/stable/ethsign.nix b/nix/overlay/upstream/stable/ethsign.nix new file mode 100644 index 000000000..b49d44891 --- /dev/null +++ b/nix/overlay/upstream/stable/ethsign.nix @@ -0,0 +1,53 @@ +{ stdenv, buildGoPackage, fetchFromGitHub, fetchgit, clang }: + +buildGoPackage rec { + name = "ethsign-${version}"; + version = "0.10"; + + goPackagePath = "github.com/dapphub/ethsign"; + hardeningDisable = ["fortify"]; + src = ./.; + + extraSrcs = [ + { + goPackagePath = "github.com/ethereum/go-ethereum"; + src = fetchFromGitHub { + owner = "ethereum"; + repo = "go-ethereum"; + rev = "v1.8.1"; + sha256 = "0k7ly9cw68ranksa1fdn7v2lncmlqgabw3qiiyqya2xz3s4aazlf"; + }; + } + { + goPackagePath = "gopkg.in/urfave/cli.v1"; + src = fetchFromGitHub { + owner = "urfave"; + repo = "cli"; + rev = "v1.19.1"; + sha256 = "1ny63c7bfwfrsp7vfkvb4i0xhq4v7yxqnwxa52y4xlfxs4r6v6fg"; + }; + } + { + goPackagePath = "golang.org/x/crypto"; + src = fetchgit { + url = "https://go.googlesource.com/crypto"; + rev = "94eea52f7b742c7cbe0b03b22f0c4c8631ece122"; + sha256 = "095zyvjb0m2pz382500miqadhk7w3nis8z3j941z8cq4rdafijvi"; + }; + } + { + goPackagePath = "golang.org/x/sys"; + src = fetchgit { + url = "https://go.googlesource.com/sys"; + rev = "53aa286056ef226755cd898109dbcdaba8ac0b81"; + sha256 = "1yd17ccklby099cpdcsgx6lf0lj968hsnppp16mwh9009ldf72r1"; + }; + } + ]; + + meta = with stdenv.lib; { + homepage = http://github.com/dapphub/ethsign; + description = "Make raw signed Ethereum transactions"; + license = [licenses.gpl3]; + }; +} diff --git a/nix/overlay/upstream/stable/hevm.nix b/nix/overlay/upstream/stable/hevm.nix new file mode 100644 index 000000000..88f0f88b4 --- /dev/null +++ b/nix/overlay/upstream/stable/hevm.nix @@ -0,0 +1,44 @@ +{ mkDerivation, abstract-par, aeson, ansi-wl-pprint, async, base +, base16-bytestring, base64-bytestring, binary, brick, bytestring +, cereal, containers, cryptonite, data-dword, deepseq, directory +, ethjet, fgl, filepath, ghci-pretty, haskeline, here, HUnit, lens +, lens-aeson, megaparsec, memory, monad-par, mtl, multiset +, operational, optparse-generic, process, QuickCheck +, quickcheck-text, readline, regex-tdfa, restless-git, rosezipper +, scientific, stdenv, tasty, tasty-hunit, tasty-quickcheck +, temporary, text, text-format, time, transformers, tree-view +, unordered-containers, vector, vty, wreq +}: +mkDerivation { + pname = "hevm"; + version = "0.15"; + src = ./.; + isLibrary = true; + isExecutable = true; + enableSeparateDataOutput = true; + libraryHaskellDepends = [ + abstract-par aeson ansi-wl-pprint base base16-bytestring + base64-bytestring binary brick bytestring cereal containers + cryptonite data-dword deepseq directory ethjet fgl filepath + ghci-pretty haskeline lens lens-aeson megaparsec memory monad-par + mtl multiset operational optparse-generic process QuickCheck + quickcheck-text readline restless-git rosezipper scientific + temporary text text-format time transformers tree-view + unordered-containers vector vty wreq + ]; + executableHaskellDepends = [ + aeson ansi-wl-pprint async base base16-bytestring base64-bytestring + binary brick bytestring containers cryptonite data-dword deepseq + directory filepath ghci-pretty lens lens-aeson memory mtl + optparse-generic process QuickCheck quickcheck-text readline + regex-tdfa temporary text text-format unordered-containers vector + vty + ]; + testHaskellDepends = [ + base binary bytestring ghci-pretty here HUnit lens mtl QuickCheck + tasty tasty-hunit tasty-quickcheck text vector + ]; + homepage = "https://github.com/mbrock/hevm"; + description = "Ethereum virtual machine evaluator"; + license = stdenv.lib.licenses.agpl3; +} diff --git a/nix/overlay/upstream/stable/jays.nix b/nix/overlay/upstream/stable/jays.nix new file mode 100644 index 000000000..8318dc458 --- /dev/null +++ b/nix/overlay/upstream/stable/jays.nix @@ -0,0 +1,22 @@ +{ mkDerivation, aeson, aeson-pretty, base, bytestring, containers +, HUnit, stdenv, tasty, tasty-hunit, text, unix +, unordered-containers, vector +}: +mkDerivation { + pname = "jays"; + version = "1.20171121"; + src = ./.; + isLibrary = true; + isExecutable = true; + libraryHaskellDepends = [ + aeson aeson-pretty base bytestring containers text + unordered-containers vector + ]; + executableHaskellDepends = [ base bytestring text unix ]; + testHaskellDepends = [ + aeson base bytestring HUnit tasty tasty-hunit text + ]; + homepage = "https://github.com/mbrock/jays"; + description = "Rewrite of jshon"; + license = stdenv.lib.licenses.gpl3; +} diff --git a/nix/overlay/upstream/stable/libethjet-haskell.nix b/nix/overlay/upstream/stable/libethjet-haskell.nix new file mode 100644 index 000000000..4e35424b8 --- /dev/null +++ b/nix/overlay/upstream/stable/libethjet-haskell.nix @@ -0,0 +1,16 @@ +{ mkDerivation, base, base16-bytestring, bytestring, ethjet, HUnit +, secp256k1, stdenv, tasty, tasty-hunit +}: +mkDerivation { + pname = "ethjet"; + version = "0.5"; + src = ./.; + libraryHaskellDepends = [ base bytestring ]; + librarySystemDepends = [ ethjet secp256k1 ]; + testHaskellDepends = [ + base base16-bytestring bytestring HUnit tasty tasty-hunit + ]; + homepage = "https://github.com/dapphub/libethjet"; + description = "Binding to libethjet for Ethereum precompiled contracts"; + license = stdenv.lib.licenses.gpl3; +} diff --git a/nix/overlay/upstream/stable/libethjet.nix b/nix/overlay/upstream/stable/libethjet.nix new file mode 100644 index 000000000..1c0f59a0f --- /dev/null +++ b/nix/overlay/upstream/stable/libethjet.nix @@ -0,0 +1,16 @@ +{ stdenv, secp256k1 }: + +stdenv.mkDerivation rec { + name = "libethjet-${version}"; + version = "0.5"; + src = ./.; + meta = with stdenv.lib; { + description = "C library for Ethereum precompiled contracts"; + homepage = https://github.com/dapphub/libethjet; + license = [licenses.mit]; + platforms = platforms.unix; + }; + + buildInputs = [secp256k1]; + installFlags = ["PREFIX=$(out)"]; +} diff --git a/nix/overlay/upstream/stable/oasis-orders.nix b/nix/overlay/upstream/stable/oasis-orders.nix new file mode 100644 index 000000000..470ff69d4 --- /dev/null +++ b/nix/overlay/upstream/stable/oasis-orders.nix @@ -0,0 +1,19 @@ +{ mkDerivation, aeson, base, base16-bytestring, binary, bytestring +, containers, data-dword, directory, hevm, lens, lens-aeson, pipes +, pipes-text, stdenv, text, vector +}: +mkDerivation { + pname = "oasis-orders"; + version = "0.7.1"; + src = ./.; + isLibrary = false; + isExecutable = true; + executableHaskellDepends = [ + aeson base base16-bytestring binary bytestring containers + data-dword directory hevm lens lens-aeson pipes pipes-text text + vector + ]; + homepage = "https://github.com/mbrock/oasis"; + description = "Parse order books from OasisDEX JSON logs"; + license = stdenv.lib.licenses.gpl3; +} diff --git a/nix/overlay/upstream/stable/restless-git.nix b/nix/overlay/upstream/stable/restless-git.nix new file mode 100644 index 000000000..eaf823756 --- /dev/null +++ b/nix/overlay/upstream/stable/restless-git.nix @@ -0,0 +1,17 @@ +{ mkDerivation, base, bytestring, containers, HSH, stdenv, tasty +, tasty-hunit, temporary, text, time +}: +mkDerivation { + pname = "restless-git"; + version = "0.5.0"; + src = ./.; + libraryHaskellDepends = [ + base bytestring containers HSH text time + ]; + testHaskellDepends = [ + base bytestring containers tasty tasty-hunit temporary text + ]; + homepage = "https://github.com/lessrest/restless-git"; + description = "Easy Git repository serialization"; + license = stdenv.lib.licenses.gpl3; +} diff --git a/nix/overlay/upstream/stable/seth.nix b/nix/overlay/upstream/stable/seth.nix new file mode 100644 index 000000000..0120d9e44 --- /dev/null +++ b/nix/overlay/upstream/stable/seth.nix @@ -0,0 +1,25 @@ +{ stdenv, makeWrapper, lib, fetchFromGitHub +, bc, coreutils, curl, ethabi, ethsign, git, gnused, jshon, perl, solc, which, nodejs }: + +stdenv.mkDerivation rec { + name = "seth-${version}"; + version = "0.6.3"; + src = ./.; + + nativeBuildInputs = [makeWrapper]; + buildPhase = "true"; + makeFlags = ["prefix=$(out)"]; + postInstall = let path = lib.makeBinPath [ + bc coreutils curl ethabi git gnused jshon perl solc which nodejs ethsign + ]; in '' + wrapProgram "$out/bin/seth" --prefix PATH : "${path}" + ''; + + meta = { + description = "Command-line client for talking to Ethereum nodes"; + homepage = https://github.com/dapphub/seth/; + maintainers = [stdenv.lib.maintainers.dbrock]; + license = lib.licenses.gpl3; + inherit version; + }; +} diff --git a/nix/overlay/upstream/stable/setzer.nix b/nix/overlay/upstream/stable/setzer.nix new file mode 100644 index 000000000..d6f40af5a --- /dev/null +++ b/nix/overlay/upstream/stable/setzer.nix @@ -0,0 +1,26 @@ +{ stdenv, makeWrapper, lib, fetchFromGitHub +, seth, curl, jshon, bc, gnused, which, perl +, datamash +}: + +stdenv.mkDerivation rec { + name = "setzer-${version}"; + version = "0.1.17"; + src = ./.; + + nativeBuildInputs = [makeWrapper]; + buildPhase = "true"; + makeFlags = ["prefix=$(out)"]; + postInstall = let path = lib.makeBinPath [ + seth curl jshon bc gnused which perl datamash + ]; in '' + wrapProgram "$out/bin/setzer" --prefix PATH : "${path}" + ''; + + meta = with lib; { + description = "Ethereum price feed tool"; + homepage = https://github.com/makerdao/setzer; + license = licenses.gpl3; + inherit version; + }; +} diff --git a/nix/overlay/upstream/stable/symbex.nix b/nix/overlay/upstream/stable/symbex.nix new file mode 100644 index 000000000..50db1f55f --- /dev/null +++ b/nix/overlay/upstream/stable/symbex.nix @@ -0,0 +1,17 @@ +{ mkDerivation, aeson, base, base16-bytestring, bytestring, mtl +, s-cargot, stdenv, text, uniplate +}: +mkDerivation { + pname = "symbex"; + version = "0.6.2"; + src = ./.; + isLibrary = true; + isExecutable = true; + libraryHaskellDepends = [ + aeson base base16-bytestring bytestring mtl s-cargot text uniplate + ]; + executableHaskellDepends = [ base ]; + homepage = "https://github.com/dapphub/symbex"; + description = "Ethereum symbolic execution engine"; + license = stdenv.lib.licenses.agpl3; +} diff --git a/nix/overlay/upstream/stable/token.nix b/nix/overlay/upstream/stable/token.nix new file mode 100644 index 000000000..e19021bc4 --- /dev/null +++ b/nix/overlay/upstream/stable/token.nix @@ -0,0 +1,23 @@ +{ lib, stdenv, makeWrapper, coreutils, perl, seth }: + +stdenv.mkDerivation rec { + name = "token-${version}"; + version = "0.5"; + src = ./.; + + nativeBuildInputs = [makeWrapper]; + buildPhase = "true"; + makeFlags = ["prefix=$(out)"]; + postInstall = let path = lib.makeBinPath [ + coreutils perl seth + ]; in '' + wrapProgram "$out/bin/token" --prefix PATH : "${path}" + ''; + + meta = { + description = "Command-line tool for ERC20 tokens"; + homepage = https://github.com/dapphub/token; + license = lib.licenses.gpl3; + inherit version; + }; +} diff --git a/nix/overlay/versions.json b/nix/overlay/versions.json new file mode 100644 index 000000000..dfdcda437 --- /dev/null +++ b/nix/overlay/versions.json @@ -0,0 +1,197 @@ +{ + "dai-cli": { + "master": { + "owner": "makerdao", + "repo": "dai-cli", + "rev": "b5120d2e36c776a96aefc0d654cf98757179551e", + "sha256": "00qyiawrn1fr2zsmbnzkhkhnkpj2hhaaadardmw60a480999w38c" + }, + "stable": { + "owner": "makerdao", + "repo": "dai-cli", + "rev": "b5120d2e36c776a96aefc0d654cf98757179551e", + "sha256": "00qyiawrn1fr2zsmbnzkhkhnkpj2hhaaadardmw60a480999w38c" + }, + "version": "0.6" + }, + "dapp": { + "master": { + "owner": "dapphub", + "repo": "dapp", + "rev": "a426596705be4dfcdd60e7965163453574459dcf", + "sha256": "14lwwyl9vzx02hcglh20vk6ad0z8h3gl7yavjck9aica4n8pf57s" + }, + "stable": { + "owner": "dapphub", + "repo": "dapp", + "rev": "e985489db15256f1b3d57bb46a0930f8d3784422", + "sha256": "09ywlnpgmppr19m138bbmmyzvk0fl2bld38af704rfccqzi8n148" + }, + "version": "0.8.2" + }, + "ethsign": { + "master": { + "owner": "dapphub", + "repo": "ethsign", + "rev": "d7591c9cac762f15c7087c2d066176bb75ba8095", + "sha256": "1qm8gwgk3l6qcjxr7i44bnav16nmmmpm0a0im7f19vayp5xx1vvc" + }, + "stable": { + "owner": "dapphub", + "repo": "ethsign", + "rev": "d7591c9cac762f15c7087c2d066176bb75ba8095", + "sha256": "1qm8gwgk3l6qcjxr7i44bnav16nmmmpm0a0im7f19vayp5xx1vvc" + }, + "version": "0.10" + }, + "hevm": { + "master": { + "owner": "dapphub", + "repo": "hevm", + "rev": "991223a4a4e0f0e8cfdaf44870d8cce591248fb5", + "sha256": "03zf08n9biz3nj0bqzcdviwp4kim11522c55jnr9sn1vgc282m1x" + }, + "stable": { + "owner": "dapphub", + "repo": "hevm", + "rev": "991223a4a4e0f0e8cfdaf44870d8cce591248fb5", + "sha256": "03zf08n9biz3nj0bqzcdviwp4kim11522c55jnr9sn1vgc282m1x" + }, + "version": "0.15" + }, + "jays": { + "master": { + "owner": "mbrock", + "repo": "jays", + "rev": "7b8ec3fa1a8489764c01db89d5aa2f2a200cc79b", + "sha256": "0f1f0r1336cyl9l0y0a5147zhp3x5zlfsxkspwbkf3v57kynvazw" + }, + "stable": { + "owner": "mbrock", + "repo": "jays", + "rev": "7b8ec3fa1a8489764c01db89d5aa2f2a200cc79b", + "sha256": "0f1f0r1336cyl9l0y0a5147zhp3x5zlfsxkspwbkf3v57kynvazw" + }, + "version": "1.20171121" + }, + "libethjet": { + "master": { + "owner": "dapphub", + "repo": "libethjet", + "rev": "6b0b519e276a8b73cdfd8200a8c226a9403745b3", + "sha256": "1f3ixnbk7ib36z4266m2qir62v09lcajdlmk3ccr7f4b0ncxlslb" + }, + "stable": { + "owner": "dapphub", + "repo": "libethjet", + "rev": "6b0b519e276a8b73cdfd8200a8c226a9403745b3", + "sha256": "1f3ixnbk7ib36z4266m2qir62v09lcajdlmk3ccr7f4b0ncxlslb" + }, + "version": "0.5.2" + }, + "libethjet-haskell": { + "master": { + "owner": "dapphub", + "repo": "libethjet-haskell", + "rev": "a1f13e95ec7619ebac60497c15b7f86449bf1d2a", + "sha256": "01mf0gh301x043npjj3kzxa35rbbazx6s768f7jaz8rzhm0qz7pm" + }, + "stable": { + "owner": "dapphub", + "repo": "libethjet-haskell", + "rev": "a1f13e95ec7619ebac60497c15b7f86449bf1d2a", + "sha256": "01mf0gh301x043npjj3kzxa35rbbazx6s768f7jaz8rzhm0qz7pm" + }, + "version": "0.5.1" + }, + "oasis-orders": { + "master": { + "owner": "mbrock", + "repo": "oasis-orders", + "rev": "62d474cabc5b74e6c785f9f70a9bd75a184e8a72", + "sha256": "1zra5gmaifp78rfqr5wcl6k2ldwp4farb9n9jalbkd6xzr87gv2j" + }, + "stable": { + "owner": "mbrock", + "repo": "oasis-orders", + "rev": "62d474cabc5b74e6c785f9f70a9bd75a184e8a72", + "sha256": "1zra5gmaifp78rfqr5wcl6k2ldwp4farb9n9jalbkd6xzr87gv2j" + }, + "version": "0.7.1" + }, + "restless-git": { + "master": { + "owner": "lessrest", + "repo": "restless-git", + "rev": "2ff25ea30660ac9db3fb0ed80d44e6ca30bc1461", + "sha256": "1s7p1pcw1xaqbrn6p6ysgx9hbc31xilvdbnl3vzzqyl1wka7iyc1" + }, + "stable": { + "owner": "lessrest", + "repo": "restless-git", + "rev": "9b208c944b61dc6cb4b32e7a5b90cffe643fcfc5", + "sha256": "0l2n556fzgkgnw7rhwfsj7438qyid8y9sghlhd3is154dspg0p9v" + }, + "version": "0.5.0" + }, + "seth": { + "master": { + "owner": "dapphub", + "repo": "seth", + "rev": "de7048815c4953da391b93179af9c2c162e59b23", + "sha256": "084vl05dbwba8fdh2lz8sd8knihrys6mpgyvmp5db75qcdqdsvi4" + }, + "stable": { + "owner": "dapphub", + "repo": "seth", + "rev": "84a77d25ebbfd029fbc454e2497936534d5d4ff8", + "sha256": "0la2nfqsscpbq6zwa6hsd73nimdnrhilrmgyy77yr3jca2wjhsjk" + }, + "version": "0.6.3" + }, + "setzer": { + "master": { + "owner": "makerdao", + "repo": "setzer", + "rev": "451ccd5f1c6acf4ad9a5edada6e967a84f3d4a86", + "sha256": "1gfjqwxhnzw3wmad7vgcnjwppwxkp32nrbiz1x059c4bvvj5nkxy" + }, + "stable": { + "owner": "makerdao", + "repo": "setzer", + "rev": "3afae0fc50d35f7ccfc36a51cbd6f1baad683b0e", + "sha256": "1bky5lfvpwhkc65xi67cx7nag4jssplyl2ymbc8x885zw771v1y7" + }, + "version": "0.1.17" + }, + "symbex": { + "master": { + "owner": "mbrock", + "repo": "symbex", + "rev": "8b72df303200743cd0b10cbcc0ce78520ae0a805", + "sha256": "0kpxnxmkczb28bbdfwmlfq6x5r1wwgrg02fgypv59l18kqlvp5hb" + }, + "stable": { + "owner": "mbrock", + "repo": "symbex", + "rev": "8b72df303200743cd0b10cbcc0ce78520ae0a805", + "sha256": "0kpxnxmkczb28bbdfwmlfq6x5r1wwgrg02fgypv59l18kqlvp5hb" + }, + "version": "0.6.2" + }, + "token": { + "master": { + "owner": "dapphub", + "repo": "token", + "rev": "8cb91c1c24c10f6bb45dea3e110f571c422f7b81", + "sha256": "0s7g5vfz3xwappbagzsxn6v59421hzpqq7gygk2hdlp2099aj07l" + }, + "stable": { + "owner": "dapphub", + "repo": "token", + "rev": "8cb91c1c24c10f6bb45dea3e110f571c422f7b81", + "sha256": "0s7g5vfz3xwappbagzsxn6v59421hzpqq7gygk2hdlp2099aj07l" + }, + "version": "0.5" + } +} diff --git a/nix/release-dapps.nix b/nix/release-dapps.nix new file mode 100644 index 000000000..b6186077a --- /dev/null +++ b/nix/release-dapps.nix @@ -0,0 +1,9 @@ +{ ... }: + +let + system = (system: (import ./default.nix { inherit system; })); + linux = system "x86_64-linux"; + +in rec { + dappsys = linux.pkgs.dappsys; +} diff --git a/nix/release-ethos.nix b/nix/release-ethos.nix new file mode 100644 index 000000000..bf3d7f91a --- /dev/null +++ b/nix/release-ethos.nix @@ -0,0 +1,10 @@ +{ ... }: + +let + system = (system: (import ./default.nix { inherit system; })); + linux = system "x86_64-linux"; + +in rec { + dapphub.ethos-iso = linux.pkgs.ethos-iso; + dapphub.ethos-iso-hidpi = linux.pkgs.ethos-iso-hidpi; +} diff --git a/nix/release.nix b/nix/release.nix new file mode 100644 index 000000000..240a6dbaf --- /dev/null +++ b/nix/release.nix @@ -0,0 +1,95 @@ +{ ... }: + +let + system = (system: (import ./default.nix { inherit system; })); + linux = system "x86_64-linux"; + darwin = system "x86_64-darwin"; + + ethereum-test-suite = x: x.fetchFromGitHub { + owner = "ethereum"; + repo = "tests"; + rev = "7e361956bd68f5cac72fe41f29e5734ee94ae2de"; + sha256 = "0l5qalgbscr77vjhyf7b542055wnp4pddpfslnypp5sqws5w940w"; + }; + + hevmTestReport = x: x.runCommand "hevm-test-report" {} '' + mkdir -p $out/nix-support + export PATH=${x.pkgs.hevm}/bin:$PATH + ${x.pkgs.hevm}/bin/hevm vm-test-report \ + --tests ${ethereum-test-suite x} > $out/index.html + echo report testlog $out index.html > $out/nix-support/hydra-build-products + ''; + +in rec { + + dapphub.linux.stable = with linux.pkgs; { + inherit celf; + inherit dai; + inherit dapp-which; + inherit dapp; + inherit ds-chief; + inherit ethjet; + inherit ethsign; + inherit evmdis; + inherit go-ethereum-unlimited; + inherit go-ethereum; + inherit hevm; + inherit hevmas; + inherit hevml; + inherit keeper; + inherit mkbip39; + inherit myetherwallet; + inherit oasis-orders; + inherit qrtx-term; + inherit qrtx; + inherit seth; + inherit setzer; + inherit solc-versions; + inherit tla-plus; + inherit token; + inherit symbex; + inherit dafny; + + hevm-test-report = hevmTestReport linux; + } // linux.pkgs.dappsys; + + dapphub.linux.master = with linux.master.pkgs; { + inherit dapp; + inherit hevm; + inherit hevmas; + inherit seth; + + hevm-test-report = hevmTestReport linux.master; + } // linux.master.pkgs.dappsys; + + dapphub.darwin.stable = with darwin.pkgs; { + inherit celf; + inherit dai; + inherit dapp-which; + inherit dapp; + inherit ds-chief; + inherit ethjet; + inherit ethsign; + inherit evmdis; + inherit go-ethereum-unlimited; + inherit go-ethereum; + inherit hevm; + inherit hevml; + inherit mkbip39; + inherit myetherwallet; + inherit oasis-orders; + inherit qrtx-term; + inherit seth; + inherit setzer; + inherit solc-versions; + inherit token; + inherit symbex; + inherit dafny; + } // darwin.pkgs.dappsys; + + dapphub.darwin.master = with darwin.master.pkgs; { + inherit dapp; + inherit hevm; + inherit seth; + } // darwin.master.pkgs.dappsys; +}