Merge branch 'dev' of gitlab.com:ligolang/ligo into rinderknecht@pprint

This commit is contained in:
Christian Rinderknecht 2020-05-25 15:15:27 +02:00
commit 2d88d2145e
85 changed files with 5189 additions and 3118 deletions

View File

@ -1,105 +1,21 @@
# TODO: remove this as submodules aren't used anymore. # TODO: remove this as submodules aren't used anymore.
variables: variables:
GIT_SUBMODULE_STRATEGY: recursive GIT_SUBMODULE_STRATEGY: recursive
build_binary_script: "./scripts/distribution/generic/build.sh"
package_binary_script: "./scripts/distribution/generic/package.sh"
LIGO_REGISTRY_IMAGE_BASE_NAME: "${CI_PROJECT_PATH}/${CI_PROJECT_NAME}" LIGO_REGISTRY_IMAGE_BASE_NAME: "${CI_PROJECT_PATH}/${CI_PROJECT_NAME}"
WEBIDE_IMAGE_NAME: "registry.gitlab.com/${CI_PROJECT_PATH}/ligo_webide" WEBIDE_IMAGE_NAME: "registry.gitlab.com/${CI_PROJECT_PATH}/ligo_webide"
stages: stages:
- test - build
- build_and_package_binaries - push
- build_docker
- build_and_deploy
- ide-unit-test
- ide-build
- ide-e2e-test
- ide-deploy - ide-deploy
- nix
- nix-push
- versioning - versioning
# TODO provide sensible CI for master .docker-image:
dont-merge-to-master: stage: push
stage: test
script:
- "false"
only:
- master
.build_binary: &build_binary
stage: test # To run in sequence and save CPU usage, use stage: build_and_package_binaries
script:
- export COMMIT_DATE="$(git show --no-patch --format=%ci)"
- $build_binary_script "$target_os_family" "$target_os" "$target_os_version"
- $package_binary_script "$target_os_family" "$target_os" "$target_os_version"
artifacts:
paths:
- dist/package/**/*
.website_build: &website_build
stage: build_and_deploy
image: node:12
dependencies:
- build-and-package-debian-9
- build-and-package-debian-10
- build-and-package-ubuntu-18-04
- build-and-package-ubuntu-19-10
before_script:
- export COMMIT_DATE="$(git show --no-patch --format=%ci)"
- export TERM=dumb
- scripts/install_native_dependencies.sh
- scripts/install_opam.sh # TODO: or scripts/install_build_environment.sh ?
- export PATH="/usr/local/bin${PATH:+:}${PATH:-}"
- eval $(opam config env)
- scripts/setup_switch.sh
- eval $(opam config env)
- scripts/setup_repos.sh
# install deps for internal documentation
- scripts/install_vendors_deps.sh
- opam install -y odoc
- scripts/build_ligo_local.sh
# build with odoc
- dune build @doc
# copy .deb packages into website
- find dist -name \*.deb -exec sh -c 'cp {} gitlab-pages/website/static/deb/ligo_$(basename $(dirname {})).deb' \;
# yarn
- cd gitlab-pages/website
- yarn install
script:
- yarn build
# move internal odoc documentation to the website folder
- mv ../../_build/default/_doc/_html/ build/odoc
after_script:
- cp -r gitlab-pages/website/build public
- cp -r gitlab-pages/website/sitemap.xml public/sitemap.xml
artifacts:
paths:
- public
.docker: &docker
image: docker:19.03.5 image: docker:19.03.5
services: services:
- docker:19.03.5-dind - docker:19.03.5-dind
.before_script: &before_script
before_script:
# Install dependencies
# rsync is needed by opam to sync a package installed from a local directory with the copy in ~/.opam
- export COMMIT_DATE="$(git show --no-patch --format=%ci)"
- export TERM=dumb
- scripts/install_native_dependencies.sh
- scripts/install_opam.sh # TODO: or scripts/install_build_environment.sh ?
- export PATH="/usr/local/bin${PATH:+:}${PATH:-}"
- eval $(opam config env)
- scripts/setup_switch.sh
- eval $(opam config env)
- scripts/setup_repos.sh
version_scheduled_job: version_scheduled_job:
stage: versioning stage: versioning
script: script:
@ -107,192 +23,118 @@ version_scheduled_job:
only: only:
- schedules - schedules
local-dune-job: .nix:
<<: *before_script stage: build
stage: test tags:
script: - nix
- scripts/install_vendors_deps.sh before_script:
- scripts/build_ligo_local.sh - find "$CI_PROJECT_DIR" -path "$CI_PROJECT_DIR/.git" -prune -o "(" -type d -a -not -perm -u=w ")" -exec chmod --verbose u+w {} ";"
- dune runtest - nix-env -f channel:nixos-unstable -iA gnutar gitMinimal
- make coverage - export COMMIT_DATE="$(git show --no-patch --format=%ci)"
artifacts:
paths: # The binary produced is useless by itself
- _coverage_all binary:
extends: .nix
only: only:
- merge_requests - merge_requests
- dev - dev
- tags
- triggers
- /^.*-run-dev$/ - /^.*-run-dev$/
# Run a docker build without publishing to the registry
build-current-docker-image:
stage: build_docker
dependencies:
- build-and-package-debian-10
<<: *docker
script: script:
- export COMMIT_DATE="$(git show --no-patch --format=%ci)" - nix-build nix -A ligo-bin
- sh scripts/build_docker_image.sh next
- sh scripts/test_cli.sh doc:
extends: .nix
only: only:
- merge_requests - merge_requests
- dev
# When a MR/PR is merged to dev - /^.*-run-dev$/
# take the previous build and publish it to Docker Hub
build-and-publish-latest-docker-image:
stage: build_and_deploy
<<: *docker
dependencies:
- build-and-package-debian-10
script: script:
- sh scripts/build_docker_image.sh $(if test "$CI_COMMIT_REF_NAME" = "dev"; then echo next; else echo next-attempt; fi) - nix-build nix -A ligo-doc
- sh scripts/test_cli.sh - cp -Lr --no-preserve=mode,ownership,timestamps result/share/doc .
- echo ${LIGO_REGISTRY_PASSWORD} | docker login -u ${LIGO_REGISTRY_USER} --password-stdin artifacts:
- docker push ${LIGO_REGISTRY_IMAGE_BUILD:-ligolang/ligo}:$(if test "$CI_COMMIT_REF_NAME" = "dev"; then echo next; else echo next-attempt; fi) paths:
- doc
test:
extends: .nix
only:
- merge_requests
- dev
- /^.*-run-dev$/
script:
- nix-build nix -A ligo-coverage
- cp -Lr --no-preserve=mode,ownership,timestamps result/share/coverage .
artifacts:
paths:
- coverage
webide-e2e:
extends: .nix
only:
- merge_requests
- dev
- /^.*-run-dev$/
script:
- nix-build nix -A ligo-editor.e2e
docker:
extends: .nix
only:
- merge_requests
- dev
- /^.*-run-dev$/
script:
- nix-build nix -A ligo-docker
- cp -L result ligo.tar.gz
artifacts:
paths:
- ligo.tar.gz
docker-push:
extends: .docker-image
dependencies:
- docker
needs:
- docker
rules: rules:
# Only deploy docker when from the dev branch AND on the canonical ligolang/ligo repository # Only deploy docker when from the dev branch AND on the canonical ligolang/ligo repository
- if: '$CI_COMMIT_REF_NAME =~ /^(dev|.*-run-dev)$/ && $CI_PROJECT_PATH == "ligolang/ligo"' - if: '$CI_COMMIT_REF_NAME =~ /^(dev|.*-run-dev)$/ && $CI_PROJECT_PATH == "ligolang/ligo"'
when: always when: always
script:
- echo ${LIGO_REGISTRY_PASSWORD} | docker login -u ${LIGO_REGISTRY_USER} --password-stdin
- docker load -i=./ligo.tar.gz
- export LIGO_REGISTRY_FULL_NAME=${LIGO_REGISTRY_IMAGE_BUILD:-ligolang/ligo}:$(if test "$CI_COMMIT_REF_NAME" = "dev"; then echo next; else echo next-attempt; fi)
- docker tag ligo "${LIGO_REGISTRY_FULL_NAME}"
- docker push "${LIGO_REGISTRY_FULL_NAME}"
# It'd be a good idea to generate those jobs dynamically, webide-docker:
# based on desired targets extends: .nix
build-and-package-debian-9:
<<: *docker
# To run in sequence and save CPU usage, use stage: build_and_package_binaries
stage: test
variables:
target_os_family: "debian"
target_os: "debian"
target_os_version: "9"
<<: *build_binary
only:
- dev
- tags
- /^.*-run-dev$/
build-and-package-debian-10:
<<: *docker
# To run in sequence and save CPU usage, use stage: build_and_package_binaries
stage: test
variables:
target_os_family: "debian"
target_os: "debian"
target_os_version: "10"
<<: *build_binary
# this one is merge_requests and dev, because the debian 10 binary
# is used for build-current-docker-image and for
# build-and-publish-latest-docker-image
only: only:
- merge_requests - merge_requests
- dev - dev
- tags
- /^.*-run-dev$/ - /^.*-run-dev$/
script:
- nix-build nix -A ligo-editor-docker
- cp -L result webide.tar.gz
artifacts:
paths:
- webide.tar.gz
build-and-package-ubuntu-18-04: webide-push:
<<: *docker extends: .docker-image
# To run in sequence and save CPU usage, use stage: build_and_package_binaries
stage: test
variables:
target_os_family: "debian"
target_os: "ubuntu"
target_os_version: "18.04"
<<: *build_binary
only:
- dev
- tags
- /^.*-run-dev$/
build-and-package-ubuntu-19-10:
<<: *docker
# To run in sequence and save CPU usage, use stage: build_and_package_binaries
stage: test
variables:
target_os_family: "debian"
target_os: "ubuntu"
target_os_version: "19.10"
<<: *build_binary
only:
- dev
- tags
- /^.*-run-dev$/
# Pages are deployed from dev, be careful not to override 'next'
# in case something gets merged into 'dev' while releasing.
pages:
<<: *website_build
rules:
- if: '$CI_COMMIT_REF_NAME == "dev" && $CI_PROJECT_PATH == "ligolang/ligo"'
when: always
pages-attempt:
<<: *website_build
rules:
- if: '$CI_COMMIT_REF_NAME =~ /^.*-run-dev$/ && $CI_PROJECT_PATH == "ligolang/ligo"'
when: always
# WEBIDE jobs
run-webide-unit-tests:
stage: ide-unit-test
dependencies: dependencies:
- build-and-package-debian-10 - webide-docker
image: node:12-buster needs:
script: - webide-docker
- mv $(realpath dist/package/debian-10/*.deb) ligo_deb10.deb
- apt-get update && apt-get -y install libev-dev perl pkg-config libgmp-dev libhidapi-dev m4 libcap-dev bubblewrap rsync
- dpkg -i ligo_deb10.deb
- cd tools/webide/packages/server
- npm ci
- export LIGO_CMD=/bin/ligo && npm run test
rules: rules:
- if: '$TAG_JOB != "true"' # Only deploy docker when from the dev branch AND on the canonical ligolang/ligo repository
changes: - if: '$CI_COMMIT_REF_NAME =~ /^(dev|.*-run-dev)$/ && $CI_PROJECT_PATH == "ligolang/ligo"'
- tools/webide/**
when: always when: always
build-publish-ide-image:
stage: build_and_deploy
<<: *docker
script: script:
- ls -F
- find dist/
- find dist/package/ -name '*ligo_*deb'
- mv $(realpath dist/package/debian-10/*.deb) tools/webide/ligo_deb10.deb
- cp -r src/test/examples tools/webide/packages/client/examples
- cd tools/webide
- echo "${CI_BUILD_TOKEN}" | docker login -u gitlab-ci-token --password-stdin registry.gitlab.com - echo "${CI_BUILD_TOKEN}" | docker login -u gitlab-ci-token --password-stdin registry.gitlab.com
- > - docker load -i=./webide.tar.gz
docker build - docker tag ligo-editor "${WEBIDE_IMAGE_NAME}:${CI_COMMIT_SHORT_SHA}"
-t "${WEBIDE_IMAGE_NAME}:${CI_COMMIT_SHORT_SHA}"
--build-arg GIT_TAG="${CI_COMMIT_SHA}"
--build-arg GIT_COMMIT="${CI_COMMIT_SHORT_SHA}"
--build-arg EXAMPLES_DIR_SRC=packages/client/examples
.
- docker push "${WEBIDE_IMAGE_NAME}:${CI_COMMIT_SHORT_SHA}" - docker push "${WEBIDE_IMAGE_NAME}:${CI_COMMIT_SHORT_SHA}"
rules:
- if: '$TAG_JOB != "true"'
changes:
- tools/webide/**
when: always
if: '$CI_COMMIT_REF_NAME == "dev"'
when: always
run-webide-e2e-tests:
stage: ide-e2e-test
<<: *docker
image: tmaier/docker-compose
script:
- cd tools/webide/packages/e2e
- export WEBIDE_IMAGE="${WEBIDE_IMAGE_NAME}:${CI_COMMIT_SHORT_SHA}"
- docker-compose run e2e
rules:
- if: '$TAG_JOB != "true"'
changes:
- tools/webide/**
when: always
if: '$CI_COMMIT_REF_NAME == "dev"'
when: always
deploy-handoff: deploy-handoff:
# Handoff deployment duties to private repo # Handoff deployment duties to private repo
@ -305,161 +147,39 @@ deploy-handoff:
- if: '$CI_COMMIT_REF_NAME == "dev"' - if: '$CI_COMMIT_REF_NAME == "dev"'
when: always when: always
static-binary:
##### The following jobs will replace the ones above! ##### extends: .nix
# TODO: add jobs for deploying the website, build a docker image and deploy it
.prepare_nix: &prepare_nix
image: nixos/nix:latest
before_script:
- nix-env -f channel:nixos-unstable -iA gnutar gitMinimal cachix
- export COMMIT_DATE="$(git show --no-patch --format=%ci)"
- echo "sandbox = true" > /etc/nix/nix.conf
# A temporary caching solution
- cachix use balsoft
# TODO Don't upload everything, use a post-build-hook to only upload what can't be substituted
- cachix push -w balsoft &
# The binary produced is useless by itself
binary-nix:
stage: nix
<<: *prepare_nix
only: only:
- merge_requests - merge_requests
- dev - dev
- /^.*-run-dev$/ - /^.*-run-dev$/
script:
- nix-build nix -A ligo-bin
doc-nix:
stage: nix
<<: *prepare_nix
only:
- merge_requests
- dev
- /^.*-run-dev$/
script:
- nix-build nix -A ligo-doc
- cp -Lr result/share/doc result-doc
artifacts:
paths:
- result-doc
test-nix:
stage: nix
<<: *prepare_nix
only:
- merge_requests
- dev
- /^.*-run-dev$/
script:
- nix-build nix -A ligo-coverage
- cp -Lr result/share/coverage result-coverage
artifacts:
paths:
- result-coverage
# FIXME For some reason, e2e tests can't build on CI.
.webide-e2e-nix:
stage: nix
<<: *prepare_nix
rules:
- changes:
- tools/webide/**
when: always
- if: '$CI_COMMIT_REF_NAME =~ /^(dev|.*-run-dev)$/ && $CI_PROJECT_PATH == "ligolang/ligo"'
when: always
script:
- nix-build nix -A ligo-editor.e2e
docker-nix:
stage: nix
only:
- merge_requests
- dev
- /^.*-run-dev$/
<<: *prepare_nix
script:
- nix-build nix -A ligo-docker
- cp -L result ligo.tar.gz
artifacts:
paths:
- ligo.tar.gz
docker-push-nix:
stage: nix-push
<<: *docker
dependencies:
- docker-nix
needs:
- docker-nix
rules:
# Only deploy docker when from the dev branch AND on the canonical ligolang/ligo repository
- if: '$CI_COMMIT_REF_NAME =~ /^(dev|.*-run-dev)$/ && $CI_PROJECT_PATH == "ligolang/ligo"'
when: always
script:
- echo ${LIGO_REGISTRY_PASSWORD} | docker login -u ${LIGO_REGISTRY_USER} --password-stdin
- docker load -i=./ligo.tar.gz
- export LIGO_REGISTRY_FULL_NAME=${LIGO_REGISTRY_IMAGE_BUILD:-ligolang/ligo}:$(if test "$CI_COMMIT_REF_NAME" = "dev"; then echo next-nix; else echo next-attempt-nix; fi)
- docker tag ligo "${LIGO_REGISTRY_FULL_NAME}"
- docker push "${LIGO_REGISTRY_FULL_NAME}"
webide-docker-nix:
stage: nix
only:
- merge_requests
- dev
- /^.*-run-dev$/
<<: *prepare_nix
script:
- nix-build nix -A ligo-editor-docker
- cp -L result webide.tar.gz
artifacts:
paths:
- webide.tar.gz
webide-push-nix:
stage: nix-push
<<: *docker
dependencies:
- webide-docker-nix
needs:
- webide-docker-nix
rules:
# Only deploy docker when from the dev branch AND on the canonical ligolang/ligo repository
- if: '$CI_COMMIT_REF_NAME =~ /^(dev|.*-run-dev)$/ && $CI_PROJECT_PATH == "ligolang/ligo"'
when: always
script:
- echo "${CI_BUILD_TOKEN}" | docker login -u gitlab-ci-token --password-stdin registry.gitlab.com
- docker load -i=./webide.tar.gz
- docker tag ligo-editor "${WEBIDE_IMAGE_NAME}:nix${CI_COMMIT_SHORT_SHA}"
- docker push "${WEBIDE_IMAGE_NAME}:nix${CI_COMMIT_SHORT_SHA}"
static-binary-nix:
stage: nix
<<: *prepare_nix
only:
- dev
- /^.*-run-dev$/
script: script:
- nix-build nix -A ligo-static - nix-build nix -A ligo-static
# Check that the binary is truly static and has 0 dependencies # Check that the binary is truly static and has 0 dependencies
- test $(nix-store -q --references ./result | wc -l) -eq 0 - test $(nix-store -q --references ./result | wc -l) -eq 0
- cp -Lr result/bin result-static - cp -L result/bin/ligo ligo
- chmod +rwx ligo
artifacts: artifacts:
paths: paths:
- result-static - ligo
website-nix: .website:
stage: nix extends: .nix
<<: *prepare_nix
only:
- dev
- /^.*-run-dev$/
script: script:
- nix-build nix -A ligo-website - nix-build nix -A ligo-website
- cp -Lr result/ result-website - cp -Lr --no-preserve=mode,ownership,timestamps result/ public
artifacts: artifacts:
paths: paths:
- result-website - public
pages:
extends: .website
rules:
- if: '$CI_COMMIT_REF_NAME == "dev" && $CI_PROJECT_PATH == "ligolang/ligo"'
when: always
pages-attempt:
extends: .website
only:
- merge_requests
- /^.*-run-dev$/

View File

@ -0,0 +1,730 @@
---
id: interop
title: Interop
---
import Syntax from '@theme/Syntax';
LIGO can work together with other smart contract languages on Tezos. However
data structures might have different representations in Michelson and not
correctly match the standard LIGO types.
## Michelson types and annotations
Michelson types consist of `or`'s and `pair`'s, combined with field annotations.
Field annotations add contraints on a Michelson type, for example a pair of
`(pair (int %foo) (string %bar))` will only work with the exact equivalence or
the same type without the field annotations.
To clarify:
```michelson
(pair (int %foo) (string %bar))
````
works with
```michelson
(pair (int %foo) (string %bar))
```
works with
```michelson
(pair int string)
```
works not with
```michelson
(pair (int %bar) (string %foo))
```
works not with
```michelson
(pair (string %bar) (int %foo))
```
:::info
In the case of annotated entrypoints - the annotated `or` tree directly under
`parameter` in a contract - you should annotations, as otherwise it would
become unclear which entrypoint you are referring to.
:::
## Entrypoints and annotations
It's possible for a contract to have multiple entrypoints, which translates in
LIGO to a `parameter` with a variant type as shown here:
<Syntax syntax="pascaligo">
```pascaligo
type storage is int
type parameter is
| Left of int
| Right of int
function main (const p: parameter; const x: storage): (list(operation) * storage) is
((nil: list(operation)), case p of
| Left(i) -> x - i
| Right(i) -> x + i
end)
```
</Syntax>
<Syntax syntax="cameligo">
```cameligo
type storage = int
type parameter =
| Left of int
| Right of int
let main ((p, x): (parameter * storage)): (operation list * storage) =
(([]: operation list), (match p with
| Left i -> x - i
| Right i -> x + i
))
```
</Syntax>
<Syntax syntax="reasonligo">
```reasonligo
type storage = int
type parameter =
| Left(int)
| Right(int)
let main = ((p, x): (parameter, storage)): (list(operation), storage) => {
([]: list(operation), (switch(p) {
| Left(i) => x - i
| Right(i) => x + i
}))
};
```
</Syntax>
This contract can be called by another contract, like this one:
<Syntax syntax="pascaligo">
```pascaligo group=get_entrypoint_opt
type storage is int
type parameter is int
type x is Left of int
function main (const p: parameter; const s: storage): (list(operation) * storage) is block {
const contract: contract(x) =
case (Tezos.get_entrypoint_opt("%left", ("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx":address)): option(contract(x))) of
| Some (c) -> c
| None -> (failwith("not a correct contract") : contract(x))
end;
const result: (list(operation) * storage) = ((list [Tezos.transaction(Left(2), 2mutez, contract)]: list(operation)), s)
} with result
```
</Syntax>
<Syntax syntax="cameligo">
```cameligo group=get_entrypoint_opt
type storage = int
type parameter = int
type x = Left of int
let main (p, s: parameter * storage): operation list * storage = (
let contract: x contract =
match ((Tezos.get_entrypoint_opt "%left" ("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx": address)): x contract option) with
| Some c -> c
| None -> (failwith "contract does not match": x contract)
in
(([
Tezos.transaction (Left 2) 2mutez contract;
]: operation list), s)
)
```
</Syntax>
<Syntax syntax="reasonligo">
```reasonligo group=get_entrypoint_opt
type storage = int;
type parameter = int;
type x = Left(int);
let main = ((p, s): (parameter, storage)): (list(operation), storage) => {
let contract: contract(x) =
switch (Tezos.get_entrypoint_opt("%left", ("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx": address)): option(contract(x))) {
| Some c => c
| None => (failwith ("contract does not match"): contract(x))
};
([
Tezos.transaction(Left(2), 2mutez, contract)
]: list(operation), s);
};
```
</Syntax>
Notice how we directly use the `%left` entrypoint without mentioning the
`%right` entrypoint. This is done with the help of annotations. Without
annotations it wouldn't be clear what our `int` would be referring to.
This currently only works for `or`'s or variant types in LIGO.
## Interop with Michelson
To interop with existing Michelson code or for compatibility with certain
development tooling, LIGO has two special interop types: `michelson_or` and
`michelson_pair`. These types give the flexibility to model the exact Michelson
output, including field annotations.
Take for example the following Michelson type that we want to interop with:
```michelson
(or
(unit %z)
(or %other
(unit %y)
(pair %other
(string %x)
(pair %other
(int %w)
(nat %v)))))
```
To reproduce this type we can use the following LIGO code:
<Syntax syntax="pascaligo">
```pascaligo
type w_and_v is michelson_pair(int, "w", nat, "v")
type x_and is michelson_pair(string, "x", w_and_v, "other")
type y_or is michelson_or(unit, "y", x_and, "other")
type z_or is michelson_or(unit, "z", y_or, "other")
```
</Syntax>
<Syntax syntax="cameligo">
```cameligo
type w_and_v = (int, "w", nat, "v") michelson_pair
type x_and = (string, "x", w_and_v, "other") michelson_pair
type y_or = (unit, "y", x_and, "other") michelson_or
type z_or = (unit, "z", y_or, "other") michelson_or
```
</Syntax>
<Syntax syntax="reasonligo">
```reasonligo
type w_and_v = michelson_pair(int, "w", nat, "v")
type x_and = michelson_pair(string, "x", w_and_v, "other")
type y_or = michelson_or(unit, "y", x_and, "other")
type z_or = michelson_or(unit, "z", y_or, "other")
```
</Syntax>
If you don't want to have an annotation, you need to provide an empty string.
:::info
Alternatively, if annotations are not important you can also use plain tuples
for pair's instead. Plain tuples don't have any annotations.
:::
To use variables of type `michelson_or` you have to use `M_left` and `M_right`.
`M_left` picks the left `or` case while `M_right` picks the right `or` case.
For `michelson_pair` you need to use tuples.
<Syntax syntax="pascaligo">
```pascaligo
const z: z_or = (M_left (unit) : z_or);
const y_1: y_or = (M_left (unit): y_or);
const y: z_or = (M_right (y_1) : z_or);
const x_pair: x_and = ("foo", (2, 3n));
const x_1: y_or = (M_right (x_pair): y_or);
const x: z_or = (M_right (y_1) : z_or);
```
</Syntax>
<Syntax syntax="cameligo">
```cameligo
let z: z_or = (M_left (unit) : z_or)
let y_1: y_or = (M_left (unit): y_or)
let y: z_or = (M_right (y_1) : z_or)
let x_pair: x_and = ("foo", (2, 3n))
let x_1: y_or = (M_right (x_pair): y_or)
let x: z_or = (M_right (y_1) : z_or)
```
</Syntax>
<Syntax syntax="reasonligo">
```reasonligo
let z: z_or = (M_left (unit) : z_or)
let y_1: y_or = (M_left (unit): y_or)
let y: z_or = (M_right (y_1) : z_or)
let x_pair: x_and = ("foo", (2, 3n))
let x_1: y_or = (M_right (x_pair): y_or)
let x: z_or = (M_right (y_1) : z_or)
```
</Syntax>
## Helper functions
Converting between different LIGO types and data structures can happen in two
ways. The first way is to use the provided layout conversion functions, and the
second way is to handle the layout conversion manually.
:::info
In both cases it will increase the size of the smart contract and the
conversion will happen when running the smart contract.
:::
### Converting left combed Michelson data structures
Here's an example of a left combed Michelson data structure using pairs:
```michelson
(pair %other
(pair %other
(string %s)
(int %w)
)
(nat %v)
)
```
Which could respond with the following record type:
<Syntax syntax="pascaligo">
```pascaligo
type l_record is record [
s: string;
w: int;
v: nat
]
```
</Syntax>
<Syntax syntax="cameligo">
```cameligo
type l_record = {
s: string;
w: int;
v: nat
}
```
</Syntax>
<Syntax syntax="reasonligo">
```reasonligo
type l_record = {
s: string,
w: int,
v: nat
}
```
</Syntax>
If we want to convert from the Michelson type to our record type and vice
versa, we can use the following code:
<Syntax syntax="pascaligo">
```pascaligo
type michelson is michelson_pair_left_comb(l_record)
function of_michelson (const f: michelson) : l_record is
block {
const p: l_record = Layout.convert_from_left_comb(f)
}
with p
function to_michelson (const f: l_record) : michelson is
block {
const p: michelson = Layout.convert_to_left_comb ((f: l_record))
}
with p
```
</Syntax>
<Syntax syntax="cameligo">
```cameligo
type michelson = l_record michelson_pair_left_comb
let of_michelson (f: michelson) : l_record =
let p: l_record = Layout.convert_from_left_comb f in
p
let to_michelson (f: l_record) : michelson =
let p = Layout.convert_to_left_comb (f: l_record) in
p
```
</Syntax>
<Syntax syntax="reasonligo">
```reasonligo
type michelson = michelson_pair_left_comb(l_record);
let of_michelson = (f: michelson) : l_record => {
let p: l_record = Layout.convert_from_left_comb(f);
p
};
let to_michelson = (f: l_record) : michelson => {
let p = Layout.convert_to_left_comb(f: l_record);
p
}
```
</Syntax>
In the case of a left combed Michelson `or` data structure, that you want to
translate to a variant, you can use the `michelson_or_left_comb` type.
For example:
<Syntax syntax="pascaligo">
```pascaligo
type vari is
| Foo of int
| Bar of nat
| Other of bool
type r is michelson_or_left_comb(vari)
```
</Syntax>
<Syntax syntax="cameligo">
```cameligo
type vari =
| Foo of int
| Bar of nat
| Other of bool
type r = vari michelson_or_left_comb
```
</Syntax>
<Syntax syntax="reasonligo">
```reasonligo
type vari =
| Foo(int)
| Bar(nat)
| Other(bool)
type r = michelson_or_left_comb(vari)
```
</Syntax>
And then use these types in `Layout.convert_from_left_comb` or
`Layout.convert_to_left_comb`, similar to the `pair`s example above, like this:
<Syntax syntax="pascaligo">
```pascaligo
function of_michelson_or (const f: r) : vari is
block {
const p: vari = Layout.convert_from_left_comb(f)
}
with p
function to_michelson_or (const f: vari) : r is
block {
const p: r = Layout.convert_to_left_comb((f: vari))
}
with p
```
</Syntax>
<Syntax syntax="cameligo">
```cameligo
let of_michelson_or (f: r) : vari =
let p: vari = Layout.convert_from_left_comb f in
p
let to_michelson_or (f: vari) : r =
let p = Layout.convert_to_left_comb (f: vari) in
p
```
</Syntax>
<Syntax syntax="reasonligo">
```reasonligo
let of_michelson_or = (f: r) : vari => {
let p: vari = Layout.convert_from_left_comb(f);
p
};
let to_michelson_or = (f: vari) : r => {
let p = Layout.convert_to_left_comb(f: vari);
p
}
```
</Syntax>
### Converting right combed Michelson data structures
In the case of right combed data structures, like:
```michelson
(pair %other
(string %s)
(pair %other
(int %w)
(nat %v)
)
)
```
you can almost use the same code as that for the left combed data structures,
but with `michelson_or_right_comb`, `michelson_pair_right_comb`,
`Layout.convert_from_right_comb`, and `Layout.convert_to_left_comb`
respectively.
### Manual data structure conversion
If you want to get your hands dirty, it's also possible to do manual data
structure conversion.
The following code can be used as inspiration:
<Syntax syntax="pascaligo">
```pascaligo group=helper_functions
type z_to_v is
| Z
| Y
| X
| W
| V
type w_or_v is michelson_or(unit, "w", unit, "v")
type x_or is michelson_or(unit, "x", w_or_v, "other")
type y_or is michelson_or(unit, "y", x_or, "other")
type z_or is michelson_or(unit, "z", y_or, "other")
type test is record [
z: string;
y: int;
x: string;
w: bool;
v: int;
]
function make_concrete_sum (const r: z_to_v) : z_or is block {
const z: z_or = (M_left (unit) : z_or);
const y_1: y_or = (M_left (unit): y_or);
const y: z_or = (M_right (y_1) : z_or);
const x_2: x_or = (M_left (unit): x_or);
const x_1: y_or = (M_right (x_2): y_or);
const x: z_or = (M_right (x_1) : z_or);
const w_3: w_or_v = (M_left (unit): w_or_v);
const w_2: x_or = (M_right (w_3): x_or);
const w_1: y_or = (M_right (w_2): y_or);
const w: z_or = (M_right (w_1) : z_or);
const v_3: w_or_v = (M_right (unit): w_or_v);
const v_2: x_or = (M_right (v_3): x_or);
const v_1: y_or = (M_right (v_2): y_or);
const v: z_or = (M_right (v_1) : z_or);
}
with (case r of
| Z -> z
| Y -> y
| X -> x
| W -> w
| V -> v
end)
function make_concrete_record (const r: test) : (string * int * string * bool * int) is
(r.z, r.y, r.x, r.w, r.v)
function make_abstract_sum (const z_or: z_or) : z_to_v is
(case z_or of
| M_left (n) -> Z
| M_right (y_or) ->
(case y_or of
| M_left (n) -> Y
| M_right (x_or) ->
(case x_or of
| M_left (n) -> X
| M_right (w_or) ->
(case (w_or) of
| M_left (n) -> W
| M_right (n) -> V
end)
end)
end)
end)
function make_abstract_record (const z: string; const y: int; const x: string; const w: bool; const v: int) : test is
record [ z = z; y = y; x = x; w = w; v = v ]
```
</Syntax>
<Syntax syntax="cameligo">
```cameligo group=helper_functions
type z_to_v =
| Z
| Y
| X
| W
| V
type w_or_v = (unit, "w", unit, "v") michelson_or
type x_or = (unit, "x", w_or_v, "other") michelson_or
type y_or = (unit, "y", x_or, "other") michelson_or
type z_or = (unit, "z", y_or, "other") michelson_or
type test = {
z: string;
y: int;
x: string;
w: bool;
v: int;
}
let make_concrete_sum (r: z_to_v) : z_or =
match r with
| Z -> (M_left (unit) : z_or)
| Y -> (M_right (M_left (unit): y_or) : z_or )
| X -> (M_right (M_right (M_left (unit): x_or): y_or) : z_or )
| W -> (M_right (M_right (M_right (M_left (unit): w_or_v): x_or): y_or) : z_or )
| V -> (M_right (M_right (M_right (M_right (unit): w_or_v): x_or): y_or) : z_or )
let make_concrete_record (r: test) : (string * int * string * bool * int) =
(r.z, r.y, r.x, r.w, r.v)
let make_abstract_sum (z_or: z_or) : z_to_v =
match z_or with
| M_left n -> Z
| M_right y_or ->
(match y_or with
| M_left n -> Y
| M_right x_or -> (
match x_or with
| M_left n -> X
| M_right w_or -> (
match w_or with
| M_left n -> W
| M_right n -> V)))
let make_abstract_record (z: string) (y: int) (x: string) (w: bool) (v: int) : test =
{ z = z; y = y; x = x; w = w; v = v }
```
</Syntax>
<Syntax syntax="reasonligo">
```reasonligo group=helper_functions
type z_to_v =
| Z
| Y
| X
| W
| V
type w_or_v = michelson_or(unit, "w", unit, "v")
type x_or = michelson_or(unit, "x", w_or_v, "other")
type y_or = michelson_or(unit, "y", x_or, "other")
type z_or = michelson_or(unit, "z", y_or, "other")
type test = {
z: string,
y: int,
x: string,
w: bool,
v: int
}
let make_concrete_sum = (r: z_to_v) : z_or =>
switch(r){
| Z => (M_left (unit) : z_or)
| Y => (M_right (M_left (unit): y_or) : z_or )
| X => (M_right (M_right (M_left (unit): x_or): y_or) : z_or )
| W => (M_right (M_right (M_right (M_left (unit): w_or_v): x_or): y_or) : z_or )
| V => (M_right (M_right (M_right (M_right (unit): w_or_v): x_or): y_or) : z_or )
}
let make_concrete_record = (r: test) : (string, int, string, bool, int) =>
(r.z, r.y, r.x, r.w, r.v)
let make_abstract_sum = (z_or: z_or) : z_to_v =>
switch (z_or) {
| M_left n => Z
| M_right y_or => (
switch (y_or) {
| M_left n => Y
| M_right x_or => (
switch (x_or) {
| M_left n => X
| M_right w_or => (
switch (w_or) {
| M_left n => W
| M_right n => V
})
})
})
}
let make_abstract_record = (z: string, y: int, x: string, w: bool, v: int) : test =>
{ z : z, y, x, w, v }
```
</Syntax>
## Amendment
With the upcoming 007 amendment to Tezos this will change though, and also
`pair`'s can be ordered differently.

View File

@ -40,20 +40,31 @@ curl https://gitlab.com/ligolang/ligo/raw/master/scripts/installer.sh | bash -s
ligo --help ligo --help
``` ```
## Static Linux binary
The `ligo` executable is statically linked. It should run on most modern Linux distributions.
To use it, get it [here](/bin/linux/ligo), make it executable, you're done!
```zsh
wget https://ligolang.org/bin/linux/ligo
chmod +x ./ligo
```
Optionally, you can put it somewhere in your `PATH` for easy access:
```zsh
sudo cp ./ligo /usr/local/bin
```
## Debian Linux package installation ## Debian Linux package installation
We have produced .deb packages for a few Debian Linux versions. They will install a global `ligo` executable. A `.deb` package containing the static `ligo` executable is also available.
First download one of the packages below, and then install using: First download [the package](/deb/ligo.deb), and then install using:
```zsh
sudo apt install ./ligo.deb
``` ```
sudo apt install ./<package_name_here>.deb
```
- [Ubuntu 18.04](/deb/ligo_ubuntu-18.04.deb)
- [Ubuntu 19.10](/deb/ligo_ubuntu-19.10.deb)
- [Debian 9](/deb/ligo_debian-9.deb)
- [Debian 10](/deb/ligo_debian-10.deb)
## Release schedule ## Release schedule

View File

@ -19,8 +19,9 @@
"advanced/entrypoints-contracts", "advanced/entrypoints-contracts",
"advanced/include", "advanced/include",
"advanced/first-contract", "advanced/first-contract",
"advanced/michelson-and-ligo", "advanced/michelson-and-ligo",
"advanced/inline" "advanced/inline",
"advanced/interop"
], ],
"Reference": [ "Reference": [
"api/cli-commands", "api/cli-commands",

30
nix/README.md Normal file
View File

@ -0,0 +1,30 @@
# Nix expressions for building LIGO
Nix is a declarative package manager. Get it here: https://nixos.org/nix
These expressions are used on CI to reproducibly build the LIGO compiler, as well as WebIDE and https://ligolang.org .
If you wish to build it yourself, do `nix build -f. $thing`, where `$thing` is
- `ligo`: executables, libraries, documentation, coverage reports
- `ligo-bin`: a dynamically linked binary (Linux, Mac)
- `ligo-static`: a statically linked binary (Linux only)
- `ligo-doc`: documentation generated by odoc
- `ligo-editor`: WebIDE, it can be started with `result/bin/ligo-editor`
- `ligo-website`: the website, website root is `result`
- `ligo-docker`: a docker image with LIGO binaries
- `ligo-editor-docker`: a docker image with webide
- `ligo-deb`: debian package with static binaries
The output of `nix build` can be found in `result` directory.
## Quick maintenance guide
- `opam-repository` and `tezos-opam-repository` are pinned. To update them when required, run `niv update` (you can get niv with `nix shell 'nixpkgs#niv'`)
- `ocaml` version is pinned in `ocaml-overlay.nix`. If you want to update it, go there and change the version.
- If something fails, `nix repl pkgs.nix` can be very useful to investigate it.
## Known caveats
- This is not a [nix flake](https://gist.github.com/edolstra/40da6e3a4d4ee8fd019395365e0772e7). This will never be a flake if we want to keep this low-maintenance, because of the way `opam` sources are defined. Sometimes, the checksum is omitted there, so we have to use `fetchTarball` without the checksum, which won't work in restricted mode (which is required for flakes). The only solution would be to generate nix expressions for opam-repository separately, but it means a manual step in the process (and it's also impossible to make this work as a flake).
- For the same reason as above, evaluation can take a while because we need to download all the sources every `tarball-ttl` seconds. This can be mitigated by setting `tarball-ttl` to a high value.

View File

@ -1,10 +1,7 @@
{ dockerTools, writeShellScriptBin, runCommand, mcpp, bash, coreutils, ligo, name ? "ligo" }: { dockerTools, writeShellScriptBin, runCommand, mcpp, bash, coreutils, ligo, name ? "ligo" }:
let
tmp = runCommand "tmp" {} "mkdir -p $out/tmp";
in
dockerTools.buildLayeredImage { dockerTools.buildLayeredImage {
inherit name; inherit name;
tag = "latest"; tag = "latest";
contents = [ ligo tmp bash ]; contents = [ ligo bash ];
config.Entrypoint = name; config.Entrypoint = name;
} }

View File

@ -2,10 +2,12 @@
, writeShellScriptBin, makeFontsConf, buildEnv, rsync, sources , writeShellScriptBin, makeFontsConf, buildEnv, rsync, sources
, chromium ? null }: , chromium ? null }:
let let
# Use a common yarn.lock for everything
yarnLock = ../tools/webide/yarn.lock; yarnLock = ../tools/webide/yarn.lock;
installPhase = "mkdir $out; cp -Lr node_modules $out/node_modules"; installPhase = "mkdir $out; cp -Lr node_modules $out/node_modules";
# node_modules of the server
server = mkYarnPackage { server = mkYarnPackage {
name = "webide-server"; name = "webide-server";
src = ../tools/webide/packages/server; src = ../tools/webide/packages/server;
@ -19,6 +21,8 @@ let
distPhase = "true"; distPhase = "true";
inherit yarnLock installPhase; inherit yarnLock installPhase;
}; };
# node_modules of the client
client = mkYarnPackage rec { client = mkYarnPackage rec {
name = "webide-client"; name = "webide-client";
src = ../tools/webide/packages/client; src = ../tools/webide/packages/client;
@ -42,6 +46,7 @@ let
*/ */
}; };
# Perform the e2e tests; output is empty on purpose
e2e = mkYarnPackage rec { e2e = mkYarnPackage rec {
name = "webide-e2e"; name = "webide-e2e";
src = ../tools/webide/packages/e2e; src = ../tools/webide/packages/e2e;
@ -61,6 +66,7 @@ let
inherit yarnLock; inherit yarnLock;
}; };
# Run the WebIDE server with all the needed env variables
ligo-editor = writeShellScriptBin "ligo-editor" '' ligo-editor = writeShellScriptBin "ligo-editor" ''
set -e set -e
LIGO_CMD=${ligo-bin}/bin/ligo \ LIGO_CMD=${ligo-bin}/bin/ligo \

View File

@ -9,7 +9,7 @@ buildNpmPackage {
''; '';
installPhase = '' installPhase = ''
cp -Lr build $out cp -Lr build $out
cp -r ${ligo-deb}/* $out/deb cp -r ${ligo-deb}/*.deb $out/deb/ligo.deb
mkdir -p $out/bin/linux mkdir -p $out/bin/linux
cp -r ${ligo-static}/bin/ligo $out/bin/linux/ligo cp -r ${ligo-static}/bin/ligo $out/bin/linux/ligo
cp -r ${ligo-doc}/share/doc $out/odoc cp -r ${ligo-doc}/share/doc $out/odoc

View File

@ -1,4 +1,5 @@
self: super: { self: super: {
# Note: this overlay doesn't apply to nix-npm-buildpackage
nodejs = super.nodejs-12_x; nodejs = super.nodejs-12_x;
nodePackages = super.nodePackages_12_x; nodePackages = super.nodePackages_12_x;
nodejs-slim = super.nodejs-slim-12_x; nodejs-slim = super.nodejs-slim-12_x;

View File

@ -1,3 +1,5 @@
# An overlay that adds ligo to ocamlPackages
{ sources ? import ./sources.nix { sources ? import ./sources.nix
, CI_COMMIT_SHA ? builtins.getEnv "CI_COMMIT_SHA" , CI_COMMIT_SHA ? builtins.getEnv "CI_COMMIT_SHA"
, COMMIT_DATE ? builtins.getEnv "COMMIT_DATE" }: , COMMIT_DATE ? builtins.getEnv "COMMIT_DATE" }:
@ -6,6 +8,7 @@ let
opam-nix = import sources.opam-nix (import sources.nixpkgs { }); opam-nix = import sources.opam-nix (import sources.nixpkgs { });
inherit (import sources."gitignore.nix" { inherit (self) lib; }) inherit (import sources."gitignore.nix" { inherit (self) lib; })
gitignoreSource; gitignoreSource;
# Remove list of directories or files from source (to stop unneeded rebuilds)
filterOut = xs: filterOut = xs:
self.lib.cleanSourceWith { self.lib.cleanSourceWith {
filter = p: type: !(builtins.elem (builtins.baseNameOf p) xs); filter = p: type: !(builtins.elem (builtins.baseNameOf p) xs);
@ -14,6 +17,7 @@ let
in { in {
ocamlPackages = self.ocaml-ng.ocamlPackages_4_07.overrideScope' ocamlPackages = self.ocaml-ng.ocamlPackages_4_07.overrideScope'
(builtins.foldl' self.lib.composeExtensions (_: _: { }) [ (builtins.foldl' self.lib.composeExtensions (_: _: { }) [
# Both opam-repository and tezos-opam-repository are updated manually with `niv update`
(opam-nix.traverseOPAMRepo' sources.opam-repository) (opam-nix.traverseOPAMRepo' sources.opam-repository)
(opam-nix.traverseOPAMRepo sources.tezos-opam-repository) (opam-nix.traverseOPAMRepo sources.tezos-opam-repository)
(opam-nix.callOPAMPackage (filterOut [ (opam-nix.callOPAMPackage (filterOut [
@ -26,19 +30,23 @@ in {
"gitlab-pages" "gitlab-pages"
])) ]))
(oself: osuper: { (oself: osuper: {
# Strange naming in nixpkgs
ocamlfind = oself.findlib; ocamlfind = oself.findlib;
lablgtk = null; lablgtk = null;
lwt = oself.lwt4; lwt = oself.lwt4;
# Native dependencies
conf-gmp = self.gmp; conf-gmp = self.gmp;
conf-libev = self.libev; conf-libev = self.libev;
conf-hidapi = self.hidapi; conf-hidapi = self.hidapi;
conf-pkg-config = self.pkg-config; conf-pkg-config = self.pkg-config;
# Strange problems
bigstring = osuper.bigstring.overrideAttrs (_: { doCheck = false; }); bigstring = osuper.bigstring.overrideAttrs (_: { doCheck = false; });
xmldiff = osuper.xmldiff.overrideAttrs (_: { src = sources.xmldiff; }); xmldiff = osuper.xmldiff.overrideAttrs (_: { src = sources.xmldiff; });
getopt = osuper.getopt.overrideAttrs (_: { configurePhase = "true"; }); getopt = osuper.getopt.overrideAttrs (_: { configurePhase = "true"; });
# Force certain versions
ipaddr = osuper.ipaddr.versions."4.0.0"; ipaddr = osuper.ipaddr.versions."4.0.0";
conduit = osuper.conduit.versions."2.1.0"; conduit = osuper.conduit.versions."2.1.0";
conduit-lwt-unix = osuper.conduit-lwt-unix.versions."2.0.2"; conduit-lwt-unix = osuper.conduit-lwt-unix.versions."2.0.2";
@ -64,6 +72,7 @@ in {
propagatedBuildInputs = buildInputs; propagatedBuildInputs = buildInputs;
}); });
# A combination of executables, libraries, documentation and test coverage
ligo = self.buildEnv { ligo = self.buildEnv {
name = "ligo"; name = "ligo";
paths = with oself; [ paths = with oself; [
@ -74,6 +83,7 @@ in {
]; ];
}; };
# LIGO executable and public libraries
ligo-out = osuper.ligo.overrideAttrs (oa: { ligo-out = osuper.ligo.overrideAttrs (oa: {
name = "ligo-out"; name = "ligo-out";
inherit CI_COMMIT_SHA COMMIT_DATE; inherit CI_COMMIT_SHA COMMIT_DATE;
@ -82,6 +92,8 @@ in {
nativeBuildInputs = oa.nativeBuildInputs nativeBuildInputs = oa.nativeBuildInputs
++ [ self.buildPackages.rakudo ]; ++ [ self.buildPackages.rakudo ];
}); });
# LIGO test suite; output empty on purpose
ligo-tests = osuper.ligo.overrideAttrs (oa: { ligo-tests = osuper.ligo.overrideAttrs (oa: {
name = "ligo-tests"; name = "ligo-tests";
src = filterOut [ src = filterOut [
@ -98,6 +110,7 @@ in {
++ [ self.buildPackages.rakudo ]; ++ [ self.buildPackages.rakudo ];
installPhase = "mkdir $out"; installPhase = "mkdir $out";
}); });
# LIGO odoc documentation
ligo-doc = osuper.ligo.overrideAttrs (oa: { ligo-doc = osuper.ligo.overrideAttrs (oa: {
name = "ligo-doc"; name = "ligo-doc";
buildInputs = oa.buildInputs buildInputs = oa.buildInputs
@ -109,6 +122,7 @@ in {
installPhase = installPhase =
"mkdir $out; cp -r _build/default/_doc/_html/ $out/doc"; "mkdir $out; cp -r _build/default/_doc/_html/ $out/doc";
}); });
# LIGO test coverage reports
ligo-coverage = oself.ligo-tests.overrideAttrs (oa: { ligo-coverage = oself.ligo-tests.overrideAttrs (oa: {
name = "ligo-coverage"; name = "ligo-coverage";
nativeBuildInputs = oa.nativeBuildInputs nativeBuildInputs = oa.nativeBuildInputs

View File

@ -1,3 +1,4 @@
# Create a debian package from static executable
{ stdenv, lib, writeTextFile, ligo-static, dpkg }: { stdenv, lib, writeTextFile, ligo-static, dpkg }:
let let
project = "ligo"; project = "ligo";

View File

@ -1,20 +1,25 @@
# nixpkgs extended with all the overlays for LIGO
{ sources ? import ./sources.nix }: { sources ? import ./sources.nix }:
let let
ocaml-overlay = import ./ocaml-overlay.nix { inherit sources; }; ocaml-overlay = import ./ocaml-overlay.nix { inherit sources; };
static-overlay = import ./static-overlay.nix pkgs; static-overlay = import ./static-overlay.nix pkgs;
mac-overlay = import ./mac-overlay.nix; mac-overlay = import ./mac-overlay.nix;
nodejs-overlay = import ./nodejs-overlay.nix; nodejs-overlay = import ./nodejs-overlay.nix;
nix-npm-buildpackage = pkgs.callPackage sources.nix-npm-buildpackage { };
pkgs = import sources.nixpkgs { pkgs = import sources.nixpkgs {
overlays = [ ocaml-overlay nodejs-overlay ] overlays = [ ocaml-overlay nodejs-overlay ]
# This is done here to prevent the need for bootstrap nixpkgs
++ (if builtins.currentSystem == "x86_64-darwin" ++ (if builtins.currentSystem == "x86_64-darwin"
then [ mac-overlay ] then [ mac-overlay ]
else [ ]); else [ ]);
}; };
# Takes $pkg/ligo and creates a new package with $pkg/bin/ligo
separateBinary = pkg: separateBinary = pkg:
pkgs.runCommandNoCC "${pkg.name}-bin" { } pkgs.runCommandNoCC "${pkg.name}-bin" { }
"mkdir -p $out/bin; cp -Lr ${pkg}/ligo $out/bin"; "mkdir -p $out/bin; cp -Lr ${pkg}/ligo $out/bin";
nix-npm-buildpackage = pkgs.callPackage sources.nix-npm-buildpackage { };
in pkgs.extend (self: super: { in pkgs.extend (self: super: {
inherit (self.ocamlPackages) ligo ligo-out ligo-tests ligo-doc ligo-coverage; inherit (self.ocamlPackages) ligo ligo-out ligo-tests ligo-doc ligo-coverage;
ligo-bin = separateBinary self.ligo-out.bin; ligo-bin = separateBinary self.ligo-out.bin;

View File

@ -17,10 +17,10 @@
"homepage": "", "homepage": "",
"owner": "serokell", "owner": "serokell",
"repo": "nix-npm-buildpackage", "repo": "nix-npm-buildpackage",
"rev": "0450c7d88dc3d0a26461b05cfa36f45d551f4d63", "rev": "f2107f638f7df7450a5b7b77b96aaf9752b838d9",
"sha256": "1w0k4jxw141win67rk66nvg323j5i3s4m1w3icf1g1f0p2zyf531", "sha256": "02w8jxmmhxsq7fgzml75b8w8i9mdqxnaajia99jajg6rdiam8zfp",
"type": "tarball", "type": "tarball",
"url": "https://github.com/serokell/nix-npm-buildpackage/archive/0450c7d88dc3d0a26461b05cfa36f45d551f4d63.tar.gz", "url": "https://github.com/serokell/nix-npm-buildpackage/archive/f2107f638f7df7450a5b7b77b96aaf9752b838d9.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz" "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
}, },
"nixpkgs": { "nixpkgs": {

View File

@ -1,3 +1,6 @@
# An overlay that adds flags needed to build LIGO statically;
# Supposed to be applied to pkgsMusl
# Takes `native` as a package set that doesn't cause mass rebuilds (so that we don't have to build perl with musl)
native: self: super: native: self: super:
let dds = x: x.overrideAttrs (o: { dontDisableStatic = true; }); let dds = x: x.overrideAttrs (o: { dontDisableStatic = true; });
in { in {

View File

@ -1,5 +1,6 @@
diff --git a/src/bin/dune b/src/bin/dune diff --git a/src/bin/dune b/src/bin/dune
index 162963b4b..29dfa5191 100644 index 162963b4b..29dfa5191 100644
With this patch, a static executable is produced
--- a/src/bin/dune --- a/src/bin/dune
+++ b/src/bin/dune +++ b/src/bin/dune
@@ -34,5 +34,6 @@ @@ -34,5 +34,6 @@

View File

@ -1549,7 +1549,7 @@ let%expect_test _ =
let%expect_test _ = let%expect_test _ =
run_ligo_bad [ "compile-contract" ; bad_contract "bad_contract.mligo" ; "main" ] ; run_ligo_bad [ "compile-contract" ; bad_contract "bad_contract.mligo" ; "main" ] ;
[%expect {| [%expect {|
ligo: in file "", line 0, characters 0-0. badly typed contract: unexpected entrypoint type {"location":"in file \"\", line 0, characters 0-0","entrypoint":"main","entrypoint_type":"( nat * int ) -> int"} ligo: in file "bad_contract.mligo", line 4, characters 0-3. badly typed contract: unexpected entrypoint type {"location":"in file \"bad_contract.mligo\", line 4, characters 0-3","entrypoint":"main","entrypoint_type":"( nat * int ) -> int"}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can
@ -1562,7 +1562,7 @@ let%expect_test _ =
run_ligo_bad [ "compile-contract" ; bad_contract "bad_contract2.mligo" ; "main" ] ; run_ligo_bad [ "compile-contract" ; bad_contract "bad_contract2.mligo" ; "main" ] ;
[%expect {| [%expect {|
ligo: in file "", line 0, characters 0-0. bad return type: expected (type_operator: list(operation)), got string {"location":"in file \"\", line 0, characters 0-0","entrypoint":"main"} ligo: in file "bad_contract2.mligo", line 5, characters 0-3. bad return type: expected (type_operator: list(operation)), got string {"location":"in file \"bad_contract2.mligo\", line 5, characters 0-3","entrypoint":"main"}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can
@ -1575,7 +1575,7 @@ let%expect_test _ =
run_ligo_bad [ "compile-contract" ; bad_contract "bad_contract3.mligo" ; "main" ] ; run_ligo_bad [ "compile-contract" ; bad_contract "bad_contract3.mligo" ; "main" ] ;
[%expect {| [%expect {|
ligo: in file "", line 0, characters 0-0. badly typed contract: expected {int} and {string} to be the same in the entrypoint type {"location":"in file \"\", line 0, characters 0-0","entrypoint":"main","entrypoint_type":"( nat * int ) -> ( (type_operator: list(operation)) * string )"} ligo: in file "bad_contract3.mligo", line 5, characters 0-3. badly typed contract: expected {int} and {string} to be the same in the entrypoint type {"location":"in file \"bad_contract3.mligo\", line 5, characters 0-3","entrypoint":"main","entrypoint_type":"( nat * int ) -> ( (type_operator: list(operation)) * string )"}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can
@ -1692,4 +1692,56 @@ let%expect_test _ =
* Visit our documentation: https://ligolang.org/docs/intro/introduction * Visit our documentation: https://ligolang.org/docs/intro/introduction
* Ask a question on our Discord: https://discord.gg/9rhYaEt * Ask a question on our Discord: https://discord.gg/9rhYaEt
* Open a gitlab issue: https://gitlab.com/ligolang/ligo/issues/new * Open a gitlab issue: https://gitlab.com/ligolang/ligo/issues/new
* Check the changelog by running 'ligo changelog' |}] * Check the changelog by running 'ligo changelog' |}];
run_ligo_good ["print-ast"; contract "letin.mligo"];
[%expect {|
type storage = (int ,
int)
const main : (int ,
storage) -> ((TO_list(operation)) ,
storage) = lambda (n:Some((int ,
storage))) : None return let x = let x = 7 : int in (ADD(x ,
n.0) ,
ADD(n.1.0 ,
n.1.1)) : (int ,
int) in (list[] : (TO_list(operation)) ,
x)
const f0 = lambda (a:Some(string)) : None return true(unit)
const f1 = lambda (a:Some(string)) : None return true(unit)
const f2 = lambda (a:Some(string)) : None return true(unit)
const letin_nesting = lambda (_:Some(unit)) : None return let s = "test" in let p0 = (f0)@(s) in { ASSERTION(p0);
let p1 = (f1)@(s) in { ASSERTION(p1);
let p2 = (f2)@(s) in { ASSERTION(p2);
s}}}
const letin_nesting2 = lambda (x:Some(int)) : None return let y = 2 in let z = 3 in ADD(ADD(x ,
y) ,
z)
|}];
run_ligo_good ["print-ast"; contract "letin.religo"];
[%expect {|
type storage = (int ,
int)
const main : (int ,
storage) -> ((TO_list(operation)) ,
storage) = lambda (n:Some((int ,
storage))) : None return let x = let x = 7 : int in (ADD(x ,
n.0) ,
ADD(n.1.0 ,
n.1.1)) : (int ,
int) in (list[] : (TO_list(operation)) ,
x)
const f0 = lambda (a:Some(string)) : None return true(unit)
const f1 = lambda (a:Some(string)) : None return true(unit)
const f2 = lambda (a:Some(string)) : None return true(unit)
const letin_nesting = lambda (_:Some(unit)) : None return let s = "test" in let p0 = (f0)@(s) in { ASSERTION(p0);
let p1 = (f1)@(s) in { ASSERTION(p1);
let p2 = (f2)@(s) in { ASSERTION(p2);
s}}}
const letin_nesting2 = lambda (x:Some(int)) : None return let y = 2 in let z = 3 in ADD(ADD(x ,
y) ,
z)
|}];

View File

@ -3,7 +3,7 @@ open Cli_expect
let%expect_test _ = let%expect_test _ =
run_ligo_bad [ "compile-contract" ; "../../test/contracts/negative/error_function_annotation_1.mligo"; "main"]; run_ligo_bad [ "compile-contract" ; "../../test/contracts/negative/error_function_annotation_1.mligo"; "main"];
[%expect {| [%expect {|
ligo: in file "", line 0, characters 0-0. different type constructors: Expected these two constant type constructors to be the same, but they're different {"a":"unit","b":"int"} ligo: in file "error_function_annotation_1.mligo", line 1, characters 0-3. different type constructors: Expected these two constant type constructors to be the same, but they're different {"a":"unit","b":"int"}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can
@ -29,7 +29,7 @@ let%expect_test _ =
run_ligo_bad [ "compile-contract" ; "../../test/contracts/negative/error_function_annotation_3.mligo"; "f"]; run_ligo_bad [ "compile-contract" ; "../../test/contracts/negative/error_function_annotation_3.mligo"; "f"];
[%expect {| [%expect {|
ligo: in file "", line 0, characters 0-0. different kinds: {"a":"( (type_operator: list(operation)) * sum[Add -> int , Sub -> int] )","b":"sum[Add -> int , Sub -> int]"} ligo: in file "error_function_annotation_3.mligo", line 6, characters 0-3. different kinds: {"a":"( (type_operator: list(operation)) * sum[Add -> int , Sub -> int] )","b":"sum[Add -> int , Sub -> int]"}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can

View File

@ -187,21 +187,22 @@ and field_decl = {
and type_tuple = (type_expr, comma) nsepseq par reg and type_tuple = (type_expr, comma) nsepseq par reg
and pattern = and pattern =
PConstr of constr_pattern PConstr of constr_pattern
| PUnit of the_unit reg | PUnit of the_unit reg
| PFalse of kwd_false | PFalse of kwd_false
| PTrue of kwd_true | PTrue of kwd_true
| PVar of variable | PVar of variable
| PInt of (Lexer.lexeme * Z.t) reg | PInt of (Lexer.lexeme * Z.t) reg
| PNat of (Lexer.lexeme * Z.t) reg | PNat of (Lexer.lexeme * Z.t) reg
| PBytes of (Lexer.lexeme * Hex.t) reg | PBytes of (Lexer.lexeme * Hex.t) reg
| PString of string reg | PString of string reg
| PWild of wild | PVerbatim of string reg
| PList of list_pattern | PWild of wild
| PTuple of (pattern, comma) nsepseq reg | PList of list_pattern
| PPar of pattern par reg | PTuple of (pattern, comma) nsepseq reg
| PRecord of field_pattern reg ne_injection reg | PPar of pattern par reg
| PTyped of typed_pattern reg | PRecord of field_pattern reg ne_injection reg
| PTyped of typed_pattern reg
and constr_pattern = and constr_pattern =
PNone of c_None PNone of c_None
@ -269,8 +270,9 @@ and list_expr =
(*| Append of (expr * append * expr) reg*) (*| Append of (expr * append * expr) reg*)
and string_expr = and string_expr =
Cat of cat bin_op reg Cat of cat bin_op reg
| String of string reg | String of string reg
| Verbatim of string reg
and constr_expr = and constr_expr =
ENone of c_None ENone of c_None
@ -429,8 +431,8 @@ let pattern_to_region = function
| PTrue region | PFalse region | PTrue region | PFalse region
| PTuple {region;_} | PVar {region;_} | PTuple {region;_} | PVar {region;_}
| PInt {region;_} | PInt {region;_}
| PString {region;_} | PWild region | PString {region;_} | PVerbatim {region;_}
| PPar {region;_} | PWild region | PPar {region;_}
| PRecord {region; _} | PTyped {region; _} | PRecord {region; _} | PTyped {region; _}
| PNat {region; _} | PBytes {region; _} | PNat {region; _} | PBytes {region; _}
-> region -> region
@ -456,7 +458,7 @@ let arith_expr_to_region = function
| Nat {region; _} -> region | Nat {region; _} -> region
let string_expr_to_region = function let string_expr_to_region = function
String {region;_} | Cat {region;_} -> region Verbatim {region;_} | String {region;_} | Cat {region;_} -> region
let list_expr_to_region = function let list_expr_to_region = function
ECons {region; _} | EListComp {region; _} ECons {region; _} | EListComp {region; _}

View File

@ -78,14 +78,15 @@ type t =
(* Identifiers, labels, numbers and strings *) (* Identifiers, labels, numbers and strings *)
| Ident of string Region.reg | Ident of string Region.reg
| Constr of string Region.reg | Constr of string Region.reg
| Int of (string * Z.t) Region.reg | Int of (string * Z.t) Region.reg
| Nat of (string * Z.t) Region.reg | Nat of (string * Z.t) Region.reg
| Mutez of (string * Z.t) Region.reg | Mutez of (string * Z.t) Region.reg
| String of string Region.reg | String of string Region.reg
| Bytes of (string * Hex.t) Region.reg | Verbatim of string Region.reg
| Attr of string Region.reg | Bytes of (string * Hex.t) Region.reg
| Attr of string Region.reg
(* Keywords *) (* Keywords *)
@ -142,17 +143,18 @@ type sym_err = Invalid_symbol
type attr_err = Invalid_attribute type attr_err = Invalid_attribute
type kwd_err = Invalid_keyword type kwd_err = Invalid_keyword
val mk_int : lexeme -> Region.t -> (token, int_err) result val mk_int : lexeme -> Region.t -> (token, int_err) result
val mk_nat : lexeme -> Region.t -> (token, nat_err) result val mk_nat : lexeme -> Region.t -> (token, nat_err) result
val mk_mutez : lexeme -> Region.t -> (token, int_err) result val mk_mutez : lexeme -> Region.t -> (token, int_err) result
val mk_ident : lexeme -> Region.t -> (token, ident_err) result val mk_ident : lexeme -> Region.t -> (token, ident_err) result
val mk_sym : lexeme -> Region.t -> (token, sym_err) result val mk_sym : lexeme -> Region.t -> (token, sym_err) result
val mk_kwd : lexeme -> Region.t -> (token, kwd_err) result val mk_kwd : lexeme -> Region.t -> (token, kwd_err) result
val mk_string : lexeme -> Region.t -> token val mk_string : lexeme -> Region.t -> token
val mk_bytes : lexeme -> Region.t -> token val mk_verbatim : lexeme -> Region.t -> token
val mk_constr : lexeme -> Region.t -> token val mk_bytes : lexeme -> Region.t -> token
val mk_attr : string -> lexeme -> Region.t -> (token, attr_err) result val mk_constr : lexeme -> Region.t -> token
val eof : Region.t -> token val mk_attr : string -> lexeme -> Region.t -> (token, attr_err) result
val eof : Region.t -> token
(* Predicates *) (* Predicates *)

View File

@ -62,14 +62,15 @@ type t =
(* Identifiers, labels, numbers and strings *) (* Identifiers, labels, numbers and strings *)
| Ident of string Region.reg | Ident of string Region.reg
| Constr of string Region.reg | Constr of string Region.reg
| Int of (string * Z.t) Region.reg | Int of (string * Z.t) Region.reg
| Nat of (string * Z.t) Region.reg | Nat of (string * Z.t) Region.reg
| Mutez of (string * Z.t) Region.reg | Mutez of (string * Z.t) Region.reg
| String of string Region.reg | String of string Region.reg
| Bytes of (string * Hex.t) Region.reg | Verbatim of string Region.reg
| Attr of string Region.reg | Bytes of (string * Hex.t) Region.reg
| Attr of string Region.reg
(* Keywords *) (* Keywords *)
@ -112,6 +113,8 @@ let proj_token = function
String Region.{region; value} -> String Region.{region; value} ->
region, sprintf "String %s" value region, sprintf "String %s" value
| Verbatim Region.{region; value} ->
region, sprintf "Verbatim {|%s|}" value
| Bytes Region.{region; value = s,b} -> | Bytes Region.{region; value = s,b} ->
region, region,
sprintf "Bytes (\"%s\", \"0x%s\")" s (Hex.show b) sprintf "Bytes (\"%s\", \"0x%s\")" s (Hex.show b)
@ -193,6 +196,7 @@ let to_lexeme = function
(* Literals *) (* Literals *)
String s -> String.escaped s.Region.value String s -> String.escaped s.Region.value
| Verbatim v -> String.escaped v.Region.value
| Bytes b -> fst b.Region.value | Bytes b -> fst b.Region.value
| Int i | Int i
| Nat i | Nat i
@ -405,6 +409,9 @@ and scan_constr region lexicon = parse
let mk_string lexeme region = let mk_string lexeme region =
String Region.{region; value=lexeme} String Region.{region; value=lexeme}
let mk_verbatim lexeme region =
Verbatim Region.{region; value=lexeme}
let mk_bytes lexeme region = let mk_bytes lexeme region =
let norm = Str.(global_replace (regexp "_") "" lexeme) in let norm = Str.(global_replace (regexp "_") "" lexeme) in
let value = lexeme, `Hex norm let value = lexeme, `Hex norm

View File

@ -5,14 +5,15 @@
(* Literals *) (* Literals *)
%token <string Region.reg> String "<string>" %token <string Region.reg> String "<string>"
%token <(LexToken.lexeme * Hex.t) Region.reg> Bytes "<bytes>" %token <string Region.reg> Verbatim "<verbatim>"
%token <(string * Z.t) Region.reg> Int "<int>" %token <(LexToken.lexeme * Hex.t) Region.reg> Bytes "<bytes>"
%token <(string * Z.t) Region.reg> Nat "<nat>" %token <(string * Z.t) Region.reg> Int "<int>"
%token <(string * Z.t) Region.reg> Mutez "<mutez>" %token <(string * Z.t) Region.reg> Nat "<nat>"
%token <string Region.reg> Ident "<ident>" %token <(string * Z.t) Region.reg> Mutez "<mutez>"
%token <string Region.reg> Constr "<constr>" %token <string Region.reg> Ident "<ident>"
%token <string Region.reg> Attr "<attr>" %token <string Region.reg> Constr "<constr>"
%token <string Region.reg> Attr "<attr>"
(* Symbols *) (* Symbols *)

View File

@ -147,8 +147,8 @@ cartesian:
in TProd {region; value} } in TProd {region; value} }
core_type: core_type:
type_name { TVar $1 } type_name { TVar $1 }
| par(type_expr) { TPar $1 } | par(type_expr) { TPar $1 }
| "<string>" { TString $1 } | "<string>" { TString $1 }
| module_name "." type_name { | module_name "." type_name {
let module_name = $1.value in let module_name = $1.value in
@ -287,6 +287,7 @@ core_pattern:
| "<nat>" { PNat $1 } | "<nat>" { PNat $1 }
| "<bytes>" { PBytes $1 } | "<bytes>" { PBytes $1 }
| "<string>" { PString $1 } | "<string>" { PString $1 }
| "<verbatim>" { PVerbatim $1 }
| unit { PUnit $1 } | unit { PUnit $1 }
| "false" { PFalse $1 } | "false" { PFalse $1 }
| "true" { PTrue $1 } | "true" { PTrue $1 }
@ -573,6 +574,7 @@ core_expr:
| "<ident>" | module_field { EVar $1 } | "<ident>" | module_field { EVar $1 }
| projection { EProj $1 } | projection { EProj $1 }
| "<string>" { EString (String $1) } | "<string>" { EString (String $1) }
| "<verbatim>" { EString (Verbatim $1) }
| unit { EUnit $1 } | unit { EUnit $1 }
| "false" { ELogic (BoolExpr (False $1)) } | "false" { ELogic (BoolExpr (False $1)) }
| "true" { ELogic (BoolExpr (True $1)) } | "true" { ELogic (BoolExpr (True $1)) }
@ -656,8 +658,12 @@ field_assignment:
field_expr = $3} field_expr = $3}
in {region; value} } in {region; value} }
path :
"<ident>" { Name $1 }
| projection { Path $1 }
sequence: sequence:
"begin" sep_or_term_list(expr,";")? "end" { "begin" series? "end" {
let region = cover $1 $3 let region = cover $1 $3
and compound = BeginEnd ($1,$3) in and compound = BeginEnd ($1,$3) in
let elements, terminator = let elements, terminator =
@ -668,6 +674,36 @@ sequence:
let value = {compound; elements; terminator} let value = {compound; elements; terminator}
in {region; value} } in {region; value} }
path : series:
"<ident>" { Name $1 } last_expr {
| projection { Path $1 } let expr, term = $1 in (expr, []), term
}
| seq_expr ";" series {
let rest, term = $3 in
let seq = Utils.nsepseq_cons $1 $2 rest
in seq, term }
last_expr:
seq_expr ";"?
| fun_expr(seq_expr) ";"?
| match_expr(seq_expr) ";"? {
$1,$2
}
| "let" ioption("rec") let_binding seq(Attr) "in" series {
let seq, term = $6 in
let stop = nsepseq_to_region expr_to_region seq in
let region = cover $1 stop in
let compound = BeginEnd (Region.ghost, Region.ghost) in
let elements = Some seq in
let value = {compound; elements; terminator=term} in
let body = ESeq {region; value} in
let value = {kwd_let = $1;
kwd_rec = $2;
binding = $3;
attributes = $4;
kwd_in = $5;
body}
in ELetIn {region; value}, term }
seq_expr:
disj_expr_level | if_then_else (seq_expr) { $1 }

View File

@ -97,7 +97,13 @@ let print_uident state {region; value} =
let print_string state {region; value} = let print_string state {region; value} =
let line = let line =
sprintf "%s: String %s\n" sprintf "%s: String %S\n"
(compact state region) value
in Buffer.add_string state#buffer line
let print_verbatim state {region; value} =
let line =
sprintf "%s: Verbatim {|%s|}\n"
(compact state region) value (compact state region) value
in Buffer.add_string state#buffer line in Buffer.add_string state#buffer line
@ -279,6 +285,7 @@ and print_pattern state = function
| PNat i -> print_nat state i | PNat i -> print_nat state i
| PBytes b -> print_bytes state b | PBytes b -> print_bytes state b
| PString s -> print_string state s | PString s -> print_string state s
| PVerbatim v -> print_verbatim state v
| PWild wild -> print_token state wild "_" | PWild wild -> print_token state wild "_"
| PPar {value={lpar;inside=p;rpar}; _} -> | PPar {value={lpar;inside=p;rpar}; _} ->
print_token state lpar "("; print_token state lpar "(";
@ -458,6 +465,8 @@ and print_string_expr state = function
print_expr state arg2 print_expr state arg2
| String s -> | String s ->
print_string state s print_string state s
| Verbatim v ->
print_verbatim state v
and print_logic_expr state = function and print_logic_expr state = function
BoolExpr e -> print_bool_expr state e BoolExpr e -> print_bool_expr state e
@ -606,7 +615,15 @@ let pp_node state name =
let node = sprintf "%s%s\n" state#pad_path name let node = sprintf "%s%s\n" state#pad_path name
in Buffer.add_string state#buffer node in Buffer.add_string state#buffer node
let pp_string state = pp_ident state let pp_string state {value=name; region} =
let reg = compact state region in
let node = sprintf "%s%S (%s)\n" state#pad_path name reg
in Buffer.add_string state#buffer node
let pp_verbatim state {value=name; region} =
let reg = compact state region in
let node = sprintf "%s{|%s|} (%s)\n" state#pad_path name reg
in Buffer.add_string state#buffer node
let pp_loc_node state name region = let pp_loc_node state name region =
pp_ident state {value=name; region} pp_ident state {value=name; region}
@ -692,6 +709,9 @@ and pp_pattern state = function
| PString s -> | PString s ->
pp_node state "PString"; pp_node state "PString";
pp_string (state#pad 1 0) s pp_string (state#pad 1 0) s
| PVerbatim v ->
pp_node state "PVerbatim";
pp_verbatim (state#pad 1 0) v
| PUnit {region; _} -> | PUnit {region; _} ->
pp_loc_node state "PUnit" region pp_loc_node state "PUnit" region
| PFalse region -> | PFalse region ->
@ -991,6 +1011,9 @@ and pp_string_expr state = function
| String s -> | String s ->
pp_node state "String"; pp_node state "String";
pp_string (state#pad 1 0) s pp_string (state#pad 1 0) s
| Verbatim v ->
pp_node state "Verbatim";
pp_string (state#pad 1 0) v
and pp_arith_expr state = function and pp_arith_expr state = function
Add {value; region} -> Add {value; region} ->

View File

@ -33,7 +33,9 @@ and pp_attributes = function
and pp_ident {value; _} = string value and pp_ident {value; _} = string value
and pp_string s = pp_ident s and pp_string s = string "\"" ^^ pp_ident s ^^ string "\""
and pp_verbatim s = string "{|" ^^ pp_ident s ^^ string "|}"
and pp_let_binding (binding : let_binding) = and pp_let_binding (binding : let_binding) =
let {binders; lhs_type; let_rhs; _} = binding in let {binders; lhs_type; let_rhs; _} = binding in
@ -57,6 +59,7 @@ and pp_pattern = function
| PNat n -> pp_nat n | PNat n -> pp_nat n
| PBytes b -> pp_bytes b | PBytes b -> pp_bytes b
| PString s -> pp_string s | PString s -> pp_string s
| PVerbatim s -> pp_verbatim s
| PWild _ -> string "_" | PWild _ -> string "_"
| PList l -> pp_plist l | PList l -> pp_plist l
| PTuple t -> pp_ptuple t | PTuple t -> pp_ptuple t
@ -226,6 +229,7 @@ and pp_mutez {value; _} =
and pp_string_expr = function and pp_string_expr = function
Cat e -> pp_bin_op "^" e Cat e -> pp_bin_op "^" e
| String e -> pp_string e | String e -> pp_string e
| Verbatim e -> pp_verbatim e
and pp_list_expr = function and pp_list_expr = function
ECons e -> pp_bin_op "::" e ECons e -> pp_bin_op "::" e

View File

@ -77,7 +77,8 @@ let rec vars_of_pattern env = function
PConstr p -> vars_of_pconstr env p PConstr p -> vars_of_pconstr env p
| PUnit _ | PFalse _ | PTrue _ | PUnit _ | PFalse _ | PTrue _
| PInt _ | PNat _ | PBytes _ | PInt _ | PNat _ | PBytes _
| PString _ | PWild _ -> env | PString _ | PVerbatim _
| PWild _ -> env
| PVar var -> | PVar var ->
if VarSet.mem var env then if VarSet.mem var env then
raise (Error (Non_linear_pattern var)) raise (Error (Non_linear_pattern var))

File diff suppressed because it is too large Load Diff

View File

@ -555,8 +555,9 @@ and arith_expr =
| Mutez of (Lexer.lexeme * Z.t) reg | Mutez of (Lexer.lexeme * Z.t) reg
and string_expr = and string_expr =
Cat of cat bin_op reg Cat of cat bin_op reg
| String of Lexer.lexeme reg | String of Lexer.lexeme reg
| Verbatim of Lexer.lexeme reg
and list_expr = and list_expr =
ECons of cons bin_op reg ECons of cons bin_op reg
@ -726,8 +727,9 @@ and arith_expr_to_region = function
| Mutez {region; _} -> region | Mutez {region; _} -> region
and string_expr_to_region = function and string_expr_to_region = function
Cat {region; _} Cat {region; _}
| String {region; _} -> region | String {region; _}
| Verbatim {region; _} -> region
and annot_expr_to_region {region; _} = region and annot_expr_to_region {region; _} = region

View File

@ -36,13 +36,14 @@ type attribute = {
type t = type t =
(* Literals *) (* Literals *)
String of lexeme Region.reg String of lexeme Region.reg
| Bytes of (lexeme * Hex.t) Region.reg | Verbatim of lexeme Region.reg
| Int of (lexeme * Z.t) Region.reg | Bytes of (lexeme * Hex.t) Region.reg
| Nat of (lexeme * Z.t) Region.reg | Int of (lexeme * Z.t) Region.reg
| Mutez of (lexeme * Z.t) Region.reg | Nat of (lexeme * Z.t) Region.reg
| Ident of lexeme Region.reg | Mutez of (lexeme * Z.t) Region.reg
| Constr of lexeme Region.reg | Ident of lexeme Region.reg
| Constr of lexeme Region.reg
(* Symbols *) (* Symbols *)
@ -149,17 +150,18 @@ type sym_err = Invalid_symbol
type attr_err = Invalid_attribute type attr_err = Invalid_attribute
type kwd_err = Invalid_keyword type kwd_err = Invalid_keyword
val mk_int : lexeme -> Region.t -> (token, int_err) result val mk_int : lexeme -> Region.t -> (token, int_err) result
val mk_nat : lexeme -> Region.t -> (token, nat_err) result val mk_nat : lexeme -> Region.t -> (token, nat_err) result
val mk_mutez : lexeme -> Region.t -> (token, int_err) result val mk_mutez : lexeme -> Region.t -> (token, int_err) result
val mk_ident : lexeme -> Region.t -> (token, ident_err) result val mk_ident : lexeme -> Region.t -> (token, ident_err) result
val mk_sym : lexeme -> Region.t -> (token, sym_err) result val mk_sym : lexeme -> Region.t -> (token, sym_err) result
val mk_kwd : lexeme -> Region.t -> (token, kwd_err) result val mk_kwd : lexeme -> Region.t -> (token, kwd_err) result
val mk_string : lexeme -> Region.t -> token val mk_string : lexeme -> Region.t -> token
val mk_bytes : lexeme -> Region.t -> token val mk_verbatim : lexeme -> Region.t -> token
val mk_constr : lexeme -> Region.t -> token val mk_bytes : lexeme -> Region.t -> token
val mk_attr : string -> lexeme -> Region.t -> (token, attr_err) result val mk_constr : lexeme -> Region.t -> token
val eof : Region.t -> token val mk_attr : string -> lexeme -> Region.t -> (token, attr_err) result
val eof : Region.t -> token
(* Predicates *) (* Predicates *)

View File

@ -24,13 +24,14 @@ type attribute = {
type t = type t =
(* Literals *) (* Literals *)
String of lexeme Region.reg String of lexeme Region.reg
| Bytes of (lexeme * Hex.t) Region.reg | Verbatim of lexeme Region.reg
| Int of (lexeme * Z.t) Region.reg | Bytes of (lexeme * Hex.t) Region.reg
| Nat of (lexeme * Z.t) Region.reg | Int of (lexeme * Z.t) Region.reg
| Mutez of (lexeme * Z.t) Region.reg | Nat of (lexeme * Z.t) Region.reg
| Ident of lexeme Region.reg | Mutez of (lexeme * Z.t) Region.reg
| Constr of lexeme Region.reg | Ident of lexeme Region.reg
| Constr of lexeme Region.reg
(* Symbols *) (* Symbols *)
@ -121,7 +122,11 @@ let proj_token = function
(* Literals *) (* Literals *)
String Region.{region; value} -> String Region.{region; value} ->
region, sprintf "String %s" value region, sprintf "String %S" value
| Verbatim Region.{region; value} ->
region, sprintf "Verbatim {|%s|}" value
| Bytes Region.{region; value = s,b} -> | Bytes Region.{region; value = s,b} ->
region, region,
sprintf "Bytes (\"%s\", \"0x%s\")" s (Hex.show b) sprintf "Bytes (\"%s\", \"0x%s\")" s (Hex.show b)
@ -221,6 +226,7 @@ let to_lexeme = function
(* Literals *) (* Literals *)
String s -> String.escaped s.Region.value String s -> String.escaped s.Region.value
| Verbatim v -> String.escaped v.Region.value
| Bytes b -> fst b.Region.value | Bytes b -> fst b.Region.value
| Int i | Int i
| Nat i | Nat i
@ -442,6 +448,8 @@ and scan_constr region lexicon = parse
let mk_string lexeme region = String Region.{region; value=lexeme} let mk_string lexeme region = String Region.{region; value=lexeme}
let mk_verbatim lexeme region = Verbatim Region.{region; value=lexeme}
let mk_bytes lexeme region = let mk_bytes lexeme region =
let norm = Str.(global_replace (regexp "_") "" lexeme) in let norm = Str.(global_replace (regexp "_") "" lexeme) in
let value = lexeme, `Hex norm let value = lexeme, `Hex norm

View File

@ -5,13 +5,14 @@
(* Literals *) (* Literals *)
%token <LexToken.lexeme Region.reg> String "<string>" %token <LexToken.lexeme Region.reg> String "<string>"
%token <(LexToken.lexeme * Hex.t) Region.reg> Bytes "<bytes>" %token <LexToken.lexeme Region.reg> Verbatim "<verbatim>"
%token <(LexToken.lexeme * Z.t) Region.reg> Int "<int>" %token <(LexToken.lexeme * Hex.t) Region.reg> Bytes "<bytes>"
%token <(LexToken.lexeme * Z.t) Region.reg> Nat "<nat>" %token <(LexToken.lexeme * Z.t) Region.reg> Int "<int>"
%token <(LexToken.lexeme * Z.t) Region.reg> Mutez "<mutez>" %token <(LexToken.lexeme * Z.t) Region.reg> Nat "<nat>"
%token <LexToken.lexeme Region.reg> Ident "<ident>" %token <(LexToken.lexeme * Z.t) Region.reg> Mutez "<mutez>"
%token <LexToken.lexeme Region.reg> Constr "<constr>" %token <LexToken.lexeme Region.reg> Ident "<ident>"
%token <LexToken.lexeme Region.reg> Constr "<constr>"
(* Symbols *) (* Symbols *)

View File

@ -849,6 +849,7 @@ core_expr:
| "<mutez>" { EArith (Mutez $1) } | "<mutez>" { EArith (Mutez $1) }
| "<ident>" | module_field { EVar $1 } | "<ident>" | module_field { EVar $1 }
| "<string>" { EString (String $1) } | "<string>" { EString (String $1) }
| "<verbatim>" { EString (Verbatim $1) }
| "<bytes>" { EBytes $1 } | "<bytes>" { EBytes $1 }
| "False" { ELogic (BoolExpr (False $1)) } | "False" { ELogic (BoolExpr (False $1)) }
| "True" { ELogic (BoolExpr (True $1)) } | "True" { ELogic (BoolExpr (True $1)) }

View File

@ -592,6 +592,8 @@ and print_string_expr state = function
print_expr state arg2 print_expr state arg2
| String s -> | String s ->
print_string state s print_string state s
| Verbatim v ->
print_string state v
and print_list_expr state = function and print_list_expr state = function
ECons {value = {arg1; op; arg2}; _} -> ECons {value = {arg1; op; arg2}; _} ->
@ -840,7 +842,15 @@ let pp_node state name =
let node = sprintf "%s%s\n" state#pad_path name let node = sprintf "%s%s\n" state#pad_path name
in Buffer.add_string state#buffer node in Buffer.add_string state#buffer node
let pp_string state = pp_ident state let pp_string state {value=name; region} =
let reg = compact state region in
let node = sprintf "%s%S (%s)\n" state#pad_path name reg
in Buffer.add_string state#buffer node
let pp_verbatim state {value=name; region} =
let reg = compact state region in
let node = sprintf "%s{|%s|} (%s)\n" state#pad_path name reg
in Buffer.add_string state#buffer node
let pp_loc_node state name region = let pp_loc_node state name region =
pp_ident state {value=name; region} pp_ident state {value=name; region}
@ -1572,6 +1582,9 @@ and pp_string_expr state = function
| String s -> | String s ->
pp_node state "String"; pp_node state "String";
pp_string (state#pad 1 0) s pp_string (state#pad 1 0) s
| Verbatim v ->
pp_node state "Verbatim";
pp_verbatim (state#pad 1 0) v
and pp_annotated state (expr, t_expr) = and pp_annotated state (expr, t_expr) =
pp_expr (state#pad 2 0) expr; pp_expr (state#pad 2 0) expr;

View File

@ -8,6 +8,7 @@ module Region = Simple_utils.Region
module ParErr = Parser_reasonligo.ParErr module ParErr = Parser_reasonligo.ParErr
module SyntaxError = Parser_reasonligo.SyntaxError module SyntaxError = Parser_reasonligo.SyntaxError
module SSet = Set.Make (String) module SSet = Set.Make (String)
module Pretty = Parser_cameligo.Pretty
(* Mock IOs TODO: Fill them with CLI options *) (* Mock IOs TODO: Fill them with CLI options *)

View File

@ -81,14 +81,15 @@ type t =
(* Identifiers, labels, numbers and strings *) (* Identifiers, labels, numbers and strings *)
| Ident of string Region.reg | Ident of string Region.reg
| Constr of string Region.reg | Constr of string Region.reg
| Int of (string * Z.t) Region.reg | Int of (string * Z.t) Region.reg
| Nat of (string * Z.t) Region.reg | Nat of (string * Z.t) Region.reg
| Mutez of (string * Z.t) Region.reg | Mutez of (string * Z.t) Region.reg
| String of string Region.reg | String of string Region.reg
| Bytes of (string * Hex.t) Region.reg | Verbatim of string Region.reg
| Attr of string Region.reg | Bytes of (string * Hex.t) Region.reg
| Attr of string Region.reg
(* Keywords *) (* Keywords *)
@ -141,17 +142,18 @@ type sym_err = Invalid_symbol
type attr_err = Invalid_attribute type attr_err = Invalid_attribute
type kwd_err = Invalid_keyword type kwd_err = Invalid_keyword
val mk_int : lexeme -> Region.t -> (token, int_err) result val mk_int : lexeme -> Region.t -> (token, int_err) result
val mk_nat : lexeme -> Region.t -> (token, nat_err) result val mk_nat : lexeme -> Region.t -> (token, nat_err) result
val mk_mutez : lexeme -> Region.t -> (token, int_err) result val mk_mutez : lexeme -> Region.t -> (token, int_err) result
val mk_ident : lexeme -> Region.t -> (token, ident_err) result val mk_ident : lexeme -> Region.t -> (token, ident_err) result
val mk_attr : string -> lexeme -> Region.t -> (token, attr_err) result val mk_attr : string -> lexeme -> Region.t -> (token, attr_err) result
val mk_sym : lexeme -> Region.t -> (token, sym_err) result val mk_sym : lexeme -> Region.t -> (token, sym_err) result
val mk_kwd : lexeme -> Region.t -> (token, kwd_err) result val mk_kwd : lexeme -> Region.t -> (token, kwd_err) result
val mk_string : lexeme -> Region.t -> token val mk_string : lexeme -> Region.t -> token
val mk_bytes : lexeme -> Region.t -> token val mk_verbatim : lexeme -> Region.t -> token
val mk_constr : lexeme -> Region.t -> token val mk_bytes : lexeme -> Region.t -> token
val eof : Region.t -> token val mk_constr : lexeme -> Region.t -> token
val eof : Region.t -> token
(* Predicates *) (* Predicates *)

View File

@ -67,14 +67,15 @@ type t =
(* Identifiers, labels, numbers and strings *) (* Identifiers, labels, numbers and strings *)
| Ident of string Region.reg | Ident of string Region.reg
| Constr of string Region.reg | Constr of string Region.reg
| Int of (string * Z.t) Region.reg | Int of (string * Z.t) Region.reg
| Nat of (string * Z.t) Region.reg | Nat of (string * Z.t) Region.reg
| Mutez of (string * Z.t) Region.reg | Mutez of (string * Z.t) Region.reg
| String of string Region.reg | String of string Region.reg
| Bytes of (string * Hex.t) Region.reg | Verbatim of string Region.reg
| Attr of string Region.reg | Bytes of (string * Hex.t) Region.reg
| Attr of string Region.reg
(* Keywords *) (* Keywords *)
@ -108,6 +109,8 @@ let proj_token = function
String Region.{region; value} -> String Region.{region; value} ->
region, sprintf "String %s" value region, sprintf "String %s" value
| Verbatim Region.{region; value} ->
region, sprintf "Verbatim {|%s|}" value
| Bytes Region.{region; value = s,b} -> | Bytes Region.{region; value = s,b} ->
region, region,
sprintf "Bytes (\"%s\", \"0x%s\")" s (Hex.show b) sprintf "Bytes (\"%s\", \"0x%s\")" s (Hex.show b)
@ -172,6 +175,7 @@ let to_lexeme = function
(* Literals *) (* Literals *)
String s -> s.Region.value String s -> s.Region.value
| Verbatim v -> String.escaped v.Region.value
| Bytes b -> fst b.Region.value | Bytes b -> fst b.Region.value
| Int i | Int i
| Nat i | Nat i
@ -385,6 +389,8 @@ let line_comment_start lexeme = lexeme = "//"
let mk_string lexeme region = String Region.{region; value=lexeme} let mk_string lexeme region = String Region.{region; value=lexeme}
let mk_verbatim lexeme region = Verbatim Region.{region; value=lexeme}
let mk_bytes lexeme region = let mk_bytes lexeme region =
let norm = Str.(global_replace (regexp "_") "" lexeme) in let norm = Str.(global_replace (regexp "_") "" lexeme) in
let value = lexeme, `Hex norm let value = lexeme, `Hex norm

View File

@ -5,14 +5,15 @@
(* Literals *) (* Literals *)
%token <string Region.reg> String "<string>" %token <string Region.reg> String "<string>"
%token <(LexToken.lexeme * Hex.t) Region.reg> Bytes "<bytes>" %token <string Region.reg> Verbatim "<verbatim>"
%token <(string * Z.t) Region.reg> Int "<int>" %token <(LexToken.lexeme * Hex.t) Region.reg> Bytes "<bytes>"
%token <(string * Z.t) Region.reg> Nat "<nat>" %token <(string * Z.t) Region.reg> Int "<int>"
%token <(string * Z.t) Region.reg> Mutez "<mutez>" %token <(string * Z.t) Region.reg> Nat "<nat>"
%token <string Region.reg> Ident "<ident>" %token <(string * Z.t) Region.reg> Mutez "<mutez>"
%token <string Region.reg> Constr "<constr>" %token <string Region.reg> Ident "<ident>"
%token <string Region.reg> Attr "<attr>" %token <string Region.reg> Constr "<constr>"
%token <string Region.reg> Attr "<attr>"
(* Symbols *) (* Symbols *)

View File

@ -8,20 +8,6 @@ open Region
module AST = Parser_cameligo.AST module AST = Parser_cameligo.AST
open! AST open! AST
type 'a sequence_elements = {
s_elts : ('a, semi) Utils.nsepseq;
s_terminator : semi option
}
type 'a record_elements = {
r_elts : (field_assign reg, semi) Utils.nsepseq;
r_terminator : semi option
}
type 'a sequence_or_record =
PaSequence of 'a sequence_elements
| PaRecord of 'a record_elements
let (<@) f g x = f (g x) let (<@) f g x = f (g x)
(* (*
@ -58,7 +44,7 @@ let wild_error e =
%type <AST.t> contract %type <AST.t> contract
%type <AST.expr> interactive_expr %type <AST.expr> interactive_expr
(* Solves a shift/reduce problem that happens with record and (* Solves a shift/reduce problem that happens with records and
sequences. To elaborate: [sequence_or_record_in] sequences. To elaborate: [sequence_or_record_in]
can be reduced to [expr -> Ident], but also to can be reduced to [expr -> Ident], but also to
[field_assignment -> Ident]. [field_assignment -> Ident].
@ -205,9 +191,9 @@ type_args:
| fun_type { $1, [] } | fun_type { $1, [] }
core_type: core_type:
type_name { TVar $1 } type_name { TVar $1 }
| "<string>" { TString $1 } | "<string>" { TString $1 }
| par(fun_type) { TPar $1 } | par(fun_type) { TPar $1 }
| module_name "." type_name { | module_name "." type_name {
let module_name = $1.value in let module_name = $1.value in
let type_name = $3.value in let type_name = $3.value in
@ -264,8 +250,11 @@ let_declaration:
let kwd_rec = $3 in let kwd_rec = $3 in
let binding = $4 in let binding = $4 in
let value = kwd_let, kwd_rec, binding, attributes in let value = kwd_let, kwd_rec, binding, attributes in
let stop = expr_to_region binding.let_rhs in let start = match $1 with
let region = cover $2 stop [] -> $2
| l -> last (fun x -> x.region) l
and stop = expr_to_region binding.let_rhs in
let region = cover start stop
in {region; value} } in {region; value} }
let_binding: let_binding:
@ -354,19 +343,20 @@ sub_pattern:
| core_pattern { $1 } | core_pattern { $1 }
core_pattern: core_pattern:
"<ident>" { PVar $1 } "<ident>" { PVar $1 }
| "_" { PWild $1 } | "_" { PWild $1 }
| unit { PUnit $1 } | unit { PUnit $1 }
| "<int>" { PInt $1 } | "<int>" { PInt $1 }
| "<nat>" { PNat $1 } | "<nat>" { PNat $1 }
| "<bytes>" { PBytes $1 } | "<bytes>" { PBytes $1 }
| "true" { PTrue $1 } | "true" { PTrue $1 }
| "false" { PFalse $1 } | "false" { PFalse $1 }
| "<string>" { PString $1 } | "<string>" { PString $1 }
| par(ptuple) { PPar $1 } | "<verbatim>" { PVerbatim $1 }
| par(ptuple) { PPar $1 }
| list__(sub_pattern) { PList (PListComp $1) } | list__(sub_pattern) { PList (PListComp $1) }
| constr_pattern { PConstr $1 } | constr_pattern { PConstr $1 }
| record_pattern { PRecord $1 } | record_pattern { PRecord $1 }
record_pattern: record_pattern:
"{" sep_or_term_list(field_pattern,",") "}" { "{" sep_or_term_list(field_pattern,",") "}" {
@ -416,15 +406,12 @@ interactive_expr:
expr_with_let_expr EOF { $1 } expr_with_let_expr EOF { $1 }
expr: expr:
base_cond__open(expr) | switch_expr(base_cond) { $1 } base_cond | switch_expr(base_cond) { $1 }
base_cond__open(x):
base_expr(x) | conditional(expr_with_let_expr) {
wild_error $1;
$1 }
base_cond: base_cond:
base_cond__open(base_cond) { $1 } base_expr | conditional(expr_with_let_expr) {
wild_error $1;
$1 }
type_expr_simple_args: type_expr_simple_args:
par(nsepseq(type_expr_simple, ",")) { $1 } par(nsepseq(type_expr_simple, ",")) { $1 }
@ -448,8 +435,8 @@ type_expr_simple:
type_annotation_simple: type_annotation_simple:
":" type_expr_simple { $1,$2 } ":" type_expr_simple { $1,$2 }
fun_expr: fun_expr(right_expr):
disj_expr_level "=>" expr { disj_expr_level "=>" right_expr {
let arrow, body = $2, $3 let arrow, body = $2, $3
and kwd_fun = ghost in and kwd_fun = ghost in
let start = expr_to_region $1 let start = expr_to_region $1
@ -570,8 +557,8 @@ fun_expr:
} }
in EFun {region; value=f} } in EFun {region; value=f} }
base_expr(right_expr): base_expr:
disj_expr_level | fun_expr { $1 } disj_expr_level | fun_expr(expr) { $1 }
conditional(right_expr): conditional(right_expr):
if_then_else(right_expr) | if_then(right_expr) { $1 } if_then_else(right_expr) | if_then(right_expr) { $1 }
@ -605,7 +592,7 @@ if_then_else(right_expr):
in ECond {region; value} } in ECond {region; value} }
base_if_then_else__open(x): base_if_then_else__open(x):
base_expr(x) | if_then_else(x) { $1 } base_expr | if_then_else(x) { $1 }
base_if_then_else: base_if_then_else:
base_if_then_else__open(base_if_then_else) { $1 } base_if_then_else__open(base_if_then_else) { $1 }
@ -800,6 +787,7 @@ common_expr:
| "_" { EVar {value = "_"; region = $1} } | "_" { EVar {value = "_"; region = $1} }
| update_record { EUpdate $1 } | update_record { EUpdate $1 }
| "<string>" { EString (String $1) } | "<string>" { EString (String $1) }
| "<verbatim>" { EString (Verbatim $1) }
| unit { EUnit $1 } | unit { EUnit $1 }
| "false" { ELogic (BoolExpr (False $1)) } | "false" { ELogic (BoolExpr (False $1)) }
| "true" { ELogic (BoolExpr (True $1)) } | "true" { ELogic (BoolExpr (True $1)) }
@ -836,9 +824,10 @@ list_or_spread:
core_expr: core_expr:
common_expr common_expr
| list_or_spread | list_or_spread { $1 }
| sequence_or_record { $1 } | sequence { ESeq $1 }
| par(expr) { EPar $1 } | record { ERecord $1 }
| par(expr) { EPar $1 }
module_field: module_field:
module_name "." module_fun { module_name "." module_fun {
@ -897,67 +886,104 @@ update_record:
let region = cover $1 $6 in let region = cover $1 $6 in
let ne_elements, terminator = $5 in let ne_elements, terminator = $5 in
let value = { let value = {
lbrace = $1; lbrace = $1;
record = $3; record = $3;
kwd_with = $4; kwd_with = $4;
updates = { value = {compound = Braces($1,$6); updates = {value = {compound = Braces($1,$6);
ne_elements; ne_elements;
terminator}; terminator};
region = cover $4 $6}; region = cover $4 $6};
rbrace = $6} rbrace = $6}
in {region; value} } in {region; value} }
expr_with_let_expr: expr_with_let_expr:
expr { $1 } expr
| let_expr(expr_with_let_expr) { $1 } | let_expr(expr_with_let_expr) { $1 }
exprs:
expr_with_let_expr ";"? {
(($1, []), $2)
}
| expr_with_let_expr ";" exprs {
let rec fix_let_in a b c =
match a with
| ELetIn {value = {body; _} as v; _} -> (
let end_ = (nsepseq_to_region expr_to_region (fst c)) in
let sequence_region =
cover (expr_to_region body) end_
in
let val_ =
match body with
| ELetIn _ -> fst (fix_let_in body b c)
| e -> Utils.nsepseq_cons e b (fst c)
in
let sequence = ESeq {
value = {
compound = BeginEnd(Region.ghost, Region.ghost);
elements = Some val_;
terminator = (snd c)
};
region = sequence_region
}
in
let region =
cover (expr_to_region a) end_
in
let let_in =
ELetIn {
value = {
v with
body = sequence
};
region
}
in
((let_in, []), snd c)
)
| e -> Utils.nsepseq_cons e b (fst c), None
in
fix_let_in $1 $2 $3
}
more_field_assignments: more_field_assignments:
"," sep_or_term_list(field_assignment_punning,",") { "," sep_or_term_list(field_assignment_punning,",") {
let elts, _region = $2
in $1, elts }
sequence:
"{" exprs "}" {
let elts, _region = $2 in let elts, _region = $2 in
$1, elts let compound = Braces ($1, $3) in
} let value = {compound;
elements = Some elts;
terminator = None} in
let region = cover $1 $3
in {region; value} }
record:
"{" field_assignment more_field_assignments? "}" {
let compound = Braces ($1,$4) in
let region = cover $1 $4 in
sequence_or_record_in: match $3 with
sep_or_term_list(expr_with_let_expr,";") {
let elts, _region = $1 in
PaSequence {s_elts = elts; s_terminator=None}
}
| field_assignment more_field_assignments? {
match $2 with
| Some (comma, elts) -> | Some (comma, elts) ->
let r_elts = Utils.nsepseq_cons $1 comma elts in let ne_elements = Utils.nsepseq_cons $2 comma elts in
PaRecord {r_elts; r_terminator = None} { value = {compound; ne_elements; terminator = None}; region }
| None -> | None ->
PaRecord {r_elts = ($1, []); r_terminator = None} let ne_elements = ($2,[]) in
{ value = {compound; ne_elements; terminator = None}; region }
} }
| field_name more_field_assignments { | "{" field_name more_field_assignments "}" {
let value = { let value = {
field_name = $1; field_name = $2;
assignment = ghost; assignment = ghost;
field_expr = EVar $1 } field_expr = EVar $2 } in
in let field_name = {$2 with value} in
let field_name = {$1 with value} in let comma, elts = $3 in
let (comma, elts) = $2 in let ne_elements = Utils.nsepseq_cons field_name comma elts in
let r_elts = Utils.nsepseq_cons field_name comma elts in let compound = Braces ($1,$4) in
PaRecord {r_elts; r_terminator = None} let region = cover $1 $4 in
} {value = {compound; ne_elements; terminator = None}; region} }
sequence_or_record:
"{" sequence_or_record_in "}" {
let compound = Braces ($1,$3) in
let region = cover $1 $3 in
match $2 with
PaSequence s ->
let value = {compound;
elements = Some s.s_elts;
terminator = s.s_terminator}
in ESeq {region; value}
| PaRecord r ->
let value = {compound;
ne_elements = r.r_elts;
terminator = r.r_terminator}
in ERecord {region; value}}
field_assignment_punning: field_assignment_punning:
(* This can only happen with multiple fields - (* This can only happen with multiple fields -
@ -967,12 +993,9 @@ field_assignment_punning:
field_name = $1; field_name = $1;
assignment = ghost; assignment = ghost;
field_expr = EVar $1 } field_expr = EVar $1 }
in in {$1 with value}
{$1 with value}
} }
| field_assignment { | field_assignment { $1 }
$1
}
field_assignment: field_assignment:
field_name ":" expr { field_name ":" expr {

File diff suppressed because it is too large Load Diff

View File

@ -69,16 +69,17 @@ module type TOKEN =
(* Injections *) (* Injections *)
val mk_int : lexeme -> Region.t -> (token, int_err) result val mk_int : lexeme -> Region.t -> (token, int_err) result
val mk_nat : lexeme -> Region.t -> (token, nat_err) result val mk_nat : lexeme -> Region.t -> (token, nat_err) result
val mk_mutez : lexeme -> Region.t -> (token, int_err) result val mk_mutez : lexeme -> Region.t -> (token, int_err) result
val mk_ident : lexeme -> Region.t -> (token, ident_err) result val mk_ident : lexeme -> Region.t -> (token, ident_err) result
val mk_sym : lexeme -> Region.t -> (token, sym_err) result val mk_sym : lexeme -> Region.t -> (token, sym_err) result
val mk_string : lexeme -> Region.t -> token val mk_string : lexeme -> Region.t -> token
val mk_bytes : lexeme -> Region.t -> token val mk_verbatim : lexeme -> Region.t -> token
val mk_constr : lexeme -> Region.t -> token val mk_bytes : lexeme -> Region.t -> token
val mk_attr : string -> lexeme -> Region.t -> (token, attr_err) result val mk_constr : lexeme -> Region.t -> token
val eof : Region.t -> token val mk_attr : string -> lexeme -> Region.t -> (token, attr_err) result
val eof : Region.t -> token
(* Predicates *) (* Predicates *)

View File

@ -33,16 +33,17 @@ module type TOKEN =
(* Injections *) (* Injections *)
val mk_int : lexeme -> Region.t -> (token, int_err) result val mk_int : lexeme -> Region.t -> (token, int_err) result
val mk_nat : lexeme -> Region.t -> (token, nat_err) result val mk_nat : lexeme -> Region.t -> (token, nat_err) result
val mk_mutez : lexeme -> Region.t -> (token, int_err) result val mk_mutez : lexeme -> Region.t -> (token, int_err) result
val mk_ident : lexeme -> Region.t -> (token, ident_err) result val mk_ident : lexeme -> Region.t -> (token, ident_err) result
val mk_sym : lexeme -> Region.t -> (token, sym_err) result val mk_sym : lexeme -> Region.t -> (token, sym_err) result
val mk_string : lexeme -> Region.t -> token val mk_string : lexeme -> Region.t -> token
val mk_bytes : lexeme -> Region.t -> token val mk_verbatim : lexeme -> Region.t -> token
val mk_constr : lexeme -> Region.t -> token val mk_bytes : lexeme -> Region.t -> token
val mk_attr : string -> lexeme -> Region.t -> (token, attr_err) result val mk_constr : lexeme -> Region.t -> token
val eof : Region.t -> token val mk_attr : string -> lexeme -> Region.t -> (token, attr_err) result
val eof : Region.t -> token
(* Predicates *) (* Predicates *)
@ -111,6 +112,7 @@ module Make (Token : TOKEN) : (S with module Token = Token) =
| Unexpected_character of char | Unexpected_character of char
| Undefined_escape_sequence | Undefined_escape_sequence
| Unterminated_string | Unterminated_string
| Unterminated_verbatim
| Unterminated_comment of string | Unterminated_comment of string
| Non_canonical_zero | Non_canonical_zero
| Broken_string | Broken_string
@ -133,6 +135,9 @@ module Make (Token : TOKEN) : (S with module Token = Token) =
| Unterminated_string -> | Unterminated_string ->
"Unterminated string.\n\ "Unterminated string.\n\
Hint: Close with double quotes." Hint: Close with double quotes."
| Unterminated_verbatim ->
"Unterminated verbatim.\n\
Hint: Close with \"|}\"."
| Unterminated_comment ending -> | Unterminated_comment ending ->
sprintf "Unterminated comment.\n\ sprintf "Unterminated comment.\n\
Hint: Close with \"%s\"." ending Hint: Close with \"%s\"." ending
@ -179,6 +184,14 @@ module Make (Token : TOKEN) : (S with module Token = Token) =
let token = Token.mk_string lexeme region let token = Token.mk_string lexeme region
in state#enqueue token in state#enqueue token
let mk_verbatim (thread, state) =
let start = thread#opening#start in
let stop = state#pos in
let region = Region.make ~start ~stop in
let lexeme = thread#to_string in
let token = Token.mk_verbatim lexeme region
in state#enqueue token
let mk_bytes bytes state buffer = let mk_bytes bytes state buffer =
let region, _, state = state#sync buffer in let region, _, state = state#sync buffer in
let token = Token.mk_bytes bytes region let token = Token.mk_bytes bytes region
@ -414,10 +427,14 @@ and scan state = parse
(* String *) (* String *)
| '"' { let opening, lexeme, state = state#sync lexbuf in | '"' { let opening, _, state = state#sync lexbuf in
let thread = LexerLib.mk_thread opening lexeme in let thread = LexerLib.mk_thread opening "" in
scan_string thread state lexbuf |> mk_string } scan_string thread state lexbuf |> mk_string }
| "{|" { let opening, _, state = state#sync lexbuf in
let thread = LexerLib.mk_thread opening "" in
scan_verbatim thread state lexbuf |> mk_verbatim }
(* Comments *) (* Comments *)
| block_comment_openings { | block_comment_openings {
@ -484,7 +501,7 @@ and scan_string thread state = parse
{ let region, _, _ = state#sync lexbuf { let region, _, _ = state#sync lexbuf
in fail region Invalid_character_in_string } in fail region Invalid_character_in_string }
| '"' { let _, _, state = state#sync lexbuf | '"' { let _, _, state = state#sync lexbuf
in thread#push_char '"', state } in thread, state }
| esc { let _, lexeme, state = state#sync lexbuf in | esc { let _, lexeme, state = state#sync lexbuf in
let thread = thread#push_string lexeme let thread = thread#push_string lexeme
in scan_string thread state lexbuf } in scan_string thread state lexbuf }
@ -493,6 +510,13 @@ and scan_string thread state = parse
| _ as c { let _, _, state = state#sync lexbuf in | _ as c { let _, _, state = state#sync lexbuf in
scan_string (thread#push_char c) state lexbuf } scan_string (thread#push_char c) state lexbuf }
and scan_verbatim thread state = parse
| eof { fail thread#opening Unterminated_verbatim}
| "|}" { let _, _, state = state#sync lexbuf
in thread, state }
| _ as c { let _, _, state = state#sync lexbuf in
scan_verbatim (thread#push_char c) state lexbuf }
(* Finishing a block comment (* Finishing a block comment
(For Emacs: ("(*") The lexing of block comments must take care of (For Emacs: ("(*") The lexing of block comments must take care of

View File

@ -258,7 +258,7 @@ and eval_literal : Ast_typed.literal -> value result = function
| Literal_int i -> ok @@ V_Ct (C_int i) | Literal_int i -> ok @@ V_Ct (C_int i)
| Literal_nat n -> ok @@ V_Ct (C_nat n) | Literal_nat n -> ok @@ V_Ct (C_nat n)
| Literal_timestamp i -> ok @@ V_Ct (C_timestamp i) | Literal_timestamp i -> ok @@ V_Ct (C_timestamp i)
| Literal_string s -> ok @@ V_Ct (C_string s) | Literal_string s -> ok @@ V_Ct (C_string (Ligo_string.extract s))
| Literal_bytes s -> ok @@ V_Ct (C_bytes s) | Literal_bytes s -> ok @@ V_Ct (C_bytes s)
| Literal_mutez t -> ok @@ V_Ct (C_mutez t) | Literal_mutez t -> ok @@ V_Ct (C_mutez t)
| Literal_address s -> ok @@ V_Ct (C_address s) | Literal_address s -> ok @@ V_Ct (C_address s)

View File

@ -234,7 +234,7 @@ let transpile_constant' : AST.constant' -> constant' = function
| C_CONVERT_FROM_RIGHT_COMB -> C_CONVERT_FROM_RIGHT_COMB | C_CONVERT_FROM_RIGHT_COMB -> C_CONVERT_FROM_RIGHT_COMB
let rec transpile_type (t:AST.type_expression) : type_expression result = let rec transpile_type (t:AST.type_expression) : type_expression result =
let return tc = ok @@ Expression.make_t @@ tc in let return tc = ok @@ Expression.make_t ~loc:t.location @@ tc in
match t.type_content with match t.type_content with
| T_variable (name) when Var.equal name Stage_common.Constant.t_bool -> return (T_base TB_bool) | T_variable (name) when Var.equal name Stage_common.Constant.t_bool -> return (T_base TB_bool)
| t when (compare t (t_bool ()).type_content) = 0-> return (T_base TB_bool) | t when (compare t (t_bool ()).type_content) = 0-> return (T_base TB_bool)
@ -372,7 +372,7 @@ let rec transpile_literal : AST.literal -> value = fun l -> match l with
| Literal_timestamp n -> D_timestamp n | Literal_timestamp n -> D_timestamp n
| Literal_mutez n -> D_mutez n | Literal_mutez n -> D_mutez n
| Literal_bytes s -> D_bytes s | Literal_bytes s -> D_bytes s
| Literal_string s -> D_string s | Literal_string s -> D_string (Ligo_string.extract s)
| Literal_address s -> D_string s | Literal_address s -> D_string s
| Literal_signature s -> D_string s | Literal_signature s -> D_string s
| Literal_key s -> D_string s | Literal_key s -> D_string s
@ -392,7 +392,7 @@ and tree_of_sum : AST.type_expression -> (AST.constructor' * AST.type_expression
and transpile_annotated_expression (ae:AST.expression) : expression result = and transpile_annotated_expression (ae:AST.expression) : expression result =
let%bind tv = transpile_type ae.type_expression in let%bind tv = transpile_type ae.type_expression in
let return ?(tv = tv) expr = ok @@ Combinators.Expression.make_tpl (expr, tv) in let return ?(tv = tv) expr = ok @@ Combinators.Expression.make_tpl ~loc:ae.location (expr, tv) in
let info = let info =
let title () = "translating expression" in let title () = "translating expression" in
let content () = Format.asprintf "%a" Location.pp ae.location in let content () = Format.asprintf "%a" Location.pp ae.location in
@ -474,10 +474,12 @@ and transpile_annotated_expression (ae:AST.expression) : expression result =
let aux = fun pred (ty, lr) -> let aux = fun pred (ty, lr) ->
let c = match lr with let c = match lr with
| `Left -> C_CAR | `Left -> C_CAR
| `Right -> C_CDR in | `Right -> C_CDR
Combinators.Expression.make_tpl (E_constant {cons_name=c;arguments=[pred]} , ty) in in
return ~tv:ty @@ E_constant {cons_name=c;arguments=[pred]}
in
let%bind record' = transpile_annotated_expression record in let%bind record' = transpile_annotated_expression record in
let expr = List.fold_left aux record' path in let%bind expr = bind_fold_list aux record' path in
ok expr ok expr
| E_record_update {record; path; update} -> | E_record_update {record; path; update} ->
let rec aux res (r,p,up) = let rec aux res (r,p,up) =
@ -654,14 +656,14 @@ and transpile_lambda l (input_type , output_type) =
let tv = Combinators.t_function input output in let tv = Combinators.t_function input output in
let binder = binder in let binder = binder in
let closure = E_closure { binder; body = result'} in let closure = E_closure { binder; body = result'} in
ok @@ Combinators.Expression.make_tpl (closure , tv) ok @@ Combinators.Expression.make_tpl ~loc:result.location (closure , tv)
and transpile_recursive {fun_name; fun_type; lambda} = and transpile_recursive {fun_name; fun_type; lambda} =
let rec map_lambda : AST.expression_variable -> type_expression -> AST.expression -> (expression * expression_variable list) result = fun fun_name loop_type e -> let rec map_lambda : AST.expression_variable -> type_expression -> AST.expression -> (expression * expression_variable list) result = fun fun_name loop_type e ->
match e.expression_content with match e.expression_content with
E_lambda {binder;result} -> E_lambda {binder;result} ->
let%bind (body,l) = map_lambda fun_name loop_type result in let%bind (body,l) = map_lambda fun_name loop_type result in
ok @@ (Expression.make (E_closure {binder;body}) loop_type, binder::l) ok @@ (Expression.make ~loc:e.location (E_closure {binder;body}) loop_type, binder::l)
| _ -> | _ ->
let%bind res = replace_callback fun_name loop_type false e in let%bind res = replace_callback fun_name loop_type false e in
ok @@ (res, []) ok @@ (res, [])

View File

@ -92,6 +92,7 @@ let rec untranspile (v : value) (t : AST.type_expression) : AST.expression resul
let%bind n = let%bind n =
trace_strong (wrong_mini_c_value "string" v) @@ trace_strong (wrong_mini_c_value "string" v) @@
get_string v in get_string v in
let n = Ligo_string.Standard n in
return (E_literal (Literal_string n)) return (E_literal (Literal_string n))
) )
| TC_bytes -> ( | TC_bytes -> (
@ -246,6 +247,7 @@ let rec untranspile (v : value) (t : AST.type_expression) : AST.expression resul
let%bind n = let%bind n =
trace_strong (wrong_mini_c_value "lambda as string" v) @@ trace_strong (wrong_mini_c_value "lambda as string" v) @@
get_string v in get_string v in
let n = Ligo_string.Standard n in
return (E_literal (Literal_string n)) return (E_literal (Literal_string n))
| T_variable _ -> | T_variable _ ->
fail @@ corner_case ~loc:__LOC__ "trying to untranspile at variable type" fail @@ corner_case ~loc:__LOC__ "trying to untranspile at variable type"

View File

@ -504,16 +504,17 @@ and translate_function_body ({body ; binder} : anon_function) lst input : michel
and translate_function anon env input_ty output_ty : michelson result = and translate_function anon env input_ty output_ty : michelson result =
let fvs = Mini_c.Free_variables.lambda [] anon in let fvs = Mini_c.Free_variables.lambda [] anon in
let small_env = Mini_c.Environment.select fvs env in let small_env = Mini_c.Environment.select fvs env in
let%bind lambda_ty = Compiler_type.lambda_closure (small_env , input_ty , output_ty) in let%bind (_lambda_ty , input_ty' , output_ty') =
Compiler_type.lambda_closure_with_ty (small_env , input_ty , output_ty) in
let%bind lambda_body_code = translate_function_body anon small_env input_ty in let%bind lambda_body_code = translate_function_body anon small_env input_ty in
match fvs with match fvs with
| [] -> ok @@ seq [ i_push lambda_ty lambda_body_code ] | [] -> ok @@ seq [ i_lambda input_ty' output_ty' lambda_body_code ]
| _ :: _ -> | _ :: _ ->
let selector = List.map fst small_env in let selector = List.map fst small_env in
let%bind closure_pack_code = Compiler_environment.pack_closure env selector in let%bind closure_pack_code = Compiler_environment.pack_closure env selector in
ok @@ seq [ ok @@ seq [
closure_pack_code ; closure_pack_code ;
i_push lambda_ty lambda_body_code ; i_lambda input_ty' output_ty' lambda_body_code ;
i_swap ; i_swap ;
i_apply ; i_apply ;
] ]

View File

@ -265,13 +265,19 @@ and environment = fun env ->
@@ List.map snd env @@ List.map snd env
and lambda_closure = fun (c , arg , ret) -> and lambda_closure = fun (c , arg , ret) ->
let%bind (lambda , _arg' , _ret') =
lambda_closure_with_ty (c , arg , ret) in
ok lambda
and lambda_closure_with_ty = fun (c , arg , ret) ->
let%bind arg = type_ arg in let%bind arg = type_ arg in
let%bind ret = type_ ret in let%bind ret = type_ ret in
match c with match c with
| [] -> ok @@ O.t_lambda arg ret | [] -> ok @@ (O.t_lambda arg ret , arg , ret)
| _ :: _ -> | _ :: _ ->
let%bind capture = environment_closure c in let%bind capture = environment_closure c in
ok @@ O.t_lambda (O.t_pair capture arg) ret let arg' = O.t_pair capture arg in
ok @@ (O.t_lambda arg' ret , arg' , ret)
and environment_closure = and environment_closure =
function function

View File

@ -87,6 +87,8 @@ val environment_element : string * type_expression -> (int, O.prim) Tezos_michel
val environment : ( 'a * type_expression ) list -> O.t list result val environment : ( 'a * type_expression ) list -> O.t list result
val lambda_closure : environment * type_expression * type_expression -> (int, O.prim) Tezos_micheline.Micheline.node result val lambda_closure : environment * type_expression * type_expression -> (int, O.prim) Tezos_micheline.Micheline.node result
val lambda_closure_with_ty : environment * type_expression * type_expression ->
(O.michelson * O.michelson * O.michelson) result
val environment_closure : environment -> (int , O.prim ) Tezos_micheline.Micheline.node result val environment_closure : environment -> (int , O.prim ) Tezos_micheline.Micheline.node result
(* (*

View File

@ -37,13 +37,13 @@ module Errors = struct
Raw.pattern_to_region actual)] Raw.pattern_to_region actual)]
in error ~data title message in error ~data title message
let unsupported_let_in_function (patterns : Raw.pattern list) = let unsupported_let_in_function (region : Region.t) (patterns : Raw.pattern list) =
let title () = "" in let title () = "" in
let message () = "\nDefining functions with \"let ... in\" \ let message () = "\nDefining functions with \"let ... in\" \
is not supported yet.\n" in is not supported yet.\n" in
let patterns_loc = let patterns_loc =
List.fold_left (fun a p -> Region.cover a (Raw.pattern_to_region p)) List.fold_left (fun a p -> Region.cover a (Raw.pattern_to_region p))
Region.ghost patterns in region patterns in
let data = [ let data = [
("location", ("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ patterns_loc)] fun () -> Format.asprintf "%a" Location.pp_lift @@ patterns_loc)]
@ -169,7 +169,7 @@ open Operators.Concrete_to_imperative.Cameligo
let r_split = Location.r_split let r_split = Location.r_split
let get_t_string_singleton_opt = function let get_t_string_singleton_opt = function
| Raw.TString s -> Some (String.(sub s.value 1 (length s.value - 2))) | Raw.TString s -> Some s.value
| _ -> None | _ -> None
let rec pattern_to_var : Raw.pattern -> _ = fun p -> let rec pattern_to_var : Raw.pattern -> _ = fun p ->
@ -218,7 +218,8 @@ let rec typed_pattern_to_typed_vars : Raw.pattern -> _ = fun pattern ->
let (p,t) = pt.value.pattern,pt.value.type_expr in let (p,t) = pt.value.pattern,pt.value.type_expr in
let%bind p = tuple_pattern_to_vars p in let%bind p = tuple_pattern_to_vars p in
let%bind t = compile_type_expression t in let%bind t = compile_type_expression t in
ok @@ (p,t) let l = Location.lift pt.region in
ok @@ (p,t,l)
| other -> (fail @@ wrong_pattern "parenthetical or type annotation" other) | other -> (fail @@ wrong_pattern "parenthetical or type annotation" other)
and unpar_pattern : Raw.pattern -> Raw.pattern = function and unpar_pattern : Raw.pattern -> Raw.pattern = function
@ -398,19 +399,21 @@ let rec compile_expression :
match t with match t with
Raw.ELetIn e -> Raw.ELetIn e ->
let Raw.{kwd_rec; binding; body; attributes; _} = e.value in let Raw.{kwd_rec; binding; body; attributes; _} = e.value in
let region = e.region in
let loc = Location.lift region in
let inline = List.exists (fun (a: Raw.attribute) -> a.value = "inline") attributes in let inline = List.exists (fun (a: Raw.attribute) -> a.value = "inline") attributes in
let Raw.{binders; lhs_type; let_rhs; _} = binding in let Raw.{binders; lhs_type; let_rhs; _} = binding in
begin match binders with begin match binders with
| (p, []) -> | (p, []) ->
let%bind variables = tuple_pattern_to_typed_vars p in let%bind variables = tuple_pattern_to_typed_vars p in
let%bind ty_opt = let%bind ty_opt =
bind_map_option (fun (_,te) -> compile_type_expression te) lhs_type in bind_map_option (fun (re,te) -> let%bind te = compile_type_expression te in ok(Location.lift re,te)) lhs_type in
let%bind rhs = compile_expression let_rhs in let%bind rhs = compile_expression let_rhs in
let rhs_b = Var.fresh ~name: "rhs" () in let rhs_b = Var.fresh ~name: "rhs" () in
let rhs',rhs_b_expr = let rhs',rhs_b_expr =
match ty_opt with match ty_opt with
None -> rhs, e_variable rhs_b None -> rhs, e_variable ~loc rhs_b
| Some ty -> (e_annotation rhs ty), e_annotation (e_variable rhs_b) ty in | Some (lt,ty) -> (e_annotation ~loc:lt rhs ty), e_annotation ~loc:lt (e_variable ~loc rhs_b) ty in
let%bind body = compile_expression body in let%bind body = compile_expression body in
let prepare_variable (ty_var: Raw.variable * Raw.type_expr option) = let prepare_variable (ty_var: Raw.variable * Raw.type_expr option) =
let variable, ty_opt = ty_var in let variable, ty_opt = ty_var in
@ -435,12 +438,12 @@ let rec compile_expression :
match variables with match variables with
| hd :: [] -> | hd :: [] ->
if (List.length prep_vars = 1) if (List.length prep_vars = 1)
then e_let_in hd inline rhs_b_expr body then e_let_in ~loc hd inline rhs_b_expr body
else e_let_in hd inline (e_record_accessor rhs_b_expr (string_of_int ((List.length prep_vars) - 1))) body else e_let_in ~loc hd inline (e_record_accessor ~loc rhs_b_expr (string_of_int ((List.length prep_vars) - 1))) body
| hd :: tl -> | hd :: tl ->
e_let_in hd e_let_in ~loc hd
inline inline
(e_record_accessor rhs_b_expr (string_of_int ((List.length prep_vars) - (List.length tl) - 1))) (e_record_accessor ~loc rhs_b_expr (string_of_int ((List.length prep_vars) - (List.length tl) - 1)))
(chain_let_in tl body) (chain_let_in tl body)
| [] -> body (* Precluded by corner case assertion above *) | [] -> body (* Precluded by corner case assertion above *)
in in
@ -450,11 +453,11 @@ let rec compile_expression :
let f_args = nseq_to_list (binders) in let f_args = nseq_to_list (binders) in
let%bind lhs_type' = bind_map_option (fun x -> compile_type_expression (snd x)) lhs_type in let%bind lhs_type' = bind_map_option (fun x -> compile_type_expression (snd x)) lhs_type in
let%bind ty = bind_map_list typed_pattern_to_typed_vars f_args in let%bind ty = bind_map_list typed_pattern_to_typed_vars f_args in
let aux acc ty = Option.map (t_function (snd ty)) acc in let aux acc (_,ty,loc) = Option.map (t_function ~loc ty) acc in
ok @@ (List.fold_right' aux lhs_type' ty) ok @@ (List.fold_right' aux lhs_type' ty)
| _ -> ok None | _ -> ok None
) )
| Some t -> ok @@ Some t | Some (_,t) -> ok @@ Some t
in in
let%bind ret_expr = if List.length prep_vars = 1 let%bind ret_expr = if List.length prep_vars = 1
then ok (chain_let_in prep_vars body) then ok (chain_let_in prep_vars body)
@ -491,7 +494,7 @@ let rec compile_expression :
(* let f p1 ps... = rhs in body *) (* let f p1 ps... = rhs in body *)
| (f, p1 :: ps) -> | (f, p1 :: ps) ->
fail @@ unsupported_let_in_function (f :: p1 :: ps) fail @@ unsupported_let_in_function e.region (f :: p1 :: ps)
end end
| Raw.EAnnot a -> | Raw.EAnnot a ->
let Raw.{inside=expr, _, type_expr; _}, loc = r_split a in let Raw.{inside=expr, _, type_expr; _}, loc = r_split a in
@ -583,11 +586,11 @@ let rec compile_expression :
| EArith (Neg e) -> compile_unop "NEG" e | EArith (Neg e) -> compile_unop "NEG" e
| EString (String s) -> ( | EString (String s) -> (
let (s , loc) = r_split s in let (s , loc) = r_split s in
let s' = return @@ e_literal ~loc (Literal_string (Standard s))
let s = s in )
String.(sub s 1 ((length s) - 2)) | EString (Verbatim v) -> (
in let (v , loc) = r_split v in
return @@ e_literal ~loc (Literal_string s') return @@ e_literal ~loc (Literal_string (Verbatim v))
) )
| EString (Cat c) -> | EString (Cat c) ->
let (c, loc) = r_split c in let (c, loc) = r_split c in
@ -680,12 +683,12 @@ and compile_fun lamb' : expr result =
let pt_pattern = unpar_pattern pt.value.pattern in let pt_pattern = unpar_pattern pt.value.pattern in
match pt_pattern with match pt_pattern with
| Raw.PVar _ -> params | Raw.PVar _ -> params
| Raw.PTuple _ -> | Raw.PTuple t ->
[Raw.PTyped [Raw.PTyped
{region=Region.ghost; {region=t.region;
value= value=
{ pt.value with pattern= { pt.value with pattern=
Raw.PVar {region=Region.ghost; Raw.PVar {region=pt.region;
value="#P"}}}] value="#P"}}}]
| _ -> params | _ -> params
end end
@ -727,7 +730,7 @@ and compile_fun lamb' : expr result =
{binders = (PTuple vars, []) ; {binders = (PTuple vars, []) ;
lhs_type=None; lhs_type=None;
eq=Region.ghost; eq=Region.ghost;
let_rhs=(Raw.EVar {region=Region.ghost; value="#P"}); let_rhs=(Raw.EVar {region=pt.region; value="#P"});
} }
in in
let let_in: Raw.let_in = let let_in: Raw.let_in =
@ -741,7 +744,7 @@ and compile_fun lamb' : expr result =
in in
ok (Raw.ELetIn ok (Raw.ELetIn
{ {
region=Region.ghost; region=pt.region;
value=let_in value=let_in
}) })
| Raw.PVar _ -> ok lamb.body | Raw.PVar _ -> ok lamb.body
@ -842,7 +845,7 @@ and compile_declaration : Raw.declaration -> declaration Location.wrap list resu
let%bind type_expression = compile_type_expression type_expr in let%bind type_expression = compile_type_expression type_expr in
ok @@ [loc x @@ Declaration_type (Var.of_name name.value , type_expression)] ok @@ [loc x @@ Declaration_type (Var.of_name name.value , type_expression)]
| Let x -> ( | Let x -> (
let (_, recursive, let_binding, attributes), _ = r_split x in let (region, recursive, let_binding, attributes), _ = r_split x in
let inline = List.exists (fun (a: Raw.attribute) -> a.value = "inline") attributes in let inline = List.exists (fun (a: Raw.attribute) -> a.value = "inline") attributes in
let binding = let_binding in let binding = let_binding in
let {binders; lhs_type; let_rhs} = binding in let {binders; lhs_type; let_rhs} = binding in
@ -876,7 +879,7 @@ and compile_declaration : Raw.declaration -> declaration Location.wrap list resu
field_path = field_path =
( (
(Component (Component
{region = Region.ghost; {region = v.region;
value = name, Z.of_int i;} : Raw.selection) value = name, Z.of_int i;} : Raw.selection)
, []); , []);
} }
@ -926,8 +929,8 @@ and compile_declaration : Raw.declaration -> declaration Location.wrap list resu
} in } in
let f_args = nseq_to_list (param1,others) in let f_args = nseq_to_list (param1,others) in
let%bind ty = bind_map_list typed_pattern_to_typed_vars f_args in let%bind ty = bind_map_list typed_pattern_to_typed_vars f_args in
let aux acc ty = Option.map (t_function (snd ty)) acc in let aux acc (_,ty,loc) = Option.map (t_function ~loc ty) acc in
ok (Raw.EFun {region=Region.ghost ; value=fun_},List.fold_right' aux lhs_type' ty) ok (Raw.EFun {region; value=fun_},List.fold_right' aux lhs_type' ty)
in in
let%bind rhs' = compile_expression let_rhs in let%bind rhs' = compile_expression let_rhs in
let%bind lhs_type = match lhs_type with let%bind lhs_type = match lhs_type with
@ -936,7 +939,7 @@ and compile_declaration : Raw.declaration -> declaration Location.wrap list resu
let f_args = nseq_to_list (binders) in let f_args = nseq_to_list (binders) in
let%bind lhs_type' = bind_map_option (fun x -> compile_type_expression (snd x)) lhs_type in let%bind lhs_type' = bind_map_option (fun x -> compile_type_expression (snd x)) lhs_type in
let%bind ty = bind_map_list typed_pattern_to_typed_vars f_args in let%bind ty = bind_map_list typed_pattern_to_typed_vars f_args in
let aux acc ty = Option.map (t_function (snd ty)) acc in let aux acc (_,ty,loc) = Option.map (t_function ~loc ty) acc in
ok @@ (List.fold_right' aux lhs_type' ty) ok @@ (List.fold_right' aux lhs_type' ty)
| _ -> ok None | _ -> ok None
) )
@ -982,10 +985,10 @@ and compile_cases : type a . (Raw.pattern * a) list -> (a, unit) matching_conten
| PConstr v -> | PConstr v ->
let const, pat_opt = let const, pat_opt =
match v with match v with
PConstrApp {value; _} -> PConstrApp {value; region} ->
(match value with (match value with
| constr, None -> | constr, None ->
constr, Some (PVar {value = "unit"; region = Region.ghost}) constr, Some (PVar {value = "unit"; region})
| _ -> value) | _ -> value)
| PSomeApp {value=region,pat; _} -> | PSomeApp {value=region,pat; _} ->
{value="Some"; region}, Some pat {value="Some"; region}, Some pat

View File

@ -152,7 +152,7 @@ let return_statement expr = ok @@ fun expr'_opt ->
| Some expr' -> ok @@ e_sequence expr expr' | Some expr' -> ok @@ e_sequence expr expr'
let get_t_string_singleton_opt = function let get_t_string_singleton_opt = function
| Raw.TString s -> Some (String.(sub s.value 1 (length s.value -2))) | Raw.TString s -> Some s.value
| _ -> None | _ -> None
@ -384,11 +384,10 @@ let rec compile_expression (t:Raw.expr) : expr result =
| EArith (Neg e) -> compile_unop "NEG" e | EArith (Neg e) -> compile_unop "NEG" e
| EString (String s) -> | EString (String s) ->
let (s , loc) = r_split s in let (s , loc) = r_split s in
let s' = return @@ e_literal ~loc (Literal_string (Standard s))
(* S contains quotes *) | EString (Verbatim v) ->
String.(sub s 1 (length s - 2)) let (v , loc) = r_split v in
in return @@ e_literal ~loc (Literal_string (Verbatim v))
return @@ e_literal ~loc (Literal_string s')
| EString (Cat bo) -> | EString (Cat bo) ->
let (bo , loc) = r_split bo in let (bo , loc) = r_split bo in
let%bind sl = compile_expression bo.arg1 in let%bind sl = compile_expression bo.arg1 in

View File

@ -17,21 +17,23 @@ let peephole_expression : expression -> expression result = fun e ->
match e.expression_content with match e.expression_content with
| E_ascription {anno_expr=e'; type_annotation=t} as e -> ( | E_ascription {anno_expr=e'; type_annotation=t} as e -> (
match (e'.expression_content , t.type_content) with match (e'.expression_content , t.type_content) with
| (E_literal (Literal_string s) , T_constant (TC_key_hash)) -> return @@ E_literal (Literal_key_hash s) | (E_literal (Literal_string s) , T_constant (TC_key_hash)) -> return @@ E_literal (Literal_key_hash (Ligo_string.extract s))
| (E_literal (Literal_string s) , T_constant (TC_signature)) -> return @@ E_literal (Literal_signature s) | (E_literal (Literal_string s) , T_constant (TC_signature)) -> return @@ E_literal (Literal_signature (Ligo_string.extract s))
| (E_literal (Literal_string s) , T_constant (TC_key)) -> return @@ E_literal (Literal_key s) | (E_literal (Literal_string s) , T_constant (TC_key)) -> return @@ E_literal (Literal_key (Ligo_string.extract s))
| (E_literal (Literal_int i) , T_constant (TC_timestamp)) -> return @@ E_literal (Literal_timestamp i) | (E_literal (Literal_int i) , T_constant (TC_timestamp)) -> return @@ E_literal (Literal_timestamp i)
| (E_literal (Literal_string str) , T_constant (TC_timestamp)) -> | (E_literal (Literal_string str) , T_constant (TC_timestamp)) ->
let str = Ligo_string.extract str in
let%bind time = let%bind time =
trace_option (bad_string_timestamp str e'.location) trace_option (bad_string_timestamp str e'.location)
@@ Memory_proto_alpha.Protocol.Alpha_context.Timestamp.of_notation str in @@ Memory_proto_alpha.Protocol.Alpha_context.Timestamp.of_notation str in
let itime = Z.of_int64 @@ Tezos_utils.Time.Protocol.to_seconds time in let itime = Z.of_int64 @@ Tezos_utils.Time.Protocol.to_seconds time in
return @@ E_literal (Literal_timestamp itime) return @@ E_literal (Literal_timestamp itime)
| (E_literal (Literal_string str) , T_constant (TC_address)) -> return @@ E_literal (Literal_address str) | (E_literal (Literal_string str) , T_constant (TC_address)) -> return @@ E_literal (Literal_address (Ligo_string.extract str))
| (E_literal (Literal_string str) , T_constant (TC_bytes)) -> ( | (E_literal (Literal_string str) , T_constant (TC_bytes)) -> (
let%bind e' = e'_bytes str in let str = Ligo_string.extract str in
return e' let%bind e' = e'_bytes str in
) return e'
)
| _ -> return e | _ -> return e
) )
| e -> return e | e -> return e

View File

@ -253,7 +253,7 @@ and compile_expression' : I.expression -> (O.expression option -> O.expression)
let%bind element = compile_expression element in let%bind element = compile_expression element in
return @@ O.e_constructor ~loc constructor element return @@ O.e_constructor ~loc constructor element
| I.E_matching m -> | I.E_matching m ->
let%bind m = compile_matching m in let%bind m = compile_matching m loc in
ok @@ m ok @@ m
| I.E_record record -> | I.E_record record ->
let record = I.LMap.to_kv_list record in let record = I.LMap.to_kv_list record in
@ -385,8 +385,8 @@ and compile_lambda : I.lambda -> O.lambda result =
let%bind result = compile_expression result in let%bind result = compile_expression result in
ok @@ O.{binder;input_type;output_type;result} ok @@ O.{binder;input_type;output_type;result}
and compile_matching : I.matching -> (O.expression option -> O.expression) result = and compile_matching : I.matching -> Location.t -> (O.expression option -> O.expression) result =
fun {matchee;cases} -> fun {matchee;cases} loc ->
let return expr = ok @@ function let return expr = ok @@ function
| None -> expr | None -> expr
| Some e -> O.e_sequence expr e | Some e -> O.e_sequence expr e
@ -412,7 +412,7 @@ and compile_matching : I.matching -> (O.expression option -> O.expression) resul
in in
ok @@ restore_mutable_variable return_expr free_vars env ok @@ restore_mutable_variable return_expr free_vars env
else else
return @@ O.e_matching matchee @@ O.Match_option {match_none=match_none'; match_some=(n,expr',tv)} return @@ O.e_matching ~loc matchee @@ O.Match_option {match_none=match_none'; match_some=(n,expr',tv)}
| I.Match_list {match_nil;match_cons} -> | I.Match_list {match_nil;match_cons} ->
let%bind match_nil' = compile_expression match_nil in let%bind match_nil' = compile_expression match_nil in
let (hd,tl,expr,tv) = match_cons in let (hd,tl,expr,tv) = match_cons in
@ -432,10 +432,10 @@ and compile_matching : I.matching -> (O.expression option -> O.expression) resul
in in
ok @@ restore_mutable_variable return_expr free_vars env ok @@ restore_mutable_variable return_expr free_vars env
else else
return @@ O.e_matching matchee @@ O.Match_list {match_nil=match_nil'; match_cons=(hd,tl,expr',tv)} return @@ O.e_matching ~loc matchee @@ O.Match_list {match_nil=match_nil'; match_cons=(hd,tl,expr',tv)}
| I.Match_tuple ((lst,expr), tv) -> | I.Match_tuple ((lst,expr), tv) ->
let%bind expr = compile_expression expr in let%bind expr = compile_expression expr in
return @@ O.e_matching matchee @@ O.Match_tuple ((lst,expr), tv) return @@ O.e_matching ~loc matchee @@ O.Match_tuple ((lst,expr), tv)
| I.Match_variant (lst,tv) -> | I.Match_variant (lst,tv) ->
let env = Var.fresh () in let env = Var.fresh () in
let aux fv ((c,n),expr) = let aux fv ((c,n),expr) =
@ -448,7 +448,7 @@ and compile_matching : I.matching -> (O.expression option -> O.expression) resul
let free_vars = List.sort_uniq Var.compare @@ List.concat fv in let free_vars = List.sort_uniq Var.compare @@ List.concat fv in
if (List.length free_vars == 0) then ( if (List.length free_vars == 0) then (
let cases = List.map (fun case -> let (a,_,b) = case in (a,b)) cases in let cases = List.map (fun case -> let (a,_,b) = case in (a,b)) cases in
return @@ O.e_matching matchee @@ O.Match_variant (cases,tv) return @@ O.e_matching ~loc matchee @@ O.Match_variant (cases,tv)
) else ( ) else (
let cases = List.map (fun case -> let (a,b,_) = case in (a,b)) cases in let cases = List.map (fun case -> let (a,b,_) = case in (a,b)) cases in
let match_expr = O.e_matching matchee @@ O.Match_variant (cases,tv) in let match_expr = O.e_matching matchee @@ O.Match_variant (cases,tv) in

View File

@ -59,7 +59,7 @@ let self_typing : contract_pass_data -> expression -> (bool * contract_pass_data
match e.expression_content , e.type_expression with match e.expression_content , e.type_expression with
| E_constant {cons_name=C_SELF ; arguments=[entrypoint_exp]}, {type_content = T_operator (TC_contract t) ; type_meta=_} -> | E_constant {cons_name=C_SELF ; arguments=[entrypoint_exp]}, {type_content = T_operator (TC_contract t) ; type_meta=_} ->
let%bind entrypoint = match entrypoint_exp.expression_content with let%bind entrypoint = match entrypoint_exp.expression_content with
| E_literal (Literal_string ep) -> check_entrypoint_annotation_format ep entrypoint_exp | E_literal (Literal_string ep) -> check_entrypoint_annotation_format (Ligo_string.extract ep) entrypoint_exp
| _ -> fail @@ Errors.entrypoint_annotation_not_literal entrypoint_exp.location in | _ -> fail @@ Errors.entrypoint_annotation_not_literal entrypoint_exp.location in
let%bind entrypoint_t = match dat.contract_type.parameter.type_content with let%bind entrypoint_t = match dat.contract_type.parameter.type_content with
| T_sum cmap -> | T_sum cmap ->

View File

@ -18,7 +18,25 @@ module Typer = struct
("b" , fun () -> Format.asprintf "%a" PP.type_expression b ) ("b" , fun () -> Format.asprintf "%a" PP.type_expression b )
] in ] in
error ~data title message () error ~data title message ()
let error_comparator_composed a () =
let title () = "We only allow composed types of not more than two element to be compared" in
let message () = "" in
let data = [
("received" , fun () -> Format.asprintf "%a" PP.type_expression a);
] in
error ~data title message ()
let error_first_field_comp_pair a () =
let title () = "this field is not allowed at the left of a comparable pair" in
let message () = "" in
let data = [
("received" , fun () -> Format.asprintf "%a" PP.type_expression a);
] in
error ~data title message ()
end end
open Errors open Errors
type type_result = type_expression type type_result = type_expression
@ -105,7 +123,7 @@ module Typer = struct
let assert_eq_1 ?msg a b = Assert.assert_true ?msg (eq_1 a b) let assert_eq_1 ?msg a b = Assert.assert_true ?msg (eq_1 a b)
let comparator : string -> typer = fun s -> typer_2 s @@ fun a b -> let simple_comparator : string -> typer = fun s -> typer_2 s @@ fun a b ->
let%bind () = let%bind () =
trace_strong (error_uncomparable_types a b) @@ trace_strong (error_uncomparable_types a b) @@
Assert.assert_true @@ Assert.assert_true @@
@ -122,6 +140,24 @@ module Typer = struct
] in ] in
ok @@ t_bool () ok @@ t_bool ()
let rec pair_comparator : string -> typer = fun s -> typer_2 s @@ fun a b ->
let%bind () =
trace_strong (error_uncomparable_types a b) @@
Assert.assert_true @@ eq_1 a b
in
let%bind (a_k, a_v) =
trace_strong (error_comparator_composed a) @@
get_t_pair a in
let%bind (b_k, b_v) = get_t_pair b in
let%bind _ =
trace_strong (error_first_field_comp_pair a) @@
simple_comparator s [a_k;b_k] None
in
comparator s [a_v;b_v] None
and comparator : string -> typer = fun s -> typer_2 s @@ fun a b ->
bind_or (simple_comparator s [a;b] None, pair_comparator s [a;b] None)
let boolean_operator_2 : string -> typer = fun s -> typer_2 s @@ fun a b -> let boolean_operator_2 : string -> typer = fun s -> typer_2 s @@ fun a b ->
let%bind () = let%bind () =
trace_strong (simple_error "A isn't of type bool") @@ trace_strong (simple_error "A isn't of type bool") @@

View File

@ -96,7 +96,8 @@ let e_nat_z ?loc n : expression = make_e ?loc @@ E_literal (Literal_nat n)
let e_nat ?loc n : expression = e_nat_z ?loc @@ Z.of_int n let e_nat ?loc n : expression = e_nat_z ?loc @@ Z.of_int n
let e_timestamp_z ?loc n : expression = make_e ?loc @@ E_literal (Literal_timestamp n) let e_timestamp_z ?loc n : expression = make_e ?loc @@ E_literal (Literal_timestamp n)
let e_timestamp ?loc n : expression = e_timestamp_z ?loc @@ Z.of_int n let e_timestamp ?loc n : expression = e_timestamp_z ?loc @@ Z.of_int n
let e_string ?loc s : expression = make_e ?loc @@ E_literal (Literal_string s) let e_string ?loc s : expression = make_e ?loc @@ E_literal (Literal_string (Standard s))
let e_verbatim ?loc v : expression = make_e ?loc @@ E_literal (Literal_string (Verbatim v))
let e_address ?loc s : expression = make_e ?loc @@ E_literal (Literal_address s) let e_address ?loc s : expression = make_e ?loc @@ E_literal (Literal_address s)
let e_mutez_z ?loc s : expression = make_e ?loc @@ E_literal (Literal_mutez s) let e_mutez_z ?loc s : expression = make_e ?loc @@ E_literal (Literal_mutez s)
let e_mutez ?loc s : expression = e_mutez_z ?loc @@ Z.of_int s let e_mutez ?loc s : expression = e_mutez_z ?loc @@ Z.of_int s

View File

@ -49,6 +49,7 @@ val t_michelson_pair : ?loc:Location.t -> type_expression -> michelson_prct_anno
val t_operator : ?loc:Location.t -> type_operator -> type_expression list -> type_expression result val t_operator : ?loc:Location.t -> type_operator -> type_expression list -> type_expression result
val t_set : ?loc:Location.t -> type_expression -> type_expression val t_set : ?loc:Location.t -> type_expression -> type_expression
val t_contract : ?loc:Location.t -> type_expression -> type_expression
val make_e : ?loc:Location.t -> expression_content -> expression val make_e : ?loc:Location.t -> expression_content -> expression
@ -62,6 +63,7 @@ val e_nat : ?loc:Location.t -> int -> expression
val e_timestamp : ?loc:Location.t -> int -> expression val e_timestamp : ?loc:Location.t -> int -> expression
val e_bool : ?loc:Location.t -> bool -> expression val e_bool : ?loc:Location.t -> bool -> expression
val e_string : ?loc:Location.t -> string -> expression val e_string : ?loc:Location.t -> string -> expression
val e_verbatim : ?loc:Location.t -> string -> expression
val e_address : ?loc:Location.t -> string -> expression val e_address : ?loc:Location.t -> string -> expression
val e_signature : ?loc:Location.t -> string -> expression val e_signature : ?loc:Location.t -> string -> expression
val e_key : ?loc:Location.t -> string -> expression val e_key : ?loc:Location.t -> string -> expression

View File

@ -53,7 +53,7 @@ val e_int : ?loc:Location.t -> Z.t -> expression
val e_nat : ?loc:Location.t -> Z.t -> expression val e_nat : ?loc:Location.t -> Z.t -> expression
val e_timestamp : ?loc:Location.t -> Z.t -> expression val e_timestamp : ?loc:Location.t -> Z.t -> expression
val e_bool : ?loc:Location.t -> bool -> expression val e_bool : ?loc:Location.t -> bool -> expression
val e_string : ?loc:Location.t -> string -> expression val e_string : ?loc:Location.t -> ligo_string -> expression
val e_address : ?loc:Location.t -> string -> expression val e_address : ?loc:Location.t -> string -> expression
val e_signature : ?loc:Location.t -> string -> expression val e_signature : ?loc:Location.t -> string -> expression
val e_key : ?loc:Location.t -> string -> expression val e_key : ?loc:Location.t -> string -> expression

View File

@ -54,7 +54,7 @@ val e_int : ?loc:Location.t -> Z.t -> expression
val e_nat : ?loc:Location.t -> Z.t -> expression val e_nat : ?loc:Location.t -> Z.t -> expression
val e_timestamp : ?loc:Location.t -> Z.t -> expression val e_timestamp : ?loc:Location.t -> Z.t -> expression
val e_bool : ?loc:Location.t -> bool -> expression val e_bool : ?loc:Location.t -> bool -> expression
val e_string : ?loc:Location.t -> string -> expression val e_string : ?loc:Location.t -> ligo_string -> expression
val e_address : ?loc:Location.t -> string -> expression val e_address : ?loc:Location.t -> string -> expression
val e_signature : ?loc:Location.t -> string -> expression val e_signature : ?loc:Location.t -> string -> expression
val e_key : ?loc:Location.t -> string -> expression val e_key : ?loc:Location.t -> string -> expression

View File

@ -188,7 +188,7 @@ let literal ppf (l : literal) =
| Literal_nat z -> fprintf ppf "+%a" Z.pp_print z | Literal_nat z -> fprintf ppf "+%a" Z.pp_print z
| Literal_timestamp z -> fprintf ppf "+%a" Z.pp_print z | Literal_timestamp z -> fprintf ppf "+%a" Z.pp_print z
| Literal_mutez z -> fprintf ppf "%amutez" Z.pp_print z | Literal_mutez z -> fprintf ppf "%amutez" Z.pp_print z
| Literal_string s -> fprintf ppf "%S" s | Literal_string s -> fprintf ppf "%a" Ligo_string.pp s
| Literal_bytes b -> fprintf ppf "0x%a" Hex.pp (Hex.of_bytes b) | Literal_bytes b -> fprintf ppf "0x%a" Hex.pp (Hex.of_bytes b)
| Literal_address s -> fprintf ppf "@%S" s | Literal_address s -> fprintf ppf "@%S" s
| Literal_operation _ -> fprintf ppf "Operation(...bytes)" | Literal_operation _ -> fprintf ppf "Operation(...bytes)"

View File

@ -15,6 +15,7 @@ let needs_parens = {
int = (fun _ _ _ -> false) ; int = (fun _ _ _ -> false) ;
z = (fun _ _ _ -> false) ; z = (fun _ _ _ -> false) ;
string = (fun _ _ _ -> false) ; string = (fun _ _ _ -> false) ;
ligo_string = (fun _ _ _ -> false) ;
bytes = (fun _ _ _ -> false) ; bytes = (fun _ _ _ -> false) ;
unit = (fun _ _ _ -> false) ; unit = (fun _ _ _ -> false) ;
packed_internal_operation = (fun _ _ _ -> false) ; packed_internal_operation = (fun _ _ _ -> false) ;
@ -54,6 +55,7 @@ let op ppf = {
bool = (fun _visitor () b -> fprintf ppf "%s" (if b then "true" else "false")) ; bool = (fun _visitor () b -> fprintf ppf "%s" (if b then "true" else "false")) ;
z = (fun _visitor () i -> fprintf ppf "%a" Z.pp_print i) ; z = (fun _visitor () i -> fprintf ppf "%a" Z.pp_print i) ;
string = (fun _visitor () str -> fprintf ppf "\"%s\"" str) ; string = (fun _visitor () str -> fprintf ppf "\"%s\"" str) ;
ligo_string = (fun _visitor () str -> fprintf ppf "%a" Ligo_string.pp str) ;
bytes = (fun _visitor () _bytes -> fprintf ppf "bytes...") ; bytes = (fun _visitor () _bytes -> fprintf ppf "bytes...") ;
unit = (fun _visitor () () -> fprintf ppf "()") ; unit = (fun _visitor () () -> fprintf ppf "()") ;
packed_internal_operation = (fun _visitor () _op -> fprintf ppf "Operation(...bytes)") ; packed_internal_operation = (fun _visitor () _op -> fprintf ppf "Operation(...bytes)") ;

View File

@ -120,7 +120,7 @@ val e_int : Z.t -> expression_content
val e_nat : Z.t -> expression_content val e_nat : Z.t -> expression_content
val e_mutez : Z.t -> expression_content val e_mutez : Z.t -> expression_content
val e_bool : bool -> environment -> expression_content val e_bool : bool -> environment -> expression_content
val e_string : string -> expression_content val e_string : ligo_string -> expression_content
val e_bytes : bytes -> expression_content val e_bytes : bytes -> expression_content
val e_timestamp : Z.t -> expression_content val e_timestamp : Z.t -> expression_content
val e_address : string -> expression_content val e_address : string -> expression_content
@ -140,7 +140,7 @@ val e_a_int : Z.t -> environment -> expression
val e_a_nat : Z.t -> environment -> expression val e_a_nat : Z.t -> environment -> expression
val e_a_mutez : Z.t -> environment -> expression val e_a_mutez : Z.t -> environment -> expression
val e_a_bool : bool -> environment -> expression val e_a_bool : bool -> environment -> expression
val e_a_string : string -> environment -> expression val e_a_string : ligo_string -> environment -> expression
val e_a_address : string -> environment -> expression val e_a_address : string -> environment -> expression
val e_a_pair : expression -> expression -> environment -> expression val e_a_pair : expression -> expression -> environment -> expression
val e_a_some : expression -> environment -> expression val e_a_some : expression -> environment -> expression

View File

@ -7,7 +7,7 @@ val e_a_empty_int : Z.t -> expression
val e_a_empty_nat : Z.t -> expression val e_a_empty_nat : Z.t -> expression
val e_a_empty_mutez : Z.t -> expression val e_a_empty_mutez : Z.t -> expression
val e_a_empty_bool : bool -> expression val e_a_empty_bool : bool -> expression
val e_a_empty_string : string -> expression val e_a_empty_string : ligo_string -> expression
val e_a_empty_address : string -> expression val e_a_empty_address : string -> expression
val e_a_empty_pair : expression -> expression -> expression val e_a_empty_pair : expression -> expression -> expression
val e_a_empty_some : expression -> expression val e_a_empty_some : expression -> expression

View File

@ -80,7 +80,7 @@ type literal =
| Literal_nat of z | Literal_nat of z
| Literal_timestamp of z | Literal_timestamp of z
| Literal_mutez of z | Literal_mutez of z
| Literal_string of string | Literal_string of ligo_string
| Literal_bytes of bytes | Literal_bytes of bytes
| Literal_address of string | Literal_address of string
| Literal_signature of string | Literal_signature of string

View File

@ -11,6 +11,7 @@ type expression_variable = Stage_common.Types.expression_variable
type type_ = Stage_common.Types.type_ type type_ = Stage_common.Types.type_
type type_variable = Stage_common.Types.type_variable type type_variable = Stage_common.Types.type_variable
type z = Z.t type z = Z.t
type ligo_string = Stage_common.Types.ligo_string
type constructor' = type constructor' =
| Constructor of string | Constructor of string

View File

@ -259,8 +259,8 @@ let%expect_test _ =
let%expect_test _ = let%expect_test _ =
let pp = expression_content Format.std_formatter in let pp = expression_content Format.std_formatter in
let dummy_type = {type_content=T_base TB_unit} in let dummy_type = {type_content=T_base TB_unit;location=Location.generated} in
let wrap e = { content = e ; type_expression = dummy_type} in let wrap e = { content = e ; type_expression = dummy_type ; location = Location.generated} in
pp @@ E_closure { binder = Var.of_name "y" ; body = wrap (E_variable (Var.of_name "y")) } ; pp @@ E_closure { binder = Var.of_name "y" ; body = wrap (E_variable (Var.of_name "y")) } ;
[%expect{| [%expect{|
fun y -> (y) fun y -> (y)

View File

@ -8,18 +8,21 @@ module Expression = struct
let get_content : t -> t' = fun e -> e.content let get_content : t -> t' = fun e -> e.content
let get_type : t -> type_expression = fun e -> e.type_expression let get_type : t -> type_expression = fun e -> e.type_expression
let make_t = fun tc -> { let make_t ?(loc=Location.generated) = fun tc -> {
type_content = tc; type_content = tc;
location = loc;
} }
let make = fun e' t -> { let make ?(loc=Location.generated) = fun e' t -> {
content = e' ; content = e' ;
type_expression = t ; type_expression = t ;
location = loc;
} }
let make_tpl = fun (e' , t) -> { let make_tpl ?(loc=Location.generated) = fun (e' , t) -> {
content = e' ; content = e' ;
type_expression = t ; type_expression = t ;
location = loc;
} }
let pair : t -> t -> t' = fun a b -> E_constant { cons_name = C_PAIR; arguments = [ a ; b ]} let pair : t -> t -> t' = fun a b -> E_constant { cons_name = C_PAIR; arguments = [ a ; b ]}
@ -164,24 +167,24 @@ let get_operation (v:value) = match v with
| _ -> simple_fail "not an operation" | _ -> simple_fail "not an operation"
let t_int () : type_expression = Expression.make_t @@ T_base TB_int let t_int ?loc () : type_expression = Expression.make_t ?loc @@ T_base TB_int
let t_unit () : type_expression = Expression.make_t @@ T_base TB_unit let t_unit ?loc () : type_expression = Expression.make_t ?loc @@ T_base TB_unit
let t_nat () : type_expression = Expression.make_t @@ T_base TB_nat let t_nat ?loc () : type_expression = Expression.make_t ?loc @@ T_base TB_nat
let t_function x y : type_expression = Expression.make_t @@ T_function ( x , y ) let t_function ?loc x y : type_expression = Expression.make_t ?loc @@ T_function ( x , y )
let t_pair x y : type_expression = Expression.make_t @@ T_pair ( x , y ) let t_pair ?loc x y : type_expression = Expression.make_t ?loc @@ T_pair ( x , y )
let t_union x y : type_expression = Expression.make_t @@ T_or ( x , y ) let t_union ?loc x y : type_expression = Expression.make_t ?loc @@ T_or ( x , y )
let e_int expr : expression = Expression.make_tpl (expr, t_int ()) let e_int ?loc expr : expression = Expression.make_tpl ?loc (expr, t_int ())
let e_unit : expression = Expression.make_tpl (E_literal D_unit, t_unit ()) let e_unit ?loc () : expression = Expression.make_tpl ?loc (E_literal D_unit, t_unit ())
let e_skip : expression = Expression.make_tpl (E_skip, t_unit ()) let e_skip ?loc () : expression = Expression.make_tpl ?loc (E_skip, t_unit ())
let e_var_int name : expression = e_int (E_variable name) let e_var_int ?loc name : expression = e_int ?loc (E_variable name)
let e_let_in v tv inline expr body : expression = Expression.(make_tpl ( let e_let_in ?loc v tv inline expr body : expression = Expression.(make_tpl ?loc(
E_let_in ((v , tv) , inline, expr , body) , E_let_in ((v , tv) , inline, expr , body) ,
get_type body get_type body
)) ))
let ez_e_sequence a b : expression = Expression.(make_tpl (E_sequence (make_tpl (a , t_unit ()) , b) , get_type b)) let ez_e_sequence ?loc a b : expression = Expression.(make_tpl (E_sequence (make_tpl ?loc (a , t_unit ()) , b) , get_type b))
let d_unit : value = D_unit let d_unit : value = D_unit

View File

@ -10,9 +10,9 @@ module Expression : sig
(* (*
val is_toplevel : t -> bool val is_toplevel : t -> bool
*) *)
val make_t : type_content -> type_expression val make_t : ?loc:Location.t -> type_content -> type_expression
val make : t' -> type_expression -> t val make : ?loc:Location.t -> t' -> type_expression -> t
val make_tpl : t' * type_expression -> t val make_tpl : ?loc:Location.t -> t' * type_expression -> t
val pair : t -> t -> t' val pair : t -> t -> t'
end end
@ -53,24 +53,24 @@ val get_t_contract : type_expression -> type_expression result
val get_t_operation : type_expression -> type_expression result val get_t_operation : type_expression -> type_expression result
val get_operation : value -> Memory_proto_alpha.Protocol.Alpha_context.packed_internal_operation result val get_operation : value -> Memory_proto_alpha.Protocol.Alpha_context.packed_internal_operation result
val t_int : unit -> type_expression val t_int : ?loc:Location.t -> unit -> type_expression
val t_unit : unit -> type_expression val t_unit : ?loc:Location.t -> unit -> type_expression
val t_nat : unit -> type_expression val t_nat : ?loc:Location.t -> unit -> type_expression
val t_function : type_expression -> type_expression -> type_expression val t_function : ?loc:Location.t -> type_expression -> type_expression -> type_expression
val t_pair : type_expression annotated -> type_expression annotated -> type_expression val t_pair : ?loc:Location.t -> type_expression annotated -> type_expression annotated -> type_expression
val t_union : type_expression annotated -> type_expression annotated -> type_expression val t_union : ?loc:Location.t -> type_expression annotated -> type_expression annotated -> type_expression
(* (*
val quote : string -> type_value -> type_value -> Expression.t -> anon_function val quote : string -> type_value -> type_value -> Expression.t -> anon_function
val e_int : Expression.t' -> Expression.t val e_int : Expression.t' -> Expression.t
*) *)
val e_unit : Expression.t val e_unit : ?loc:Location.t -> unit -> Expression.t
val e_skip : Expression.t val e_skip : ?loc:Location.t -> unit -> Expression.t
val e_var_int : expression_variable -> Expression.t val e_var_int : ?loc:Location.t -> expression_variable -> Expression.t
val e_let_in : expression_variable -> type_expression -> inline -> Expression.t -> Expression.t -> Expression.t val e_let_in : ?loc:Location.t -> expression_variable -> type_expression -> inline -> Expression.t -> Expression.t -> Expression.t
val ez_e_sequence : Expression.t' -> Expression.t -> expression val ez_e_sequence : ?loc:Location.t -> Expression.t' -> Expression.t -> expression
(* (*
val ez_e_return : Expression.t -> Expression.t val ez_e_return : Expression.t -> Expression.t
*) *)

View File

@ -155,6 +155,7 @@ let aggregate_entry (lst : program) (form : form_t) : expression result =
let e' = { let e' = {
content = E_closure l' ; content = E_closure l' ;
type_expression = entry_expression.type_expression ; type_expression = entry_expression.type_expression ;
location = entry_expression.location;
} in } in
ok e' ok e'
) )

View File

@ -16,6 +16,7 @@ type type_content =
and type_expression = { and type_expression = {
type_content : type_content; type_content : type_content;
location : Location.t;
} }
and type_base = and type_base =
@ -94,6 +95,7 @@ and expression_content =
and expression = { and expression = {
content : expression_content ; content : expression_content ;
type_expression : type_expression ; type_expression : type_expression ;
location : Location.t;
} }
and constant = { and constant = {

View File

@ -138,7 +138,7 @@ let literal ppf (l : literal) =
| Literal_nat z -> fprintf ppf "+%a" Z.pp_print z | Literal_nat z -> fprintf ppf "+%a" Z.pp_print z
| Literal_timestamp z -> fprintf ppf "+%a" Z.pp_print z | Literal_timestamp z -> fprintf ppf "+%a" Z.pp_print z
| Literal_mutez z -> fprintf ppf "%amutez" Z.pp_print z | Literal_mutez z -> fprintf ppf "%amutez" Z.pp_print z
| Literal_string s -> fprintf ppf "%S" s | Literal_string s -> fprintf ppf "%a" Ligo_string.pp s
| Literal_bytes b -> fprintf ppf "0x%a" Hex.pp (Hex.of_bytes b) | Literal_bytes b -> fprintf ppf "0x%a" Hex.pp (Hex.of_bytes b)
| Literal_address s -> fprintf ppf "@%S" s | Literal_address s -> fprintf ppf "@%S" s
| Literal_operation _ -> fprintf ppf "Operation(...bytes)" | Literal_operation _ -> fprintf ppf "Operation(...bytes)"

View File

@ -3,6 +3,7 @@ and expression_variable = expression_ Var.t
type type_ type type_
and type_variable = type_ Var.t and type_variable = type_ Var.t
type ligo_string = Simple_utils.Ligo_string.t
type constructor' = Constructor of string type constructor' = Constructor of string
type label = Label of string type label = Label of string
@ -178,7 +179,7 @@ type literal =
| Literal_nat of Z.t | Literal_nat of Z.t
| Literal_timestamp of Z.t | Literal_timestamp of Z.t
| Literal_mutez of Z.t | Literal_mutez of Z.t
| Literal_string of string | Literal_string of ligo_string
| Literal_bytes of bytes | Literal_bytes of bytes
| Literal_address of string | Literal_address of string
| Literal_signature of string | Literal_signature of string

View File

@ -0,0 +1,116 @@
type tokens is big_map (address, nat)
type allowances is big_map (address * address, nat) (* (sender,account) -> value *)
type storage is record [
tokens : tokens;
allowances : allowances;
total_amount : nat;
]
type transfer is record [
address_from : address;
address_to : address;
value : nat;
]
type approve is record [
spender : address;
value : nat;
]
type getAllowance is record [
owner : address;
spender : address;
callback : contract (nat);
]
type getBalance is record [
owner : address;
callback : contract (nat);
]
type getTotalSupply is record [
callback : contract (nat);
]
type action is
Transfer of transfer
| Approve of approve
| GetAllowance of getAllowance
| GetBalance of getBalance
| GetTotalSupply of getTotalSupply
function transfer (const p : transfer; const s: storage) : list (operation) * storage is block {
var new_allowances : allowances := Big_map.empty;
if Tezos.sender = p.address_from
then { new_allowances := s.allowances; }
else {
var authorized_value : nat :=
case (Big_map.find_opt ((Tezos.sender,p.address_from), s.allowances)) of
Some (value) -> value
| None -> 0n
end;
if (authorized_value < p.value)
then { failwith("Not Enough Allowance")}
else { new_allowances := Big_map.update ((Tezos.sender,p.address_from), (Some (abs(authorized_value - p.value))), s.allowances) }
};
var sender_balance : nat := case (Big_map.find_opt (p.address_from, s.tokens)) of
Some (value) -> value
| None -> 0n
end;
var new_tokens : tokens := Big_map.empty;
if (sender_balance < p.value)
then { failwith ("Not Enough Balance")}
else {
new_tokens := Big_map.update (p.address_from, (Some (abs(sender_balance - p.value))), s.tokens);
var receiver_balance : nat := case (Big_map.find_opt (p.address_to, s.tokens)) of
Some (value) -> value
| None -> 0n
end;
new_tokens := Big_map.update (p.address_to, (Some (receiver_balance + p.value)), new_tokens);
}
} with ((nil: list (operation)), s with record [tokens = new_tokens; allowances = new_allowances])
function approve (const p : approve; const s : storage) : list (operation) * storage is block {
var previous_value : nat := case Big_map.find_opt ((p.spender, Tezos.sender), s.allowances) of
Some (value) -> value
| None -> 0n
end;
var new_allowances : allowances := Big_map.empty;
if previous_value > 0n and p.value > 0n
then { failwith ("Unsafe Allowance Change")}
else {
new_allowances := Big_map.update ((p.spender, Tezos.sender), (Some (p.value)), s.allowances);
}
} with ((nil: list (operation)), s with record [allowances = new_allowances])
function getAllowance (const p : getAllowance; const s : storage) : list (operation) * storage is block {
var value : nat := case Big_map.find_opt ((p.owner, p.spender), s.allowances) of
Some (value) -> value
| None -> 0n
end;
var op : operation := Tezos.transaction (value, 0mutez, p.callback);
} with (list [op],s)
function getBalance (const p : getBalance; const s : storage) : list (operation) * storage is block {
var value : nat := case Big_map.find_opt (p.owner, s.tokens) of
Some (value) -> value
| None -> 0n
end;
var op : operation := Tezos.transaction (value, 0mutez, p.callback);
} with (list [op],s)
function getTotalSupply (const p : getTotalSupply; const s : storage) : list (operation) * storage is block {
var total : nat := s.total_amount;
var op : operation := Tezos.transaction (total, 0mutez, p.callback);
} with (list [op],s)
function main (const a : action; const s : storage) : list (operation) * storage is
case a of
Transfer (p) -> transfer (p,s)
| Approve (p) -> approve (p,s)
| GetAllowance (p) -> getAllowance (p,s)
| GetBalance (p) -> getBalance (p,s)
| GetTotalSupply (p) -> getTotalSupply (p,s)
end;

View File

@ -0,0 +1,109 @@
type tokens = (address, nat) big_map
type allowances = (address * address, nat) big_map (* (sender,account) -> value *)
type storage = {
tokens : tokens;
allowances : allowances;
total_amount : nat;
}
type transfer = {
address_from : address;
address_to : address;
value : nat;
}
type approve = {
spender : address;
value : nat;
}
type getAllowance = {
owner : address;
spender : address;
callback : nat contract;
}
type getBalance = {
owner : address;
callback : nat contract;
}
type getTotalSupply = {
callback : nat contract;
}
type action =
Transfer of transfer
| Approve of approve
| GetAllowance of getAllowance
| GetBalance of getBalance
| GetTotalSupply of getTotalSupply
let transfer (p,s : transfer * storage) : operation list * storage =
let new_allowances =
if Tezos.sender = p.address_from then s.allowances
else
let authorized_value = match Big_map.find_opt (Tezos.sender,p.address_from) s.allowances with
Some value -> value
| None -> 0n
in
if (authorized_value < p.value)
then (failwith "Not Enough Allowance" : allowances)
else Big_map.update (Tezos.sender,p.address_from) (Some (abs(authorized_value - p.value))) s.allowances
in
let sender_balance = match Big_map.find_opt p.address_from s.tokens with
Some value -> value
| None -> 0n
in
if (sender_balance < p.value)
then (failwith "Not Enough Balance" : operation list * storage)
else
let new_tokens = Big_map.update p.address_from (Some (abs(sender_balance - p.value))) s.tokens in
let receiver_balance = match Big_map.find_opt p.address_to s.tokens with
Some value -> value
| None -> 0n
in
let new_tokens = Big_map.update p.address_to (Some (receiver_balance + p.value)) new_tokens in
([]:operation list), {s with tokens = new_tokens; allowances = new_allowances}
let approve (p,s : approve * storage) : operation list * storage =
let previous_value = match Big_map.find_opt (p.spender, Tezos.sender) s.allowances with
Some value -> value
| None -> 0n
in
if previous_value > 0n && p.value > 0n
then (failwith "Unsafe Allowance Change" : operation list * storage)
else
let new_allowances = Big_map.update (p.spender, Tezos.sender) (Some (p.value)) s.allowances in
([] : operation list), {s with allowances = new_allowances}
let getAllowance (p,s : getAllowance * storage) : operation list * storage =
let value = match Big_map.find_opt (p.owner, p.spender) s.allowances with
Some value -> value
| None -> 0n
in
let op = Tezos.transaction value 0mutez p.callback in
([op],s)
let getBalance (p,s : getBalance * storage) : operation list * storage =
let value = match Big_map.find_opt p.owner s.tokens with
Some value -> value
| None -> 0n
in
let op = Tezos.transaction value 0mutez p.callback in
([op],s)
let getTotalSupply (p,s : getTotalSupply * storage) : operation list * storage =
let total = s.total_amount in
let op = Tezos.transaction total 0mutez p.callback in
([op],s)
let main (a,s:action * storage) =
match a with
Transfer p -> transfer (p,s)
| Approve p -> approve (p,s)
| GetAllowance p -> getAllowance (p,s)
| GetBalance p -> getBalance (p,s)
| GetTotalSupply p -> getTotalSupply (p,s)

View File

@ -0,0 +1,115 @@
type tokens = big_map (address, nat)
type allowances = big_map ((address, address), nat) /* (sender,account) -> value */
type storage = {
tokens : tokens,
allowances : allowances,
total_amount : nat,
}
type transfer = {
address_from : address,
address_to : address,
value : nat,
}
type approve = {
spender : address,
value : nat,
}
type getAllowance = {
owner : address,
spender : address,
callback : contract (nat),
}
type getBalance = {
owner : address,
callback : contract (nat),
}
type getTotalSupply = {
callback : contract (nat),
}
type action =
| Transfer ( transfer )
| Approve ( approve )
| GetAllowance ( getAllowance )
| GetBalance ( getBalance )
| GetTotalSupply ( getTotalSupply )
let transfer = ((p,s) : (transfer, storage)) : (list (operation), storage) => {
let new_allowances =
if (Tezos.sender == p.address_from) { s.allowances; }
else {
let authorized_value = switch (Big_map.find_opt ((Tezos.sender,p.address_from), s.allowances)) {
| Some value => value
| None => 0n
};
if (authorized_value < p.value) { (failwith ("Not Enough Allowance") : allowances); }
else { Big_map.update ((Tezos.sender,p.address_from), (Some (abs(authorized_value - p.value))), s.allowances); };
};
let sender_balance = switch (Big_map.find_opt (p.address_from, s.tokens)) {
| Some value => value
| None => 0n
};
if (sender_balance < p.value) { (failwith ("Not Enough Balance") : (list (operation), storage)); }
else {
let new_tokens = Big_map.update (p.address_from, (Some (abs(sender_balance - p.value))), s.tokens);
let receiver_balance = switch (Big_map.find_opt (p.address_to, s.tokens)) {
| Some value => value
| None => 0n
};
let new_tokens = Big_map.update (p.address_to, (Some (receiver_balance + p.value)), new_tokens);
(([]: list (operation)), { ...s,tokens:new_tokens, allowances:new_allowances});
};
};
let approve = ((p,s) : (approve, storage)) : (list (operation), storage) => {
let previous_value = switch (Big_map.find_opt ((p.spender, Tezos.sender), s.allowances)){
| Some value => value
| None => 0n
};
if (previous_value > 0n && p.value > 0n)
{ (failwith ("Unsafe Allowance Change") : (list (operation), storage)); }
else {
let new_allowances = Big_map.update ((p.spender, Tezos.sender), (Some (p.value)), s.allowances);
(([] : list (operation)), { ...s, allowances : new_allowances});
};
};
let getAllowance = ((p,s) : (getAllowance, storage)) : (list (operation), storage) => {
let value = switch (Big_map.find_opt ((p.owner, p.spender), s.allowances)) {
| Some value => value
| None => 0n
};
let op = Tezos.transaction (value, 0mutez, p.callback);
([op],s)
};
let getBalance = ((p,s) : (getBalance, storage)) : (list (operation), storage) => {
let value = switch (Big_map.find_opt (p.owner, s.tokens)) {
| Some value => value
| None => 0n
};
let op = Tezos.transaction (value, 0mutez, p.callback);
([op],s)
};
let getTotalSupply = ((p,s) : (getTotalSupply, storage)) : (list (operation), storage) => {
let total = s.total_amount;
let op = Tezos.transaction (total, 0mutez, p.callback);
([op],s)
};
let main = ((a,s): (action, storage)) =>
switch a {
| Transfer p => transfer ((p,s))
| Approve p => approve ((p,s))
| GetAllowance p => getAllowance ((p,s))
| GetBalance p => getBalance ((p,s))
| GetTotalSupply p => getTotalSupply ((p,s))
};

View File

@ -0,0 +1,30 @@
(* This test check that the type are comparable *)
let int_ (a: int) = a < a
let nat_ (a: nat) = a < a
let bool_ (a: bool) = a < a
let mutez_ (a: tez) = a < a
let string_ (a: string) = a < a
let bytes_ (a: bytes) = a < a
let address_ (a: address) = a < a
let timestamp_ (a: timestamp) = a < a
let key_hash_ (a: key_hash) = a < a
type comp_pair = int * int
let comp_pair (a: comp_pair) = a < a
(*
type uncomp_pair_1 = int * int * int
let uncomp_pair_1 (a: uncomp_pair_1) = a < a
type uncomp_pair_2 = comp_pair * int
let uncomp_pair_2 (a: uncomp_pair_2) = a < a
*)
type inner_record = (int,"one",nat,"two") michelson_pair
type comb_record = (int,"three",inner_record,"four") michelson_pair
let comb_record (a : comb_record) = a < a

View File

@ -5,3 +5,25 @@ let main (n : int * storage) : operation list * storage =
let x : int = 7 let x : int = 7
in x + n.0, n.1.0 + n.1.1 in x + n.0, n.1.0 + n.1.1
in ([] : operation list), x in ([] : operation list), x
let f0 (a: string) = true
let f1 (a: string) = true
let f2 (a: string) = true
let letin_nesting (_: unit) =
begin
let s = "test" in
let p0 = f0 s in
assert p0;
let p1 = f1 s in
assert p1;
let p2 = f2 s in
assert p2;
s
end
let letin_nesting2 (x: int) =
let y = 2 in
let z = 3 in
x + y + z

View File

@ -7,3 +7,24 @@ let main = (n : (int, storage)) : (list (operation), storage) => {
}; };
([]: list (operation), x); ([]: list (operation), x);
}; };
let f0 = (a: string) => true
let f1 = (a: string) => true
let f2 = (a: string) => true
let letin_nesting = (_: unit) => {
let s = "test";
let p0 = f0(s);
assert(p0);
let p1 = f1(s);
assert(p1);
let p2 = f2(s);
assert(p2);
s
}
let letin_nesting2 = (x: int) => {
let y = 2;
let z = 3;
x + y + z
}

View File

@ -1,3 +1,4 @@
const s : string = "toto" const s : string = "toto"
const x : string = s ^ "bar" const x : string = s ^ "bar"
const y : string = "foo" ^ x const y : string = "foo" ^ x
const v : string = {|deadbeef|}

View File

@ -433,6 +433,30 @@ let bytes_arithmetic () : unit result =
let%bind () = Assert.assert_fail @@ Ast_core.Misc.assert_value_eq (b3 , b1) in let%bind () = Assert.assert_fail @@ Ast_core.Misc.assert_value_eq (b3 , b1) in
ok () ok ()
let comparable_mligo () : unit result =
let%bind program = mtype_file "./contracts/comparable.mligo" in
let%bind () = expect_eq program "int_" (e_int 1) (e_bool false) in
let%bind () = expect_eq program "nat_" (e_nat 1) (e_bool false) in
let%bind () = expect_eq program "bool_" (e_bool true) (e_bool false) in
let%bind () = expect_eq program "mutez_" (e_mutez 1) (e_bool false) in
let%bind () = expect_eq program "string_" (e_string "foo") (e_bool false) in
let%bind () = expect_eq program "bytes_" (e_bytes_string "deadbeaf") (e_bool false) in
let%bind () = expect_eq program "address_" (e_address "tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx") (e_bool false) in
let%bind () = expect_eq program "timestamp_" (e_timestamp 101112) (e_bool false) in
let open Tezos_crypto in
let pkh, _, _ = Signature.generate_key () in
let key_hash = Signature.Public_key_hash.to_b58check @@ pkh in
let%bind () = expect_eq program "key_hash_" (e_key_hash key_hash) (e_bool false) in
let pair = e_pair (e_int 1) (e_int 2) in
let%bind () = expect_eq program "comp_pair" pair (e_bool false) in
(* let tuple = e_tuple [e_int 1; e_int 2; e_int 3] in
let%bind () = expect_string_failwith program "uncomp_pair_1" tuple "" in
let pair = e_pair pair (e_int 3) in
let%bind () = expect_string_failwith program "uncomp_pair_2" pair "" in *)
let comb = e_pair (e_int 3) (e_pair (e_int 1) (e_nat 2)) in
let%bind () = expect_eq program "comb_record" comb (e_bool false) in
ok ()
let crypto () : unit result = let crypto () : unit result =
let%bind program = type_file "./contracts/crypto.ligo" in let%bind program = type_file "./contracts/crypto.ligo" in
let%bind foo = e_bytes_hex "0f00" in let%bind foo = e_bytes_hex "0f00" in
@ -1571,18 +1595,37 @@ let counter_religo () : unit result =
let let_in_mligo () : unit result = let let_in_mligo () : unit result =
let%bind program = mtype_file "./contracts/letin.mligo" in let%bind program = mtype_file "./contracts/letin.mligo" in
let make_input n = e_pair (e_int n) (e_pair (e_int 3) (e_int 5)) in let%bind () =
let make_expected n = let make_input n = e_pair (e_int n) (e_pair (e_int 3) (e_int 5)) in
e_pair (e_typed_list [] (t_operation ())) (e_pair (e_int (7+n)) (e_int (3+5))) let make_expected n =
in expect_eq_n program "main" make_input make_expected e_pair (e_typed_list [] (t_operation ())) (e_pair (e_int (7+n)) (e_int (3+5)))
in
expect_eq_n program "main" make_input make_expected
in
let%bind () =
expect_eq program "letin_nesting" (e_unit ()) (e_string "test")
in
let%bind () =
expect_eq program "letin_nesting2" (e_int 4) (e_int 9)
in
ok ()
let let_in_religo () : unit result = let let_in_religo () : unit result =
let%bind program = retype_file "./contracts/letin.religo" in let%bind program = retype_file "./contracts/letin.religo" in
let make_input n = e_pair (e_int n) (e_pair (e_int 3) (e_int 5)) in let%bind () =
let make_expected n = let make_input n = e_pair (e_int n) (e_pair (e_int 3) (e_int 5)) in
e_pair (e_typed_list [] (t_operation ())) (e_pair (e_int (7+n)) (e_int (3+5))) let make_expected n =
in expect_eq_n program "main" make_input make_expected e_pair (e_typed_list [] (t_operation ())) (e_pair (e_int (7+n)) (e_int (3+5)))
in
expect_eq_n program "main" make_input make_expected
in
let%bind () =
expect_eq program "letin_nesting" (e_unit ()) (e_string "test")
in
let%bind () =
expect_eq program "letin_nesting2" (e_int 4) (e_int 9)
in
ok ()
let match_variant () : unit result = let match_variant () : unit result =
let%bind program = mtype_file "./contracts/match.mligo" in let%bind program = mtype_file "./contracts/match.mligo" in
@ -2417,6 +2460,7 @@ let main = test_suite "Integration (End to End)" [
test "bytes_arithmetic" bytes_arithmetic ; test "bytes_arithmetic" bytes_arithmetic ;
test "bytes_arithmetic (mligo)" bytes_arithmetic_mligo ; test "bytes_arithmetic (mligo)" bytes_arithmetic_mligo ;
test "bytes_arithmetic (religo)" bytes_arithmetic_religo ; test "bytes_arithmetic (religo)" bytes_arithmetic_religo ;
test "comparable (mligo)" comparable_mligo;
test "crypto" crypto ; test "crypto" crypto ;
test "crypto (mligo)" crypto_mligo ; test "crypto (mligo)" crypto_mligo ;
test "crypto (religo)" crypto_religo ; test "crypto (religo)" crypto_religo ;

View File

@ -123,6 +123,7 @@ let md_files = [
"/gitlab-pages/docs/advanced/entrypoints-contracts.md"; "/gitlab-pages/docs/advanced/entrypoints-contracts.md";
"/gitlab-pages/docs/advanced/timestamps-addresses.md"; "/gitlab-pages/docs/advanced/timestamps-addresses.md";
"/gitlab-pages/docs/advanced/inline.md"; "/gitlab-pages/docs/advanced/inline.md";
"/gitlab-pages/docs/advanced/interop.md";
"/gitlab-pages/docs/api/cli-commands.md"; "/gitlab-pages/docs/api/cli-commands.md";
"/gitlab-pages/docs/api/cheat-sheet.md"; "/gitlab-pages/docs/api/cheat-sheet.md";
"/gitlab-pages/docs/reference/toplevel.md"; "/gitlab-pages/docs/reference/toplevel.md";

View File

@ -18,5 +18,6 @@ let () =
Hash_lock_tests.main ; Hash_lock_tests.main ;
Time_lock_repeat_tests.main ; Time_lock_repeat_tests.main ;
Pledge_tests.main ; Pledge_tests.main ;
Tzip12_tests.main ;
] ; ] ;
() ()

View File

@ -39,7 +39,7 @@ module TestExpressions = struct
let unit () : unit result = test_expression I.(e_unit ()) O.(t_unit ()) let unit () : unit result = test_expression I.(e_unit ()) O.(t_unit ())
let int () : unit result = test_expression I.(e_int (Z.of_int 32)) O.(t_int ()) let int () : unit result = test_expression I.(e_int (Z.of_int 32)) O.(t_int ())
let bool () : unit result = test_expression I.(e_bool true) O.(t_bool ()) let bool () : unit result = test_expression I.(e_bool true) O.(t_bool ())
let string () : unit result = test_expression I.(e_string "s") O.(t_string ()) let string () : unit result = test_expression I.(e_string (Standard "s")) O.(t_string ())
let bytes () : unit result = let bytes () : unit result =
let%bind b = I.e_bytes_hex "0b" in let%bind b = I.e_bytes_hex "0b" in
test_expression b O.(t_bytes ()) test_expression b O.(t_bytes ())
@ -51,7 +51,7 @@ module TestExpressions = struct
let tuple () : unit result = let tuple () : unit result =
test_expression test_expression
I.(e_record @@ LMap.of_list [(Label "0",e_int (Z.of_int 32)); (Label "1",e_string "foo")]) I.(e_record @@ LMap.of_list [(Label "0",e_int (Z.of_int 32)); (Label "1", e_string (Standard "foo"))])
O.(make_t_ez_record [("0",t_int ()); ("1",t_string ())]) O.(make_t_ez_record [("0",t_int ()); ("1",t_string ())])
let constructor () : unit result = let constructor () : unit result =
@ -65,7 +65,7 @@ module TestExpressions = struct
let record () : unit result = let record () : unit result =
test_expression test_expression
I.(e_record @@ LMap.of_list [(Label "foo", e_int (Z.of_int 32)); (Label "bar", e_string "foo")]) I.(e_record @@ LMap.of_list [(Label "foo", e_int (Z.of_int 32)); (Label "bar", e_string (Standard "foo"))])
O.(make_t_ez_record [("foo", t_int ()); ("bar", t_string ())]) O.(make_t_ez_record [("foo", t_int ()); ("bar", t_string ())])

190
src/test/tzip12_tests.ml Normal file
View File

@ -0,0 +1,190 @@
open Trace
open Test_helpers
let file_FA12 = "./contracts/FA1.2.ligo"
let mfile_FA12 = "./contracts/FA1.2.mligo"
let refile_FA12 = "./contracts/FA1.2.religo"
let type_file f s =
let%bind typed,state = Ligo.Compile.Utils.type_file f s (Contract "main") in
ok @@ (typed,state)
let get_program f st =
let s = ref None in
fun () -> match !s with
| Some s -> ok s
| None -> (
let%bind program = type_file f st in
s := Some program ;
ok program
)
let compile_main f s () =
let%bind typed_prg,_ = get_program f s () in
let%bind mini_c_prg = Ligo.Compile.Of_typed.compile typed_prg in
let%bind michelson_prg = Ligo.Compile.Of_mini_c.aggregate_and_compile_contract mini_c_prg "main" in
let%bind (_contract: Tezos_utils.Michelson.michelson) =
(* fails if the given entry point is not a valid contract *)
Ligo.Compile.Of_michelson.build_contract michelson_prg in
ok ()
open Ast_imperative
let (sender , contract) =
let open Proto_alpha_utils.Memory_proto_alpha in
let id = List.nth dummy_environment.identities 0 in
let kt = id.implicit_contract in
Protocol.Alpha_context.Contract.to_b58check kt , kt
let external_contract =
let open Proto_alpha_utils.Memory_proto_alpha in
let id = List.nth dummy_environment.identities 4 in
let kh = id.public_key_hash in
Tezos_utils.Signature.Public_key_hash.to_string kh
let from_ = e_address @@ addr 5
let to_ = e_address @@ addr 2
let sender = e_address @@ sender
let external_contract = e_annotation (e_constant C_IMPLICIT_ACCOUNT [e_key_hash external_contract]) (t_contract (t_nat ()))
let transfer f s () =
let%bind program,_ = get_program f s () in
let storage = e_record_ez [
("tokens", e_big_map [(sender, e_nat 100); (from_, e_nat 100); (to_, e_nat 100)]);
("allowances", e_big_map [(e_pair sender from_, e_nat 100)]);
("total_amount",e_nat 300);
] in
let parameter = e_record_ez [("address_from", from_);("address_to",to_); ("value",e_nat 10)] in
let new_storage = e_record_ez [
("tokens", e_big_map [(sender, e_nat 100); (from_, e_nat 90); (to_, e_nat 110)]);
("allowances", e_big_map [(e_pair sender from_, e_nat 90)]);
("total_amount",e_nat 300);
] in
let input = e_pair parameter storage in
let expected = e_pair (e_typed_list [] (t_operation ())) new_storage in
let options = Proto_alpha_utils.Memory_proto_alpha.make_options () in
expect_eq program ~options "transfer" input expected
let transfer_not_e_allowance f s () =
let%bind program,_ = get_program f s () in
let storage = e_record_ez [
("tokens", e_big_map [(sender, e_nat 100); (from_, e_nat 100); (to_, e_nat 100)]);
("allowances", e_big_map [(e_pair sender from_, e_nat 0)]);
("total_amount",e_nat 300);
] in
let parameter = e_record_ez [("address_from", from_);("address_to",to_); ("value",e_nat 10)] in
let input = e_pair parameter storage in
let options = Proto_alpha_utils.Memory_proto_alpha.make_options () in
expect_string_failwith ~options program "transfer" input
"Not Enough Allowance"
let transfer_not_e_balance f s () =
let%bind program,_ = get_program f s () in
let storage = e_record_ez [
("tokens", e_big_map [(sender, e_nat 100); (from_, e_nat 0); (to_, e_nat 100)]);
("allowances", e_big_map [(e_pair sender from_, e_nat 100)]);
("total_amount",e_nat 300);
] in
let parameter = e_record_ez [("address_from", from_);("address_to",to_); ("value",e_nat 10)] in
let input = e_pair parameter storage in
let options = Proto_alpha_utils.Memory_proto_alpha.make_options () in
expect_string_failwith ~options program "transfer" input
"Not Enough Balance"
let approve f s () =
let%bind program,_ = get_program f s () in
let storage = e_record_ez [
("tokens", e_big_map [(sender, e_nat 100); (from_, e_nat 100); (to_, e_nat 100)]);
("allowances", e_big_map [(e_pair from_ sender, e_nat 0)]);
("total_amount",e_nat 300);
] in
let parameter = e_record_ez [("spender", from_);("value",e_nat 100)] in
let new_storage = e_record_ez [
("tokens", e_big_map [(sender, e_nat 100); (from_, e_nat 100); (to_, e_nat 100)]);
("allowances", e_big_map [(e_pair from_ sender, e_nat 100)]);
("total_amount",e_nat 300);
] in
let input = e_pair parameter storage in
let expected = e_pair (e_typed_list [] (t_operation ())) new_storage in
let options = Proto_alpha_utils.Memory_proto_alpha.make_options () in
expect_eq program ~options "approve" input expected
let approve_unsafe f s () =
let%bind program,_ = get_program f s () in
let storage = e_record_ez [
("tokens", e_big_map [(sender, e_nat 100); (from_, e_nat 100); (to_, e_nat 100)]);
("allowances", e_big_map [(e_pair from_ sender, e_nat 100)]);
("total_amount",e_nat 300);
] in
let parameter = e_record_ez [("spender", from_);("value",e_nat 100)] in
let input = e_pair parameter storage in
let options = Proto_alpha_utils.Memory_proto_alpha.make_options () in
expect_string_failwith ~options program "approve" input
"Unsafe Allowance Change"
let get_allowance f s () =
let%bind program,_ = get_program f s () in
let storage = e_record_ez [
("tokens", e_big_map [(sender, e_nat 100); (from_, e_nat 100); (to_, e_nat 100)]);
("allowances", e_big_map [(e_pair from_ sender, e_nat 100)]);
("total_amount",e_nat 300);
] in
let parameter = e_record_ez [("owner", from_);("spender",sender); ("callback", external_contract)] in
let input = e_pair parameter storage in
let expected = e_pair (e_typed_list [] (t_operation ())) storage in
let options = Proto_alpha_utils.Memory_proto_alpha.make_options () in
expect_eq program ~options "getAllowance" input expected
let get_balance f s () =
let%bind program,_ = get_program f s () in
let storage = e_record_ez [
("tokens", e_big_map [(sender, e_nat 100); (from_, e_nat 100); (to_, e_nat 100)]);
("allowances", e_big_map [(e_pair from_ sender, e_nat 100)]);
("total_amount",e_nat 300);
] in
let parameter = e_record_ez [("owner", from_);("callback", external_contract)] in
let input = e_pair parameter storage in
let expected = e_pair (e_typed_list [] (t_operation ())) storage in
let options = Proto_alpha_utils.Memory_proto_alpha.make_options () in
expect_eq program ~options "getBalance" input expected
let get_total_supply f s () =
let%bind program,_ = get_program f s () in
let storage = e_record_ez [
("tokens", e_big_map [(sender, e_nat 100); (from_, e_nat 100); (to_, e_nat 100)]);
("allowances", e_big_map [(e_pair from_ sender, e_nat 100)]);
("total_amount",e_nat 300);
] in
let parameter = e_record_ez [("callback", external_contract)] in
let input = e_pair parameter storage in
let expected = e_pair (e_typed_list [] (t_operation ())) storage in
let options = Proto_alpha_utils.Memory_proto_alpha.make_options () in
expect_eq program ~options "getTotalSupply" input expected
let main = test_suite "tzip-12" [
test "transfer" (transfer file_FA12 "pascaligo");
test "transfer (not enough allowance)" (transfer_not_e_allowance file_FA12 "pascaligo");
test "transfer (not enough balance)" (transfer_not_e_balance file_FA12 "pascaligo");
test "approve" (approve file_FA12 "pascaligo");
test "approve (unsafe allowance change)" (approve_unsafe file_FA12 "pascaligo");
(* test "getAllowance" (get_allowance file_FA12 "pascaligo");
test "getBalance" (get_balance file_FA12 "pascaligo");
test "getTotalSupply" (get_total_supply file_FA12 "pascaligo"); waiting for a dummy_contract with type nat contractt*)
test "transfer" (transfer mfile_FA12 "cameligo");
test "transfer (not enough allowance)" (transfer_not_e_allowance mfile_FA12 "cameligo");
test "transfer (not enough balance)" (transfer_not_e_balance mfile_FA12 "cameligo");
test "approve" (approve mfile_FA12 "cameligo");
test "approve (unsafe allowance change)" (approve_unsafe mfile_FA12 "cameligo");
(* test "getAllowance" (get_allowance mfile_FA12 "cameligo");
test "getBalance" (get_balance mfile_FA12 "cameligo");
test "getTotalSupply" (get_total_supply mfile_FA12 "cameligo"); waiting for a dummy_contract with type nat contractt*)
test "transfer" (transfer refile_FA12 "reasonligo");
test "transfer (not enough allowance)" (transfer_not_e_allowance refile_FA12 "reasonligo");
test "transfer (not enough balance)" (transfer_not_e_balance refile_FA12 "reasonligo");
test "approve" (approve refile_FA12 "reasonligo");
test "approve (unsafe allowance change)" (approve_unsafe refile_FA12 "reasonligo");
(* test "getAllowance" (get_allowance refile_FA12 "reasonligo");
test "getBalance" (get_balance refile_FA12 "reasonligo");
test "getTotalSupply" (get_total_supply refile_FA12 "reasonligo"); waiting for a dummy_contract with type nat contractt*)
]

View File

@ -36,7 +36,7 @@ export const HeaderComponent = () => {
<Container> <Container>
<Group> <Group>
<a href="https://ligolang.org"> <a href="https://ligolang.org">
<Logo src="logo.svg" /> <Logo src="/logo.svg" />
</a> </a>
</Group> </Group>
<Group> <Group>

View File

@ -14,4 +14,4 @@ module Tree = Tree
module Region = Region module Region = Region
module Pos = Pos module Pos = Pos
module Var = Var module Var = Var
module Ligo_string = X_string

View File

@ -0,0 +1,11 @@
type t =
Standard of string
| Verbatim of string
let pp ppf = function
Standard s -> Format.fprintf ppf "%S" s
| Verbatim v -> Format.fprintf ppf "{|%s|}" v
let extract = function
Standard s -> s
| Verbatim v -> v

View File

@ -0,0 +1,11 @@
(*
Ligo_string represent string as they are writen in a ligo program,
delimited either with double quotes (standard) or with `{|...|}` (Varbatim)
*)
type t =
Standard of string
| Verbatim of string
val pp : Format.formatter -> t -> unit
val extract : t -> string