Merge branch 'dev' of gitlab.com:ligolang/ligo into rinderknecht-dev
This commit is contained in:
commit
de0657e8f2
@ -14,17 +14,20 @@ Here's how a custom map type is defined:
|
|||||||
<!--DOCUSAURUS_CODE_TABS-->
|
<!--DOCUSAURUS_CODE_TABS-->
|
||||||
<!--Pascaligo-->
|
<!--Pascaligo-->
|
||||||
```pascaligo
|
```pascaligo
|
||||||
type ledger is map(address, tez);
|
type move is (int * int);
|
||||||
|
type moveset is map(address, move);
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--CameLIGO-->
|
<!--CameLIGO-->
|
||||||
```cameligo
|
```cameligo
|
||||||
type ledger = (address, tez) map
|
type move = int * int
|
||||||
|
type moveset = (address, move) map
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--ReasonLIGO-->
|
<!--ReasonLIGO-->
|
||||||
```reasonligo
|
```reasonligo
|
||||||
type ledger = map(address, tez);
|
type move = (int, int);
|
||||||
|
type moveset = map(address, move);
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--END_DOCUSAURUS_CODE_TABS-->
|
<!--END_DOCUSAURUS_CODE_TABS-->
|
||||||
@ -35,9 +38,9 @@ And here's how a map value is populated:
|
|||||||
<!--Pascaligo-->
|
<!--Pascaligo-->
|
||||||
|
|
||||||
```pascaligo
|
```pascaligo
|
||||||
const ledger: ledger = map
|
const moves: moveset = map
|
||||||
("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx": address) -> 1000mutez;
|
("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx": address) -> (1, 2);
|
||||||
("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) -> 2000mutez;
|
("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) -> (0, 3);
|
||||||
end
|
end
|
||||||
```
|
```
|
||||||
> Notice the `->` between the key and its value and `;` to separate individual map entries.
|
> Notice the `->` between the key and its value and `;` to separate individual map entries.
|
||||||
@ -47,9 +50,9 @@ end
|
|||||||
<!--CameLIGO-->
|
<!--CameLIGO-->
|
||||||
|
|
||||||
```cameligo
|
```cameligo
|
||||||
let ledger: ledger = Map.literal
|
let moves: moveset = Map.literal
|
||||||
[ (("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx": address), 1000mutez) ;
|
[ (("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx": address), (1, 2)) ;
|
||||||
(("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address), 2000mutez) ;
|
(("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address), (0, 3)) ;
|
||||||
]
|
]
|
||||||
```
|
```
|
||||||
> Map.literal constructs the map from a list of key-value pair tuples, `(<key>, <value>)`.
|
> Map.literal constructs the map from a list of key-value pair tuples, `(<key>, <value>)`.
|
||||||
@ -60,10 +63,10 @@ let ledger: ledger = Map.literal
|
|||||||
<!--ReasonLIGO-->
|
<!--ReasonLIGO-->
|
||||||
|
|
||||||
```reasonligo
|
```reasonligo
|
||||||
let ledger: ledger =
|
let moves: moveset =
|
||||||
Map.literal([
|
Map.literal([
|
||||||
("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx": address, 1000mutez),
|
("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx": address, (1, 2)),
|
||||||
("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, 2000mutez),
|
("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, (0, 3)),
|
||||||
]);
|
]);
|
||||||
```
|
```
|
||||||
> Map.literal constructs the map from a list of key-value pair tuples, `(<key>, <value>)`.
|
> Map.literal constructs the map from a list of key-value pair tuples, `(<key>, <value>)`.
|
||||||
@ -74,25 +77,25 @@ let ledger: ledger =
|
|||||||
|
|
||||||
### Accessing map values by key
|
### Accessing map values by key
|
||||||
|
|
||||||
If we want to access a balance from our ledger above, we can use the `[]` operator/accessor to read the associated `tez` value. However, the value we'll get will be wrapped as an optional; in our case `option(tez)`. Here's an example:
|
If we want to access a move from our moveset above, we can use the `[]` operator/accessor to read the associated `move` value. However, the value we'll get will be wrapped as an optional; in our case `option(move)`. Here's an example:
|
||||||
|
|
||||||
<!--DOCUSAURUS_CODE_TABS-->
|
<!--DOCUSAURUS_CODE_TABS-->
|
||||||
<!--Pascaligo-->
|
<!--Pascaligo-->
|
||||||
```pascaligo
|
```pascaligo
|
||||||
const balance: option(tez) = ledger[("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address)];
|
const balance: option(move) = moves[("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address)];
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--CameLIGO-->
|
<!--CameLIGO-->
|
||||||
|
|
||||||
```cameligo
|
```cameligo
|
||||||
let balance: tez option = Map.find_opt ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) ledger
|
let balance: move option = Map.find_opt ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) moves
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--ReasonLIGO-->
|
<!--ReasonLIGO-->
|
||||||
|
|
||||||
```reasonligo
|
```reasonligo
|
||||||
let balance: option(tez) =
|
let balance: option(move) =
|
||||||
Map.find_opt("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, ledger);
|
Map.find_opt("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, moves);
|
||||||
```
|
```
|
||||||
<!--END_DOCUSAURUS_CODE_TABS-->
|
<!--END_DOCUSAURUS_CODE_TABS-->
|
||||||
|
|
||||||
@ -103,24 +106,61 @@ Accessing a value in a map yields an option, however you can also get the value
|
|||||||
<!--DOCUSAURUS_CODE_TABS-->
|
<!--DOCUSAURUS_CODE_TABS-->
|
||||||
<!--Pascaligo-->
|
<!--Pascaligo-->
|
||||||
```pascaligo
|
```pascaligo
|
||||||
const balance: tez = get_force(("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address), ledger);
|
const balance: move = get_force(("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address), moves);
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--CameLIGO-->
|
<!--CameLIGO-->
|
||||||
|
|
||||||
```cameligo
|
```cameligo
|
||||||
let balance: tez = Map.find ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) ledger
|
let balance: move = Map.find ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) moves
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--ReasonLIGO-->
|
<!--ReasonLIGO-->
|
||||||
|
|
||||||
```reasonligo
|
```reasonligo
|
||||||
let balance: tez =
|
let balance: move =
|
||||||
Map.find("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, ledger);
|
Map.find("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, moves);
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--END_DOCUSAURUS_CODE_TABS-->
|
<!--END_DOCUSAURUS_CODE_TABS-->
|
||||||
|
|
||||||
|
### Updating the contents of a map
|
||||||
|
|
||||||
|
<!--DOCUSAURUS_CODE_TABS-->
|
||||||
|
|
||||||
|
<!--Pascaligo-->
|
||||||
|
|
||||||
|
The values of a PascaLIGO map can be updated using the ordinary assignment syntax:
|
||||||
|
|
||||||
|
```pascaligo
|
||||||
|
|
||||||
|
function set_ (var m: moveset) : moveset is
|
||||||
|
block {
|
||||||
|
m[("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address)] := (4,9);
|
||||||
|
} with m
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--Cameligo-->
|
||||||
|
|
||||||
|
We can update a map in CameLIGO using the `Map.update` built-in:
|
||||||
|
|
||||||
|
```cameligo
|
||||||
|
|
||||||
|
let updated_map: moveset = Map.update ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) (Some (4,9)) moves
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--Reasonligo-->
|
||||||
|
|
||||||
|
We can update a map in ReasonLIGO using the `Map.update` built-in:
|
||||||
|
|
||||||
|
```reasonligo
|
||||||
|
|
||||||
|
let updated_map: moveset = Map.update(("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address), Some((4,9)), moves);
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--END_DOCUSAURUS_CODE_TABS-->
|
||||||
|
|
||||||
|
|
||||||
### Iteration over the contents of a map
|
### Iteration over the contents of a map
|
||||||
|
|
||||||
There are three kinds of iteration on LIGO maps, `iter`, `map` and `fold`. `iter`
|
There are three kinds of iteration on LIGO maps, `iter`, `map` and `fold`. `iter`
|
||||||
@ -132,24 +172,24 @@ otherwise.
|
|||||||
<!--DOCUSAURUS_CODE_TABS-->
|
<!--DOCUSAURUS_CODE_TABS-->
|
||||||
<!--Pascaligo-->
|
<!--Pascaligo-->
|
||||||
```pascaligo
|
```pascaligo
|
||||||
function iter_op (const m : ledger) : unit is
|
function iter_op (const m : moveset) : unit is
|
||||||
block {
|
block {
|
||||||
function aggregate (const i : address ; const j : tez) : unit is block
|
function aggregate (const i : address ; const j : move) : unit is block
|
||||||
{ if (j > 100mutez) then skip else failwith("fail") } with unit ;
|
{ if (j.1 > 1) then skip else failwith("fail") } with unit ;
|
||||||
} with map_iter(aggregate, m) ;
|
} with map_iter(aggregate, m) ;
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--CameLIGO-->
|
<!--CameLIGO-->
|
||||||
```cameligo
|
```cameligo
|
||||||
let iter_op (m : ledger) : unit =
|
let iter_op (m : moveset) : unit =
|
||||||
let assert_eq = fun (i: address) (j: tez) -> assert (j > 100tz)
|
let assert_eq = fun (i: address) (j: move) -> assert (j.0 > 1)
|
||||||
in Map.iter assert_eq m
|
in Map.iter assert_eq m
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--ReasonLIGO-->
|
<!--ReasonLIGO-->
|
||||||
```reasonligo
|
```reasonligo
|
||||||
let iter_op = (m: ledger): unit => {
|
let iter_op = (m: moveset): unit => {
|
||||||
let assert_eq = (i: address, j: tez) => assert(j > 100mutez);
|
let assert_eq = (i: address, j: move) => assert(j[0] > 1);
|
||||||
Map.iter(assert_eq, m);
|
Map.iter(assert_eq, m);
|
||||||
};
|
};
|
||||||
```
|
```
|
||||||
@ -160,23 +200,23 @@ let iter_op = (m: ledger): unit => {
|
|||||||
<!--DOCUSAURUS_CODE_TABS-->
|
<!--DOCUSAURUS_CODE_TABS-->
|
||||||
<!--Pascaligo-->
|
<!--Pascaligo-->
|
||||||
```pascaligo
|
```pascaligo
|
||||||
function map_op (const m : ledger) : ledger is
|
function map_op (const m : moveset) : moveset is
|
||||||
block {
|
block {
|
||||||
function increment (const i : address ; const j : tez) : tez is block { skip } with j + 1mutez ;
|
function increment (const i : address ; const j : move) : move is block { skip } with (j.0, j.1 + 1) ;
|
||||||
} with map_map(increment, m) ;
|
} with map_map(increment, m) ;
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--CameLIGO-->
|
<!--CameLIGO-->
|
||||||
```cameligo
|
```cameligo
|
||||||
let map_op (m : ledger) : ledger =
|
let map_op (m : moveset) : moveset =
|
||||||
let increment = fun (_: address) (j: tez) -> j + 1tz
|
let increment = fun (_: address) (j: move) -> (j.0, j.1 + 1)
|
||||||
in Map.map increment m
|
in Map.map increment m
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--ReasonLIGO-->
|
<!--ReasonLIGO-->
|
||||||
```reasonligo
|
```reasonligo
|
||||||
let map_op = (m: ledger): ledger => {
|
let map_op = (m: moveset): moveset => {
|
||||||
let increment = (ignore: address, j: tez) => j + 1tz;
|
let increment = (ignore: address, j: move) => (j[0], j[1] + 1);
|
||||||
Map.map(increment, m);
|
Map.map(increment, m);
|
||||||
};
|
};
|
||||||
```
|
```
|
||||||
@ -194,30 +234,191 @@ It eventually returns the result of combining all the elements.
|
|||||||
<!--DOCUSAURUS_CODE_TABS-->
|
<!--DOCUSAURUS_CODE_TABS-->
|
||||||
<!--Pascaligo-->
|
<!--Pascaligo-->
|
||||||
```pascaligo
|
```pascaligo
|
||||||
function fold_op (const m : ledger) : tez is
|
function fold_op (const m : moveset) : int is
|
||||||
block {
|
block {
|
||||||
function aggregate (const j : tez ; const cur : (address * tez)) : tez is j + cur.1 ;
|
function aggregate (const j : int ; const cur : (address * (int * int))) : int is j + cur.1.1 ;
|
||||||
} with map_fold(aggregate, m , 10mutez)
|
} with map_fold(aggregate, m , 5)
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--CameLIGO-->
|
<!--CameLIGO-->
|
||||||
```cameligo
|
```cameligo
|
||||||
let fold_op (m : ledger) : ledger =
|
let fold_op (m : moveset) : moveset =
|
||||||
let aggregate = fun (j: tez) (cur: address * tez) -> j + cur.1 in
|
let aggregate = fun (j: int) (cur: address * (int * int)) -> j + cur.1.1 in
|
||||||
Map.fold aggregate m 10tz
|
Map.fold aggregate m 5
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--ReasonLIGO-->
|
<!--ReasonLIGO-->
|
||||||
```reasonligo
|
```reasonligo
|
||||||
let fold_op = (m: ledger): ledger => {
|
let fold_op = (m: moveset): moveset => {
|
||||||
let aggregate = (j: tez, cur: (address, tez)) => j + cur[1];
|
let aggregate = (j: int, cur: (address, (int,int))) => j + cur[1][1];
|
||||||
Map.fold(aggregate, m, 10tz);
|
Map.fold(aggregate, m, 5);
|
||||||
};
|
};
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--END_DOCUSAURUS_CODE_TABS-->
|
<!--END_DOCUSAURUS_CODE_TABS-->
|
||||||
|
|
||||||
|
## Big Maps
|
||||||
|
|
||||||
|
Ordinary maps are fine for contracts with a finite lifespan or a bounded number
|
||||||
|
of users. For many contracts however, the intention is to have a map hold *many*
|
||||||
|
entries, potentially millions or billions. The cost of loading these entries into
|
||||||
|
the environment each time a user executes the contract would eventually become
|
||||||
|
too expensive were it not for big maps. Big maps are a data structure offered by
|
||||||
|
Tezos which handles the scaling concerns for us. In LIGO, the interface for big
|
||||||
|
maps is analogous to the one used for ordinary maps.
|
||||||
|
|
||||||
|
Here's how we define a big map:
|
||||||
|
|
||||||
|
<!--DOCUSAURUS_CODE_TABS-->
|
||||||
|
<!--Pascaligo-->
|
||||||
|
```pascaligo
|
||||||
|
type move is (int * int);
|
||||||
|
type moveset is big_map(address, move);
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--CameLIGO-->
|
||||||
|
```cameligo
|
||||||
|
type move = int * int
|
||||||
|
type moveset = (address, move) big_map
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--ReasonLIGO-->
|
||||||
|
```reasonligo
|
||||||
|
type move = (int, int);
|
||||||
|
type moveset = big_map(address, move);
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--END_DOCUSAURUS_CODE_TABS-->
|
||||||
|
|
||||||
|
And here's how a map value is populated:
|
||||||
|
|
||||||
|
<!--DOCUSAURUS_CODE_TABS-->
|
||||||
|
<!--Pascaligo-->
|
||||||
|
|
||||||
|
```pascaligo
|
||||||
|
const moves: moveset = big_map
|
||||||
|
("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx": address) -> (1, 2);
|
||||||
|
("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) -> (0, 3);
|
||||||
|
end
|
||||||
|
```
|
||||||
|
> Notice the `->` between the key and its value and `;` to separate individual map entries.
|
||||||
|
>
|
||||||
|
> `("<string value>": address)` means that we type-cast a string into an address.
|
||||||
|
|
||||||
|
<!--CameLIGO-->
|
||||||
|
|
||||||
|
```cameligo
|
||||||
|
let moves: moveset = Big_map.literal
|
||||||
|
[ (("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx": address), (1, 2)) ;
|
||||||
|
(("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address), (0, 3)) ;
|
||||||
|
]
|
||||||
|
```
|
||||||
|
> Big_map.literal constructs the map from a list of key-value pair tuples, `(<key>, <value>)`.
|
||||||
|
> Note also the `;` to separate individual map entries.
|
||||||
|
>
|
||||||
|
> `("<string value>": address)` means that we type-cast a string into an address.
|
||||||
|
|
||||||
|
<!--ReasonLIGO-->
|
||||||
|
|
||||||
|
```reasonligo
|
||||||
|
let moves: moveset =
|
||||||
|
Big_map.literal([
|
||||||
|
("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx": address, (1, 2)),
|
||||||
|
("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, (0, 3)),
|
||||||
|
]);
|
||||||
|
```
|
||||||
|
> Big_map.literal constructs the map from a list of key-value pair tuples, `(<key>, <value>)`.
|
||||||
|
>
|
||||||
|
> `("<string value>": address)` means that we type-cast a string into an address.
|
||||||
|
|
||||||
|
<!--END_DOCUSAURUS_CODE_TABS-->
|
||||||
|
|
||||||
|
### Accessing map values by key
|
||||||
|
|
||||||
|
If we want to access a move from our moveset above, we can use the `[]` operator/accessor to read the associated `move` value. However, the value we'll get will be wrapped as an optional; in our case `option(move)`. Here's an example:
|
||||||
|
|
||||||
|
<!--DOCUSAURUS_CODE_TABS-->
|
||||||
|
<!--Pascaligo-->
|
||||||
|
```pascaligo
|
||||||
|
const balance: option(move) = moves[("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address)];
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--CameLIGO-->
|
||||||
|
|
||||||
|
```cameligo
|
||||||
|
let balance: move option = Big_map.find_opt ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) moves
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--ReasonLIGO-->
|
||||||
|
|
||||||
|
```reasonligo
|
||||||
|
let balance: option(move) =
|
||||||
|
Big_map.find_opt("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, moves);
|
||||||
|
```
|
||||||
|
<!--END_DOCUSAURUS_CODE_TABS-->
|
||||||
|
|
||||||
|
#### Obtaining a map value forcefully
|
||||||
|
|
||||||
|
Accessing a value in a map yields an option, however you can also get the value directly:
|
||||||
|
|
||||||
|
<!--DOCUSAURUS_CODE_TABS-->
|
||||||
|
<!--Pascaligo-->
|
||||||
|
```pascaligo
|
||||||
|
const balance: move = get_force(("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address), moves);
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--CameLIGO-->
|
||||||
|
|
||||||
|
```cameligo
|
||||||
|
let balance: move = Big_map.find ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) moves
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--ReasonLIGO-->
|
||||||
|
|
||||||
|
```reasonligo
|
||||||
|
let balance: move = Big_map.find("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, moves);
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--END_DOCUSAURUS_CODE_TABS-->
|
||||||
|
|
||||||
|
### Updating the contents of a big map
|
||||||
|
|
||||||
|
<!--DOCUSAURUS_CODE_TABS-->
|
||||||
|
|
||||||
|
<!--Pascaligo-->
|
||||||
|
|
||||||
|
The values of a PascaLIGO big map can be updated using the ordinary assignment syntax:
|
||||||
|
|
||||||
|
```pascaligo
|
||||||
|
|
||||||
|
function set_ (var m: moveset) : moveset is
|
||||||
|
block {
|
||||||
|
m[("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address)] := (4,9);
|
||||||
|
} with m
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--Cameligo-->
|
||||||
|
|
||||||
|
We can update a big map in CameLIGO using the `Big_map.update` built-in:
|
||||||
|
|
||||||
|
```cameligo
|
||||||
|
|
||||||
|
let updated_map: moveset =
|
||||||
|
Big_map.update ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) (Some (4,9)) moves
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--Reasonligo-->
|
||||||
|
|
||||||
|
We can update a big map in ReasonLIGO using the `Big_map.update` built-in:
|
||||||
|
|
||||||
|
```reasonligo
|
||||||
|
let updated_map: moveset =
|
||||||
|
Big_map.update(("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address), Some((4,9)), moves);
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--END_DOCUSAURUS_CODE_TABS-->
|
||||||
|
|
||||||
## Records
|
## Records
|
||||||
|
|
||||||
Records are a construct introduced in LIGO, and are not natively available in Michelson. The LIGO compiler translates records into Michelson `Pairs`.
|
Records are a construct introduced in LIGO, and are not natively available in Michelson. The LIGO compiler translates records into Michelson `Pairs`.
|
||||||
|
@ -90,7 +90,7 @@ let predecessor_timestamp =
|
|||||||
let open Arg in
|
let open Arg in
|
||||||
let info =
|
let info =
|
||||||
let docv = "PREDECESSOR_TIMESTAMP" in
|
let docv = "PREDECESSOR_TIMESTAMP" in
|
||||||
let doc = "$(docv) is the pedecessor_timestamp (now value) the michelson interpreter will use (e.g. '2000-01-01T10:10:10Z')" in
|
let doc = "$(docv) is the pedecessor_timestamp (now value minus one minute) the michelson interpreter will use (e.g. '2000-01-01T10:10:10Z')" in
|
||||||
info ~docv ~doc ["predecessor-timestamp"] in
|
info ~docv ~doc ["predecessor-timestamp"] in
|
||||||
value @@ opt (some string) None info
|
value @@ opt (some string) None info
|
||||||
|
|
||||||
|
@ -939,8 +939,16 @@ let%expect_test _ =
|
|||||||
|
|
||||||
let%expect_test _ =
|
let%expect_test _ =
|
||||||
run_ligo_bad [ "compile-contract" ; contract "bad_address_format.religo" ; "main" ] ;
|
run_ligo_bad [ "compile-contract" ; contract "bad_address_format.religo" ; "main" ] ;
|
||||||
[%expect {| ligo: in file "bad_address_format.religo", line 2, characters 25-47. Badly formatted address "KT1badaddr": {"location":"in file \"bad_address_format.religo\", line 2, characters 25-47"} |}]
|
[%expect {| ligo: in file "bad_address_format.religo", line 2, characters 25-47. Badly formatted literal: address "KT1badaddr" {"location":"in file \"bad_address_format.religo\", line 2, characters 25-47"} |}]
|
||||||
|
|
||||||
let%expect_test _ =
|
let%expect_test _ =
|
||||||
run_ligo_bad [ "compile-contract" ; contract "bad_timestamp.ligo" ; "main" ] ;
|
run_ligo_bad [ "compile-contract" ; contract "bad_timestamp.ligo" ; "main" ] ;
|
||||||
[%expect {| ligo: in file "bad_timestamp.ligo", line 5, characters 29-43. Badly formatted timestamp "badtimestamp": {"location":"in file \"bad_timestamp.ligo\", line 5, characters 29-43"} |}]
|
[%expect {| ligo: in file "bad_timestamp.ligo", line 5, characters 29-43. Badly formatted timestamp "badtimestamp": {"location":"in file \"bad_timestamp.ligo\", line 5, characters 29-43"} |}]
|
||||||
|
|
||||||
|
let%expect_test _ =
|
||||||
|
run_ligo_good [ "dry-run" ; contract "redeclaration.ligo" ; "main" ; "unit" ; "0" ] ;
|
||||||
|
[%expect {|( [] , 0 ) |}]
|
||||||
|
|
||||||
|
let%expect_test _ =
|
||||||
|
run_ligo_good [ "dry-run" ; contract "double_main.ligo" ; "main" ; "unit" ; "0" ] ;
|
||||||
|
[%expect {|( [] , 2 ) |}]
|
@ -198,8 +198,9 @@ let%expect_test _ =
|
|||||||
are 'text' (default), 'json' and 'hex'.
|
are 'text' (default), 'json' and 'hex'.
|
||||||
|
|
||||||
--predecessor-timestamp=PREDECESSOR_TIMESTAMP
|
--predecessor-timestamp=PREDECESSOR_TIMESTAMP
|
||||||
PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value) the
|
PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value minus
|
||||||
michelson interpreter will use (e.g. '2000-01-01T10:10:10Z')
|
one minute) the michelson interpreter will use (e.g.
|
||||||
|
'2000-01-01T10:10:10Z')
|
||||||
|
|
||||||
-s SYNTAX, --syntax=SYNTAX (absent=auto)
|
-s SYNTAX, --syntax=SYNTAX (absent=auto)
|
||||||
SYNTAX is the syntax that will be used. Currently supported
|
SYNTAX is the syntax that will be used. Currently supported
|
||||||
@ -263,8 +264,9 @@ let%expect_test _ =
|
|||||||
are 'text' (default), 'json' and 'hex'.
|
are 'text' (default), 'json' and 'hex'.
|
||||||
|
|
||||||
--predecessor-timestamp=PREDECESSOR_TIMESTAMP
|
--predecessor-timestamp=PREDECESSOR_TIMESTAMP
|
||||||
PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value) the
|
PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value minus
|
||||||
michelson interpreter will use (e.g. '2000-01-01T10:10:10Z')
|
one minute) the michelson interpreter will use (e.g.
|
||||||
|
'2000-01-01T10:10:10Z')
|
||||||
|
|
||||||
-s SYNTAX, --syntax=SYNTAX (absent=auto)
|
-s SYNTAX, --syntax=SYNTAX (absent=auto)
|
||||||
SYNTAX is the syntax that will be used. Currently supported
|
SYNTAX is the syntax that will be used. Currently supported
|
||||||
@ -324,8 +326,9 @@ let%expect_test _ =
|
|||||||
`plain' whenever the TERM env var is `dumb' or undefined.
|
`plain' whenever the TERM env var is `dumb' or undefined.
|
||||||
|
|
||||||
--predecessor-timestamp=PREDECESSOR_TIMESTAMP
|
--predecessor-timestamp=PREDECESSOR_TIMESTAMP
|
||||||
PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value) the
|
PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value minus
|
||||||
michelson interpreter will use (e.g. '2000-01-01T10:10:10Z')
|
one minute) the michelson interpreter will use (e.g.
|
||||||
|
'2000-01-01T10:10:10Z')
|
||||||
|
|
||||||
-s SYNTAX, --syntax=SYNTAX (absent=auto)
|
-s SYNTAX, --syntax=SYNTAX (absent=auto)
|
||||||
SYNTAX is the syntax that will be used. Currently supported
|
SYNTAX is the syntax that will be used. Currently supported
|
||||||
@ -382,8 +385,9 @@ let%expect_test _ =
|
|||||||
`plain' whenever the TERM env var is `dumb' or undefined.
|
`plain' whenever the TERM env var is `dumb' or undefined.
|
||||||
|
|
||||||
--predecessor-timestamp=PREDECESSOR_TIMESTAMP
|
--predecessor-timestamp=PREDECESSOR_TIMESTAMP
|
||||||
PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value) the
|
PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value minus
|
||||||
michelson interpreter will use (e.g. '2000-01-01T10:10:10Z')
|
one minute) the michelson interpreter will use (e.g.
|
||||||
|
'2000-01-01T10:10:10Z')
|
||||||
|
|
||||||
-s SYNTAX, --syntax=SYNTAX (absent=auto)
|
-s SYNTAX, --syntax=SYNTAX (absent=auto)
|
||||||
SYNTAX is the syntax that will be used. Currently supported
|
SYNTAX is the syntax that will be used. Currently supported
|
||||||
@ -435,8 +439,9 @@ let%expect_test _ =
|
|||||||
`plain' whenever the TERM env var is `dumb' or undefined.
|
`plain' whenever the TERM env var is `dumb' or undefined.
|
||||||
|
|
||||||
--predecessor-timestamp=PREDECESSOR_TIMESTAMP
|
--predecessor-timestamp=PREDECESSOR_TIMESTAMP
|
||||||
PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value) the
|
PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value minus
|
||||||
michelson interpreter will use (e.g. '2000-01-01T10:10:10Z')
|
one minute) the michelson interpreter will use (e.g.
|
||||||
|
'2000-01-01T10:10:10Z')
|
||||||
|
|
||||||
-s SYNTAX, --syntax=SYNTAX (absent=auto)
|
-s SYNTAX, --syntax=SYNTAX (absent=auto)
|
||||||
SYNTAX is the syntax that will be used. Currently supported
|
SYNTAX is the syntax that will be used. Currently supported
|
||||||
|
@ -20,6 +20,6 @@ let%expect_test _ =
|
|||||||
[%expect {| ligo: in file "error_typer_6.mligo", line 1, characters 30-64. different type constructors: Expected these two constant type constructors to be the same, but they're different {"a":"string","b":"bool"} |} ] ;
|
[%expect {| ligo: in file "error_typer_6.mligo", line 1, characters 30-64. different type constructors: Expected these two constant type constructors to be the same, but they're different {"a":"string","b":"bool"} |} ] ;
|
||||||
|
|
||||||
run_ligo_bad [ "compile-contract" ; "../../test/contracts/negative/error_typer_7.mligo" ; "main" ] ;
|
run_ligo_bad [ "compile-contract" ; "../../test/contracts/negative/error_typer_7.mligo" ; "main" ] ;
|
||||||
[%expect {| ligo: in file "error_typer_7.mligo", line 4, characters 17-56. records have different sizes: Expected these two types to be the same, but they're different (both are records, but with a different number of arguments) {"a":"record[b -> string , a -> int]","b":"record[c -> bool , b -> string , a -> int]"} |} ] ;
|
[%expect {| ligo: in file "error_typer_7.mligo", line 4, characters 18-48. records have different sizes: Expected these two types to be the same, but they're different (both are records, but with a different number of arguments) {"a":"record[b -> string , a -> int]","b":"record[c -> bool , b -> string , a -> int]"} |} ] ;
|
||||||
|
|
||||||
|
|
||||||
|
@ -25,9 +25,10 @@ let aggregate_and_compile = fun program form ->
|
|||||||
| ContractForm _ -> compile_contract aggregated'
|
| ContractForm _ -> compile_contract aggregated'
|
||||||
| ExpressionForm _ -> compile_expression aggregated'
|
| ExpressionForm _ -> compile_expression aggregated'
|
||||||
|
|
||||||
let aggregate_and_compile_contract = fun program name ->
|
let aggregate_and_compile_contract = fun (program : Types.program) name ->
|
||||||
let%bind (exp, _) = get_entry program name in
|
let%bind (exp, idx) = get_entry program name in
|
||||||
aggregate_and_compile program (ContractForm exp)
|
let program' = List.take idx program in
|
||||||
|
aggregate_and_compile program' (ContractForm exp)
|
||||||
|
|
||||||
let aggregate_and_compile_expression = fun program exp ->
|
let aggregate_and_compile_expression = fun program exp ->
|
||||||
aggregate_and_compile program (ExpressionForm exp)
|
aggregate_and_compile program (ExpressionForm exp)
|
||||||
|
@ -34,12 +34,12 @@ let rec error_pp ?(dev = false) out (e : error) =
|
|||||||
| x -> [ x ] in
|
| x -> [ x ] in
|
||||||
let location =
|
let location =
|
||||||
let opt = e |> member "data" |> member "location" |> string in
|
let opt = e |> member "data" |> member "location" |> string in
|
||||||
let aux prec cur =
|
let aux cur prec =
|
||||||
match prec with
|
match prec with
|
||||||
| None -> cur |> member "data" |> member "location" |> string
|
| None -> cur |> member "data" |> member "location" |> string
|
||||||
| Some s -> Some s
|
| Some s -> Some s
|
||||||
in
|
in
|
||||||
match List.fold_left aux opt infos with
|
match List.fold_right aux infos opt with
|
||||||
| None -> ""
|
| None -> ""
|
||||||
| Some s -> s ^ ". "
|
| Some s -> s ^ ". "
|
||||||
in
|
in
|
||||||
|
@ -6,6 +6,73 @@ module ParserLog = Parser_cameligo.ParserLog
|
|||||||
module LexToken = Parser_cameligo.LexToken
|
module LexToken = Parser_cameligo.LexToken
|
||||||
module Lexer = Lexer.Make(LexToken)
|
module Lexer = Lexer.Make(LexToken)
|
||||||
|
|
||||||
|
module Errors = struct
|
||||||
|
|
||||||
|
let lexer_error (e: Lexer.error AST.reg) =
|
||||||
|
let title () = "lexer error" in
|
||||||
|
let message () = Lexer.error_to_string e.value in
|
||||||
|
let data = [
|
||||||
|
("parser_loc",
|
||||||
|
fun () -> Format.asprintf "%a" Location.pp_lift @@ e.region
|
||||||
|
)
|
||||||
|
] in
|
||||||
|
error ~data title message
|
||||||
|
|
||||||
|
let parser_error start end_ =
|
||||||
|
let title () = "parser error" in
|
||||||
|
let message () = "" in
|
||||||
|
let loc = Region.make
|
||||||
|
~start:(Pos.from_byte start)
|
||||||
|
~stop:(Pos.from_byte end_)
|
||||||
|
in
|
||||||
|
let data = [
|
||||||
|
("parser_loc",
|
||||||
|
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc
|
||||||
|
)
|
||||||
|
] in
|
||||||
|
error ~data title message
|
||||||
|
|
||||||
|
let unrecognized_error start end_ =
|
||||||
|
let title () = "unrecognized error" in
|
||||||
|
let message () = "" in
|
||||||
|
let loc = Region.make
|
||||||
|
~start:(Pos.from_byte start)
|
||||||
|
~stop:(Pos.from_byte end_)
|
||||||
|
in
|
||||||
|
let data = [
|
||||||
|
("unrecognized_loc",
|
||||||
|
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc
|
||||||
|
)
|
||||||
|
] in
|
||||||
|
error ~data title message
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
open Errors
|
||||||
|
|
||||||
|
type 'a parser = (Lexing.lexbuf -> LexToken.token) -> Lexing.lexbuf -> 'a
|
||||||
|
|
||||||
|
let parse (parser: 'a parser) lexbuf =
|
||||||
|
let Lexer.{read ; close ; _} = Lexer.open_token_stream None in
|
||||||
|
let result =
|
||||||
|
try
|
||||||
|
ok (parser read lexbuf)
|
||||||
|
with
|
||||||
|
| Parser.Error ->
|
||||||
|
let start = Lexing.lexeme_start_p lexbuf in
|
||||||
|
let end_ = Lexing.lexeme_end_p lexbuf in
|
||||||
|
fail @@ (parser_error start end_)
|
||||||
|
| Lexer.Error e ->
|
||||||
|
fail @@ (lexer_error e)
|
||||||
|
| _ ->
|
||||||
|
let _ = Printexc.print_backtrace Pervasives.stdout in
|
||||||
|
let start = Lexing.lexeme_start_p lexbuf in
|
||||||
|
let end_ = Lexing.lexeme_end_p lexbuf in
|
||||||
|
fail @@ (unrecognized_error start end_)
|
||||||
|
in
|
||||||
|
close ();
|
||||||
|
result
|
||||||
|
|
||||||
let parse_file (source: string) : AST.t result =
|
let parse_file (source: string) : AST.t result =
|
||||||
let pp_input =
|
let pp_input =
|
||||||
let prefix = Filename.(source |> basename |> remove_extension)
|
let prefix = Filename.(source |> basename |> remove_extension)
|
||||||
@ -20,93 +87,12 @@ let parse_file (source: string) : AST.t result =
|
|||||||
generic_try (simple_error "error opening file") @@
|
generic_try (simple_error "error opening file") @@
|
||||||
(fun () -> open_in pp_input) in
|
(fun () -> open_in pp_input) in
|
||||||
let lexbuf = Lexing.from_channel channel in
|
let lexbuf = Lexing.from_channel channel in
|
||||||
let Lexer.{read ; close ; _} =
|
parse (Parser.contract) lexbuf
|
||||||
Lexer.open_token_stream None in
|
|
||||||
specific_try (function
|
|
||||||
| Parser.Error -> (
|
|
||||||
let start = Lexing.lexeme_start_p lexbuf in
|
|
||||||
let end_ = Lexing.lexeme_end_p lexbuf in
|
|
||||||
let str = Format.sprintf
|
|
||||||
"Parse error at \"%s\" from (%d, %d) to (%d, %d). In file \"%s|%s\"\n"
|
|
||||||
(Lexing.lexeme lexbuf)
|
|
||||||
start.pos_lnum (start.pos_cnum - start.pos_bol)
|
|
||||||
end_.pos_lnum (end_.pos_cnum - end_.pos_bol)
|
|
||||||
start.pos_fname source
|
|
||||||
in
|
|
||||||
simple_error str
|
|
||||||
)
|
|
||||||
| exn ->
|
|
||||||
let start = Lexing.lexeme_start_p lexbuf in
|
|
||||||
let end_ = Lexing.lexeme_end_p lexbuf in
|
|
||||||
let str = Format.sprintf
|
|
||||||
"Unrecognized error (%s) at \"%s\" from (%d, %d) to (%d, %d). In file \"%s|%s\"\n"
|
|
||||||
(Printexc.to_string exn)
|
|
||||||
(Lexing.lexeme lexbuf)
|
|
||||||
start.pos_lnum (start.pos_cnum - start.pos_bol)
|
|
||||||
end_.pos_lnum (end_.pos_cnum - end_.pos_bol)
|
|
||||||
start.pos_fname source
|
|
||||||
in
|
|
||||||
simple_error str
|
|
||||||
) @@ (fun () ->
|
|
||||||
let raw = Parser.contract read lexbuf in
|
|
||||||
close () ;
|
|
||||||
raw
|
|
||||||
) >>? fun raw ->
|
|
||||||
ok raw
|
|
||||||
|
|
||||||
let parse_string (s:string) : AST.t result =
|
let parse_string (s:string) : AST.t result =
|
||||||
let lexbuf = Lexing.from_string s in
|
let lexbuf = Lexing.from_string s in
|
||||||
let Lexer.{read ; close ; _} =
|
parse (Parser.contract) lexbuf
|
||||||
Lexer.open_token_stream None in
|
|
||||||
specific_try (function
|
|
||||||
| Parser.Error -> (
|
|
||||||
let start = Lexing.lexeme_start_p lexbuf in
|
|
||||||
let end_ = Lexing.lexeme_end_p lexbuf in
|
|
||||||
let str = Format.sprintf
|
|
||||||
"Parse error at \"%s\" from (%d, %d) to (%d, %d)\n"
|
|
||||||
(Lexing.lexeme lexbuf)
|
|
||||||
start.pos_lnum (start.pos_cnum - start.pos_bol)
|
|
||||||
end_.pos_lnum (end_.pos_cnum - end_.pos_bol) in
|
|
||||||
simple_error str
|
|
||||||
)
|
|
||||||
| _ -> simple_error "unrecognized parse_ error"
|
|
||||||
) @@ (fun () ->
|
|
||||||
let raw = Parser.contract read lexbuf in
|
|
||||||
close () ;
|
|
||||||
raw
|
|
||||||
) >>? fun raw ->
|
|
||||||
ok raw
|
|
||||||
|
|
||||||
let parse_expression (s:string) : AST.expr result =
|
let parse_expression (s:string) : AST.expr result =
|
||||||
let lexbuf = Lexing.from_string s in
|
let lexbuf = Lexing.from_string s in
|
||||||
let Lexer.{read ; close; _} =
|
parse (Parser.interactive_expr) lexbuf
|
||||||
Lexer.open_token_stream None in
|
|
||||||
specific_try (function
|
|
||||||
| Parser.Error -> (
|
|
||||||
let start = Lexing.lexeme_start_p lexbuf in
|
|
||||||
let end_ = Lexing.lexeme_end_p lexbuf in
|
|
||||||
let str = Format.sprintf
|
|
||||||
"Parse error at \"%s\" from (%d, %d) to (%d, %d)\n"
|
|
||||||
(Lexing.lexeme lexbuf)
|
|
||||||
start.pos_lnum (start.pos_cnum - start.pos_bol)
|
|
||||||
end_.pos_lnum (end_.pos_cnum - end_.pos_bol) in
|
|
||||||
simple_error str
|
|
||||||
)
|
|
||||||
| exn ->
|
|
||||||
let start = Lexing.lexeme_start_p lexbuf in
|
|
||||||
let end_ = Lexing.lexeme_end_p lexbuf in
|
|
||||||
let str = Format.sprintf
|
|
||||||
"Unrecognized error (%s) at \"%s\" from (%d, %d) to (%d, %d). In expression \"%s|%s\"\n"
|
|
||||||
(Printexc.to_string exn)
|
|
||||||
(Lexing.lexeme lexbuf)
|
|
||||||
start.pos_lnum (start.pos_cnum - start.pos_bol)
|
|
||||||
end_.pos_lnum (end_.pos_cnum - end_.pos_bol)
|
|
||||||
start.pos_fname s
|
|
||||||
in
|
|
||||||
simple_error str
|
|
||||||
) @@ (fun () ->
|
|
||||||
let raw = Parser.interactive_expr read lexbuf in
|
|
||||||
close () ;
|
|
||||||
raw
|
|
||||||
) >>? fun raw ->
|
|
||||||
ok raw
|
|
@ -40,4 +40,6 @@
|
|||||||
(executable
|
(executable
|
||||||
(name Unlexer)
|
(name Unlexer)
|
||||||
(libraries str)
|
(libraries str)
|
||||||
|
(preprocess
|
||||||
|
(pps bisect_ppx --conditional))
|
||||||
(modules Unlexer))
|
(modules Unlexer))
|
||||||
|
@ -6,6 +6,73 @@ module ParserLog = Parser_pascaligo.ParserLog
|
|||||||
module LexToken = Parser_pascaligo.LexToken
|
module LexToken = Parser_pascaligo.LexToken
|
||||||
module Lexer = Lexer.Make(LexToken)
|
module Lexer = Lexer.Make(LexToken)
|
||||||
|
|
||||||
|
module Errors = struct
|
||||||
|
|
||||||
|
let lexer_error (e: Lexer.error AST.reg) =
|
||||||
|
let title () = "lexer error" in
|
||||||
|
let message () = Lexer.error_to_string e.value in
|
||||||
|
let data = [
|
||||||
|
("parser_loc",
|
||||||
|
fun () -> Format.asprintf "%a" Location.pp_lift @@ e.region
|
||||||
|
)
|
||||||
|
] in
|
||||||
|
error ~data title message
|
||||||
|
|
||||||
|
let parser_error start end_ =
|
||||||
|
let title () = "parser error" in
|
||||||
|
let message () = "" in
|
||||||
|
let loc = Region.make
|
||||||
|
~start:(Pos.from_byte start)
|
||||||
|
~stop:(Pos.from_byte end_)
|
||||||
|
in
|
||||||
|
let data = [
|
||||||
|
("parser_loc",
|
||||||
|
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc
|
||||||
|
)
|
||||||
|
] in
|
||||||
|
error ~data title message
|
||||||
|
|
||||||
|
let unrecognized_error start end_ =
|
||||||
|
let title () = "unrecognized error" in
|
||||||
|
let message () = "" in
|
||||||
|
let loc = Region.make
|
||||||
|
~start:(Pos.from_byte start)
|
||||||
|
~stop:(Pos.from_byte end_)
|
||||||
|
in
|
||||||
|
let data = [
|
||||||
|
("unrecognized_loc",
|
||||||
|
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc
|
||||||
|
)
|
||||||
|
] in
|
||||||
|
error ~data title message
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
open Errors
|
||||||
|
|
||||||
|
type 'a parser = (Lexing.lexbuf -> LexToken.token) -> Lexing.lexbuf -> 'a
|
||||||
|
|
||||||
|
let parse (parser: 'a parser) lexbuf =
|
||||||
|
let Lexer.{read ; close ; _} = Lexer.open_token_stream None in
|
||||||
|
let result =
|
||||||
|
try
|
||||||
|
ok (parser read lexbuf)
|
||||||
|
with
|
||||||
|
| Parser.Error ->
|
||||||
|
let start = Lexing.lexeme_start_p lexbuf in
|
||||||
|
let end_ = Lexing.lexeme_end_p lexbuf in
|
||||||
|
fail @@ (parser_error start end_)
|
||||||
|
| Lexer.Error e ->
|
||||||
|
fail @@ (lexer_error e)
|
||||||
|
| _ ->
|
||||||
|
let _ = Printexc.print_backtrace Pervasives.stdout in
|
||||||
|
let start = Lexing.lexeme_start_p lexbuf in
|
||||||
|
let end_ = Lexing.lexeme_end_p lexbuf in
|
||||||
|
fail @@ (unrecognized_error start end_)
|
||||||
|
in
|
||||||
|
close ();
|
||||||
|
result
|
||||||
|
|
||||||
let parse_file (source: string) : AST.t result =
|
let parse_file (source: string) : AST.t result =
|
||||||
let pp_input =
|
let pp_input =
|
||||||
let prefix = Filename.(source |> basename |> remove_extension)
|
let prefix = Filename.(source |> basename |> remove_extension)
|
||||||
@ -20,93 +87,12 @@ let parse_file (source: string) : AST.t result =
|
|||||||
generic_try (simple_error "error opening file") @@
|
generic_try (simple_error "error opening file") @@
|
||||||
(fun () -> open_in pp_input) in
|
(fun () -> open_in pp_input) in
|
||||||
let lexbuf = Lexing.from_channel channel in
|
let lexbuf = Lexing.from_channel channel in
|
||||||
let Lexer.{read ; close ; _} =
|
parse (Parser.contract) lexbuf
|
||||||
Lexer.open_token_stream None in
|
|
||||||
specific_try (function
|
|
||||||
| Parser.Error -> (
|
|
||||||
let start = Lexing.lexeme_start_p lexbuf in
|
|
||||||
let end_ = Lexing.lexeme_end_p lexbuf in
|
|
||||||
let str = Format.sprintf
|
|
||||||
"Parse error at \"%s\" from (%d, %d) to (%d, %d). In file \"%s|%s\"\n"
|
|
||||||
(Lexing.lexeme lexbuf)
|
|
||||||
start.pos_lnum (start.pos_cnum - start.pos_bol)
|
|
||||||
end_.pos_lnum (end_.pos_cnum - end_.pos_bol)
|
|
||||||
start.pos_fname source
|
|
||||||
in
|
|
||||||
simple_error str
|
|
||||||
)
|
|
||||||
| exn ->
|
|
||||||
let start = Lexing.lexeme_start_p lexbuf in
|
|
||||||
let end_ = Lexing.lexeme_end_p lexbuf in
|
|
||||||
let str = Format.sprintf
|
|
||||||
"Unrecognized error (%s) at \"%s\" from (%d, %d) to (%d, %d). In file \"%s|%s\"\n"
|
|
||||||
(Printexc.to_string exn)
|
|
||||||
(Lexing.lexeme lexbuf)
|
|
||||||
start.pos_lnum (start.pos_cnum - start.pos_bol)
|
|
||||||
end_.pos_lnum (end_.pos_cnum - end_.pos_bol)
|
|
||||||
start.pos_fname source
|
|
||||||
in
|
|
||||||
simple_error str
|
|
||||||
) @@ (fun () ->
|
|
||||||
let raw = Parser.contract read lexbuf in
|
|
||||||
close () ;
|
|
||||||
raw
|
|
||||||
) >>? fun raw ->
|
|
||||||
ok raw
|
|
||||||
|
|
||||||
let parse_string (s:string) : AST.t result =
|
let parse_string (s:string) : AST.t result =
|
||||||
let lexbuf = Lexing.from_string s in
|
let lexbuf = Lexing.from_string s in
|
||||||
let Lexer.{read ; close ; _} =
|
parse (Parser.contract) lexbuf
|
||||||
Lexer.open_token_stream None in
|
|
||||||
specific_try (function
|
|
||||||
| Parser.Error -> (
|
|
||||||
let start = Lexing.lexeme_start_p lexbuf in
|
|
||||||
let end_ = Lexing.lexeme_end_p lexbuf in
|
|
||||||
let str = Format.sprintf
|
|
||||||
"Parse error at \"%s\" from (%d, %d) to (%d, %d)\n"
|
|
||||||
(Lexing.lexeme lexbuf)
|
|
||||||
start.pos_lnum (start.pos_cnum - start.pos_bol)
|
|
||||||
end_.pos_lnum (end_.pos_cnum - end_.pos_bol) in
|
|
||||||
simple_error str
|
|
||||||
)
|
|
||||||
| _ -> simple_error "unrecognized parse_ error"
|
|
||||||
) @@ (fun () ->
|
|
||||||
let raw = Parser.contract read lexbuf in
|
|
||||||
close () ;
|
|
||||||
raw
|
|
||||||
) >>? fun raw ->
|
|
||||||
ok raw
|
|
||||||
|
|
||||||
let parse_expression (s:string) : AST.expr result =
|
let parse_expression (s:string) : AST.expr result =
|
||||||
let lexbuf = Lexing.from_string s in
|
let lexbuf = Lexing.from_string s in
|
||||||
let Lexer.{read ; close; _} =
|
parse (Parser.interactive_expr) lexbuf
|
||||||
Lexer.open_token_stream None in
|
|
||||||
specific_try (function
|
|
||||||
| Parser.Error -> (
|
|
||||||
let start = Lexing.lexeme_start_p lexbuf in
|
|
||||||
let end_ = Lexing.lexeme_end_p lexbuf in
|
|
||||||
let str = Format.sprintf
|
|
||||||
"Parse error at \"%s\" from (%d, %d) to (%d, %d)\n"
|
|
||||||
(Lexing.lexeme lexbuf)
|
|
||||||
start.pos_lnum (start.pos_cnum - start.pos_bol)
|
|
||||||
end_.pos_lnum (end_.pos_cnum - end_.pos_bol) in
|
|
||||||
simple_error str
|
|
||||||
)
|
|
||||||
| exn ->
|
|
||||||
let start = Lexing.lexeme_start_p lexbuf in
|
|
||||||
let end_ = Lexing.lexeme_end_p lexbuf in
|
|
||||||
let str = Format.sprintf
|
|
||||||
"Unrecognized error (%s) at \"%s\" from (%d, %d) to (%d, %d). In expression \"%s|%s\"\n"
|
|
||||||
(Printexc.to_string exn)
|
|
||||||
(Lexing.lexeme lexbuf)
|
|
||||||
start.pos_lnum (start.pos_cnum - start.pos_bol)
|
|
||||||
end_.pos_lnum (end_.pos_cnum - end_.pos_bol)
|
|
||||||
start.pos_fname s
|
|
||||||
in
|
|
||||||
simple_error str
|
|
||||||
) @@ (fun () ->
|
|
||||||
let raw = Parser.interactive_expr read lexbuf in
|
|
||||||
close () ;
|
|
||||||
raw
|
|
||||||
) >>? fun raw ->
|
|
||||||
ok raw
|
|
||||||
|
@ -24,7 +24,7 @@ module Errors = struct
|
|||||||
let message () = "" in
|
let message () = "" in
|
||||||
let expression_loc = AST.expr_to_region expr in
|
let expression_loc = AST.expr_to_region expr in
|
||||||
let data = [
|
let data = [
|
||||||
("expression_loc",
|
("location",
|
||||||
fun () -> Format.asprintf "%a" Location.pp_lift @@ expression_loc)
|
fun () -> Format.asprintf "%a" Location.pp_lift @@ expression_loc)
|
||||||
] in
|
] in
|
||||||
error ~data title message
|
error ~data title message
|
||||||
@ -37,7 +37,7 @@ module Errors = struct
|
|||||||
~stop:(Pos.from_byte end_)
|
~stop:(Pos.from_byte end_)
|
||||||
in
|
in
|
||||||
let data = [
|
let data = [
|
||||||
("parser_loc",
|
("location",
|
||||||
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc
|
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc
|
||||||
)
|
)
|
||||||
] in
|
] in
|
||||||
@ -51,7 +51,7 @@ module Errors = struct
|
|||||||
~stop:(Pos.from_byte end_)
|
~stop:(Pos.from_byte end_)
|
||||||
in
|
in
|
||||||
let data = [
|
let data = [
|
||||||
("unrecognized_loc",
|
("location",
|
||||||
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc
|
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc
|
||||||
)
|
)
|
||||||
] in
|
] in
|
||||||
|
@ -44,4 +44,6 @@
|
|||||||
(executable
|
(executable
|
||||||
(name Unlexer)
|
(name Unlexer)
|
||||||
(libraries str)
|
(libraries str)
|
||||||
|
(preprocess
|
||||||
|
(pps bisect_ppx --conditional))
|
||||||
(modules Unlexer))
|
(modules Unlexer))
|
||||||
|
@ -32,7 +32,7 @@ module Errors = struct
|
|||||||
in
|
in
|
||||||
let data = [
|
let data = [
|
||||||
("expected", fun () -> expected_name);
|
("expected", fun () -> expected_name);
|
||||||
("actual_loc" , fun () -> Format.asprintf "%a" Location.pp_lift @@ Raw.pattern_to_region actual)
|
("location" , fun () -> Format.asprintf "%a" Location.pp_lift @@ Raw.pattern_to_region actual)
|
||||||
] in
|
] in
|
||||||
error ~data title message
|
error ~data title message
|
||||||
|
|
||||||
@ -43,7 +43,7 @@ module Errors = struct
|
|||||||
List.fold_left (fun a p -> Region.cover a (Raw.pattern_to_region p))
|
List.fold_left (fun a p -> Region.cover a (Raw.pattern_to_region p))
|
||||||
Region.ghost patterns in
|
Region.ghost patterns in
|
||||||
let data = [
|
let data = [
|
||||||
("loc", fun () -> Format.asprintf "%a" Location.pp_lift @@ patterns_loc)
|
("location", fun () -> Format.asprintf "%a" Location.pp_lift @@ patterns_loc)
|
||||||
] in
|
] in
|
||||||
error ~data title message
|
error ~data title message
|
||||||
|
|
||||||
@ -52,7 +52,7 @@ module Errors = struct
|
|||||||
let message () =
|
let message () =
|
||||||
Format.asprintf "unknown predefined type \"%s\"" name.Region.value in
|
Format.asprintf "unknown predefined type \"%s\"" name.Region.value in
|
||||||
let data = [
|
let data = [
|
||||||
("typename_loc",
|
("location",
|
||||||
fun () -> Format.asprintf "%a" Location.pp_lift @@ name.Region.region)
|
fun () -> Format.asprintf "%a" Location.pp_lift @@ name.Region.region)
|
||||||
] in
|
] in
|
||||||
error ~data title message
|
error ~data title message
|
||||||
@ -63,7 +63,7 @@ module Errors = struct
|
|||||||
Format.asprintf "untyped function parameters are not supported yet" in
|
Format.asprintf "untyped function parameters are not supported yet" in
|
||||||
let param_loc = var.Region.region in
|
let param_loc = var.Region.region in
|
||||||
let data = [
|
let data = [
|
||||||
("param_loc",
|
("location",
|
||||||
fun () -> Format.asprintf "%a" Location.pp_lift @@ param_loc)
|
fun () -> Format.asprintf "%a" Location.pp_lift @@ param_loc)
|
||||||
] in
|
] in
|
||||||
error ~data title message
|
error ~data title message
|
||||||
@ -74,7 +74,7 @@ module Errors = struct
|
|||||||
Format.asprintf "tuple patterns are not supported yet" in
|
Format.asprintf "tuple patterns are not supported yet" in
|
||||||
let pattern_loc = Raw.pattern_to_region p in
|
let pattern_loc = Raw.pattern_to_region p in
|
||||||
let data = [
|
let data = [
|
||||||
("pattern_loc",
|
("location",
|
||||||
fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc)
|
fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc)
|
||||||
] in
|
] in
|
||||||
error ~data title message
|
error ~data title message
|
||||||
@ -85,7 +85,7 @@ module Errors = struct
|
|||||||
Format.asprintf "constant constructors are not supported yet" in
|
Format.asprintf "constant constructors are not supported yet" in
|
||||||
let pattern_loc = Raw.pattern_to_region p in
|
let pattern_loc = Raw.pattern_to_region p in
|
||||||
let data = [
|
let data = [
|
||||||
("pattern_loc",
|
("location",
|
||||||
fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc)
|
fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc)
|
||||||
] in
|
] in
|
||||||
error ~data title message
|
error ~data title message
|
||||||
@ -97,7 +97,7 @@ module Errors = struct
|
|||||||
are not supported yet" in
|
are not supported yet" in
|
||||||
let pattern_loc = Raw.pattern_to_region p in
|
let pattern_loc = Raw.pattern_to_region p in
|
||||||
let data = [
|
let data = [
|
||||||
("pattern_loc",
|
("location",
|
||||||
fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc)
|
fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc)
|
||||||
] in
|
] in
|
||||||
error ~data title message
|
error ~data title message
|
||||||
@ -119,7 +119,7 @@ module Errors = struct
|
|||||||
Format.asprintf "currently, only constructors are supported in patterns" in
|
Format.asprintf "currently, only constructors are supported in patterns" in
|
||||||
let pattern_loc = Raw.pattern_to_region p in
|
let pattern_loc = Raw.pattern_to_region p in
|
||||||
let data = [
|
let data = [
|
||||||
("pattern_loc",
|
("location",
|
||||||
fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc)
|
fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc)
|
||||||
] in
|
] in
|
||||||
error ~data title message
|
error ~data title message
|
||||||
@ -130,7 +130,7 @@ module Errors = struct
|
|||||||
Format.asprintf "currently, only empty lists and constructors (::) \
|
Format.asprintf "currently, only empty lists and constructors (::) \
|
||||||
are supported in patterns" in
|
are supported in patterns" in
|
||||||
let data = [
|
let data = [
|
||||||
("pattern_loc",
|
("location",
|
||||||
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)
|
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)
|
||||||
] in
|
] in
|
||||||
error ~data title message
|
error ~data title message
|
||||||
|
@ -72,7 +72,7 @@ module Errors = struct
|
|||||||
Format.asprintf "constant constructors are not supported yet" in
|
Format.asprintf "constant constructors are not supported yet" in
|
||||||
let pattern_loc = Raw.pattern_to_region p in
|
let pattern_loc = Raw.pattern_to_region p in
|
||||||
let data = [
|
let data = [
|
||||||
("pattern_loc",
|
("location",
|
||||||
fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc)
|
fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc)
|
||||||
] in
|
] in
|
||||||
error ~data title message
|
error ~data title message
|
||||||
@ -104,7 +104,7 @@ module Errors = struct
|
|||||||
let message () =
|
let message () =
|
||||||
Format.asprintf "unknown predefined type \"%s\"" name.Region.value in
|
Format.asprintf "unknown predefined type \"%s\"" name.Region.value in
|
||||||
let data = [
|
let data = [
|
||||||
("typename_loc",
|
("location",
|
||||||
fun () -> Format.asprintf "%a" Location.pp_lift @@ name.Region.region)
|
fun () -> Format.asprintf "%a" Location.pp_lift @@ name.Region.region)
|
||||||
] in
|
] in
|
||||||
error ~data title message
|
error ~data title message
|
||||||
@ -116,7 +116,7 @@ module Errors = struct
|
|||||||
are not supported yet" in
|
are not supported yet" in
|
||||||
let pattern_loc = Raw.pattern_to_region p in
|
let pattern_loc = Raw.pattern_to_region p in
|
||||||
let data = [
|
let data = [
|
||||||
("pattern_loc",
|
("location",
|
||||||
fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc)
|
fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc)
|
||||||
] in
|
] in
|
||||||
error ~data title message
|
error ~data title message
|
||||||
@ -127,7 +127,7 @@ module Errors = struct
|
|||||||
Format.asprintf "currently, only constructors are supported in patterns" in
|
Format.asprintf "currently, only constructors are supported in patterns" in
|
||||||
let pattern_loc = Raw.pattern_to_region p in
|
let pattern_loc = Raw.pattern_to_region p in
|
||||||
let data = [
|
let data = [
|
||||||
("pattern_loc",
|
("location",
|
||||||
fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc)
|
fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc)
|
||||||
] in
|
] in
|
||||||
error ~data title message
|
error ~data title message
|
||||||
@ -138,7 +138,7 @@ module Errors = struct
|
|||||||
Format.asprintf "tuple patterns are not supported yet" in
|
Format.asprintf "tuple patterns are not supported yet" in
|
||||||
let pattern_loc = Raw.pattern_to_region p in
|
let pattern_loc = Raw.pattern_to_region p in
|
||||||
let data = [
|
let data = [
|
||||||
("pattern_loc",
|
("location",
|
||||||
fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc) ;
|
fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc) ;
|
||||||
(** TODO: The labelled arguments should be flowing from the CLI. *)
|
(** TODO: The labelled arguments should be flowing from the CLI. *)
|
||||||
("pattern",
|
("pattern",
|
||||||
@ -154,7 +154,7 @@ module Errors = struct
|
|||||||
in patterns are supported" in
|
in patterns are supported" in
|
||||||
let pattern_loc = Raw.pattern_to_region pattern in
|
let pattern_loc = Raw.pattern_to_region pattern in
|
||||||
let data = [
|
let data = [
|
||||||
("pattern_loc",
|
("location",
|
||||||
fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc)
|
fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc)
|
||||||
] in
|
] in
|
||||||
error ~data title message
|
error ~data title message
|
||||||
@ -165,7 +165,7 @@ module Errors = struct
|
|||||||
Format.asprintf "currently, only empty lists and x::y \
|
Format.asprintf "currently, only empty lists and x::y \
|
||||||
are supported in patterns" in
|
are supported in patterns" in
|
||||||
let data = [
|
let data = [
|
||||||
("pattern_loc",
|
("location",
|
||||||
fun () -> Format.asprintf "%a" Location.pp_lift @@ cons.Region.region)
|
fun () -> Format.asprintf "%a" Location.pp_lift @@ cons.Region.region)
|
||||||
] in
|
] in
|
||||||
error ~data title message
|
error ~data title message
|
||||||
@ -174,7 +174,7 @@ module Errors = struct
|
|||||||
let title () = "unexpected anonymous function" in
|
let title () = "unexpected anonymous function" in
|
||||||
let message () = "you provided a function declaration without name" in
|
let message () = "you provided a function declaration without name" in
|
||||||
let data = [
|
let data = [
|
||||||
("loc" , fun () -> Format.asprintf "%a" Location.pp @@ loc)
|
("location" , fun () -> Format.asprintf "%a" Location.pp @@ loc)
|
||||||
] in
|
] in
|
||||||
error ~data title message
|
error ~data title message
|
||||||
|
|
||||||
@ -182,7 +182,7 @@ module Errors = struct
|
|||||||
let title () = "unexpected named function" in
|
let title () = "unexpected named function" in
|
||||||
let message () = "you provided a function expression with a name (remove it)" in
|
let message () = "you provided a function expression with a name (remove it)" in
|
||||||
let data = [
|
let data = [
|
||||||
("loc" , fun () -> Format.asprintf "%a" Location.pp @@ loc)
|
("location" , fun () -> Format.asprintf "%a" Location.pp @@ loc)
|
||||||
] in
|
] in
|
||||||
error ~data title message
|
error ~data title message
|
||||||
|
|
||||||
|
@ -3,23 +3,32 @@ open Trace
|
|||||||
open Proto_alpha_utils
|
open Proto_alpha_utils
|
||||||
|
|
||||||
module Errors = struct
|
module Errors = struct
|
||||||
let bad_literal_address s_addr loc () =
|
|
||||||
let title = (thunk ("Badly formatted address \""^s_addr^"\"")) in
|
let bad_format e () =
|
||||||
let message () = "" in
|
let title = (thunk ("Badly formatted literal")) in
|
||||||
|
let message () = Format.asprintf "%a" Ast_simplified.PP.expression e in
|
||||||
let data = [
|
let data = [
|
||||||
("location" , fun () -> Format.asprintf "%a" Location.pp loc)
|
("location" , fun () -> Format.asprintf "%a" Location.pp e.location)
|
||||||
] in
|
] in
|
||||||
error ~data title message ()
|
error ~data title message ()
|
||||||
|
|
||||||
end
|
end
|
||||||
open Errors
|
open Errors
|
||||||
|
|
||||||
let peephole_expression : expression -> expression result = fun e ->
|
let peephole_expression : expression -> expression result = fun e ->
|
||||||
let return expression = ok { e with expression } in
|
let return expression = ok { e with expression } in
|
||||||
match e.expression with
|
match e.expression with
|
||||||
|
| E_literal (Literal_key_hash s) as l -> (
|
||||||
|
let open Tezos_crypto in
|
||||||
|
let%bind (_pkh:Crypto.Signature.public_key_hash) =
|
||||||
|
Trace.trace_tzresult (bad_format e) @@
|
||||||
|
Signature.Public_key_hash.of_b58check s in
|
||||||
|
return l
|
||||||
|
)
|
||||||
| E_literal (Literal_address s) as l -> (
|
| E_literal (Literal_address s) as l -> (
|
||||||
let open Memory_proto_alpha in
|
let open Memory_proto_alpha in
|
||||||
let%bind (_contract:Protocol.Alpha_context.Contract.t) =
|
let%bind (_contract:Protocol.Alpha_context.Contract.t) =
|
||||||
Trace.trace_alpha_tzresult (bad_literal_address s e.location) @@
|
Trace.trace_alpha_tzresult (bad_format e) @@
|
||||||
Protocol.Alpha_context.Contract.of_b58check s in
|
Protocol.Alpha_context.Contract.of_b58check s in
|
||||||
return l
|
return l
|
||||||
)
|
)
|
||||||
|
@ -17,6 +17,7 @@ let peephole_expression : expression -> expression result = fun e ->
|
|||||||
match e.expression with
|
match e.expression with
|
||||||
| E_ascription (e' , t) as e -> (
|
| E_ascription (e' , t) as e -> (
|
||||||
match (e'.expression , t.type_expression') with
|
match (e'.expression , t.type_expression') with
|
||||||
|
| (E_literal (Literal_string s) , T_constant (TC_key_hash)) -> return @@ E_literal (Literal_key_hash s)
|
||||||
| (E_literal (Literal_int i) , T_constant (TC_timestamp)) -> return @@ E_literal (Literal_timestamp i)
|
| (E_literal (Literal_int i) , T_constant (TC_timestamp)) -> return @@ E_literal (Literal_timestamp i)
|
||||||
| (E_literal (Literal_string str) , T_constant (TC_timestamp)) ->
|
| (E_literal (Literal_string str) , T_constant (TC_timestamp)) ->
|
||||||
let%bind time =
|
let%bind time =
|
||||||
|
@ -15,6 +15,7 @@ module Ty = struct
|
|||||||
let tez_k = Mutez_key None
|
let tez_k = Mutez_key None
|
||||||
let int_k = Int_key None
|
let int_k = Int_key None
|
||||||
let string_k = String_key None
|
let string_k = String_key None
|
||||||
|
let key_hash_k = Key_hash_key None
|
||||||
let address_k = Address_key None
|
let address_k = Address_key None
|
||||||
let timestamp_k = Timestamp_key None
|
let timestamp_k = Timestamp_key None
|
||||||
let bytes_k = Bytes_key None
|
let bytes_k = Bytes_key None
|
||||||
@ -72,7 +73,7 @@ module Ty = struct
|
|||||||
| Base_operation -> fail (not_comparable "operation")
|
| Base_operation -> fail (not_comparable "operation")
|
||||||
| Base_signature -> fail (not_comparable "signature")
|
| Base_signature -> fail (not_comparable "signature")
|
||||||
| Base_key -> fail (not_comparable "key")
|
| Base_key -> fail (not_comparable "key")
|
||||||
| Base_key_hash -> fail (not_comparable "key_hash")
|
| Base_key_hash -> return key_hash_k
|
||||||
| Base_chain_id -> fail (not_comparable "chain_id")
|
| Base_chain_id -> fail (not_comparable "chain_id")
|
||||||
|
|
||||||
let comparable_type : type_value -> ex_comparable_ty result = fun tv ->
|
let comparable_type : type_value -> ex_comparable_ty result = fun tv ->
|
||||||
|
@ -190,12 +190,12 @@ let literal ppf (l:literal) = match l with
|
|||||||
| Literal_mutez n -> fprintf ppf "%dmutez" n
|
| Literal_mutez n -> fprintf ppf "%dmutez" n
|
||||||
| Literal_string s -> fprintf ppf "%S" s
|
| Literal_string s -> fprintf ppf "%S" s
|
||||||
| Literal_bytes b -> fprintf ppf "0x%a" Hex.pp (Hex.of_bytes b)
|
| Literal_bytes b -> fprintf ppf "0x%a" Hex.pp (Hex.of_bytes b)
|
||||||
| Literal_address s -> fprintf ppf "@%S" s
|
| Literal_address s -> fprintf ppf "address %S" s
|
||||||
| Literal_operation _ -> fprintf ppf "Operation(...bytes)"
|
| Literal_operation _ -> fprintf ppf "Operation(...bytes)"
|
||||||
| Literal_key s -> fprintf ppf "key %s" s
|
| Literal_key s -> fprintf ppf "key %s" s
|
||||||
| Literal_key_hash s -> fprintf ppf "key_hash %s" s
|
| Literal_key_hash s -> fprintf ppf "key_hash %s" s
|
||||||
| Literal_signature s -> fprintf ppf "Signature %s" s
|
| Literal_signature s -> fprintf ppf "signature %s" s
|
||||||
| Literal_chain_id s -> fprintf ppf "Chain_id %s" s
|
| Literal_chain_id s -> fprintf ppf "chain_id %s" s
|
||||||
|
|
||||||
let%expect_test _ =
|
let%expect_test _ =
|
||||||
Format.printf "%a" literal (Literal_bytes (Bytes.of_string "foo")) ;
|
Format.printf "%a" literal (Literal_bytes (Bytes.of_string "foo")) ;
|
||||||
|
@ -129,14 +129,14 @@ let get_entry (lst : program) (name : string) : (expression * int) result =
|
|||||||
then Some decl_expr
|
then Some decl_expr
|
||||||
else None
|
else None
|
||||||
in
|
in
|
||||||
List.find_map aux lst
|
List.find_map aux (List.rev lst)
|
||||||
in
|
in
|
||||||
let entry_index =
|
let entry_index =
|
||||||
let aux x =
|
let aux x =
|
||||||
let (((decl_name , _) , _)) = x in
|
let (((decl_name , _) , _)) = x in
|
||||||
Var.equal decl_name (Var.of_name name)
|
Var.equal decl_name (Var.of_name name)
|
||||||
in
|
in
|
||||||
List.find_index aux lst
|
(List.length lst) - (List.find_index aux (List.rev lst)) - 1
|
||||||
in
|
in
|
||||||
ok (entry_expression , entry_index)
|
ok (entry_expression , entry_index)
|
||||||
|
|
||||||
|
8
src/test/contracts/double_main.ligo
Normal file
8
src/test/contracts/double_main.ligo
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
function main(const p : unit; const s : int) : list(operation) * int is
|
||||||
|
((list end : list(operation)), s + 1)
|
||||||
|
|
||||||
|
function main(const p : unit; const s : int) : list(operation) * int is
|
||||||
|
begin
|
||||||
|
const ret : list(operation) * int = main(p, s)
|
||||||
|
end
|
||||||
|
with (ret.0, ret.1 + 1)
|
6
src/test/contracts/redeclaration.ligo
Normal file
6
src/test/contracts/redeclaration.ligo
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
function foo(const p : unit) : int is 0
|
||||||
|
|
||||||
|
function main(const p : unit; const s : int) : list(operation) * int is
|
||||||
|
((list end : list(operation)), foo(unit))
|
||||||
|
|
||||||
|
function foo(const p : unit) : int is 1
|
5
vendors/ligo-utils/simple-utils/x_list.ml
vendored
5
vendors/ligo-utils/simple-utils/x_list.ml
vendored
@ -5,6 +5,11 @@ let rec remove n = function
|
|||||||
| _ :: tl when n = 0 -> tl
|
| _ :: tl when n = 0 -> tl
|
||||||
| hd :: tl -> hd :: remove (n - 1) tl
|
| hd :: tl -> hd :: remove (n - 1) tl
|
||||||
|
|
||||||
|
let rec take n = function
|
||||||
|
| [] -> []
|
||||||
|
| _ when n = 0 -> []
|
||||||
|
| hd :: tl -> hd :: take (n - 1) tl
|
||||||
|
|
||||||
let map ?(acc = []) f lst =
|
let map ?(acc = []) f lst =
|
||||||
let rec aux acc f = function
|
let rec aux acc f = function
|
||||||
| [] -> acc
|
| [] -> acc
|
||||||
|
Loading…
Reference in New Issue
Block a user