Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Using local variable in subroutine through frame pointer #606

Merged
merged 24 commits into from
Dec 23, 2022
Merged
Show file tree
Hide file tree
Changes from 10 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

# Added
* Added frame pointer support for subroutine arguments, replacing the previous usage of scratch. ([#562](https://github.com/algorand/pyteal/pull/562))
* Added frame pointer support for local ABI variables in subroutine. ([#606](https://github.com/algorand/pyteal/pull/606))

# Fixed
* Allowing the `MethodCall` and `ExecuteMethodCall` to be passed `None` as app_id argument in the case of an app create transaction ([#592](https://github.com/algorand/pyteal/pull/592))
Expand Down
48 changes: 36 additions & 12 deletions pyteal/ast/abi/type.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

from pyteal.ast.expr import Expr
from pyteal.ast.abstractvar import AbstractVar
from pyteal.ast.frame import FrameVar, MAX_FRAME_LOCAL_VARS
from pyteal.ast.scratchvar import ScratchVar
from pyteal.ast.seq import Seq
from pyteal.errors import TealInputError
Expand Down Expand Up @@ -76,9 +77,26 @@ class BaseType(ABC):

def __init__(self, spec: TypeSpec) -> None:
"""Create a new BaseType."""
from pyteal.ast.subroutine import SubroutineEval

super().__init__()
self._type_spec: Final[TypeSpec] = spec
self._stored_value: AbstractVar = ScratchVar(spec.storage_type())
self._stored_value: AbstractVar

if SubroutineEval._current_proto:
local_types = SubroutineEval._current_proto.mem_layout.local_stack_types

# NOTE: you can have at most 128 local variables.
# len(local_types) + 1 computes the resulting length,
# should be <= 128
if len(local_types) + 1 <= MAX_FRAME_LOCAL_VARS:
local_types.append(spec.storage_type())
self._stored_value = FrameVar(
SubroutineEval._current_proto, len(local_types) - 1
)
return

self._stored_value = ScratchVar(spec.storage_type())

def type_spec(self) -> TypeSpec:
"""Get the TypeSpec for this ABI type instance."""
Expand Down Expand Up @@ -221,17 +239,23 @@ def store_into(self, output: BaseType) -> Expr:
f"expected type_spec {self.produced_type_spec()} but get {output.type_spec()}"
)

declaration = self.computation.subroutine.get_declaration()

if declaration.deferred_expr is None:
raise TealInputError(
"ABI return subroutine must have deferred_expr to be not-None."
)
if declaration.deferred_expr.type_of() != output.type_spec().storage_type():
raise TealInputError(
f"ABI return subroutine deferred_expr is expected to be typed {output.type_spec().storage_type()}, "
f"but has type {declaration.deferred_expr.type_of()}."
)
# HANG NOTE! This get_declaration check applies only for pre frame pointer case
# the post frame pointer case should not apply
# need to somehow expose the context of evaluation
ahangsu marked this conversation as resolved.
Show resolved Hide resolved
try:
declaration = self.computation.subroutine.get_declaration()
ahangsu marked this conversation as resolved.
Show resolved Hide resolved

if declaration.deferred_expr is None:
raise TealInputError(
"ABI return subroutine must have deferred_expr to be not-None."
)
if declaration.deferred_expr.type_of() != output.type_spec().storage_type():
raise TealInputError(
f"ABI return subroutine deferred_expr is expected to be typed {output.type_spec().storage_type()}, "
f"but has type {declaration.deferred_expr.type_of()}."
)
except Exception:
pass
michaeldiamant marked this conversation as resolved.
Show resolved Hide resolved

return output._stored_value.store(self.computation)

Expand Down
32 changes: 20 additions & 12 deletions pyteal/ast/frame.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from itertools import groupby
from typing import TYPE_CHECKING, Optional
from typing import TYPE_CHECKING, Optional, Final

from pyteal.ast.expr import Expr
from pyteal.ast.int import Int
Expand All @@ -13,6 +13,9 @@
from pyteal.compiler import CompileOptions


MAX_FRAME_LOCAL_VARS: Final[int] = 128


class LocalTypeSegment(Expr):
"""An expression that allocates stack spaces for local variable.

Expand Down Expand Up @@ -104,6 +107,14 @@ def __getitem__(self, index: int) -> TealType:
def __str__(self) -> str:
return f"(ProtoStackLayout: (args: {self.arg_stack_types}) (locals: {self.local_stack_types}))"

@classmethod
def from_proto(cls, proto: "Proto") -> "ProtoStackLayout":
return cls(
[TealType.anytype] * proto.num_args,
[TealType.anytype] * proto.num_returns,
proto.num_returns,
)

def has_return(self) -> bool:
return False

Expand Down Expand Up @@ -157,6 +168,9 @@ def __init__(
raise TealInputError(
f"The number of returns provided to Proto must be >= 0 but {num_returns=}."
)
self.num_args = num_args
self.num_returns = num_returns

if mem_layout:
if mem_layout.num_return_allocs > num_returns:
raise TealInternalError(
Expand All @@ -168,10 +182,10 @@ def __init__(
f"The number of arguments {num_args} should match with "
f"memory layout's number of arguments {len(mem_layout.arg_stack_types)}"
)
else:
mem_layout = ProtoStackLayout.from_proto(self)

self.num_args = num_args
self.num_returns = num_returns
self.mem_layout: Optional[ProtoStackLayout] = mem_layout
self.mem_layout: ProtoStackLayout = mem_layout

def __teal__(self, options: "CompileOptions") -> tuple[TealBlock, TealSimpleBlock]:
verifyProgramVersion(
Expand All @@ -181,8 +195,6 @@ def __teal__(self, options: "CompileOptions") -> tuple[TealBlock, TealSimpleBloc
)
op = TealOp(self, Op.proto, self.num_args, self.num_returns)
proto_srt, proto_end = TealBlock.FromOp(options, op)
if not self.mem_layout:
return proto_srt, proto_end
local_srt, local_end = self.mem_layout.__teal__(options)
proto_end.setNextBlock(local_srt)
return proto_srt, local_end
Expand Down Expand Up @@ -250,7 +262,7 @@ def __init__(
):
super().__init__()

target_type = inferred_type if inferred_type else TealType.anytype
target_type = inferred_type if inferred_type is not None else TealType.anytype
require_type(value, target_type)

self.value = value
Expand Down Expand Up @@ -291,11 +303,7 @@ def __init__(self, under_proto: Proto, frame_index: int) -> None:
super().__init__()
self.proto = under_proto
self.frame_index = frame_index
self.stack_type = (
self.proto.mem_layout[frame_index]
if self.proto.mem_layout
else TealType.anytype
)
self.stack_type = self.proto.mem_layout[frame_index]

def storage_type(self) -> TealType:
return self.stack_type
Expand Down
11 changes: 9 additions & 2 deletions pyteal/ast/frame_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,19 @@ def test_proto(input_num: int, output_num: int):
assert not expr.has_return()
assert expr.type_of() == pt.TealType.none

expected = pt.TealSimpleBlock([pt.TealOp(expr, pt.Op.proto, input_num, output_num)])
block = [pt.TealOp(expr, pt.Op.proto, input_num, output_num)]
if output_num > 0:
block.append(pt.TealOp(None, pt.Op.int, 0))
if output_num > 1:
block.append(pt.TealOp(None, pt.Op.dupn, output_num - 1))

expected = pt.TealSimpleBlock(block)
actual, _ = expr.__teal__(avm8Options)
actual.addIncoming()
actual = pt.TealBlock.NormalizeBlocks(actual)

assert actual == expected
with pt.TealComponent.Context.ignoreExprEquality():
assert actual == expected


def test_proto_invalid():
Expand Down
32 changes: 29 additions & 3 deletions pyteal/ast/subroutine.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
from contextlib import contextmanager
from dataclasses import dataclass
from docstring_parser import parse as parse_docstring
from inspect import isclass, Parameter, signature, get_annotations
from types import MappingProxyType, NoneType
from typing import Any, Callable, Final, Optional, TYPE_CHECKING, cast
from typing import Any, Callable, Final, Optional, TYPE_CHECKING, cast, ClassVar
import algosdk.abi as sdk_abi

from pyteal.ast import abi
from pyteal.ast.expr import Expr
from pyteal.ast.seq import Seq
from pyteal.ast.scratchvar import DynamicScratchVar, ScratchVar, ScratchSlot
from pyteal.ast.frame import Proto, FrameVar, ProtoStackLayout
from pyteal.ast.frame import FrameBury, Proto, FrameVar, ProtoStackLayout
from pyteal.errors import TealInputError, TealInternalError, verifyProgramVersion
from pyteal.ir import TealOp, Op, TealBlock
from pyteal.types import TealType
Expand Down Expand Up @@ -815,6 +816,13 @@ def __call__(self, fn_implementation: Callable[..., Expr]) -> SubroutineFnWrappe
Subroutine.__module__ = "pyteal"


@contextmanager
def _frame_pointer_context(proto: Proto):
tmp, SubroutineEval._current_proto = SubroutineEval._current_proto, proto
yield proto
SubroutineEval._current_proto = tmp


@dataclass
class SubroutineEval:
"""
Expand Down Expand Up @@ -889,6 +897,7 @@ class SubroutineEval:
tuple[Optional[ScratchVar], ScratchVar | abi.BaseType | Expr],
]
use_frame_pt: bool = False
_current_proto: ClassVar[Optional[Proto]] = None

@staticmethod
def var_n_loaded_scratch(
Expand Down Expand Up @@ -1006,7 +1015,17 @@ def __call__(self, subroutine: SubroutineDefinition) -> SubroutineDeclaration:
abi_output_kwargs[output_kwarg_info.name] = output_carrying_abi

# Arg usage "B" supplied to build an AST from the user-defined PyTEAL function:
subroutine_body = subroutine.implementation(*loaded_args, **abi_output_kwargs)
subroutine_body: Expr
if not self.use_frame_pt:
subroutine_body = subroutine.implementation(
*loaded_args, **abi_output_kwargs
)
else:
with _frame_pointer_context(proto):
subroutine_body = subroutine.implementation(
*loaded_args, **abi_output_kwargs
)

if not isinstance(subroutine_body, Expr):
raise TealInputError(
f"Subroutine function does not return a PyTeal expression. Got type {type(subroutine_body)}."
Expand All @@ -1025,6 +1044,13 @@ def __call__(self, subroutine: SubroutineDefinition) -> SubroutineDeclaration:
if not self.use_frame_pt:
deferred_expr = output_carrying_abi._stored_value.load()

if self.use_frame_pt:
depth = len(proto.mem_layout.local_stack_types)
# only when we have 1 return, and with other local variables
# we use bury to bury the result to 0 index against frame pointer
ahangsu marked this conversation as resolved.
Show resolved Hide resolved
if not abi_output_kwargs and 0 < proto.num_returns < depth:
deferred_expr = FrameBury(Seq(), 0, inferred_type=TealType.none)

# Arg usage "A" to be pick up and store in scratch parameters that have been placed on the stack
# need to reverse order of argumentVars because the last argument will be on top of the stack

Expand Down
15 changes: 10 additions & 5 deletions tests/integration/teal/roundtrip/app_roundtrip_()_v8.teal
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@ return
tuplecomplement_0:
proto 1 1
byte ""
int 0
dupn 1
byte ""
frame_bury 0
retsub
Expand All @@ -23,16 +25,19 @@ retsub
roundtripper_1:
proto 1 1
byte ""
dupn 2
int 0
dupn 1
frame_dig -1
callsub tuplecomplement_0
store 2
load 2
frame_bury 1
frame_dig 1
callsub tuplecomplement_0
store 3
frame_bury 2
frame_dig -1
load 2
frame_dig 1
concat
load 3
frame_dig 2
concat
frame_bury 0
retsub
Loading