Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Using local variable in subroutine through frame pointer #606

Merged
merged 24 commits into from
Dec 23, 2022
Merged
Show file tree
Hide file tree
Changes from 23 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

# Added
* Added frame pointer support for subroutine arguments, replacing the previous usage of scratch. ([#562](https://github.com/algorand/pyteal/pull/562))
* Added frame pointer support for local ABI variables in subroutine. ([#606](https://github.com/algorand/pyteal/pull/606))
* Added `frame_pointers` property in `OptimizeOptions` to optimize away scratch slots during subroutine calls. This defaults to frame pointer usage when not specified. ([#613](https://github.com/algorand/pyteal/pull/613))

# Fixed
Expand Down
6 changes: 3 additions & 3 deletions pyteal/ast/abi/tuple.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
from pyteal.ast.unaryexpr import Len
from pyteal.ast.binaryexpr import ExtractUint16
from pyteal.ast.naryexpr import Concat
from pyteal.ast.scratchvar import ScratchVar
from pyteal.ast.abstractvar import alloc_abstract_var

from pyteal.ast.abi.type import TypeSpec, BaseType, ComputedValue
from pyteal.ast.abi.bool import (
Expand Down Expand Up @@ -72,8 +72,8 @@ def _encode_tuple(values: Sequence[BaseType]) -> Expr:

tail_offset = Uint16()
tail_offset_accumulator = Uint16()
tail_holder = ScratchVar(TealType.bytes)
encoded_tail = ScratchVar(TealType.bytes)
tail_holder = alloc_abstract_var(TealType.bytes)
encoded_tail = alloc_abstract_var(TealType.bytes)

firstDynamicTail = True
for i, elem in enumerate(values):
Expand Down
40 changes: 25 additions & 15 deletions pyteal/ast/abi/type.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,9 @@
from abc import ABC, abstractmethod

from pyteal.ast.expr import Expr
from pyteal.ast.abstractvar import AbstractVar
from pyteal.ast.scratchvar import ScratchVar
from pyteal.ast.abstractvar import AbstractVar, alloc_abstract_var
from pyteal.ast.seq import Seq
from pyteal.errors import TealInputError
from pyteal.errors import TealInputError, TealTypeError
from pyteal.types import TealType


Expand Down Expand Up @@ -78,7 +77,7 @@ def __init__(self, spec: TypeSpec) -> None:
"""Create a new BaseType."""
super().__init__()
self._type_spec: Final[TypeSpec] = spec
self._stored_value: AbstractVar = ScratchVar(spec.storage_type())
self._stored_value: AbstractVar = alloc_abstract_var(spec.storage_type())

def type_spec(self) -> TypeSpec:
"""Get the TypeSpec for this ABI type instance."""
Expand Down Expand Up @@ -216,22 +215,33 @@ def produced_type_spec(self) -> TypeSpec:
return self.type_spec

def store_into(self, output: BaseType) -> Expr:
from pyteal.ast.subroutine import SubroutineDeclaration

if output.type_spec() != self.produced_type_spec():
raise TealInputError(
f"expected type_spec {self.produced_type_spec()} but get {output.type_spec()}"
)

declaration = self.computation.subroutine.get_declaration()

if declaration.deferred_expr is None:
raise TealInputError(
"ABI return subroutine must have deferred_expr to be not-None."
)
if declaration.deferred_expr.type_of() != output.type_spec().storage_type():
raise TealInputError(
f"ABI return subroutine deferred_expr is expected to be typed {output.type_spec().storage_type()}, "
f"but has type {declaration.deferred_expr.type_of()}."
)
# HANG NOTE! This get_declaration check applies only for pre frame pointer case
# the post frame pointer case should not apply
# need to somehow expose the context of evaluation
ahangsu marked this conversation as resolved.
Show resolved Hide resolved

declaration: SubroutineDeclaration | None = None
try:
declaration = self.computation.subroutine.get_declaration_by_option(False)
except Exception:
pass

if declaration is not None:
if declaration.deferred_expr is None:
raise TealInputError(
"ABI return subroutine must have deferred_expr to be not-None."
)
if declaration.deferred_expr.type_of() != output.type_spec().storage_type():
raise TealTypeError(
declaration.deferred_expr.type_of(),
output.type_spec().storage_type(),
)

return output._stored_value.store(self.computation)

Expand Down
29 changes: 29 additions & 0 deletions pyteal/ast/abstractvar.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,3 +38,32 @@ def storage_type(self) -> TealType:


AbstractVar.__module__ = "pyteal"


def alloc_abstract_var(stack_type: TealType) -> AbstractVar:
"""Allocate abstract var over stack, or over scratch.

This unexported function takes a TealType as value type representation over stack (or scratch),
and generates an AbstractVar instance.
It infers the proto currently being used in context of subroutine evaluation,
and swap to FrameVar to save the use of scratch slots.

Arg:
stack_type: TealType that represents stack type.
"""

from pyteal.ast import ScratchVar
from pyteal.ast.subroutine import SubroutineEval
from pyteal.ast.frame import FrameVar, MAX_FRAME_LOCAL_VARS

if SubroutineEval._current_proto:
local_types = SubroutineEval._current_proto.mem_layout.local_stack_types

# NOTE: you can have at most 128 local variables.
# len(local_types) + 1 computes the resulting length,
# should be <= 128
if len(local_types) + 1 <= MAX_FRAME_LOCAL_VARS:
local_types.append(stack_type)
return FrameVar(SubroutineEval._current_proto, len(local_types) - 1)

return ScratchVar(stack_type)
32 changes: 20 additions & 12 deletions pyteal/ast/frame.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from itertools import groupby
from typing import TYPE_CHECKING, Optional
from typing import TYPE_CHECKING, Optional, Final

from pyteal.ast.expr import Expr
from pyteal.ast.int import Int
Expand All @@ -13,6 +13,9 @@
from pyteal.compiler import CompileOptions


MAX_FRAME_LOCAL_VARS: Final[int] = 128


class LocalTypeSegment(Expr):
"""An expression that allocates stack spaces for local variable.

Expand Down Expand Up @@ -104,6 +107,14 @@ def __getitem__(self, index: int) -> TealType:
def __str__(self) -> str:
return f"(ProtoStackLayout: (args: {self.arg_stack_types}) (locals: {self.local_stack_types}))"

@classmethod
def from_proto(cls, proto: "Proto") -> "ProtoStackLayout":
return cls(
[TealType.anytype] * proto.num_args,
[TealType.anytype] * proto.num_returns,
proto.num_returns,
)

def has_return(self) -> bool:
return False

Expand Down Expand Up @@ -157,6 +168,9 @@ def __init__(
raise TealInputError(
f"The number of returns provided to Proto must be >= 0 but {num_returns=}."
)
self.num_args = num_args
self.num_returns = num_returns

if mem_layout:
if mem_layout.num_return_allocs > num_returns:
raise TealInternalError(
Expand All @@ -168,10 +182,10 @@ def __init__(
f"The number of arguments {num_args} should match with "
f"memory layout's number of arguments {len(mem_layout.arg_stack_types)}"
)
else:
mem_layout = ProtoStackLayout.from_proto(self)

self.num_args = num_args
self.num_returns = num_returns
self.mem_layout: Optional[ProtoStackLayout] = mem_layout
self.mem_layout: ProtoStackLayout = mem_layout

def __teal__(self, options: "CompileOptions") -> tuple[TealBlock, TealSimpleBlock]:
verifyProgramVersion(
Expand All @@ -181,8 +195,6 @@ def __teal__(self, options: "CompileOptions") -> tuple[TealBlock, TealSimpleBloc
)
op = TealOp(self, Op.proto, self.num_args, self.num_returns)
proto_srt, proto_end = TealBlock.FromOp(options, op)
if not self.mem_layout:
return proto_srt, proto_end
local_srt, local_end = self.mem_layout.__teal__(options)
proto_end.setNextBlock(local_srt)
return proto_srt, local_end
Expand Down Expand Up @@ -250,7 +262,7 @@ def __init__(
):
super().__init__()

target_type = inferred_type if inferred_type else TealType.anytype
target_type = inferred_type if inferred_type is not None else TealType.anytype
require_type(value, target_type)

self.value = value
Expand Down Expand Up @@ -291,11 +303,7 @@ def __init__(self, under_proto: Proto, frame_index: int) -> None:
super().__init__()
self.proto = under_proto
self.frame_index = frame_index
self.stack_type = (
self.proto.mem_layout[frame_index]
if self.proto.mem_layout
else TealType.anytype
)
self.stack_type = self.proto.mem_layout[frame_index]

def storage_type(self) -> TealType:
return self.stack_type
Expand Down
11 changes: 9 additions & 2 deletions pyteal/ast/frame_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,19 @@ def test_proto(input_num: int, output_num: int):
assert not expr.has_return()
assert expr.type_of() == pt.TealType.none

expected = pt.TealSimpleBlock([pt.TealOp(expr, pt.Op.proto, input_num, output_num)])
block = [pt.TealOp(expr, pt.Op.proto, input_num, output_num)]
if output_num > 0:
block.append(pt.TealOp(None, pt.Op.int, 0))
if output_num > 1:
block.append(pt.TealOp(None, pt.Op.dupn, output_num - 1))

expected = pt.TealSimpleBlock(block)
actual, _ = expr.__teal__(avm8Options)
actual.addIncoming()
actual = pt.TealBlock.NormalizeBlocks(actual)

assert actual == expected
with pt.TealComponent.Context.ignoreExprEquality():
assert actual == expected


def test_proto_invalid():
Expand Down
46 changes: 41 additions & 5 deletions pyteal/ast/subroutine.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,18 @@
from contextlib import contextmanager
import algosdk.abi as sdk_abi
import warnings

from dataclasses import dataclass
from docstring_parser import parse as parse_docstring
from inspect import isclass, Parameter, signature, get_annotations
from types import MappingProxyType, NoneType
from typing import Any, Callable, Final, Optional, TYPE_CHECKING, cast
import algosdk.abi as sdk_abi
from typing import Any, Callable, Final, Optional, TYPE_CHECKING, cast, ClassVar

from pyteal.ast import abi
from pyteal.ast.expr import Expr
from pyteal.ast.seq import Seq
from pyteal.ast.scratchvar import DynamicScratchVar, ScratchVar, ScratchSlot
from pyteal.ast.frame import Proto, FrameVar, ProtoStackLayout
from pyteal.ast.frame import FrameBury, Proto, FrameVar, ProtoStackLayout
from pyteal.errors import TealInputError, TealInternalError, verifyProgramVersion
from pyteal.ir import TealOp, Op, TealBlock
from pyteal.types import TealType
Expand All @@ -33,6 +36,10 @@ def __init__(self, subroutine_def: "SubroutineDefinition") -> None:
self.type_of: Optional[TealType] = None

def get_declaration(self) -> "SubroutineDeclaration":
warnings.warn(
"`get_declaration` is being deprecated: Please use `get_declaration_by_option` instead.",
DeprecationWarning,
)
return self.get_declaration_by_option(False)

def get_declaration_by_option(
Expand Down Expand Up @@ -304,7 +311,11 @@ def _validate_annotation(
)

def get_declaration(self) -> "SubroutineDeclaration":
return self.declarations.get_declaration()
warnings.warn(
"`get_declaration` is being deprecated: Please use `get_declaration_by_option` instead.",
DeprecationWarning,
)
return self.declarations.get_declaration_by_option(False)

def get_declaration_by_option(
self,
Expand Down Expand Up @@ -815,6 +826,13 @@ def __call__(self, fn_implementation: Callable[..., Expr]) -> SubroutineFnWrappe
Subroutine.__module__ = "pyteal"


@contextmanager
def _frame_pointer_context(proto: Proto):
tmp, SubroutineEval._current_proto = SubroutineEval._current_proto, proto
yield proto
SubroutineEval._current_proto = tmp


@dataclass
class SubroutineEval:
"""
Expand Down Expand Up @@ -889,6 +907,7 @@ class SubroutineEval:
tuple[Optional[ScratchVar], ScratchVar | abi.BaseType | Expr],
]
use_frame_pt: bool = False
_current_proto: ClassVar[Optional[Proto]] = None

@staticmethod
def var_n_loaded_scratch(
Expand Down Expand Up @@ -1006,7 +1025,17 @@ def __call__(self, subroutine: SubroutineDefinition) -> SubroutineDeclaration:
abi_output_kwargs[output_kwarg_info.name] = output_carrying_abi

# Arg usage "B" supplied to build an AST from the user-defined PyTEAL function:
subroutine_body = subroutine.implementation(*loaded_args, **abi_output_kwargs)
subroutine_body: Expr
if not self.use_frame_pt:
subroutine_body = subroutine.implementation(
*loaded_args, **abi_output_kwargs
)
else:
with _frame_pointer_context(proto):
subroutine_body = subroutine.implementation(
*loaded_args, **abi_output_kwargs
)

if not isinstance(subroutine_body, Expr):
raise TealInputError(
f"Subroutine function does not return a PyTeal expression. Got type {type(subroutine_body)}."
Expand All @@ -1025,6 +1054,13 @@ def __call__(self, subroutine: SubroutineDefinition) -> SubroutineDeclaration:
if not self.use_frame_pt:
deferred_expr = output_carrying_abi._stored_value.load()

if self.use_frame_pt:
local_size = len(proto.mem_layout.local_stack_types)
# only when we have 1 return, and with other local variables
# we use bury to bury the result to 0 index against frame pointer
ahangsu marked this conversation as resolved.
Show resolved Hide resolved
if not abi_output_kwargs and proto.num_returns > 0 and local_size > 0:
deferred_expr = FrameBury(Seq(), 0, inferred_type=TealType.none)

# Arg usage "A" to be pick up and store in scratch parameters that have been placed on the stack
# need to reverse order of argumentVars because the last argument will be on top of the stack

Expand Down
Loading