Merge pull request #3308 from ethereum/usereturndatacopy

Use returndatacopy for retrieving dynamically sized outputs.
This commit is contained in:
chriseth 2018-03-21 18:12:41 +01:00 committed by GitHub
commit 85b0cfea9a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 363 additions and 144 deletions

View File

@ -1,6 +1,7 @@
### 0.4.22 (unreleased)
Features:
* General: Support accessing dynamic return data in post-byzantium EVMs.
Bugfixes:
* Code Generator: Allow ``block.blockhash`` without being called.

View File

@ -1551,16 +1551,22 @@ bool TypeChecker::visit(FunctionCall const& _functionCall)
_functionCall.expression().annotation().isPure &&
functionType->isPure();
bool allowDynamicTypes = m_evmVersion.supportsReturndata();
if (!functionType)
{
m_errorReporter.typeError(_functionCall.location(), "Type is not callable");
_functionCall.annotation().type = make_shared<TupleType>();
return false;
}
else if (functionType->returnParameterTypes().size() == 1)
_functionCall.annotation().type = functionType->returnParameterTypes().front();
auto returnTypes =
allowDynamicTypes ?
functionType->returnParameterTypes() :
functionType->returnParameterTypesWithoutDynamicTypes();
if (returnTypes.size() == 1)
_functionCall.annotation().type = returnTypes.front();
else
_functionCall.annotation().type = make_shared<TupleType>(functionType->returnParameterTypes());
_functionCall.annotation().type = make_shared<TupleType>(returnTypes);
if (auto functionName = dynamic_cast<Identifier const*>(&_functionCall.expression()))
{

View File

@ -2311,6 +2311,18 @@ vector<string> FunctionType::parameterNames() const
return vector<string>(m_parameterNames.cbegin() + 1, m_parameterNames.cend());
}
TypePointers FunctionType::returnParameterTypesWithoutDynamicTypes() const
{
TypePointers returnParameterTypes = m_returnParameterTypes;
if (m_kind == Kind::External || m_kind == Kind::CallCode || m_kind == Kind::DelegateCall)
for (auto& param: returnParameterTypes)
if (param->isDynamicallySized() && !param->dataStoredIn(DataLocation::Storage))
param = make_shared<InaccessibleDynamicType>();
return returnParameterTypes;
}
TypePointers FunctionType::parameterTypes() const
{
if (!bound())
@ -2772,18 +2784,9 @@ FunctionTypePointer FunctionType::asMemberFunction(bool _inLibrary, bool _bound)
kind = Kind::DelegateCall;
}
TypePointers returnParameterTypes = m_returnParameterTypes;
if (kind != Kind::Internal)
{
// Alter dynamic types to be non-accessible.
for (auto& param: returnParameterTypes)
if (param->isDynamicallySized())
param = make_shared<InaccessibleDynamicType>();
}
return make_shared<FunctionType>(
parameterTypes,
returnParameterTypes,
m_returnParameterTypes,
m_parameterNames,
m_returnParameterNames,
kind,

View File

@ -973,6 +973,9 @@ public:
TypePointers parameterTypes() const;
std::vector<std::string> parameterNames() const;
TypePointers const& returnParameterTypes() const { return m_returnParameterTypes; }
/// @returns the list of return parameter types. All dynamically-sized types (this excludes
/// storage pointers) are replaced by InaccessibleDynamicType instances.
TypePointers returnParameterTypesWithoutDynamicTypes() const;
std::vector<std::string> const& returnParameterNames() const { return m_returnParameterNames; }
/// @returns the "self" parameter type for a bound function
TypePointer const& selfType() const;

View File

@ -253,6 +253,9 @@ string ABIFunctions::cleanupFunction(Type const& _type, bool _revertOnFailure)
templ("body", w.render());
break;
}
case Type::Category::InaccessibleDynamic:
templ("body", "cleaned := 0");
break;
default:
solAssert(false, "Cleanup of type " + _type.identifier() + " requested.");
}

View File

@ -22,11 +22,14 @@
#include <libsolidity/codegen/CompilerUtils.h>
#include <libsolidity/ast/AST.h>
#include <libevmasm/Instruction.h>
#include <libsolidity/codegen/ArrayUtils.h>
#include <libsolidity/codegen/LValue.h>
#include <libsolidity/codegen/ABIFunctions.h>
#include <libevmasm/Instruction.h>
#include <libdevcore/Whiskers.h>
using namespace std;
namespace dev
@ -159,6 +162,163 @@ void CompilerUtils::storeInMemoryDynamic(Type const& _type, bool _padToWordBound
}
}
void CompilerUtils::abiDecode(TypePointers const& _typeParameters, bool _fromMemory, bool _revertOnOutOfBounds)
{
/// Stack: <source_offset> <length>
if (m_context.experimentalFeatureActive(ExperimentalFeature::ABIEncoderV2))
{
// Use the new JULIA-based decoding function
auto stackHeightBefore = m_context.stackHeight();
abiDecodeV2(_typeParameters, _fromMemory);
solAssert(m_context.stackHeight() - stackHeightBefore == sizeOnStack(_typeParameters) - 2, "");
return;
}
//@todo this does not yet support nested dynamic arrays
if (_revertOnOutOfBounds)
{
size_t encodedSize = 0;
for (auto const& t: _typeParameters)
encodedSize += t->decodingType()->calldataEncodedSize(true);
m_context.appendInlineAssembly("{ if lt(len, " + to_string(encodedSize) + ") { revert(0, 0) } }", {"len"});
}
m_context << Instruction::DUP2 << Instruction::ADD;
m_context << Instruction::SWAP1;
/// Stack: <input_end> <source_offset>
// Retain the offset pointer as base_offset, the point from which the data offsets are computed.
m_context << Instruction::DUP1;
for (TypePointer const& parameterType: _typeParameters)
{
// stack: v1 v2 ... v(k-1) input_end base_offset current_offset
TypePointer type = parameterType->decodingType();
solUnimplementedAssert(type, "No decoding type found.");
if (type->category() == Type::Category::Array)
{
auto const& arrayType = dynamic_cast<ArrayType const&>(*type);
solUnimplementedAssert(!arrayType.baseType()->isDynamicallyEncoded(), "Nested arrays not yet implemented.");
if (_fromMemory)
{
solUnimplementedAssert(
arrayType.baseType()->isValueType(),
"Nested memory arrays not yet implemented here."
);
// @todo If base type is an array or struct, it is still calldata-style encoded, so
// we would have to convert it like below.
solAssert(arrayType.location() == DataLocation::Memory, "");
if (arrayType.isDynamicallySized())
{
// compute data pointer
m_context << Instruction::DUP1 << Instruction::MLOAD;
if (_revertOnOutOfBounds)
{
// Check that the data pointer is valid and that length times
// item size is still inside the range.
Whiskers templ(R"({
if gt(ptr, 0x100000000) { revert(0, 0) }
ptr := add(ptr, base_offset)
let array_data_start := add(ptr, 0x20)
if gt(array_data_start, input_end) { revert(0, 0) }
let array_length := mload(ptr)
if or(
gt(array_length, 0x100000000),
gt(add(array_data_start, mul(array_length, <item_size>)), input_end)
) { revert(0, 0) }
})");
templ("item_size", to_string(arrayType.isByteArray() ? 1 : arrayType.baseType()->calldataEncodedSize(true)));
m_context.appendInlineAssembly(templ.render(), {"input_end", "base_offset", "offset", "ptr"});
}
else
m_context << Instruction::DUP3 << Instruction::ADD;
// stack: v1 v2 ... v(k-1) input_end base_offset current_offset v(k)
moveIntoStack(3);
m_context << u256(0x20) << Instruction::ADD;
}
else
{
// Size has already been checked for this one.
moveIntoStack(2);
m_context << Instruction::DUP3;
m_context << u256(arrayType.calldataEncodedSize(true)) << Instruction::ADD;
}
}
else
{
// first load from calldata and potentially convert to memory if arrayType is memory
TypePointer calldataType = arrayType.copyForLocation(DataLocation::CallData, false);
if (calldataType->isDynamicallySized())
{
// put on stack: data_pointer length
loadFromMemoryDynamic(IntegerType(256), !_fromMemory);
m_context << Instruction::SWAP1;
// stack: input_end base_offset next_pointer data_offset
if (_revertOnOutOfBounds)
m_context.appendInlineAssembly("{ if gt(data_offset, 0x100000000) { revert(0, 0) } }", {"data_offset"});
m_context << Instruction::DUP3 << Instruction::ADD;
// stack: input_end base_offset next_pointer array_head_ptr
if (_revertOnOutOfBounds)
m_context.appendInlineAssembly(
"{ if gt(add(array_head_ptr, 0x20), input_end) { revert(0, 0) } }",
{"input_end", "base_offset", "next_ptr", "array_head_ptr"}
);
// retrieve length
loadFromMemoryDynamic(IntegerType(256), !_fromMemory, true);
// stack: input_end base_offset next_pointer array_length data_pointer
m_context << Instruction::SWAP2;
// stack: input_end base_offset data_pointer array_length next_pointer
if (_revertOnOutOfBounds)
{
unsigned itemSize = arrayType.isByteArray() ? 1 : arrayType.baseType()->calldataEncodedSize(true);
m_context.appendInlineAssembly(R"({
if or(
gt(array_length, 0x100000000),
gt(add(data_ptr, mul(array_length, )" + to_string(itemSize) + R"()), input_end)
) { revert(0, 0) }
})", {"input_end", "base_offset", "data_ptr", "array_length", "next_ptr"});
}
}
else
{
// size has already been checked
// stack: input_end base_offset data_offset
m_context << Instruction::DUP1;
m_context << u256(calldataType->calldataEncodedSize()) << Instruction::ADD;
}
if (arrayType.location() == DataLocation::Memory)
{
// stack: input_end base_offset calldata_ref [length] next_calldata
// copy to memory
// move calldata type up again
moveIntoStack(calldataType->sizeOnStack());
convertType(*calldataType, arrayType, false, false, true);
// fetch next pointer again
moveToStackTop(arrayType.sizeOnStack());
}
// move input_end up
// stack: input_end base_offset calldata_ref [length] next_calldata
moveToStackTop(2 + arrayType.sizeOnStack());
m_context << Instruction::SWAP1;
// stack: base_offset calldata_ref [length] input_end next_calldata
moveToStackTop(2 + arrayType.sizeOnStack());
m_context << Instruction::SWAP1;
// stack: calldata_ref [length] input_end base_offset next_calldata
}
}
else
{
solAssert(!type->isDynamicallyEncoded(), "Unknown dynamically sized type: " + type->toString());
loadFromMemoryDynamic(*type, !_fromMemory, true);
// stack: v1 v2 ... v(k-1) input_end base_offset v(k) mem_offset
moveToStackTop(1, type->sizeOnStack());
moveIntoStack(3, type->sizeOnStack());
}
// stack: v1 v2 ... v(k-1) v(k) input_end base_offset next_offset
}
popStackSlots(3);
}
void CompilerUtils::encodeToMemory(
TypePointers const& _givenTypes,
TypePointers const& _targetTypes,
@ -321,15 +481,13 @@ void CompilerUtils::abiEncodeV2(
void CompilerUtils::abiDecodeV2(TypePointers const& _parameterTypes, bool _fromMemory)
{
// stack: <source_offset>
// stack: <source_offset> <length> [stack top]
auto ret = m_context.pushNewTag();
moveIntoStack(2);
// stack: <return tag> <source_offset> <length> [stack top]
m_context << Instruction::DUP2 << Instruction::ADD;
m_context << Instruction::SWAP1;
if (_fromMemory)
// TODO pass correct size for the memory case
m_context << (u256(1) << 63);
else
m_context << Instruction::CALLDATASIZE;
m_context << Instruction::SWAP1;
// stack: <return tag> <end> <start>
string decoderName = m_context.abiFunctions().tupleDecoder(_parameterTypes, _fromMemory);
m_context.appendJumpTo(m_context.namedTag(decoderName));
m_context.adjustStackOffset(int(sizeOnStack(_parameterTypes)) - 3);

View File

@ -88,6 +88,15 @@ public:
/// Stack post: (memory_offset+length)
void storeInMemoryDynamic(Type const& _type, bool _padToWords = true);
/// Creates code that unpacks the arguments according to their types specified by a vector of TypePointers.
/// From memory if @a _fromMemory is true, otherwise from call data.
/// Calls revert if @a _revertOnOutOfBounds is true and the supplied size is shorter
/// than the static data requirements or if dynamic data pointers reach outside of the
/// area. Also has a hard cap of 0x100000000 for any given length/offset field.
/// Stack pre: <source_offset> <length>
/// Stack post: <value0> <value1> ... <valuen>
void abiDecode(TypePointers const& _typeParameters, bool _fromMemory = false, bool _revertOnOutOfBounds = false);
/// Copies values (of types @a _givenTypes) given on the stack to a location in memory given
/// at the stack top, encoding them according to the ABI as the given types @a _targetTypes.
/// Removes the values from the stack and leaves the updated memory pointer.
@ -149,7 +158,7 @@ public:
/// Decodes data from ABI encoding into internal encoding. If @a _fromMemory is set to true,
/// the data is taken from memory instead of from calldata.
/// Can allocate memory.
/// Stack pre: <source_offset>
/// Stack pre: <source_offset> <length>
/// Stack post: <value0> <value1> ... <valuen>
void abiDecodeV2(TypePointers const& _parameterTypes, bool _fromMemory = false);

View File

@ -278,9 +278,10 @@ void ContractCompiler::appendConstructor(FunctionDefinition const& _constructor)
m_context.appendProgramSize();
m_context << Instruction::DUP4 << Instruction::CODECOPY;
m_context << Instruction::DUP2 << Instruction::ADD;
m_context << Instruction::DUP1;
CompilerUtils(m_context).storeFreeMemoryPointer();
// stack: <memptr>
appendCalldataUnpacker(FunctionType(_constructor).parameterTypes(), true);
CompilerUtils(m_context).abiDecode(FunctionType(_constructor).parameterTypes(), true);
}
_constructor.accept(*this);
}
@ -367,7 +368,8 @@ void ContractCompiler::appendFunctionSelector(ContractDefinition const& _contrac
{
// Parameter for calldataUnpacker
m_context << CompilerUtils::dataStartOffset;
appendCalldataUnpacker(functionType->parameterTypes());
m_context << Instruction::DUP1 << Instruction::CALLDATASIZE << Instruction::SUB;
CompilerUtils(m_context).abiDecode(functionType->parameterTypes());
}
m_context.appendJumpTo(m_context.functionEntryLabel(functionType->declaration()));
m_context << returnTag;
@ -382,105 +384,6 @@ void ContractCompiler::appendFunctionSelector(ContractDefinition const& _contrac
}
}
void ContractCompiler::appendCalldataUnpacker(TypePointers const& _typeParameters, bool _fromMemory)
{
// We do not check the calldata size, everything is zero-padded
if (m_context.experimentalFeatureActive(ExperimentalFeature::ABIEncoderV2))
{
// Use the new JULIA-based decoding function
auto stackHeightBefore = m_context.stackHeight();
CompilerUtils(m_context).abiDecodeV2(_typeParameters, _fromMemory);
solAssert(m_context.stackHeight() - stackHeightBefore == CompilerUtils(m_context).sizeOnStack(_typeParameters) - 1, "");
return;
}
//@todo this does not yet support nested dynamic arrays
// Retain the offset pointer as base_offset, the point from which the data offsets are computed.
m_context << Instruction::DUP1;
for (TypePointer const& parameterType: _typeParameters)
{
// stack: v1 v2 ... v(k-1) base_offset current_offset
TypePointer type = parameterType->decodingType();
solUnimplementedAssert(type, "No decoding type found.");
if (type->category() == Type::Category::Array)
{
auto const& arrayType = dynamic_cast<ArrayType const&>(*type);
solUnimplementedAssert(!arrayType.baseType()->isDynamicallySized(), "Nested arrays not yet implemented.");
if (_fromMemory)
{
solUnimplementedAssert(
arrayType.baseType()->isValueType(),
"Nested memory arrays not yet implemented here."
);
// @todo If base type is an array or struct, it is still calldata-style encoded, so
// we would have to convert it like below.
solAssert(arrayType.location() == DataLocation::Memory, "");
if (arrayType.isDynamicallySized())
{
// compute data pointer
m_context << Instruction::DUP1 << Instruction::MLOAD;
m_context << Instruction::DUP3 << Instruction::ADD;
m_context << Instruction::SWAP2 << Instruction::SWAP1;
m_context << u256(0x20) << Instruction::ADD;
}
else
{
m_context << Instruction::SWAP1 << Instruction::DUP2;
m_context << u256(arrayType.calldataEncodedSize(true)) << Instruction::ADD;
}
}
else
{
// first load from calldata and potentially convert to memory if arrayType is memory
TypePointer calldataType = arrayType.copyForLocation(DataLocation::CallData, false);
if (calldataType->isDynamicallySized())
{
// put on stack: data_pointer length
CompilerUtils(m_context).loadFromMemoryDynamic(IntegerType(256), !_fromMemory);
// stack: base_offset data_offset next_pointer
m_context << Instruction::SWAP1 << Instruction::DUP3 << Instruction::ADD;
// stack: base_offset next_pointer data_pointer
// retrieve length
CompilerUtils(m_context).loadFromMemoryDynamic(IntegerType(256), !_fromMemory, true);
// stack: base_offset next_pointer length data_pointer
m_context << Instruction::SWAP2;
// stack: base_offset data_pointer length next_pointer
}
else
{
// leave the pointer on the stack
m_context << Instruction::DUP1;
m_context << u256(calldataType->calldataEncodedSize()) << Instruction::ADD;
}
if (arrayType.location() == DataLocation::Memory)
{
// stack: base_offset calldata_ref [length] next_calldata
// copy to memory
// move calldata type up again
CompilerUtils(m_context).moveIntoStack(calldataType->sizeOnStack());
CompilerUtils(m_context).convertType(*calldataType, arrayType, false, false, true);
// fetch next pointer again
CompilerUtils(m_context).moveToStackTop(arrayType.sizeOnStack());
}
// move base_offset up
CompilerUtils(m_context).moveToStackTop(1 + arrayType.sizeOnStack());
m_context << Instruction::SWAP1;
}
}
else
{
solAssert(!type->isDynamicallySized(), "Unknown dynamically sized type: " + type->toString());
CompilerUtils(m_context).loadFromMemoryDynamic(*type, !_fromMemory, true);
CompilerUtils(m_context).moveToStackTop(1 + type->sizeOnStack());
m_context << Instruction::SWAP1;
}
// stack: v1 v2 ... v(k-1) v(k) base_offset mem_offset
}
m_context << Instruction::POP << Instruction::POP;
}
void ContractCompiler::appendReturnValuePacker(TypePointers const& _typeParameters, bool _isLibrary)
{
CompilerUtils utils(m_context);

View File

@ -90,10 +90,6 @@ private:
void appendDelegatecallCheck();
void appendFunctionSelector(ContractDefinition const& _contract);
void appendCallValueCheck();
/// Creates code that unpacks the arguments for the given function represented by a vector of TypePointers.
/// From memory if @a _fromMemory is true, otherwise from call data.
/// Expects source offset on the stack, which is removed.
void appendCalldataUnpacker(TypePointers const& _typeParameters, bool _fromMemory = false);
void appendReturnValuePacker(TypePointers const& _typeParameters, bool _isLibrary);
void registerStateVariables(ContractDefinition const& _contract);

View File

@ -139,8 +139,8 @@ void ExpressionCompiler::appendStateVariableAccessor(VariableDeclaration const&
utils().popStackSlots(paramTypes.size() - 1);
}
unsigned retSizeOnStack = 0;
solAssert(accessorType.returnParameterTypes().size() >= 1, "");
auto const& returnTypes = accessorType.returnParameterTypes();
auto returnTypes = accessorType.returnParameterTypes();
solAssert(returnTypes.size() >= 1, "");
if (StructType const* structType = dynamic_cast<StructType const*>(returnType.get()))
{
// remove offset
@ -1618,15 +1618,27 @@ void ExpressionCompiler::appendExternalFunctionCall(
m_context.experimentalFeatureActive(ExperimentalFeature::V050) &&
m_context.evmVersion().hasStaticCall();
bool haveReturndatacopy = m_context.evmVersion().supportsReturndata();
unsigned retSize = 0;
TypePointers returnTypes;
if (returnSuccessCondition)
retSize = 0; // return value actually is success condition
else if (haveReturndatacopy)
returnTypes = _functionType.returnParameterTypes();
else
for (auto const& retType: _functionType.returnParameterTypes())
returnTypes = _functionType.returnParameterTypesWithoutDynamicTypes();
bool dynamicReturnSize = false;
for (auto const& retType: returnTypes)
if (retType->isDynamicallyEncoded())
{
solAssert(!retType->isDynamicallySized(), "Unable to return dynamic type from external call.");
retSize += retType->calldataEncodedSize();
solAssert(haveReturndatacopy, "");
dynamicReturnSize = true;
retSize = 0;
break;
}
else
retSize += retType->calldataEncodedSize();
// Evaluate arguments.
TypePointers argumentTypes;
@ -1824,20 +1836,42 @@ void ExpressionCompiler::appendExternalFunctionCall(
utils().fetchFreeMemoryPointer();
m_context << Instruction::SUB << Instruction::MLOAD;
}
else if (!_functionType.returnParameterTypes().empty())
else if (!returnTypes.empty())
{
utils().fetchFreeMemoryPointer();
bool memoryNeeded = false;
for (auto const& retType: _functionType.returnParameterTypes())
{
utils().loadFromMemoryDynamic(*retType, false, true, true);
if (dynamic_cast<ReferenceType const*>(retType.get()))
memoryNeeded = true;
}
if (memoryNeeded)
utils().storeFreeMemoryPointer();
// Stack: return_data_start
// The old decoder did not allocate any memory (i.e. did not touch the free
// memory pointer), but kept references to the return data for
// (statically-sized) arrays
bool needToUpdateFreeMemoryPtr = false;
if (dynamicReturnSize || m_context.experimentalFeatureActive(ExperimentalFeature::ABIEncoderV2))
needToUpdateFreeMemoryPtr = true;
else
m_context << Instruction::POP;
for (auto const& retType: returnTypes)
if (dynamic_cast<ReferenceType const*>(retType.get()))
needToUpdateFreeMemoryPtr = true;
// Stack: return_data_start
if (dynamicReturnSize)
{
solAssert(haveReturndatacopy, "");
m_context.appendInlineAssembly("{ returndatacopy(return_data_start, 0, returndatasize()) }", {"return_data_start"});
}
else
solAssert(retSize > 0, "");
// Always use the actual return length, and not our calculated expected length, if returndatacopy is supported.
// This ensures it can catch badly formatted input from external calls.
m_context << (haveReturndatacopy ? eth::AssemblyItem(Instruction::RETURNDATASIZE) : u256(retSize));
// Stack: return_data_start return_data_size
if (needToUpdateFreeMemoryPtr)
m_context.appendInlineAssembly(R"({
// round size to the next multiple of 32
let newMem := add(start, and(add(size, 0x1f), not(0x1f)))
mstore(0x40, newMem)
})", {"start", "size"});
utils().abiDecode(returnTypes, true, true);
}
}

View File

@ -786,6 +786,89 @@ BOOST_AUTO_TEST_CASE(complex_struct)
}
BOOST_AUTO_TEST_CASE(return_dynamic_types_cross_call_simple)
{
if (m_evmVersion == EVMVersion::homestead())
return;
string sourceCode = R"(
contract C {
function dyn() public returns (bytes) {
return "1234567890123456789012345678901234567890";
}
function f() public returns (bytes) {
return this.dyn();
}
}
)";
BOTH_ENCODERS(
compileAndRun(sourceCode, 0, "C");
ABI_CHECK(callContractFunction("f()"), encodeArgs(0x20, 40, string("1234567890123456789012345678901234567890")));
)
}
BOOST_AUTO_TEST_CASE(return_dynamic_types_cross_call_advanced)
{
if (m_evmVersion == EVMVersion::homestead())
return;
string sourceCode = R"(
contract C {
function dyn() public returns (bytes a, uint b, bytes20[] c, uint d) {
a = "1234567890123456789012345678901234567890";
b = uint(-1);
c = new bytes20[](4);
c[0] = bytes20(1234);
c[3] = bytes20(6789);
d = 0x1234;
}
function f() public returns (bytes, uint, bytes20[], uint) {
return this.dyn();
}
}
)";
BOTH_ENCODERS(
compileAndRun(sourceCode, 0, "C");
ABI_CHECK(callContractFunction("f()"), encodeArgs(
0x80, u256(-1), 0xe0, 0x1234,
40, string("1234567890123456789012345678901234567890"),
4, u256(1234) << (8 * (32 - 20)), 0, 0, u256(6789) << (8 * (32 - 20))
));
)
}
BOOST_AUTO_TEST_CASE(return_dynamic_types_cross_call_out_of_range)
{
string sourceCode = R"(
contract C {
function dyn(uint x) public returns (bytes a) {
assembly {
mstore(0, 0x20)
mstore(0x20, 0x21)
return(0, x)
}
}
function f(uint x) public returns (bool) {
this.dyn(x);
return true;
}
}
)";
BOTH_ENCODERS(
compileAndRun(sourceCode, 0, "C");
if (m_evmVersion == EVMVersion::homestead())
{
ABI_CHECK(callContractFunction("f(uint256)", 0x60), encodeArgs(true));
ABI_CHECK(callContractFunction("f(uint256)", 0x7f), encodeArgs(true));
}
else
{
ABI_CHECK(callContractFunction("f(uint256)", 0x60), encodeArgs());
ABI_CHECK(callContractFunction("f(uint256)", 0x61), encodeArgs(true));
}
ABI_CHECK(callContractFunction("f(uint256)", 0x80), encodeArgs(true));
)
}
BOOST_AUTO_TEST_SUITE_END()

View File

@ -3372,7 +3372,10 @@ BOOST_AUTO_TEST_CASE(dynamic_return_types_not_possible)
}
}
)";
if (dev::test::Options::get().evmVersion() == EVMVersion::homestead())
CHECK_ERROR(sourceCode, TypeError, "Explicit type conversion not allowed from \"inaccessible dynamic type\" to \"bytes storage pointer\".");
else
CHECK_WARNING(sourceCode, "Use of the \"var\" keyword is deprecated");
}
BOOST_AUTO_TEST_CASE(memory_arrays_not_resizeable)
@ -6387,6 +6390,23 @@ BOOST_AUTO_TEST_CASE(return_structs)
CHECK_SUCCESS(text);
}
BOOST_AUTO_TEST_CASE(read_returned_struct)
{
char const* text = R"(
pragma experimental ABIEncoderV2;
contract A {
struct T {
int x;
int y;
}
function g() public returns (T) {
return this.g();
}
}
)";
CHECK_WARNING(text, "Experimental features");
}
BOOST_AUTO_TEST_CASE(return_recursive_structs)
{
char const* text = R"(