Merge pull request #12994 from ethereum/lsp-include-path

LSP to resolve custom include paths (e.g. Hardhat)
This commit is contained in:
Christian Parpart 2022-07-13 14:41:41 +02:00 committed by GitHub
commit f686294374
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 211 additions and 56 deletions

View File

@ -30,7 +30,8 @@ Compiler Features:
* LSP: Add rudimentary support for semantic highlighting.
* Type Checker: Warn about assignments involving multiple pushes to storage ``bytes`` that may invalidate references.
* Yul Optimizer: Improve inlining heuristics for via IR code generation and pure Yul compilation.
* Language Server: Always add ``{project_root}/node_modules`` to include search paths.
* Language Server: Adds support for configuring ``include-paths`` JSON settings object that can be passed during LSP configuration stage.
Bugfixes:
* ABI Encoder: When encoding an empty string coming from storage do not add a superfluous empty slot for data.

View File

@ -22,11 +22,14 @@
#include <libsolutil/StringUtils.h>
#include <libsolutil/CommonIO.h>
#include <range/v3/algorithm/none_of.hpp>
#include <range/v3/range/conversion.hpp>
#include <range/v3/view/transform.hpp>
#include <regex>
#include <boost/algorithm/string/predicate.hpp>
using namespace std;
using namespace solidity;
using namespace solidity::lsp;
@ -34,25 +37,49 @@ using namespace solidity::frontend;
using solidity::util::readFileAsString;
using solidity::util::joinHumanReadable;
using solidity::util::Result;
FileRepository::FileRepository(boost::filesystem::path _basePath): m_basePath(std::move(_basePath))
FileRepository::FileRepository(boost::filesystem::path _basePath, std::vector<boost::filesystem::path> _includePaths):
m_basePath(std::move(_basePath)),
m_includePaths(std::move(_includePaths))
{
}
void FileRepository::setIncludePaths(std::vector<boost::filesystem::path> _paths)
{
m_includePaths = std::move(_paths);
}
string FileRepository::sourceUnitNameToUri(string const& _sourceUnitName) const
{
regex const windowsDriveLetterPath("^[a-zA-Z]:/");
auto const ensurePathIsUnixLike = [&](string inputPath) -> string {
if (!regex_search(inputPath, windowsDriveLetterPath))
return inputPath;
else
return "/" + move(inputPath);
};
if (m_sourceUnitNamesToUri.count(_sourceUnitName))
{
solAssert(boost::starts_with(m_sourceUnitNamesToUri.at(_sourceUnitName), "file://"), "");
return m_sourceUnitNamesToUri.at(_sourceUnitName);
}
else if (_sourceUnitName.find("file://") == 0)
return _sourceUnitName;
else if (regex_search(_sourceUnitName, windowsDriveLetterPath))
return "file:///" + _sourceUnitName;
else if (_sourceUnitName.find("/") == 0)
return "file://" + _sourceUnitName;
else
else if (
auto const resolvedPath = tryResolvePath(_sourceUnitName);
resolvedPath.message().empty()
)
return "file://" + ensurePathIsUnixLike(resolvedPath.get().generic_string());
else if (m_basePath.generic_string() != "/")
return "file://" + m_basePath.generic_string() + "/" + _sourceUnitName;
else
// Avoid double-/ in case base-path itself is simply a UNIX root filesystem root.
return "file:///" + _sourceUnitName;
}
string FileRepository::uriToSourceUnitName(string const& _path) const
@ -69,6 +96,52 @@ void FileRepository::setSourceByUri(string const& _uri, string _source)
m_sourceCodes[sourceUnitName] = std::move(_source);
}
Result<boost::filesystem::path> FileRepository::tryResolvePath(std::string const& _strippedSourceUnitName) const
{
if (
boost::filesystem::path(_strippedSourceUnitName).has_root_path() &&
boost::filesystem::exists(_strippedSourceUnitName)
)
return boost::filesystem::path(_strippedSourceUnitName);
vector<boost::filesystem::path> candidates;
vector<reference_wrapper<boost::filesystem::path const>> prefixes = {m_basePath};
prefixes += (m_includePaths | ranges::to<vector<reference_wrapper<boost::filesystem::path const>>>);
auto const defaultInclude = m_basePath / "node_modules";
if (m_includePaths.empty())
prefixes.emplace_back(defaultInclude);
auto const pathToQuotedString = [](boost::filesystem::path const& _path) { return "\"" + _path.string() + "\""; };
for (auto const& prefix: prefixes)
{
boost::filesystem::path canonicalPath = boost::filesystem::path(prefix) / boost::filesystem::path(_strippedSourceUnitName);
if (boost::filesystem::exists(canonicalPath))
candidates.push_back(move(canonicalPath));
}
if (candidates.empty())
return Result<boost::filesystem::path>::err(
"File not found. Searched the following locations: " +
joinHumanReadable(prefixes | ranges::views::transform(pathToQuotedString), ", ") +
"."
);
if (candidates.size() >= 2)
return Result<boost::filesystem::path>::err(
"Ambiguous import. "
"Multiple matching files found inside base path and/or include paths: " +
joinHumanReadable(candidates | ranges::views::transform(pathToQuotedString), ", ") +
"."
);
if (!boost::filesystem::is_regular_file(candidates[0]))
return Result<boost::filesystem::path>::err("Not a valid file.");
return candidates[0];
}
frontend::ReadCallback::Result FileRepository::readFile(string const& _kind, string const& _sourceUnitName)
{
solAssert(
@ -83,53 +156,11 @@ frontend::ReadCallback::Result FileRepository::readFile(string const& _kind, str
return ReadCallback::Result{true, m_sourceCodes.at(_sourceUnitName)};
string const strippedSourceUnitName = stripFileUriSchemePrefix(_sourceUnitName);
Result<boost::filesystem::path> const resolvedPath = tryResolvePath(strippedSourceUnitName);
if (!resolvedPath.message().empty())
return ReadCallback::Result{false, resolvedPath.message()};
if (
boost::filesystem::path(strippedSourceUnitName).has_root_path() &&
boost::filesystem::exists(strippedSourceUnitName)
)
{
auto contents = readFileAsString(strippedSourceUnitName);
solAssert(m_sourceCodes.count(_sourceUnitName) == 0, "");
m_sourceCodes[_sourceUnitName] = contents;
return ReadCallback::Result{true, move(contents)};
}
vector<boost::filesystem::path> candidates;
vector<reference_wrapper<boost::filesystem::path>> prefixes = {m_basePath};
prefixes += (m_includePaths | ranges::to<vector<reference_wrapper<boost::filesystem::path>>>);
auto const pathToQuotedString = [](boost::filesystem::path const& _path) { return "\"" + _path.string() + "\""; };
for (auto const& prefix: prefixes)
{
boost::filesystem::path canonicalPath = boost::filesystem::path(prefix) / boost::filesystem::path(strippedSourceUnitName);
if (boost::filesystem::exists(canonicalPath))
candidates.push_back(move(canonicalPath));
}
if (candidates.empty())
return ReadCallback::Result{
false,
"File not found. Searched the following locations: " +
joinHumanReadable(prefixes | ranges::views::transform(pathToQuotedString), ", ") +
"."
};
if (candidates.size() >= 2)
return ReadCallback::Result{
false,
"Ambiguous import. "
"Multiple matching files found inside base path and/or include paths: " +
joinHumanReadable(candidates | ranges::views::transform(pathToQuotedString), ", ") +
"."
};
if (!boost::filesystem::is_regular_file(candidates[0]))
return ReadCallback::Result{false, "Not a valid file."};
auto contents = readFileAsString(candidates[0]);
auto contents = readFileAsString(resolvedPath.get());
solAssert(m_sourceCodes.count(_sourceUnitName) == 0, "");
m_sourceCodes[_sourceUnitName] = contents;
return ReadCallback::Result{true, move(contents)};

View File

@ -18,6 +18,7 @@
#pragma once
#include <libsolidity/interface/FileReader.h>
#include <libsolutil/Result.h>
#include <string>
#include <map>
@ -28,7 +29,10 @@ namespace solidity::lsp
class FileRepository
{
public:
explicit FileRepository(boost::filesystem::path _basePath);
FileRepository(boost::filesystem::path _basePath, std::vector<boost::filesystem::path> _includePaths);
std::vector<boost::filesystem::path> const& includePaths() const noexcept { return m_includePaths; }
void setIncludePaths(std::vector<boost::filesystem::path> _paths);
boost::filesystem::path const& basePath() const { return m_basePath; }
@ -51,6 +55,8 @@ public:
return [this](std::string const& _kind, std::string const& _path) { return readFile(_kind, _path); };
}
util::Result<boost::filesystem::path> tryResolvePath(std::string const& _sourceUnitName) const;
private:
/// Base path without URI scheme.
boost::filesystem::path m_basePath;

View File

@ -130,7 +130,7 @@ LanguageServer::LanguageServer(Transport& _transport):
{"textDocument/semanticTokens/full", bind(&LanguageServer::semanticTokensFull, this, _1, _2)},
{"workspace/didChangeConfiguration", bind(&LanguageServer::handleWorkspaceDidChangeConfiguration, this, _2)},
},
m_fileRepository("/" /* basePath */),
m_fileRepository("/" /* basePath */, {} /* no search paths */),
m_compilerStack{m_fileRepository.reader()}
{
}
@ -148,6 +148,26 @@ Json::Value LanguageServer::toJson(SourceLocation const& _location)
void LanguageServer::changeConfiguration(Json::Value const& _settings)
{
m_settingsObject = _settings;
Json::Value jsonIncludePaths = _settings["include-paths"];
int typeFailureCount = 0;
if (jsonIncludePaths && jsonIncludePaths.isArray())
{
vector<boost::filesystem::path> includePaths;
for (Json::Value const& jsonPath: jsonIncludePaths)
{
if (jsonPath.isString())
includePaths.emplace_back(boost::filesystem::path(jsonPath.asString()));
else
typeFailureCount++;
}
m_fileRepository.setIncludePaths(move(includePaths));
}
else
++typeFailureCount;
if (typeFailureCount)
m_client.trace("Invalid JSON configuration passed. \"include-paths\" must be an array of strings.");
}
void LanguageServer::compile()
@ -155,7 +175,7 @@ void LanguageServer::compile()
// For files that are not open, we have to take changes on disk into account,
// so we just remove all non-open files.
FileRepository oldRepository(m_fileRepository.basePath());
FileRepository oldRepository(m_fileRepository.basePath(), m_fileRepository.includePaths());
swap(oldRepository, m_fileRepository);
for (string const& fileName: m_openFiles)
@ -302,7 +322,7 @@ void LanguageServer::handleInitialize(MessageID _id, Json::Value const& _args)
else if (Json::Value rootPath = _args["rootPath"])
rootPath = rootPath.asString();
m_fileRepository = FileRepository(rootPath);
m_fileRepository = FileRepository(rootPath, {});
if (_args["initializationOptions"].isObject())
changeConfiguration(_args["initializationOptions"]);

View File

@ -0,0 +1,14 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity >=0.8.0;
import "my-module/test.sol";
contract MyContract
{
function f(uint a, uint b) public pure returns (uint)
{
return MyModule.add(a, b);
}
}
// ----
// test:

View File

@ -0,0 +1,10 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity >=0.8.0;
import "rootlib.sol";
contract MyContract
{
}
// ----
// rootlib:

View File

@ -0,0 +1,11 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity >=0.8.0;
import "test.sol";
// ^^^^^^^^^^^^^^^^^^ @IncludeLocation
contract SomeContract
{
}
// ----
// file_not_found_in_searchpath: @IncludeLocation 6275

View File

@ -0,0 +1,11 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity >=0.8.0;
import "otherlib/otherlib.sol";
// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @NotFound
contract MyContract
{
}
// ----
// using-custom-includes: @NotFound 6275

10
test/libsolidity/lsp/node_modules/my-module/test.sol generated vendored Normal file
View File

@ -0,0 +1,10 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity >=0.8.0;
library MyModule
{
function add(uint a, uint b) public pure returns (uint)
{
return a + b;
}
}

6
test/libsolidity/lsp/node_modules/rootlib.sol generated vendored Normal file
View File

@ -0,0 +1,6 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity >=0.8.0;
library MyRootLib
{
}

View File

@ -0,0 +1,7 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity >=0.8.0;
library OtherLib
{
function f(uint n) public returns (uint) { return n + 1; }
}

View File

@ -1381,6 +1381,34 @@ class SolidityLSPTestSuite: # {{{
return markers
def test_custom_includes(self, solc: JsonRpcProcess) -> None:
self.setup_lsp(solc, expose_project_root=False)
solc.send_notification(
'workspace/didChangeConfiguration', {
'settings': {
'include-paths': [
f"{self.project_root_dir}/other-include-dir"
]
}
}
)
published_diagnostics = self.open_file_and_wait_for_diagnostics(solc, 'include-paths/using-custom-includes')
self.expect_equal(len(published_diagnostics), 2, "Diagnostic reports for 2 files")
# test file
report = published_diagnostics[0]
self.expect_equal(report['uri'], self.get_test_file_uri('using-custom-includes', 'include-paths'))
diagnostics = report['diagnostics']
self.expect_equal(len(diagnostics), 0, "no diagnostics")
# imported file
report = published_diagnostics[1]
self.expect_equal(report['uri'], f"{self.project_root_uri}/other-include-dir/otherlib/otherlib.sol")
diagnostics = report['diagnostics']
self.expect_equal(len(diagnostics), 1, "no diagnostics")
self.expect_diagnostic(diagnostics[0], code=2018, lineNo=5, startEndColumns=(4, 62))
def test_didChange_in_A_causing_error_in_B(self, solc: JsonRpcProcess) -> None:
# Reusing another test but now change some file that generates an error in the other.
self.test_textDocument_didOpen_with_relative_import(solc)
@ -1450,7 +1478,7 @@ class SolidityLSPTestSuite: # {{{
self.setup_lsp(solc)
sub_dirs = filter(
lambda filepath: filepath.is_dir(),
lambda filepath: filepath.is_dir() and filepath.name != 'node_modules',
os.scandir(self.project_root_dir)
)