Fix pylint warnings about encoding not being specified in open() calls

This commit is contained in:
Kamil Śliwak 2021-08-23 18:22:51 +02:00
parent 941919e8ab
commit a6683e3226
7 changed files with 15 additions and 15 deletions

View File

@ -69,7 +69,7 @@ copyright = '2016-2021, Ethereum'
# built documents. # built documents.
# #
# The short X.Y version. # The short X.Y version.
with open('../CMakeLists.txt', 'r') as f: with open('../CMakeLists.txt', 'r', encoding='utf8') as f:
version = re.search('PROJECT_VERSION "([^"]+)"', f.read()).group(1) version = re.search('PROJECT_VERSION "([^"]+)"', f.read()).group(1)
# The full version, including alpha/beta/rc tags. # The full version, including alpha/beta/rc tags.
if os.path.isfile('../prerelease.txt') != True or os.path.getsize('../prerelease.txt') == 0: if os.path.isfile('../prerelease.txt') != True or os.path.getsize('../prerelease.txt') == 0:

View File

@ -14,14 +14,14 @@ def render_html_extra_templates(app):
if not os.path.isabs(template_config['target']): if not os.path.isabs(template_config['target']):
raise RuntimeError(f"Template target path is not absolute: {template_config['target']}") raise RuntimeError(f"Template target path is not absolute: {template_config['target']}")
with open(input_path, 'r') as input_file: with open(input_path, 'r', encoding='utf8') as input_file:
# This runs Jinja2, which supports rendering {{ }} tags among other things. # This runs Jinja2, which supports rendering {{ }} tags among other things.
rendered_template = app.builder.templates.render_string( rendered_template = app.builder.templates.render_string(
input_file.read(), input_file.read(),
template_config['context'], template_config['context'],
) )
with open(template_config['target'], 'w') as target_file: with open(template_config['target'], 'w', encoding='utf8') as target_file:
target_file.write(rendered_template) target_file.write(rendered_template)
app.config.html_extra_path.append(template_config['target']) app.config.html_extra_path.append(template_config['target'])

View File

@ -61,7 +61,7 @@ def get_checks(content, sol_file_path):
constructors.append(line) constructors.append(line)
if line.startswith("ABI_CHECK") or line.startswith("BOOST_REQUIRE"): if line.startswith("ABI_CHECK") or line.startswith("BOOST_REQUIRE"):
checks.append(line) checks.append(line)
with open(sol_file_path, "r") as sol_file: with open(sol_file_path, "r", encoding='utf8') as sol_file:
sol_constructors = [] sol_constructors = []
sol_checks = [] sol_checks = []
inside_expectations = False inside_expectations = False
@ -118,7 +118,7 @@ def get_tests(e2e_path):
def process_input_file(e2e_path, input_file, interactive): def process_input_file(e2e_path, input_file, interactive):
tests = get_tests(e2e_path) tests = get_tests(e2e_path)
with open(input_file, "r") as cpp_file: with open(input_file, "r", encoding='utf8') as cpp_file:
inside_test = False inside_test = False
test_name = "" test_name = ""
inside_extracted_test = False inside_extracted_test = False

View File

@ -7,7 +7,7 @@
# ./soltest --color_output=false --log_level=test_suite -t semanticTests/extracted/ -- --no-smt # ./soltest --color_output=false --log_level=test_suite -t semanticTests/extracted/ -- --no-smt
# --evmonepath /Users/alex/evmone/lib/libevmone.dylib --show-messages > semanticTests.trace # --evmonepath /Users/alex/evmone/lib/libevmone.dylib --show-messages > semanticTests.trace
# #
# verify-testcases.py will compare both traces. If these traces are identical, the extracted tests where # verify-testcases.py will compare both traces. If these traces are identical, the extracted tests were
# identical with the tests specified in SolidityEndToEndTest.cpp. # identical with the tests specified in SolidityEndToEndTest.cpp.
# #
# pylint: disable=too-many-instance-attributes # pylint: disable=too-many-instance-attributes
@ -75,7 +75,7 @@ class TraceAnalyser:
self.ready = False self.ready = False
def analyse(self): def analyse(self):
with open(self.file, "r") as trace_file: with open(self.file, "r", encoding='utf8') as trace_file:
trace = None trace = None
test_case = None test_case = None
for line in trace_file.readlines(): for line in trace_file.readlines():

View File

@ -67,7 +67,7 @@ class regressor():
if not env: if not env:
env = os.environ.copy() env = os.environ.copy()
with open(logfile, 'w') as logfh: with open(logfile, 'w', encoding='utf8') as logfh:
with subprocess.Popen(command, shell=True, executable='/bin/bash', with subprocess.Popen(command, shell=True, executable='/bin/bash',
env=env, stdout=logfh, env=env, stdout=logfh,
stderr=subprocess.STDOUT) as proc: stderr=subprocess.STDOUT) as proc:
@ -88,7 +88,7 @@ class regressor():
## Log may contain non ASCII characters, so we simply stringify them ## Log may contain non ASCII characters, so we simply stringify them
## since they don't matter for regular expression matching ## since they don't matter for regular expression matching
with open(logfile, 'rb') as f: with open(logfile, 'rb', encoding=None) as f:
rawtext = str(f.read()) rawtext = str(f.read())
return not re.search(self._re_sanitizer_log, rawtext) return not re.search(self._re_sanitizer_log, rawtext)

View File

@ -15,11 +15,11 @@ def comp(version_string):
return [int(c) for c in version_string.split('.')] return [int(c) for c in version_string.split('.')]
path = os.path.dirname(os.path.realpath(__file__)) path = os.path.dirname(os.path.realpath(__file__))
with open(path + '/../docs/bugs.json') as bugsFile: with open(path + '/../docs/bugs.json', encoding='utf8') as bugsFile:
bugs = json.load(bugsFile) bugs = json.load(bugsFile)
versions = {} versions = {}
with open(path + '/../Changelog.md') as changelog: with open(path + '/../Changelog.md', encoding='utf8') as changelog:
for line in changelog: for line in changelog:
m = re.search(r'^### (\S+) \((\d+-\d+-\d+)\)$', line) m = re.search(r'^### (\S+) \((\d+-\d+-\d+)\)$', line)
if m: if m:
@ -36,8 +36,8 @@ for key, value in versions.items():
value['bugs'] += [bug['name']] value['bugs'] += [bug['name']]
new_contents = json.dumps(versions, sort_keys=True, indent=4, separators=(',', ': ')) new_contents = json.dumps(versions, sort_keys=True, indent=4, separators=(',', ': '))
with open(path + '/../docs/bugs_by_version.json', 'r') as bugs_by_version: with open(path + '/../docs/bugs_by_version.json', 'r', encoding='utf8') as bugs_by_version:
old_contents = bugs_by_version.read() old_contents = bugs_by_version.read()
with open(path + '/../docs/bugs_by_version.json', 'w') as bugs_by_version: with open(path + '/../docs/bugs_by_version.json', 'w', encoding='utf8') as bugs_by_version:
bugs_by_version.write(new_contents) bugs_by_version.write(new_contents)
sys.exit(old_contents != new_contents) sys.exit(old_contents != new_contents)

View File

@ -36,7 +36,7 @@ def extract_test_cases(path):
def extract_and_write(f, path): def extract_and_write(f, path):
if f.endswith('.sol'): if f.endswith('.sol'):
with open(path, 'r') as _f: with open(path, 'r', encoding='utf8') as _f:
cases = [_f.read()] cases = [_f.read()]
else: else:
cases = extract_test_cases(path) cases = extract_test_cases(path)
@ -46,7 +46,7 @@ def write_cases(f, tests):
cleaned_filename = f.replace(".","_").replace("-","_").replace(" ","_").lower() cleaned_filename = f.replace(".","_").replace("-","_").replace(" ","_").lower()
for test in tests: for test in tests:
remainder = re.sub(r'^ {4}', '', test, 0, re.MULTILINE) remainder = re.sub(r'^ {4}', '', test, 0, re.MULTILINE)
with open('test_%s_%s.sol' % (hashlib.sha256(test).hexdigest(), cleaned_filename), 'w') as _f: with open('test_%s_%s.sol' % (hashlib.sha256(test).hexdigest(), cleaned_filename), 'w', encoding='utf8') as _f:
_f.write(remainder) _f.write(remainder)