mirror of
https://github.com/ethereum/solidity
synced 2023-10-03 13:03:40 +00:00
Fix pylint issues
This commit is contained in:
parent
f5d659da0c
commit
0ef7d27dbd
@ -155,8 +155,8 @@ class TraceAnalyser:
|
||||
print(len(intersection), "test-cases - ", len(mismatches), " mismatche(s)")
|
||||
|
||||
def check_traces(self, test_name, left, right, mismatches):
|
||||
for trace_id in range(0, len(left.traces)):
|
||||
left_trace = left.traces[trace_id]
|
||||
for trace_id, trace in enumerate(left.traces):
|
||||
left_trace = trace
|
||||
right_trace = right.traces[trace_id]
|
||||
assert (left_trace.kind == right_trace.kind)
|
||||
if str(left_trace) != str(right_trace):
|
||||
|
@ -10,7 +10,8 @@ import sys
|
||||
import re
|
||||
|
||||
def extract_test_cases(_path):
|
||||
lines = open(_path, mode='rb', encoding='utf8').read().splitlines()
|
||||
with open(_path, mode='rb', encoding='utf8') as f:
|
||||
lines = f.read().splitlines()
|
||||
|
||||
inside = False
|
||||
delimiter = ''
|
||||
@ -22,7 +23,8 @@ def extract_test_cases(_path):
|
||||
for l in lines:
|
||||
if inside:
|
||||
if l.strip().endswith(')' + delimiter + '";'):
|
||||
open('%03d_%s.sol' % (ctr, test_name), mode='wb', encoding='utf8').write(test)
|
||||
with open('%03d_%s.sol' % (ctr, test_name), mode='wb', encoding='utf8') as f:
|
||||
f.write(test)
|
||||
ctr += 1
|
||||
inside = False
|
||||
test = ''
|
||||
|
@ -13,7 +13,8 @@ import hashlib
|
||||
from os.path import join, isfile, split
|
||||
|
||||
def extract_test_cases(path):
|
||||
lines = open(path, encoding="utf8", errors='ignore', mode='r', newline='').read().splitlines()
|
||||
with open(path, encoding="utf8", errors='ignore', mode='r', newline='') as file:
|
||||
lines = file.read().splitlines()
|
||||
|
||||
inside = False
|
||||
delimiter = ''
|
||||
@ -45,7 +46,10 @@ def extract_docs_cases(path):
|
||||
tests = []
|
||||
|
||||
# Collect all snippets of indented blocks
|
||||
for l in open(path, mode='r', errors='ignore', encoding='utf8', newline='').read().splitlines():
|
||||
|
||||
with open(path, mode='r', errors='ignore', encoding='utf8', newline='') as f:
|
||||
lines = f.read().splitlines()
|
||||
for l in lines:
|
||||
if l != '':
|
||||
if not insideBlock and l.startswith(' '):
|
||||
# start new test
|
||||
@ -87,14 +91,16 @@ def write_cases(f, tests):
|
||||
# so before checking remove 4 spaces from each line.
|
||||
remainder = re.sub(r'^ {4}', '', test, 0, re.MULTILINE)
|
||||
sol_filename = 'test_%s_%s.sol' % (hashlib.sha256(test.encode("utf-8")).hexdigest(), cleaned_filename)
|
||||
open(sol_filename, mode='w', encoding='utf8', newline='').write(remainder)
|
||||
with open(sol_filename, mode='w', encoding='utf8', newline='') as fi:
|
||||
fi.write(remainder)
|
||||
|
||||
def extract_and_write(f, path):
|
||||
if docs:
|
||||
cases = extract_docs_cases(path)
|
||||
else:
|
||||
if f.endswith('.sol'):
|
||||
cases = [open(path, mode='r', encoding='utf8', newline='').read()]
|
||||
with open(path, mode='r', encoding='utf8', newline='') as _f:
|
||||
cases = [_f.read()]
|
||||
else:
|
||||
cases = extract_test_cases(path)
|
||||
write_cases(f, cases)
|
||||
|
@ -88,7 +88,8 @@ class regressor():
|
||||
|
||||
## Log may contain non ASCII characters, so we simply stringify them
|
||||
## since they don't matter for regular expression matching
|
||||
rawtext = str(open(logfile, 'rb').read())
|
||||
with open(logfile, 'rb') as f:
|
||||
rawtext = str(f.read())
|
||||
return not re.search(self._re_sanitizer_log, rawtext)
|
||||
|
||||
def run(self):
|
||||
|
@ -63,7 +63,8 @@ if __name__ == '__main__':
|
||||
|
||||
try:
|
||||
# decide if file has multiple sources
|
||||
lines = open(filePath, mode='r', encoding='utf8', newline='').read().splitlines()
|
||||
with open(filePath, mode='r', encoding='utf8', newline='') as f:
|
||||
lines = f.read().splitlines()
|
||||
if lines[0][:12] == "==== Source:":
|
||||
hasMultipleSources = True
|
||||
writeSourceToFile(lines)
|
||||
|
@ -26,14 +26,14 @@ with open(path + '/../Changelog.md') as changelog:
|
||||
versions[m.group(1)] = {}
|
||||
versions[m.group(1)]['released'] = m.group(2)
|
||||
|
||||
for v in versions:
|
||||
versions[v]['bugs'] = []
|
||||
for key, value in versions.items():
|
||||
value['bugs'] = []
|
||||
for bug in bugs:
|
||||
if 'introduced' in bug and comp(bug['introduced']) > comp(v):
|
||||
if 'introduced' in bug and comp(bug['introduced']) > comp(key):
|
||||
continue
|
||||
if comp(bug['fixed']) <= comp(v):
|
||||
if comp(bug['fixed']) <= comp(key):
|
||||
continue
|
||||
versions[v]['bugs'] += [bug['name']]
|
||||
value['bugs'] += [bug['name']]
|
||||
|
||||
new_contents = json.dumps(versions, sort_keys=True, indent=4, separators=(',', ': '))
|
||||
with open(path + '/../docs/bugs_by_version.json', 'r') as bugs_by_version:
|
||||
|
@ -8,7 +8,8 @@ from os.path import join, isfile
|
||||
|
||||
|
||||
def extract_test_cases(path):
|
||||
lines = open(path, encoding="utf8", errors='ignore', mode='rb').read().splitlines()
|
||||
with open(path, encoding="utf8", errors='ignore', mode='rb') as f:
|
||||
lines = f.read().splitlines()
|
||||
|
||||
inside = False
|
||||
delimiter = ''
|
||||
@ -32,7 +33,8 @@ def extract_test_cases(path):
|
||||
|
||||
def extract_and_write(f, path):
|
||||
if f.endswith('.sol'):
|
||||
cases = [open(path, 'r').read()]
|
||||
with open(path, 'r') as _f:
|
||||
cases = [_f.read()]
|
||||
else:
|
||||
cases = extract_test_cases(path)
|
||||
write_cases(f, cases)
|
||||
@ -41,7 +43,8 @@ def write_cases(f, tests):
|
||||
cleaned_filename = f.replace(".","_").replace("-","_").replace(" ","_").lower()
|
||||
for test in tests:
|
||||
remainder = re.sub(r'^ {4}', '', test, 0, re.MULTILINE)
|
||||
open('test_%s_%s.sol' % (hashlib.sha256(test).hexdigest(), cleaned_filename), 'w').write(remainder)
|
||||
with open('test_%s_%s.sol' % (hashlib.sha256(test).hexdigest(), cleaned_filename), 'w') as _f:
|
||||
_f.write(remainder)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
Loading…
Reference in New Issue
Block a user