mirror of
https://github.com/RRZE-HPC/OSACA.git
synced 2025-12-16 09:00:05 +01:00
making flake8 happy
This commit is contained in:
@@ -7,9 +7,8 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
sys.path.insert(0, os.path.abspath("."))
|
||||
from version_from_src import get_version
|
||||
from version_from_src import get_version # noqa: E402
|
||||
|
||||
# -- Project information -----------------------------------------------------
|
||||
|
||||
|
||||
@@ -769,11 +769,12 @@ def get_description(arch, rhs_comment=None):
|
||||
description = descriptions[arch]
|
||||
|
||||
if rhs_comment is not None:
|
||||
max_length = max([len(l) for l in descriptions[arch].split("\n")])
|
||||
max_length = max([len(line) for line in descriptions[arch].split("\n")])
|
||||
|
||||
commented_description = ""
|
||||
for l in descriptions[arch].split("\n"):
|
||||
commented_description += ("{:<" + str(max_length) + "} # {}\n").format(l, rhs_comment)
|
||||
for line in descriptions[arch].split("\n"):
|
||||
commented_description += ("{:<" + str(max_length) + "} # {}\n").format(
|
||||
line, rhs_comment)
|
||||
description = commented_description
|
||||
|
||||
return description
|
||||
|
||||
@@ -239,11 +239,11 @@ def extract_model(tree, arch, skip_mem=True):
|
||||
|
||||
|
||||
def rhs_comment(uncommented_string, comment):
|
||||
max_length = max([len(l) for l in uncommented_string.split("\n")])
|
||||
max_length = max([len(line) for line in uncommented_string.split("\n")])
|
||||
|
||||
commented_string = ""
|
||||
for l in uncommented_string.split("\n"):
|
||||
commented_string += ("{:<" + str(max_length) + "} # {}\n").format(l, comment)
|
||||
for line in uncommented_string.split("\n"):
|
||||
commented_string += ("{:<" + str(max_length) + "} # {}\n").format(line, comment)
|
||||
return commented_string
|
||||
|
||||
|
||||
|
||||
@@ -21,9 +21,11 @@ def sanity_check(arch: str, verbose=False, internet_check=False, output_file=sys
|
||||
:type arch: str
|
||||
:param verbose: verbose output flag, defaults to `False`
|
||||
:type verbose: bool, optional
|
||||
:param internet_check: indicates if OSACA should try to look up the src/dst distribution in the internet, defaults to False
|
||||
:param internet_check: indicates if OSACA should try to look up the src/dst distribution in the
|
||||
internet, defaults to False
|
||||
:type internet_check: boolean, optional
|
||||
:param output_file: output stream specifying where to write output, defaults to :class:`sys.stdout`
|
||||
:param output_file: output stream specifying where to write output,
|
||||
defaults to :class:`sys.stdout`
|
||||
:type output_file: stream, optional
|
||||
|
||||
:return: True if everything checked out
|
||||
|
||||
@@ -178,7 +178,8 @@ class Frontend(object):
|
||||
:type ignore_unknown: boolean, optional
|
||||
:param print_arch_warning: flag for additional user warning to specify micro-arch
|
||||
:type print_arch_warning: boolean, optional
|
||||
:param print_length_warning: flag for additional user warning to specify kernel length with --lines
|
||||
:param print_length_warning: flag for additional user warning to specify kernel length with
|
||||
--lines
|
||||
:type print_length_warning: boolean, optional
|
||||
:param verbose: flag for verbosity level, defaults to False
|
||||
:type verbose: boolean, optional
|
||||
|
||||
@@ -5,7 +5,6 @@ import io
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from osaca.db_interface import import_benchmark_output, sanity_check
|
||||
from osaca.frontend import Frontend
|
||||
@@ -200,7 +199,8 @@ def import_data(benchmark_type, arch, filepath, output_file=sys.stdout):
|
||||
:type arch: str
|
||||
:param filepath: filepath of the output file"
|
||||
:type filepath: str
|
||||
:param output_file: output stream specifying where to write output, defaults to :class:`sys.stdout`
|
||||
:param output_file: output stream specifying where to write output,
|
||||
defaults to :class:`sys.stdout`
|
||||
:type output_file: stream, optional
|
||||
"""
|
||||
if benchmark_type.lower() == "ibench":
|
||||
@@ -376,14 +376,14 @@ def get_line_range(line_str):
|
||||
line_str = line_str.replace(":", "-")
|
||||
lines = line_str.split(",")
|
||||
lines_int = []
|
||||
for l in lines:
|
||||
if "-" in l:
|
||||
start = int(l.split("-")[0])
|
||||
end = int(l.split("-")[1])
|
||||
for line in lines:
|
||||
if "-" in line:
|
||||
start = int(line.split("-")[0])
|
||||
end = int(line.split("-")[1])
|
||||
rnge = list(range(start, end + 1))
|
||||
lines_int += rnge
|
||||
else:
|
||||
lines_int.append(int(l))
|
||||
lines_int.append(int(line))
|
||||
return lines_int
|
||||
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
"""Semantics opbject responsible for architecture specific semantic operations"""
|
||||
|
||||
import warnings
|
||||
from functools import reduce
|
||||
from itertools import chain
|
||||
from operator import itemgetter
|
||||
|
||||
@@ -293,7 +292,8 @@ class ArchSemantics(ISASemantics):
|
||||
# and self._isa == 'aarch64'
|
||||
# and any(
|
||||
# [
|
||||
# 'post_indexed' in op['memory'] or 'pre_indexed' in op['memory']
|
||||
# 'post_indexed' in op['memory'] or
|
||||
# 'pre_indexed' in op['memory']
|
||||
# for op in instruction_form['operands']
|
||||
# if 'memory' in op
|
||||
# ]
|
||||
|
||||
@@ -48,7 +48,7 @@ class KernelDG(nx.DiGraph):
|
||||
instruction_form["line_number"],
|
||||
latency=instruction_form["latency"] - instruction_form["latency_wo_load"],
|
||||
)
|
||||
for dep in self.find_depending(instruction_form, kernel[i + 1 :]):
|
||||
for dep in self.find_depending(instruction_form, kernel[i + 1:]):
|
||||
edge_weight = (
|
||||
instruction_form["latency"]
|
||||
if "latency_wo_load" not in instruction_form
|
||||
@@ -174,9 +174,11 @@ class KernelDG(nx.DiGraph):
|
||||
|
||||
:param dict instruction_form: instruction form to check for dependencies
|
||||
:param list kernel: kernel containing the instructions to check
|
||||
:param include_write: indicating if instruction ending the dependency chain should be included, defaults to `False`
|
||||
:param include_write: indicating if instruction ending the dependency chain should be
|
||||
included, defaults to `False`
|
||||
:type include_write: boolean, optional
|
||||
:param flag_dependencies: indicating if dependencies of flags should be considered, defaults to `False`
|
||||
:param flag_dependencies: indicating if dependencies of flags should be considered,
|
||||
defaults to `False`
|
||||
:type flag_dependencies: boolean, optional
|
||||
:returns: iterator if all directly dependent instruction forms
|
||||
"""
|
||||
|
||||
@@ -184,7 +184,7 @@ def match_bytes(lines, index, byte_list):
|
||||
line_count += 1
|
||||
extracted_bytes += lines[index].directive.parameters
|
||||
index += 1
|
||||
if extracted_bytes[0 : len(byte_list)] == byte_list:
|
||||
if extracted_bytes[0:len(byte_list)] == byte_list:
|
||||
return True, line_count
|
||||
return False, -1
|
||||
|
||||
@@ -218,14 +218,14 @@ def find_jump_labels(lines):
|
||||
for label in list(labels):
|
||||
if all(
|
||||
[
|
||||
l["instruction"].startswith(".")
|
||||
for l in lines[labels[label][0] : labels[label][1]]
|
||||
if l["instruction"] is not None
|
||||
line["instruction"].startswith(".")
|
||||
for line in lines[labels[label][0]:labels[label][1]]
|
||||
if line["instruction"] is not None
|
||||
]
|
||||
):
|
||||
del labels[label]
|
||||
|
||||
return OrderedDict([(l, v[0]) for l, v in labels.items()])
|
||||
return OrderedDict([(label, v[0]) for label, v in labels.items()])
|
||||
|
||||
|
||||
def find_basic_blocks(lines):
|
||||
@@ -243,7 +243,7 @@ def find_basic_blocks(lines):
|
||||
blocks = OrderedDict()
|
||||
for label, label_line_idx in valid_jump_labels.items():
|
||||
blocks[label] = [lines[label_line_idx]]
|
||||
for line in lines[label_line_idx + 1 :]:
|
||||
for line in lines[label_line_idx + 1:]:
|
||||
terminate = False
|
||||
blocks[label].append(line)
|
||||
# Find end of block by searching for references to valid jump labels
|
||||
@@ -272,7 +272,7 @@ def find_basic_loop_bodies(lines):
|
||||
loop_bodies = OrderedDict()
|
||||
for label, label_line_idx in valid_jump_labels.items():
|
||||
current_block = [lines[label_line_idx]]
|
||||
for line in lines[label_line_idx + 1 :]:
|
||||
for line in lines[label_line_idx + 1:]:
|
||||
terminate = False
|
||||
current_block.append(line)
|
||||
# Find end of block by searching for references to valid jump labels
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
[pep8]
|
||||
[pycodestyle]
|
||||
max-line-length=99
|
||||
max-line-length=100
|
||||
[flake8]
|
||||
max-line-length=99
|
||||
max-line-length=100
|
||||
|
||||
[metadata]
|
||||
license-file=LICENSE
|
||||
|
||||
4
setup.py
4
setup.py
@@ -106,7 +106,7 @@ setup(
|
||||
# your project is installed. For an analysis of "install_requires" vs pip's
|
||||
# requirements files see:
|
||||
# https://packaging.python.org/en/latest/requirements.html
|
||||
install_requires=["networkx", "pyparsing>=2.3.1", "ruamel.yaml>=0.15.71",],
|
||||
install_requires=["networkx", "pyparsing>=2.3.1", "ruamel.yaml>=0.15.71"],
|
||||
python_requires=">=3.5",
|
||||
# List additional groups of dependencies here (e.g. development
|
||||
# dependencies). You can install these using the following syntax,
|
||||
@@ -128,7 +128,7 @@ setup(
|
||||
# To provide executable scripts, use entry points in preference to the
|
||||
# "scripts" keyword. Entry points provide cross-platform support and allow
|
||||
# pip to create the appropriate form of executable for the target platform.
|
||||
entry_points={"console_scripts": ["osaca=osaca.osaca:main",],},
|
||||
entry_points={"console_scripts": ["osaca=osaca.osaca:main"]},
|
||||
# Overwriting install and sdist to enforce cache distribution with package
|
||||
cmdclass={"install": install, "sdist": sdist},
|
||||
)
|
||||
|
||||
@@ -153,7 +153,6 @@ class TestCLI(unittest.TestCase):
|
||||
osaca.run(args, output_file=output)
|
||||
|
||||
def test_architectures_sanity(self):
|
||||
parser = osaca.create_parser()
|
||||
# Run sanity check for all architectures
|
||||
archs = osaca.SUPPORTED_ARCHS
|
||||
for arch in archs:
|
||||
|
||||
@@ -6,7 +6,6 @@ Unit tests for Semantic Analysis
|
||||
import os
|
||||
import unittest
|
||||
from copy import deepcopy
|
||||
from subprocess import call
|
||||
|
||||
import networkx as nx
|
||||
|
||||
|
||||
Reference in New Issue
Block a user