diff --git a/changelog b/changelog index f8196e1c40..1ad6370b0e 100644 --- a/changelog +++ b/changelog @@ -130,6 +130,21 @@ 47) PR #1890 for #1889. Fix test to avoid temporary files being left after pytest run. + 48) PR #1850 towards #1799. Initial PSyIR node.datatype implementation + to query the resulting datatype of a PSyIR subtree. + + 49) PR #1869 for #1868. Extend PSyAD to make use of the + Reference2ArrayRangeTrans. + + 50) PR #1880 for #1865. Improves support for function calls in the + PSyIR by ensuring the associated RoutineSymbol has the correct + datatype. + + 51) PR #1894 for #1893. Fix bug in is_upper/lower_bound for UnknownTypes. + + 52) PR #1853 for #1829. Add OMP teams distribute parallel do directive + and refactor OMPLoopTrans. + release 2.3.1 17th of June 2022 1) PR #1747 for #1720. Adds support for If blocks to PSyAD. diff --git a/doc/user_guide/transformations.rst b/doc/user_guide/transformations.rst index 8d4d3dbdca..a10e89de87 100644 --- a/doc/user_guide/transformations.rst +++ b/doc/user_guide/transformations.rst @@ -313,8 +313,8 @@ can be found in the API-specific sections). #### -.. autoclass:: psyclone.transformations.OMPLoopTrans - :members: apply, omp_schedule, omp_worksharing +.. autoclass:: psyclone.psyir.transformations.OMPLoopTrans + :members: apply, omp_schedule, omp_directive :noindex: #### diff --git a/examples/nemo/eg1/openmp_gpu_trans.py b/examples/nemo/eg1/openmp_gpu_trans.py index 00e6939876..423df6e9ea 100755 --- a/examples/nemo/eg1/openmp_gpu_trans.py +++ b/examples/nemo/eg1/openmp_gpu_trans.py @@ -39,8 +39,8 @@ from psyclone.psyir.nodes import Loop, Assignment from psyclone.domain.nemo.transformations import NemoAllArrayRange2LoopTrans -from psyclone.psyir.transformations.omp_target_trans import OMPTargetTrans -from psyclone.transformations import TransformationError, OMPLoopTrans +from psyclone.psyir.transformations import OMPTargetTrans, OMPLoopTrans +from psyclone.transformations import TransformationError USE_GPU = True # Enable for generating OpenMP target directives @@ -57,9 +57,7 @@ def trans(psy): ''' omp_target_trans = OMPTargetTrans() omp_loop_trans = OMPLoopTrans() - # Disabling worksharing will produce the 'loop' directive which is better - # suited to map the work into the GPU - omp_loop_trans.omp_worksharing = False + omp_loop_trans.omp_directive = "loop" print("Invokes found:") for invoke in psy.invokes.invoke_list: diff --git a/psyclone.pdf b/psyclone.pdf index 8164808333..1f21a9711f 100644 Binary files a/psyclone.pdf and b/psyclone.pdf differ diff --git a/src/psyclone/psyad/transformations/preprocess.py b/src/psyclone/psyad/transformations/preprocess.py index d25f301c99..39d80be228 100644 --- a/src/psyclone/psyad/transformations/preprocess.py +++ b/src/psyclone/psyad/transformations/preprocess.py @@ -40,9 +40,11 @@ ''' from psyclone.core import SymbolicMaths from psyclone.psyad.utils import node_is_active, node_is_passive -from psyclone.psyir.nodes import BinaryOperation, Assignment, Range +from psyclone.psyir.nodes import BinaryOperation, Assignment, Range, \ + Reference from psyclone.psyir.transformations import DotProduct2CodeTrans, \ - Matmul2CodeTrans, ArrayRange2LoopTrans, TransformationError + Matmul2CodeTrans, ArrayRange2LoopTrans, TransformationError, \ + Reference2ArrayRangeTrans def preprocess_trans(kernel_psyir, active_variable_names): @@ -63,6 +65,14 @@ def preprocess_trans(kernel_psyir, active_variable_names): dot_product_trans = DotProduct2CodeTrans() matmul_trans = Matmul2CodeTrans() arrayrange2loop_trans = ArrayRange2LoopTrans() + reference2arrayrange_trans = Reference2ArrayRangeTrans() + + # Replace references to arrays (array notation) with array-ranges + for reference in kernel_psyir.walk(Reference): + try: + reference2arrayrange_trans.apply(reference) + except TransformationError: + pass # Replace array-ranges with explicit loops for assignment in kernel_psyir.walk(Assignment): diff --git a/src/psyclone/psyir/frontend/fparser2.py b/src/psyclone/psyir/frontend/fparser2.py index 3115cff9e5..f77560d3d4 100644 --- a/src/psyclone/psyir/frontend/fparser2.py +++ b/src/psyclone/psyir/frontend/fparser2.py @@ -3523,7 +3523,7 @@ def _name_handler(self, node, parent): ''' symbol = _find_or_create_imported_symbol(parent, node.string) - return Reference(symbol, parent) + return Reference(symbol, parent=parent) def _parenthesis_handler(self, node, parent): ''' @@ -3554,11 +3554,12 @@ def _part_ref_handler(self, node, parent): :param parent: Parent node of the PSyIR node we are constructing. :type parent: :py:class:`psyclone.psyir.nodes.Node` - :raises NotImplementedError: If the fparser node represents \ + :raises NotImplementedError: if the fparser node represents \ unsupported PSyIR features and should be placed in a CodeBlock. - :returns: PSyIR representation of node - :rtype: :py:class:`psyclone.psyir.nodes.ArrayReference` + :returns: the PSyIR node. + :rtype: :py:class:`psyclone.psyir.nodes.ArrayReference` or \ + :py:class:`psyclone.psyir.nodes.Call` ''' reference_name = node.items[0].string.lower() @@ -3567,9 +3568,12 @@ def _part_ref_handler(self, node, parent): # part-references instead of function-references. symbol = _find_or_create_imported_symbol(parent, reference_name) - array = ArrayReference(symbol, parent) - self.process_nodes(parent=array, nodes=node.items[1].items) - return array + if isinstance(symbol, RoutineSymbol): + call_or_array = Call(symbol, parent=parent) + else: + call_or_array = ArrayReference(symbol, parent=parent) + self.process_nodes(parent=call_or_array, nodes=node.items[1].items) + return call_or_array def _subscript_triplet_handler(self, node, parent): ''' @@ -3907,6 +3911,21 @@ def _subroutine_handler(self, node, parent): # attempt to recreate the prefix. We have to set shadowing to # True as there is likely to be a RoutineSymbol for this # function in any enclosing Container. + + # First, update the existing RoutineSymbol with the + # return datatype specified in the function + # declaration. + + # Lookup with the routine name as return_name may be + # declared with its own local name. Be wary that this + # function may not be referenced so there might not be + # a RoutineSymbol. + try: + routine_symbol = routine.symbol_table.lookup(routine.name) + routine_symbol.datatype = base_type + except KeyError: + pass + routine.symbol_table.new_symbol(return_name, tag=keep_tag, symbol_type=DataSymbol, diff --git a/src/psyclone/psyir/nodes/__init__.py b/src/psyclone/psyir/nodes/__init__.py index dd75999f53..3bbbc5aa31 100644 --- a/src/psyclone/psyir/nodes/__init__.py +++ b/src/psyclone/psyir/nodes/__init__.py @@ -83,7 +83,8 @@ OMPParallelDirective, OMPParallelDoDirective, OMPSingleDirective, \ OMPMasterDirective, OMPSerialDirective, OMPTaskloopDirective, \ OMPTaskwaitDirective, OMPStandaloneDirective, OMPRegionDirective, \ - OMPTargetDirective, OMPLoopDirective, OMPDeclareTargetDirective + OMPTargetDirective, OMPLoopDirective, OMPDeclareTargetDirective, \ + OMPTeamsDistributeParallelDoDirective from psyclone.psyir.nodes.clause import Clause, OperandClause from psyclone.psyir.nodes.omp_clauses import OMPGrainsizeClause, \ OMPNogroupClause, OMPNowaitClause, OMPNumTasksClause, OMPPrivateClause, \ @@ -161,6 +162,7 @@ 'OMPTargetDirective', 'OMPLoopDirective', 'OMPDeclareTargetDirective', + 'OMPTeamsDistributeParallelDoDirective', # OMP Clause Nodes 'OMPGrainsizeClause', 'OMPNogroupClause', diff --git a/src/psyclone/psyir/nodes/array_mixin.py b/src/psyclone/psyir/nodes/array_mixin.py index 2022e54148..11c1e0f0cc 100644 --- a/src/psyclone/psyir/nodes/array_mixin.py +++ b/src/psyclone/psyir/nodes/array_mixin.py @@ -39,21 +39,22 @@ ''' This module contains the implementation of the abstract ArrayMixin. ''' import abc -import six from psyclone.errors import InternalError +from psyclone.psyir.nodes.call import Call +from psyclone.psyir.nodes.codeblock import CodeBlock from psyclone.psyir.nodes.datanode import DataNode from psyclone.psyir.nodes.literal import Literal from psyclone.psyir.nodes.member import Member -from psyclone.psyir.nodes.operation import BinaryOperation +from psyclone.psyir.nodes.operation import Operation, BinaryOperation from psyclone.psyir.nodes.ranges import Range from psyclone.psyir.nodes.reference import Reference from psyclone.psyir.symbols import SymbolError -from psyclone.psyir.symbols.datatypes import ScalarType, ArrayType +from psyclone.psyir.symbols.datatypes import (ScalarType, ArrayType, + INTEGER_TYPE) -@six.add_metaclass(abc.ABCMeta) -class ArrayMixin(object): +class ArrayMixin(metaclass=abc.ABCMeta): ''' Abstract class used to add functionality common to Nodes that represent Array accesses. @@ -95,7 +96,7 @@ def get_signature_and_indices(self): :rtype: tuple(:py:class:`psyclone.core.Signature`, list of \ lists of indices) ''' - sig, _ = super(ArrayMixin, self).get_signature_and_indices() + sig, _ = super().get_signature_and_indices() return (sig, [self.indices[:]]) def _validate_index(self, index): @@ -151,6 +152,10 @@ def is_lower_bound(self, index): try: symbol = self.scope.symbol_table.lookup(self.name) datatype = symbol.datatype + # Check that the symbol is of ArrayType. (It may be of + # UnknownFortranType if the symbol is of e.g. character type.) + if not isinstance(datatype, ArrayType): + return False shape = datatype.shape array_bounds = shape[index] if (isinstance(array_bounds, ArrayType.ArrayBounds) @@ -211,6 +216,10 @@ def is_upper_bound(self, index): try: symbol = self.scope.symbol_table.lookup(self.name) datatype = symbol.datatype + # Check that the symbol is of ArrayType. (It may be of + # UnknownFortranType if the symbol is of e.g. character type.) + if not isinstance(datatype, ArrayType): + return False shape = datatype.shape array_bounds = shape[index] if (isinstance(array_bounds, ArrayType.ArrayBounds) and @@ -355,6 +364,76 @@ def indices(self): f"expression but found '{type(child).__name__}'") return self.children + def _get_effective_shape(self): + ''' + :returns: the shape of the array access represented by this node. + :rtype: List[:py:class:`psyclone.psyir.nodes.DataNode`] + + :raises NotImplementedError: if any of the array-indices involve a + function call or an expression. + ''' + def _num_elements(expr): + ''' + Create PSyIR for the number of elements in this range. It + is given by (stop - start)/step + 1. + + :param expr: the range for which to compute the number of elements. + :type expr: :py:class:`psyclone.psyir.nodes.Range` or \ + :py:class:`psyclone.psyir.symbols.ArrayType.ArrayBounds` + + :returns: the PSyIR expression for the number of elements in the \ + supplied range. + :rtype: :py:class:`psyclone.psyir.nodes.BinaryOperation` + + ''' + if isinstance(expr, Range): + start = expr.start + stop = expr.stop + step = expr.step + elif isinstance(expr, ArrayType.ArrayBounds): + start = expr.lower + stop = expr.upper + step = Literal("1", INTEGER_TYPE) + minus = BinaryOperation.create(BinaryOperation.Operator.SUB, + stop.copy(), start.copy()) + div = BinaryOperation.create(BinaryOperation.Operator.DIV, + minus, step.copy()) + plus = BinaryOperation.create(BinaryOperation.Operator.ADD, + div, Literal("1", INTEGER_TYPE)) + return plus + + shape = [] + for idx_expr in self.indices: + if isinstance(idx_expr, Range): + shape.append(_num_elements(idx_expr)) + + elif isinstance(idx_expr, Reference): + dtype = idx_expr.datatype + if dtype.shape: + # An array slice can be defined by a 1D slice of another + # array, e.g. `a(b(1:4))`. + if len(dtype.shape) > 1: + raise InternalError( + f"An array defining a slice of a dimension of " + f"another array must be 1D but '{idx_expr.name}' " + f"used to index into '{self.name}' has " + f"{len(dtype.shape)} dimensions.") + shape.append(_num_elements(dtype.shape[0])) + elif isinstance(idx_expr, (Call, Operation, CodeBlock)): + # We can't yet straightforwardly query the type of a function + # call or Operation - TODO #1799. + # pylint: disable=import-outside-toplevel + from psyclone.psyir.backend.fortran import FortranWriter + # TODO #1887 - get type of writer to use from Config object? + fvisitor = FortranWriter() + raise NotImplementedError( + f"The array index expressions for access " + f"'{fvisitor(self)}' include a function call or " + f"expression. Querying the return type of " + f"such things is yet to be implemented.") + + return shape + def get_outer_range_index(self): ''' Return the index of the child that represents the outermost array dimension with a Range construct. diff --git a/src/psyclone/psyir/nodes/array_of_structures_reference.py b/src/psyclone/psyir/nodes/array_of_structures_reference.py index f2512e69f5..497a1bccde 100644 --- a/src/psyclone/psyir/nodes/array_of_structures_reference.py +++ b/src/psyclone/psyir/nodes/array_of_structures_reference.py @@ -1,7 +1,7 @@ # ----------------------------------------------------------------------------- # BSD 3-Clause License # -# Copyright (c) 2020-2021, Science and Technology Facilities Council. +# Copyright (c) 2020-2022, Science and Technology Facilities Council. # All rights reserved. # # Redistribution and use in source and binary forms, with or without @@ -37,13 +37,11 @@ ''' This module contains the implementation of the ArrayOfStructuresReference node. ''' -from __future__ import absolute_import - # Circular import if only '...nodes' is used: -from psyclone.psyir.nodes.structure_reference import StructureReference from psyclone.psyir import symbols -from psyclone.psyir.nodes.array_of_structures_mixin import \ - ArrayOfStructuresMixin +from psyclone.psyir.nodes.array_of_structures_mixin import ( + ArrayOfStructuresMixin) +from psyclone.psyir.nodes.structure_reference import StructureReference class ArrayOfStructuresReference(ArrayOfStructuresMixin, StructureReference): diff --git a/src/psyclone/psyir/nodes/array_reference.py b/src/psyclone/psyir/nodes/array_reference.py index 134538c2aa..1c77156f0c 100644 --- a/src/psyclone/psyir/nodes/array_reference.py +++ b/src/psyclone/psyir/nodes/array_reference.py @@ -38,10 +38,10 @@ ''' This module contains the implementation of the ArrayReference node. ''' -from __future__ import absolute_import from psyclone.psyir.nodes.array_mixin import ArrayMixin from psyclone.psyir.nodes.reference import Reference -from psyclone.psyir.symbols import DataSymbol, DeferredType, UnknownType +from psyclone.psyir.symbols import (DataSymbol, DeferredType, UnknownType, + ScalarType, ArrayType) from psyclone.errors import GenerationError @@ -100,11 +100,26 @@ def create(symbol, indices): return array def __str__(self): - result = super(ArrayReference, self).__str__() + "\n" + result = super().__str__() + "\n" for entity in self._children: result += str(entity) + "\n" return result + @property + def datatype(self): + ''' + :returns: the datatype of the accessed array element(s). + :rtype: :py:class:`psyclone.psyir.symbols.DataType` + ''' + shape = self._get_effective_shape() + if shape: + return ArrayType(self.symbol.datatype, shape) + # TODO #1857: Really we should just be able to return + # self.symbol.datatype here but currently arrays of scalars are + # handled in a different way to all other types of array. + return ScalarType(self.symbol.datatype.intrinsic, + self.symbol.datatype.precision) + # For AutoAPI documentation generation __all__ = ['ArrayReference'] diff --git a/src/psyclone/psyir/nodes/omp_directives.py b/src/psyclone/psyir/nodes/omp_directives.py index 59b264ae84..b53f84e68a 100644 --- a/src/psyclone/psyir/nodes/omp_directives.py +++ b/src/psyclone/psyir/nodes/omp_directives.py @@ -42,9 +42,7 @@ nodes.''' -from __future__ import absolute_import import abc -import six from psyclone.configuration import Config from psyclone.core import AccessType, VariablesAccessInfo @@ -84,8 +82,7 @@ class OMPDirective(metaclass=abc.ABCMeta): _PREFIX = "OMP" -@six.add_metaclass(abc.ABCMeta) -class OMPRegionDirective(OMPDirective, RegionDirective): +class OMPRegionDirective(OMPDirective, RegionDirective, metaclass=abc.ABCMeta): ''' Base class for all OpenMP region-related directives. @@ -116,12 +113,9 @@ def _get_reductions_list(self, reduction_type): return result -@six.add_metaclass(abc.ABCMeta) -class OMPStandaloneDirective(OMPDirective, StandaloneDirective): - ''' - Base class for all OpenMP-related standalone directives - - ''' +class OMPStandaloneDirective(OMPDirective, StandaloneDirective, + metaclass=abc.ABCMeta): + ''' Base class for all OpenMP-related standalone directives. ''' class OMPDeclareTargetDirective(OMPStandaloneDirective): @@ -199,7 +193,7 @@ def validate_global_constraints(self): "OMPTaskwaitDirective must be inside an OMP parallel region " "but could not find an ancestor OMPParallelDirective node") - super(OMPTaskwaitDirective, self).validate_global_constraints() + super().validate_global_constraints() def gen_code(self, parent): '''Generate the fortran OMP Taskwait Directive and any associated @@ -229,8 +223,7 @@ def begin_string(self): return "omp taskwait" -@six.add_metaclass(abc.ABCMeta) -class OMPSerialDirective(OMPRegionDirective): +class OMPSerialDirective(OMPRegionDirective, metaclass=abc.ABCMeta): ''' Abstract class representing OpenMP serial regions, e.g. OpenMP SINGLE or OpenMP Master. @@ -261,40 +254,37 @@ def validate_global_constraints(self): if not self.ancestor(OMPParallelDirective, excluding=OMPParallelDoDirective): raise GenerationError( - "{} must be inside an OMP parallel region but " - "could not find an ancestor OMPParallelDirective node".format( - self._text_name)) + f"{self._text_name} must be inside an OMP parallel region but " + f"could not find an ancestor OMPParallelDirective node") if self.ancestor(OMPSerialDirective): raise GenerationError( - "{} must not be inside another OpenMP " - "serial region".format(self._text_name)) + f"{self._text_name} must not be inside another OpenMP " + f"serial region") - super(OMPSerialDirective, self).validate_global_constraints() + super().validate_global_constraints() class OMPSingleDirective(OMPSerialDirective): ''' Class representing an OpenMP SINGLE directive in the PSyIR. - :param list children: List of Nodes that are children of this Node. - :param parent: The Node in the AST that has this directive as a child. - :type parent: :py:class:`psyclone.psyir.nodes.Node` :param bool nowait: Argument describing whether this single should have \ a nowait clause applied. Default value is False. + :param kwargs: additional keyword arguments provided to the PSyIR node. + :type kwargs: unwrapped dict. ''' _children_valid_format = "Schedule, [OMPNowaitClause]" # Textual description of the node _text_name = "OMPSingleDirective" - def __init__(self, children=None, parent=None, nowait=False): + def __init__(self, nowait=False, **kwargs): self._nowait = nowait # Call the init method of the base class once we've stored # the nowait requirement - super(OMPSingleDirective, self).__init__(children=children, - parent=parent) + super().__init__(**kwargs) if self._nowait: self.children.append(OMPNowaitClause()) @@ -506,6 +496,7 @@ def private_clause(self): def gen_code(self, parent): '''Generate the fortran OMP Parallel Directive and any associated code''' + # pylint: disable=import-outside-toplevel from psyclone.psyGen import zero_reduction_variables # We're not doing nested parallelism so make sure that this @@ -644,6 +635,7 @@ def _get_private_clause(self): :raises InternalError: if a Kernel has local variable(s) but they \ aren't named. ''' + # pylint: disable=import-outside-toplevel from psyclone.psyGen import InvokeSchedule if (self.default_clause.clause_type != @@ -749,9 +741,6 @@ class OMPTaskloopDirective(OMPRegionDirective): ''' Class representing an OpenMP TASKLOOP directive in the PSyIR. - :param list children: list of Nodes that are children of this Node. - :param parent: the Node in the AST that has this directive as a child. - :type parent: :py:class:`psyclone.psyir.nodes.Node` :param grainsize: The grainsize value used to specify the grainsize \ clause on this OpenMP directive. If this is None \ the grainsize clause is not applied. Default \ @@ -765,6 +754,8 @@ class OMPTaskloopDirective(OMPRegionDirective): :param nogroup: Whether the nogroup clause should be used for this node. \ Default value is False :type nogroup: bool + :param kwargs: additional keyword arguments provided to the PSyIR node. + :type kwargs: unwrapped dict. :raises GenerationError: if this OMPTaskloopDirective has both \ a grainsize and num_tasks value \ @@ -777,9 +768,8 @@ class OMPTaskloopDirective(OMPRegionDirective): _children_valid_format = ("Schedule, [OMPGrainsizeClause | " "OMPNumTasksClause], [OMPNogroupClause]") - # pylint: disable=too-many-arguments - def __init__(self, children=None, parent=None, grainsize=None, - num_tasks=None, nogroup=False): + def __init__(self, grainsize=None, num_tasks=None, nogroup=False, + **kwargs): # These remain primarily for the gen_code interface self._grainsize = grainsize self._num_tasks = num_tasks @@ -788,8 +778,7 @@ def __init__(self, children=None, parent=None, grainsize=None, raise GenerationError( "OMPTaskloopDirective must not have both grainsize and " "numtasks clauses specified.") - super(OMPTaskloopDirective, self).__init__(children=children, - parent=parent) + super().__init__(**kwargs) if self._grainsize is not None: child = [Literal(f"{grainsize}", INTEGER_TYPE)] self._children.append(OMPGrainsizeClause(children=child)) @@ -863,7 +852,7 @@ def validate_global_constraints(self): "OMPTaskloopDirective has two Nogroup clauses as children " "which is not allowed.") - super(OMPTaskloopDirective, self).validate_global_constraints() + super().validate_global_constraints() def gen_code(self, parent): ''' @@ -931,28 +920,32 @@ class OMPDoDirective(OMPRegionDirective): ''' Class representing an OpenMP DO directive in the PSyIR. - :param list children: list of Nodes that are children of this Node. - :param parent: the Node in the AST that has this directive as a child. - :type parent: :py:class:`psyclone.psyir.nodes.Node` - :param str omp_schedule: the OpenMP schedule to use. - :param bool reprod: whether or not to generate code for run-reproducible \ - OpenMP reductions. + :param str omp_schedule: the OpenMP schedule to use (defaults to "auto"). + :param Optional[int] collapse: optional number of nested loops to \ + collapse into a single iteration space to parallelise. Defaults to \ + None. + :param Optional[bool] reprod: whether or not to generate code for \ + run-reproducible OpenMP reductions (if not specified the value is \ + provided by the PSyclone Config file). + :param kwargs: additional keyword arguments provided to the PSyIR node. + :type kwargs: unwrapped dict. ''' - def __init__(self, children=None, parent=None, omp_schedule="static", - reprod=None): + VALID_OMP_SCHEDULES = ["runtime", "static", "dynamic", "guided", "auto"] + _directive_string = "do" + + def __init__(self, omp_schedule="auto", collapse=None, reprod=None, + **kwargs): + super().__init__(**kwargs) if reprod is None: self._reprod = Config.get().reproducible_reductions else: self._reprod = reprod self._omp_schedule = omp_schedule - - # Call the init method of the base class once we've stored - # the OpenMP schedule - super(OMPDoDirective, self).__init__(children=children, - parent=parent) + self._collapse = None + self.collapse = collapse # Use setter with error checking def __eq__(self, other): ''' @@ -968,9 +961,47 @@ def __eq__(self, other): is_eq = super().__eq__(other) is_eq = is_eq and self.omp_schedule == other.omp_schedule is_eq = is_eq and self.reprod == other.reprod + is_eq = is_eq and self.collapse == other.collapse return is_eq + @property + def collapse(self): + ''' + :returns: the value of the collapse clause. + :rtype: int or NoneType + ''' + return self._collapse + + @collapse.setter + def collapse(self, value): + ''' + TODO #1648: Note that gen_code ignores the collapse clause but the + generated code is still valid. Since gen_code is going to be removed + and it is only used for LFRic (which does not support GPU offloading + that gets improved with the collapse clause) it will not be supported. + + :param value: optional number of nested loop to collapse into a \ + single iteration space to parallelise. Defaults to None. + :type value: int or NoneType. + + :raises TypeError: if the collapse value given is not an integer \ + or NoneType. + :raises ValueError: if the collapse integer given is not positive. + + ''' + if value is not None and not isinstance(value, int): + raise TypeError( + f"The {type(self).__name__} collapse clause must be a positive" + f" integer or None, but value '{value}' has been given.") + + if value is not None and value <= 0: + raise ValueError( + f"The {type(self).__name__} collapse clause must be a positive" + f" integer or None, but value '{value}' has been given.") + + self._collapse = value + def node_str(self, colour=True): ''' Returns the name of this node with (optional) control codes @@ -981,11 +1012,12 @@ def node_str(self, colour=True): :returns: description of this node, possibly coloured. :rtype: str ''' + val = f"{self.coloured_name(colour)}[omp_schedule={self.omp_schedule}" if self.reductions(): - reprod = f"reprod={self._reprod}" - else: - reprod = "" - return f"{self.coloured_name(colour)}[{reprod}]" + val += f",reprod={self._reprod}" + if self._collapse and self._collapse > 1: + val += f",collapse={self._collapse}" + return val + "]" def _reduction_string(self): ''' Return the OMP reduction information as a string ''' @@ -1006,11 +1038,38 @@ def omp_schedule(self): ''' return self._omp_schedule + @omp_schedule.setter + def omp_schedule(self, value): + ''' + :param str value: the omp_schedule for this object. + + :raises TypeError: if the provided omp_schedule is not a valid \ + schedule string. + ''' + if not isinstance(value, str): + raise TypeError( + f"{type(self).__name__} omp_schedule should be a str " + f"but found '{type(value).__name__}'.") + if value.split(',')[0].lower() not in self.VALID_OMP_SCHEDULES: + raise TypeError( + f"{type(self).__name__} omp_schedule should be one of " + f"{self.VALID_OMP_SCHEDULES} but found '{value}'.") + self._omp_schedule = value + @property def reprod(self): - ''' returns whether reprod has been set for this object or not ''' + ''' + :returns: whether reprod has been set for this object or not. + ''' return self._reprod + @reprod.setter + def reprod(self, value): + ''' + :param bool value: enable or disable reproducible loop parallelism. + ''' + self._reprod = value + def validate_global_constraints(self): ''' Perform validation checks that can only be done at code-generation @@ -1031,8 +1090,30 @@ def validate_global_constraints(self): "could not find an ancestor OMPParallelDirective node") self._validate_single_loop() + self._validate_collapse_value() - super(OMPDoDirective, self).validate_global_constraints() + super().validate_global_constraints() + + def _validate_collapse_value(self): + ''' + Checks that if there is a collapse clause, there must be as many + immediately nested loops as the collapse value. + + :raises GenerationError: if this OMPLoopDirective has a collapse \ + clause but it doesn't have the expected number of nested Loops. + ''' + if self._collapse: + cursor = self.dir_body.children[0] + for depth in range(self._collapse): + if (len(cursor.parent.children) != 1 or + not isinstance(cursor, Loop)): + raise GenerationError( + f"{type(self).__name__} must have as many immediately " + f"nested loops as the collapse clause specifies but " + f"'{self}' has a collapse={self._collapse} and the " + f"nested body at depth {depth} cannot be " + f"collapsed.") + cursor = cursor.loop_body.children[0] def _validate_single_loop(self): ''' @@ -1059,6 +1140,11 @@ def gen_code(self, parent): Generate the f2pygen AST entries in the Schedule for this OpenMP do directive. + TODO #1648: Note that gen_code ignores the collapse clause but the + generated code is still valid. Since gen_code is going to be removed + and it is only used for LFRic (which does not support GPU offloading + that gets improved with the collapse clause) it will not be supported. + :param parent: the parent Node in the Schedule to which to add our \ content. :type parent: sub-class of :py:class:`psyclone.f2pygen.BaseGen` @@ -1096,7 +1182,10 @@ def begin_string(self): :rtype: str ''' - return f"omp do schedule({self._omp_schedule})" + string = f"omp {self._directive_string} schedule({self._omp_schedule})" + if self._collapse: + string += f" collapse({self._collapse})" + return string def end_string(self): '''Returns the end (or closing) statement of this directive, i.e. @@ -1107,24 +1196,25 @@ def end_string(self): :rtype: str ''' - # pylint: disable=no-self-use - return "omp end do" + return f"omp end {self._directive_string}" class OMPParallelDoDirective(OMPParallelDirective, OMPDoDirective): ''' Class for the !$OMP PARALLEL DO directive. This inherits from both OMPParallelDirective (because it creates a new OpenMP thread-parallel region) and OMPDoDirective (because it - causes a loop to be parallelised). ''' + causes a loop to be parallelised). + + :param kwargs: additional keyword arguments provided to the PSyIR node. + :type kwargs: unwrapped dict. + ''' _children_valid_format = ("Schedule, OMPDefaultClause, OMPPrivateClause, " "OMPScheduleClause, [OMPReductionClause]*") + _directive_string = "parallel do" - def __init__(self, children=[], parent=None, omp_schedule="static"): - OMPDoDirective.__init__(self, - children=children, - parent=parent, - omp_schedule=omp_schedule) + def __init__(self, **kwargs): + OMPDoDirective.__init__(self, **kwargs) self.addchild(OMPDefaultClause( clause_type=OMPDefaultClause.DefaultClauseTypes.SHARED)) @@ -1152,9 +1242,23 @@ def _validate_child(position, child): return False def gen_code(self, parent): + ''' + Generate the f2pygen AST entries in the Schedule for this OpenMP + directive. + + TODO #1648: Note that gen_code ignores the collapse clause but the + generated code is still valid. Since gen_code is going to be removed + and it is only used for LFRic (which does not support GPU offloading + that gets improved with the collapse clause) it will not be supported. + + :param parent: the parent Node in the Schedule to which to add our \ + content. + :type parent: sub-class of :py:class:`psyclone.f2pygen.BaseGen` + ''' # We're not doing nested parallelism so make sure that this # omp parallel do is not already within some parallel region + # pylint: disable=import-outside-toplevel from psyclone.psyGen import zero_reduction_variables self.validate_global_constraints() @@ -1220,15 +1324,22 @@ def begin_string(self): :rtype: str ''' - return ("omp parallel do" + self._reduction_string()) + string = f"omp {self._directive_string}" + if self._collapse: + string += f" collapse({self._collapse})" + string += self._reduction_string() + return string def end_string(self): - ''' - :returns: the closing statement for this directive. + '''Returns the end (or closing) statement of this directive, i.e. + "omp end do". The visitor is responsible for adding the + correct directive beginning (e.g. "!$"). + + :returns: the end statement for this directive. :rtype: str + ''' - # pylint: disable=no-self-use - return "omp end parallel do" + return f"omp end {self._directive_string}" def validate_global_constraints(self): ''' @@ -1236,9 +1347,15 @@ def validate_global_constraints(self): time. ''' - super(OMPParallelDoDirective, self).validate_global_constraints() + OMPParallelDirective.validate_global_constraints(self) self._validate_single_loop() + self._validate_collapse_value() + + +class OMPTeamsDistributeParallelDoDirective(OMPParallelDoDirective): + ''' Class representing the OMP teams distribute parallel do directive. ''' + _directive_string = "teams distribute parallel do" class OMPTargetDirective(OMPRegionDirective): @@ -1293,12 +1410,15 @@ class OMPLoopDirective(OMPRegionDirective): ''' Class for the !$OMP LOOP directive that specifies that the iterations of the associated loops may execute concurrently. - :param int collapse: optional number of nested loops to collapse into a \ - single iteration space to parallelise. Defaults to None. + :param Optional[int] collapse: optional number of nested loops to \ + collapse into a single iteration space to parallelise. Defaults \ + to None. + :param kwargs: additional keyword arguments provided to the PSyIR node. + :type kwargs: unwrapped dict. ''' def __init__(self, collapse=None, **kwargs): - super(OMPLoopDirective, self).__init__(**kwargs) + super().__init__(**kwargs) self._collapse = None self.collapse = collapse # Use setter with error checking @@ -1329,6 +1449,11 @@ def collapse(self): @collapse.setter def collapse(self, value): ''' + TODO #1648: Note that gen_code ignores the collapse clause but the + generated code is still valid. Since gen_code is going to be removed + and it is only used for LFRic (which does not support GPU offloading + that gets improved with the collapse clause) it will not be supported. + :param value: optional number of nested loop to collapse into a \ single iteration space to parallelise. Defaults to None. :type value: int or NoneType. @@ -1437,7 +1562,7 @@ def validate_global_constraints(self): f"{type(cursor).__name__} rather than a Loop.") cursor = cursor.loop_body.children[0] - super(OMPLoopDirective, self).validate_global_constraints() + super().validate_global_constraints() # For automatic API documentation generation diff --git a/src/psyclone/psyir/nodes/reference.py b/src/psyclone/psyir/nodes/reference.py index 60c7f19a19..f7d1e7a354 100644 --- a/src/psyclone/psyir/nodes/reference.py +++ b/src/psyclone/psyir/nodes/reference.py @@ -39,10 +39,9 @@ ''' This module contains the implementation of the Reference node.''' -from __future__ import absolute_import +from psyclone.core import AccessType, Signature from psyclone.psyir.nodes.datanode import DataNode from psyclone.psyir.nodes.operation import Operation, BinaryOperation -from psyclone.core import AccessType, Signature from psyclone.psyir.symbols import Symbol @@ -52,8 +51,8 @@ class Reference(DataNode): :param symbol: the symbol being referenced. :type symbol: :py:class:`psyclone.psyir.symbols.Symbol` - :param parent: the parent node of this Reference in the PSyIR. - :type parent: :py:class:`psyclone.psyir.nodes.Node` or NoneType + :param kwargs: additional keyword arguments provided to the super class. + :type kwargs: unwrapped dict. ''' # Textual description of the node. @@ -61,8 +60,8 @@ class Reference(DataNode): _text_name = "Reference" _colour = "yellow" - def __init__(self, symbol, parent=None): - super(Reference, self).__init__(parent=parent) + def __init__(self, symbol, **kwargs): + super().__init__(**kwargs) self.symbol = symbol def __eq__(self, other): @@ -174,6 +173,14 @@ def reference_accesses(self, var_accesses): index.reference_accesses(var_accesses) var_accesses.add_access(sig, AccessType.READ, self, all_indices) + @property + def datatype(self): + ''' + :returns: the datatype of this reference. + :rtype: :py:class:`psyclone.psyir.symbols.DataType` + ''' + return self.symbol.datatype + # For AutoAPI documentation generation __all__ = ['Reference'] diff --git a/src/psyclone/psyir/nodes/structure_reference.py b/src/psyclone/psyir/nodes/structure_reference.py index b3ec2b3450..beaebbc237 100644 --- a/src/psyclone/psyir/nodes/structure_reference.py +++ b/src/psyclone/psyir/nodes/structure_reference.py @@ -1,7 +1,7 @@ # ----------------------------------------------------------------------------- # BSD 3-Clause License # -# Copyright (c) 2020-2021, Science and Technology Facilities Council. +# Copyright (c) 2020-2022, Science and Technology Facilities Council. # All rights reserved. # # Redistribution and use in source and binary forms, with or without @@ -37,18 +37,17 @@ ''' This module contains the implementation of the StructureReference node. ''' -from __future__ import absolute_import -import six - from psyclone.core import Signature from psyclone.psyir.nodes.reference import Reference from psyclone.psyir.nodes.member import Member from psyclone.psyir.nodes.array_member import ArrayMember +from psyclone.psyir.nodes.array_mixin import ArrayMixin from psyclone.psyir.nodes.array_of_structures_member import \ ArrayOfStructuresMember from psyclone.psyir.nodes.structure_member import StructureMember -from psyclone.psyir.symbols import DataSymbol, DataTypeSymbol, StructureType, \ - DeferredType, UnknownType +from psyclone.psyir.symbols import (DataSymbol, DataTypeSymbol, StructureType, + ArrayType, DeferredType, ScalarType, + UnknownType) from psyclone.errors import InternalError @@ -168,7 +167,7 @@ def _create(cls, symbol, symbol_type, members, parent=None): if isinstance(members[-1], tuple): # An access to one or more array elements subref = ArrayMember.create(members[-1][0], members[-1][1]) - elif isinstance(members[-1], six.string_types): + elif isinstance(members[-1], str): # A member access subref = Member(members[-1]) else: @@ -188,7 +187,7 @@ def _create(cls, symbol, symbol_type, members, parent=None): # This is an array access so we have an ArrayOfStructuresMember subref = ArrayOfStructuresMember.create( component[0], component[1], subref) - elif isinstance(component, six.string_types): + elif isinstance(component, str): # No array access so just a StructureMember subref = StructureMember.create(component, subref) else: @@ -203,7 +202,7 @@ def _create(cls, symbol, symbol_type, members, parent=None): return ref def __str__(self): - result = super(StructureReference, self).__str__() + result = super().__str__() for entity in self._children: result += "\n" + str(entity) return result @@ -233,13 +232,112 @@ def get_signature_and_indices(self): ''' # Get the signature of self: - my_sig, my_index = \ - super(StructureReference, self).get_signature_and_indices() + my_sig, my_index = super().get_signature_and_indices() # Then the sub-signature of the member, and indices used: sub_sig, indices = self.children[0].get_signature_and_indices() # Combine signature and indices return (Signature(my_sig, sub_sig), my_index + indices) + @property + def datatype(self): + ''' + Walks down the list of members making up this reference to determine + the type that it refers to. + + In order to minimise code duplication, this method also supports + ArrayOfStructuresReference by simply allowing for the case where + the starting reference is to an Array. + + :returns: the datatype of this reference. + :rtype: :py:class:`psyclone.psyir.symbols.DataType` + + :raises NotImplementedError: if the structure reference represents \ + an array of arrays. + ''' + dtype = self.symbol.datatype + + if isinstance(dtype, ArrayType): + dtype = dtype.intrinsic + + if isinstance(dtype, DataTypeSymbol): + dtype = dtype.datatype + + if isinstance(dtype, (DeferredType, UnknownType)): + # We don't know the type of the symbol that defines the type + # of this structure. + return DeferredType() + + # We do have the definition of this structure - walk down it. + cursor = self + cursor_type = dtype + + # The next four lines are required when this method is called for an + # ArrayOfStructuresReference. + if isinstance(cursor, ArrayMixin): + # pylint: disable=protected-access + shape = cursor._get_effective_shape() + else: + shape = [] + + # Walk down the structure, collecting information on any array slices + # as we go. + while hasattr(cursor, "member"): + cursor = cursor.member + cursor_type = cursor_type.components[cursor.name].datatype + if isinstance(cursor_type, (UnknownType, DeferredType)): + return DeferredType() + if isinstance(cursor, ArrayMixin): + # pylint: disable=protected-access + shape.extend(cursor._get_effective_shape()) + + # We've reached the ultimate member of the structure access. + if shape: + if isinstance(cursor_type, ArrayType): + # It's of array type but does it represent a single element, + # a slice or a whole array? (We use `children` rather than + # `indices` so as to avoid having to check that `cursor` is + # an `ArrayMember`.) + if cursor.children: + # It has indices so could be a single element or a slice. + # pylint: disable=protected-access + cursor_shape = cursor._get_effective_shape() + else: + # No indices so it is an access to a whole array. + cursor_shape = cursor_type.shape + if cursor_shape and shape != cursor_shape: + # This ultimate access is an array but we've already + # encountered one or more slices earlier in the access + # expression. + # TODO #1887. Allow the writer to be used in error messages + # to be set in the Config object? + # pylint: disable=import-outside-toplevel + from psyclone.psyir.backend.fortran import FortranWriter + fwriter = FortranWriter() + raise NotImplementedError( + f"Array of arrays not supported: the ultimate member " + f"'{cursor.name}' of the StructureAccess represents " + f"an array but other array notation is present in the " + f"full access expression: '{fwriter(self)}'") + return ArrayType(cursor_type.intrinsic, shape) + + return ArrayType(cursor_type, shape) + + # We don't have an explicit array access (because `shape` is Falsey) + # but is the ultimate member itself an array? + if isinstance(cursor_type, ArrayType): + if not cursor.children: + # It is and there are no index expressions so we return the + # ArrayType. + return cursor_type + # We have an access to a single element of the array. + # Currently arrays of scalars are handled in a + # different way to all other types of array. Issue #1857 will + # fix this anomaly. + if isinstance(cursor_type.intrinsic, ScalarType.Intrinsic): + return ScalarType(cursor_type.intrinsic, cursor_type.precision) + return cursor_type.intrinsic + return cursor_type + # For AutoAPI documentation generation __all__ = ['StructureReference'] diff --git a/src/psyclone/psyir/symbols/datatypes.py b/src/psyclone/psyir/symbols/datatypes.py index ccb1162e57..0a3095f9a2 100644 --- a/src/psyclone/psyir/symbols/datatypes.py +++ b/src/psyclone/psyir/symbols/datatypes.py @@ -39,15 +39,14 @@ import abc from collections import OrderedDict, namedtuple from enum import Enum -import six + from psyclone.errors import InternalError from psyclone.psyir.symbols.data_type_symbol import DataTypeSymbol from psyclone.psyir.symbols.datasymbol import DataSymbol from psyclone.psyir.symbols.symbol import Symbol -@six.add_metaclass(abc.ABCMeta) -class DataType(): +class DataType(metaclass=abc.ABCMeta): '''Abstract base class from which all types are derived.''' @abc.abstractmethod @@ -58,6 +57,15 @@ def __str__(self): ''' + def __eq__(self, other): + ''' + :param Any other: the object to check equality to. + + :returns: whether this type is equal to the 'other' type. + :rtype: bool + ''' + return type(other) is type(self) + class DeferredType(DataType): '''Indicates that the type is unknown at this point.''' @@ -79,8 +87,7 @@ def __str__(self): return "NoType" -@six.add_metaclass(abc.ABCMeta) -class UnknownType(DataType): +class UnknownType(DataType, metaclass=abc.ABCMeta): ''' Indicates that a variable declaration is not supported by the PSyIR. This class is abstract and must be subclassed for each language @@ -131,13 +138,82 @@ class UnknownFortranType(UnknownType): :param str declaration_txt: string containing the original variable \ declaration. - - :raises TypeError: if the supplied declaration_txt is not a str. - ''' + def __init__(self, declaration_txt): + super().__init__(declaration_txt) + # This will hold the Fortran type specification (as opposed to + # the whole declaration). + self._type_text = "" + def __str__(self): return f"UnknownFortranType('{self._declaration}')" + @property + def declaration(self): + ''' + This useless routine is required so that we can override the associated + setter method below. + ''' + return super().declaration + + @declaration.setter + def declaration(self, value): + ''' + Sets the original declaration that this instance represents and + removes any cached type text. + + :param str value: the original declaration. + + ''' + self._declaration = value[:] + self._type_text = "" + + @property + def type_text(self): + ''' + Parses the original Fortran declaration and uses the resulting + parse tree to extract the type information. This is returned in + text form and also cached. + + TODO #1419 - alter Unknown(Fortran)Type so that it is only the + type information that is stored as a string. i.e. remove the name + of the variable being declared. Once that is done this method + won't be required. + + :returns: the Fortran code specifying the type. + :rtype: str + ''' + if self._type_text: + return self._type_text + + # Encapsulate fparser2 functionality here. + # pylint:disable=import-outside-toplevel + from fparser.common.readfortran import FortranStringReader + from fparser.common.sourceinfo import FortranFormat + from fparser.two import Fortran2008 + from fparser.two.parser import ParserFactory + string_reader = FortranStringReader(self._declaration) + # Set reader to free format. + string_reader.set_format(FortranFormat(True, False)) + ParserFactory().create(std="f2008") + ptree = Fortran2008.Declaration_Construct( + string_reader) + self._type_text = str(ptree.children[0]) + + return self._type_text + + def __eq__(self, other): + ''' + :param Any other: the object to check equality to. + + :returns: whether this type is equal to the 'other' type. + :rtype: bool + ''' + if not super().__eq__(other): + return False + + return other.type_text == self.type_text + class ScalarType(DataType): '''Describes a scalar datatype (and its precision). @@ -232,14 +308,13 @@ def __str__(self): def __eq__(self, other): ''' :param Any other: the object to check equality to. - :returns: whether this scalar type is equal to the 'other' scalar type. + + :returns: whether this type is equal to the 'other' type. :rtype: bool ''' - # A ScalarType is not equal to e.g. an ArrayType. - if not type(other) is type(self): + if not super().__eq__(other): return False - # TODO #1799 - this method needs implementing for the other Types as - # currently we're not consistent. + return (self.precision == other.precision and self.intrinsic == other.intrinsic) @@ -317,6 +392,9 @@ def _node_from_int(var): if not isinstance(datatype, UnknownType): self._intrinsic = datatype.intrinsic self._precision = datatype.precision + else: + self._intrinsic = datatype + self._precision = None elif isinstance(datatype, DataTypeSymbol): self._intrinsic = datatype self._precision = None @@ -418,7 +496,7 @@ def _validate_data_node(dim_node): :raises TypeError: if the DataNode is not valid in this context. ''' - # When issue #685 is addressed then check that the + # When issue #1799 is addressed then check that the # datatype returned is an int (or is unknown). For the # moment, just check that if the DataNode is a # Reference then the associated symbol is a scalar @@ -426,16 +504,19 @@ def _validate_data_node(dim_node): if isinstance(dim_node, Reference): # Check the DataSymbol instance is a scalar # integer or is unknown - symbol = dim_node.symbol - if not ((symbol.is_scalar and symbol.datatype.intrinsic == - ScalarType.Intrinsic.INTEGER) or - isinstance(symbol.datatype, - (UnknownFortranType, DeferredType))): + dtype = dim_node.datatype + if isinstance(dtype, ArrayType) and dtype.shape: + raise TypeError( + f"If a DataSymbol is referenced in a dimension " + f"declaration then it should be a scalar but " + f"'{dim_node}' is not.") + if not (isinstance(dtype, (UnknownType, DeferredType)) or + dtype.intrinsic == ScalarType.Intrinsic.INTEGER): raise TypeError( f"If a DataSymbol is referenced in a dimension " - f"declaration then it should be a scalar integer or " + f"declaration then it should be an integer or " f"of UnknownType or DeferredType, but " - f"'{symbol.name}' is a '{symbol.datatype}'.") + f"'{dim_node.name}' is a '{dtype}'.") # TODO #1089 - add check that any References are not to a # local datasymbol that is not constant (as this would have # no value). @@ -507,6 +588,34 @@ def __str__(self): f"'{type(dimension).__name__}'.") return f"Array<{self._datatype}, shape=[{', '.join(dims)}]>" + def __eq__(self, other): + ''' + :param Any other: the object to check equality to. + + :returns: whether this ArrayType is equal to the 'other' ArrayType. + :rtype: bool + ''' + if not super().__eq__(other): + return False + + if (self.intrinsic != other.intrinsic or + self.precision != other.precision): + return False + + if len(self.shape) != len(other.shape): + return False + + # TODO #1799 - this implementation currently has some limitations. + # e.g. a(1:10) and b(2:11) have the same datatype (an array of 1 + # dimension and 10 elements) but we will currently return false. + # One improvement could be to use the SymbolicMath to do the comparison + # but this won't resolve all cases as shape can be references. + for this_dim, other_dim in zip(self.shape, other.shape): + if this_dim != other_dim: + return False + + return True + class StructureType(DataType): ''' @@ -607,6 +716,24 @@ def lookup(self, name): ''' return self._components[name] + def __eq__(self, other): + ''' + :param Any other: the object to check equality to. + + :returns: whether this StructureType is equal to the 'other' type. + :rtype: bool + ''' + if not super().__eq__(other): + return False + + if len(self.components) != len(other.components): + return False + + if self.components != other.components: + return False + + return True + # Create common scalar datatypes REAL_TYPE = ScalarType(ScalarType.Intrinsic.REAL, diff --git a/src/psyclone/psyir/transformations/__init__.py b/src/psyclone/psyir/transformations/__init__.py index e2a8811304..6a44112f3f 100644 --- a/src/psyclone/psyir/transformations/__init__.py +++ b/src/psyclone/psyir/transformations/__init__.py @@ -72,6 +72,7 @@ import LoopTiling2DTrans from psyclone.psyir.transformations.loop_trans import LoopTrans from psyclone.psyir.transformations.nan_test_trans import NanTestTrans +from psyclone.psyir.transformations.omp_loop_trans import OMPLoopTrans from psyclone.psyir.transformations.omp_target_trans import OMPTargetTrans from psyclone.psyir.transformations.omp_taskwait_trans import OMPTaskwaitTrans from psyclone.psyir.transformations.parallel_loop_trans import \ @@ -111,6 +112,7 @@ 'LoopTiling2DTrans', 'LoopTrans', 'NanTestTrans', + 'OMPLoopTrans', 'OMPTaskwaitTrans', 'OMPTargetTrans', 'ParallelLoopTrans', diff --git a/src/psyclone/psyir/transformations/hoist_loop_bound_expr_trans.py b/src/psyclone/psyir/transformations/hoist_loop_bound_expr_trans.py index 620f7d4704..addfd4ff14 100644 --- a/src/psyclone/psyir/transformations/hoist_loop_bound_expr_trans.py +++ b/src/psyclone/psyir/transformations/hoist_loop_bound_expr_trans.py @@ -151,7 +151,8 @@ def validate(self, node, options=None): raise TransformationError( f"The loop provided to HoistLoopBoundExprTrans must not be " f"directly inside a Directive as its Schedule does not support" - f" multiple statements, but found '{node.parent.parent}'.") + f" multiple statements, but found " + f"'{node.parent.parent.coloured_name(False)}'.") def __str__(self): return ("Hoist complex loop bound expressions outside the loop " diff --git a/src/psyclone/psyir/transformations/omp_loop_trans.py b/src/psyclone/psyir/transformations/omp_loop_trans.py new file mode 100644 index 0000000000..bccf473e5e --- /dev/null +++ b/src/psyclone/psyir/transformations/omp_loop_trans.py @@ -0,0 +1,263 @@ +# ----------------------------------------------------------------------------- +# BSD 3-Clause License +# +# Copyright (c) 2017-2022, Science and Technology Facilities Council. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# ----------------------------------------------------------------------------- +# Author: S. Siso, STFC Daresbury Lab + +''' Transformation to insert OpenMP directives to parallelise PSyIR Loops. ''' + +from psyclone.configuration import Config +from psyclone.psyir.nodes import Routine, OMPDoDirective, OMPLoopDirective, \ + OMPParallelDoDirective, OMPTeamsDistributeParallelDoDirective +from psyclone.psyir.symbols import DataSymbol, INTEGER_TYPE +from psyclone.psyir.transformations.parallel_loop_trans import \ + ParallelLoopTrans + +MAP_STR_TO_LOOP_DIRECTIVES = { + "do": OMPDoDirective, + "paralleldo": OMPParallelDoDirective, + "teamsdistributeparalleldo": OMPTeamsDistributeParallelDoDirective, + "loop": OMPLoopDirective +} +VALID_OMP_DIRECTIVES = list(MAP_STR_TO_LOOP_DIRECTIVES.keys()) + + +class OMPLoopTrans(ParallelLoopTrans): + ''' + Adds an OpenMP directive to parallelise this loop. It can insert different + directives such as "omp do/for", "omp parallel do/for", "omp teams + distribute parallel do/for" or "omp loop" depending on the provided + parameters. + The OpenMP schedule to use can also be specified, but this will be ignored + in case of the "omp loop" (as the 'schedule' clause is not valid for this + specific directive). The configuration-defined 'reprod' parameter + also specifies whether a manual reproducible reproduction is to be used. + Note, reproducible in this case means obtaining the same results with the + same number of OpenMP threads, not for different numbers of OpenMP threads. + + :param str omp_schedule: the OpenMP schedule to use. Defaults to 'auto'. + :param str omp_directive: choose which OpenMP loop directive to use. \ + Defaults to "omp do" + + For example: + + >>> from psyclone.psyir.frontend.fortran import FortranReader + >>> from psyclone.psyir.backend.fortran import FortranWriter + >>> from psyclone.psyir.nodes import Loop + >>> from psyclone.transformations import OMPLoopTrans, OMPParallelTrans + >>> + >>> psyir = FortranReader().psyir_from_source(""" + ... subroutine my_subroutine() + ... integer, dimension(10, 10) :: A + ... integer :: i + ... integer :: j + ... do i = 1, 10 + ... do j = 1, 10 + ... A(i, j) = 0 + ... end do + ... end do + ... end subroutine + ... """) + >>> loop = psyir.walk(Loop)[0] + >>> omplooptrans1 = OMPLoopTrans(omp_schedule="dynamic", + ... omp_directive="paralleldo") + >>> omplooptrans1.apply(loop) + >>> print(FortranWriter()(psyir)) + subroutine my_subroutine() + integer, dimension(10,10) :: a + integer :: i + integer :: j + + !$omp parallel do default(shared), private(i,j), schedule(dynamic) + do i = 1, 10, 1 + do j = 1, 10, 1 + a(i,j) = 0 + enddo + enddo + !$omp end parallel do + + end subroutine my_subroutine + + + ''' + def __init__(self, omp_directive="do", omp_schedule="auto"): + super().__init__() + # Whether or not to generate code for (run-to-run on n threads) + # reproducible OpenMP reductions. This setting can be overridden + # via the `reprod` argument to the apply() method. + self._reprod = Config.get().reproducible_reductions + + # Use setters to set up attributes + self._omp_schedule = "" + self.omp_schedule = omp_schedule + + self._omp_directive = "" + self.omp_directive = omp_directive + + def __str__(self): + return "Adds an OpenMP directive to parallelise the target loop" + + @property + def omp_directive(self): + ''' + :returns: the type of OMP directive that this transformation will \ + insert. + :rtype: str + ''' + return self._omp_directive + + @omp_directive.setter + def omp_directive(self, value): + ''' + :param str value: the type of OMP directive to add. + + :raises TypeError: if the provided value is not a valid str. + ''' + if not isinstance(value, str) or value not in VALID_OMP_DIRECTIVES: + raise TypeError( + f"The {type(self).__name__}.omp_directive property must be " + f"a str with the value of {VALID_OMP_DIRECTIVES}" + f" but found a '{type(value).__name__}' with value '{value}'.") + self._omp_directive = value + + @property + def omp_schedule(self): + ''' + :returns: the OpenMP schedule that will be specified by \ + this transformation. + :rtype: str + + ''' + return self._omp_schedule + + @omp_schedule.setter + def omp_schedule(self, value): + ''' + :param str value: Sets the OpenMP schedule value that will be \ + specified by this transformation, unless adding an OMP Loop \ + directive (in which case it is not applicable). + + :raises TypeError: if the provided value is not a string. + :raises ValueError: if the provided string is not a valid OpenMP \ + schedule format. + ''' + + if not isinstance(value, str): + raise TypeError( + f"The OMPLoopTrans.omp_schedule property must be a 'str'" + f" but found a '{type(value).__name__}'.") + + # Some schedules have an optional chunk size following a ',' + value_parts = value.split(',') + if value_parts[0].lower() not in OMPDoDirective.VALID_OMP_SCHEDULES: + raise ValueError(f"Valid OpenMP schedules are " + f"{OMPDoDirective.VALID_OMP_SCHEDULES} but got " + f"'{value_parts[0]}'.") + + if len(value_parts) > 1: + if value_parts[0] == "auto": + raise ValueError("Cannot specify a chunk size when using an " + "OpenMP schedule of 'auto'.") + try: + int(value_parts[1].strip()) + except ValueError as err: + raise ValueError(f"Supplied OpenMP schedule '{value}' has an " + f"invalid chunk-size.") from err + + self._omp_schedule = value + + def _directive(self, children, collapse=None): + ''' Creates the type of directive needed for this sub-class of + transformation. + + :param children: list of Nodes that will be the children of \ + the created directive. + :type children: List[:py:class:`psyclone.psyir.nodes.Node`] + :param int collapse: number of nested loops to collapse or None if \ + no collapse attribute is required. + + :returns: the new node representing the directive in the AST + :rtype: :py:class:`psyclone.psyir.nodes.OMPDoDirective` | \ + :py:class:`psyclone.psyir.nodes.OMPParallelDoDirective` | \ + :py:class:`psyclone.psyir.nodes. \ + OMPTeamsDistributeParallelDoDirective` | \ + :py:class:`psyclone.psyir.nodes.OMPLoopDirective` + ''' + node = MAP_STR_TO_LOOP_DIRECTIVES[self._omp_directive]( + children=children, + collapse=collapse) + # OMP loop does not support 'schedule' or 'reprod' attributes, so we do + # not attempt to set these properties for this specific directive + if self._omp_directive != "loop": + node.omp_schedule = self._omp_schedule + node.reprod = self._reprod + return node + + def apply(self, node, options=None): + '''Apply the OMPLoopTrans transformation to the specified PSyIR Loop. + + :param node: the supplied node to which we will apply the \ + OMPLoopTrans transformation + :type node: :py:class:`psyclone.psyir.nodes.Node` + :param options: a dictionary with options for transformations\ + and validation. + :type options: dictionary of string:values or None + :param bool options["reprod"]: + indicating whether reproducible reductions should be used. \ + By default the value from the config file will be used. + + ''' + if not options: + options = {} + self._reprod = options.get("reprod", + Config.get().reproducible_reductions) + + if self._reprod: + # When reprod is True, the variables th_idx and nthreads are + # expected to be declared in the scope. + root = node.ancestor(Routine) + + symtab = root.symbol_table + try: + symtab.lookup_with_tag("omp_thread_index") + except KeyError: + symtab.new_symbol( + "th_idx", tag="omp_thread_index", + symbol_type=DataSymbol, datatype=INTEGER_TYPE) + try: + symtab.lookup_with_tag("omp_num_threads") + except KeyError: + symtab.new_symbol( + "nthreads", tag="omp_num_threads", + symbol_type=DataSymbol, datatype=INTEGER_TYPE) + + super().apply(node, options) diff --git a/src/psyclone/psyir/transformations/reference2arrayrange_trans.py b/src/psyclone/psyir/transformations/reference2arrayrange_trans.py index de0b43e2e9..1f6b10fe4e 100644 --- a/src/psyclone/psyir/transformations/reference2arrayrange_trans.py +++ b/src/psyclone/psyir/transformations/reference2arrayrange_trans.py @@ -41,8 +41,8 @@ ''' from psyclone.psyGen import Transformation -from psyclone.psyir.nodes import Range, Reference, ArrayReference, Literal, \ - BinaryOperation +from psyclone.psyir.nodes import (Range, Reference, ArrayReference, Literal, + BinaryOperation) from psyclone.psyir.symbols import INTEGER_TYPE, ArrayType from psyclone.psyir.transformations.transformation_error \ import TransformationError @@ -134,6 +134,8 @@ def validate(self, node, options=None): :raises TransformationError: if the node is not a Reference \ node or the Reference node not does not reference an array \ symbol. + :raises TransformationError: if the Reference node is \ + within an LBOUND, UBOUND or SIZE binaryoperator. ''' # TODO issue #1858. Add support for structures containing arrays. @@ -146,6 +148,14 @@ def validate(self, node, options=None): raise TransformationError( f"The supplied node should be a Reference to a symbol " f"that is an array, but '{node.symbol.name}' is not.") + if (isinstance(node.parent, BinaryOperation) and + node.parent.operator in [ + BinaryOperation.Operator.LBOUND, + BinaryOperation.Operator.UBOUND, + BinaryOperation.Operator.SIZE]): + raise TransformationError( + "References to arrays within LBOUND, UBOUND or SIZE " + "operators should not be transformed.") def apply(self, node, options=None): '''Apply the Reference2ArrayRangeTrans transformation to the specified diff --git a/src/psyclone/tests/domain/lfric/transformations/dynamo0p3_transformations_test.py b/src/psyclone/tests/domain/lfric/transformations/dynamo0p3_transformations_test.py index dcf3af2fd8..ffbb62c528 100644 --- a/src/psyclone/tests/domain/lfric/transformations/dynamo0p3_transformations_test.py +++ b/src/psyclone/tests/domain/lfric/transformations/dynamo0p3_transformations_test.py @@ -3665,7 +3665,7 @@ def test_reprod_view(monkeypatch, annexed, dist_mem): isched + "[invoke='invoke_0', dm=True]\n" + indent + "0: " + ompparallel + "[]\n" + 2*indent + sched + "[]\n" + - 3*indent + "0: " + ompdo + "[reprod=True]\n" + + 3*indent + "0: " + ompdo + "[omp_schedule=static,reprod=True]\n" + 4*indent + sched + "[]\n" + 5*indent + "0: " + loop + "[type='dof', " "field_space='any_space_1', it_space='dof', " @@ -3680,7 +3680,7 @@ def test_reprod_view(monkeypatch, annexed, dist_mem): indent + "1: " + gsum + "[scalar='asum']\n" + indent + "2: " + ompparallel + "[]\n" + 2*indent + sched + "[]\n" + - 3*indent + "0: " + ompdo + "[]\n" + + 3*indent + "0: " + ompdo + "[omp_schedule=static]\n" + 4*indent + sched + "[]\n" + 5*indent + "0: " + loop + "[type='dof', " "field_space='any_space_1', it_space='dof', " @@ -3694,7 +3694,7 @@ def test_reprod_view(monkeypatch, annexed, dist_mem): 2*indent + ompprivate + "[]\n" + indent + "3: " + ompparallel + "[]\n" + 2*indent + sched + "[]\n" + - 3*indent + "0: " + ompdo + "[reprod=True]\n" + + 3*indent + "0: " + ompdo + "[omp_schedule=static,reprod=True]\n" + 4*indent + sched + "[]\n" + 5*indent + "0: " + loop + "[type='dof', " "field_space='any_space_1', it_space='dof', " @@ -3714,7 +3714,7 @@ def test_reprod_view(monkeypatch, annexed, dist_mem): isched + "[invoke='invoke_0', dm=False]\n" + indent + "0: " + ompparallel + "[]\n" + 2*indent + sched + "[]\n" + - 3*indent + "0: " + ompdo + "[reprod=True]\n" + + 3*indent + "0: " + ompdo + "[omp_schedule=static,reprod=True]\n" + 4*indent + sched + "[]\n" + 5*indent + "0: " + loop + "[type='dof', " "field_space='any_space_1', it_space='dof', " @@ -3728,7 +3728,7 @@ def test_reprod_view(monkeypatch, annexed, dist_mem): 2*indent + ompprivate + "[]\n" + indent + "1: " + ompparallel + "[]\n" + 2*indent + sched + "[]\n" + - 3*indent + "0: " + ompdo + "[]\n" + + 3*indent + "0: " + ompdo + "[omp_schedule=static]\n" + 4*indent + sched + "[]\n" + 5*indent + "0: " + loop + "[type='dof', " "field_space='any_space_1', it_space='dof', " @@ -3742,7 +3742,7 @@ def test_reprod_view(monkeypatch, annexed, dist_mem): 2*indent + ompprivate + "[]\n" + indent + "2: " + ompparallel + "[]\n" + 2*indent + sched + "[]\n" + - 3*indent + "0: " + ompdo + "[reprod=True]\n" + + 3*indent + "0: " + ompdo + "[omp_schedule=static,reprod=True]\n" + 4*indent + sched + "[]\n" + 5*indent + "0: " + loop + "[type='dof', " "field_space='any_space_1', it_space='dof', " diff --git a/src/psyclone/tests/nemo/transformations/openmp/openmp_test.py b/src/psyclone/tests/nemo/transformations/openmp/openmp_test.py index 08bca3f742..ef9844ef33 100644 --- a/src/psyclone/tests/nemo/transformations/openmp/openmp_test.py +++ b/src/psyclone/tests/nemo/transformations/openmp/openmp_test.py @@ -77,7 +77,7 @@ def test_omp_explicit_gen(): " real, dimension(jpi,jpj,jpk) :: umask\n" "\n" " !$omp parallel do default(shared), private(ji,jj,jk), " - "schedule(static)\n" + "schedule(auto)\n" " do jk = 1, jpk, 1\n" " do jj = 1, jpj, 1\n" " do ji = 1, jpi, 1\n" @@ -206,7 +206,7 @@ def test_omp_do_code_gen(): .else_body[0].else_body[0].dir_body[0]) gen_code = str(psy.gen).lower() correct = ''' !$omp parallel default(shared), private(ji,jj) - !$omp do schedule(static) + !$omp do schedule(auto) do jj = 1, jpj, 1 do ji = 1, jpi, 1 zdkt(ji,jj) = (ptb(ji,jj,jk - 1,jn) - ptb(ji,jj,jk,jn)) * \ @@ -233,7 +233,7 @@ def test_omp_do_within_if(): expected = ( " else\n" " !$omp parallel do default(shared), private(ji,jj), " - "schedule(static)\n" + "schedule(auto)\n" " do jj = 1, jpj, 1\n" " do ji = 1, jpi, 1\n" " zdkt(ji,jj) = (ptb(ji,jj,jk - 1,jn) - " diff --git a/src/psyclone/tests/nemo/transformations/profiling/nemo_profile_test.py b/src/psyclone/tests/nemo/transformations/profiling/nemo_profile_test.py index e545738ce5..f1fd620fc4 100644 --- a/src/psyclone/tests/nemo/transformations/profiling/nemo_profile_test.py +++ b/src/psyclone/tests/nemo/transformations/profiling/nemo_profile_test.py @@ -560,7 +560,7 @@ def test_profile_nemo_openmp(parser): "\n" " call profile_psy_data % prestart(\"do_loop\", \"r0\", 0, 0)\n" " !$omp parallel do default(shared), private(ji,jj), " - "schedule(static)\n" + "schedule(auto)\n" " do jj = 1, jpj, 1" in code) diff --git a/src/psyclone/tests/psyad/transformations/test_preprocess.py b/src/psyclone/tests/psyad/transformations/test_preprocess.py index 3aa612b8bb..1f556d371f 100644 --- a/src/psyclone/tests/psyad/transformations/test_preprocess.py +++ b/src/psyclone/tests/psyad/transformations/test_preprocess.py @@ -64,6 +64,54 @@ def test_preprocess_no_change(): assert result == code +def test_preprocess_reference2arrayrange(tmpdir, fortran_reader, + fortran_writer): + '''Test that the preprocess script replaces assignments that contain + arrays that use array notation with arrays using range notation + (for example, in Fortran, from a = b * c to a(:) = b(:) * c(:)) or + with equivalent code that uses explicit loops. Also test that + arrays in LBOUND and UBOUND intrinsics do not get modified. + + ''' + code = ( + "program test\n" + "real, dimension(10,10) :: a,b,c,e,f\n" + "real, dimension(10) :: d\n" + "integer :: i\n" + "a = b * c\n" + "do i = lbound(d,1), ubound(d,1)\n" + " d(i) = 0.0\n" + "end do\n" + "e = f\n" + "end program test\n") + expected = ( + "program test\n" + " real, dimension(10,10) :: a\n" + " real, dimension(10,10) :: b\n" + " real, dimension(10,10) :: c\n" + " real, dimension(10,10) :: e\n" + " real, dimension(10,10) :: f\n" + " real, dimension(10) :: d\n" + " integer :: i\n" + " integer :: idx\n" + " integer :: idx_1\n\n" + " do idx = 1, 10, 1\n" + " do idx_1 = 1, 10, 1\n" + " a(idx_1,idx) = b(idx_1,idx) * c(idx_1,idx)\n" + " enddo\n" + " enddo\n" + " do i = LBOUND(d, 1), UBOUND(d, 1), 1\n" + " d(i) = 0.0\n" + " enddo\n" + " e(:,:) = f(:,:)\n\n" + "end program test\n") + psyir = fortran_reader.psyir_from_source(code) + preprocess_trans(psyir, ["a", "c"]) + result = fortran_writer(psyir) + assert result == expected + assert Compile(tmpdir).string_compiles(result) + + def test_preprocess_dotproduct(tmpdir, fortran_reader, fortran_writer): '''Test that the preprocess script replaces a dotproduct with equivalent code. diff --git a/src/psyclone/tests/psyir/backend/psyir_openmp_test.py b/src/psyclone/tests/psyir/backend/psyir_openmp_test.py index 6cef799222..a970fe6db9 100644 --- a/src/psyclone/tests/psyir/backend/psyir_openmp_test.py +++ b/src/psyclone/tests/psyir/backend/psyir_openmp_test.py @@ -190,7 +190,7 @@ def test_nemo_omp_do(fortran_reader): # Disable checks on global constraints to remove need for parallel region fvisitor = FortranWriter(check_global_constraints=False) result = fvisitor(schedule) - correct = ''' !$omp do schedule(static) + correct = ''' !$omp do schedule(auto) do i = 1, 20, 2 a = 2 * i b(i) = b(i) + a @@ -200,7 +200,7 @@ def test_nemo_omp_do(fortran_reader): cvisitor = CWriter(check_global_constraints=False) result = cvisitor(schedule[0]) - correct = '''#pragma omp do schedule(static) + correct = '''#pragma omp do schedule(auto) { for(i=1; i<=20; i+=2) { @@ -235,14 +235,14 @@ def test_gocean_omp_do(): # GOInvokeSchedule is not yet supported, so start with # the OMP node: result = fvisitor(invoke.schedule[0]) - correct = '''!$omp do schedule(static) + correct = '''!$omp do schedule(auto) a = b !$omp end do''' assert correct in result cvisitor = CWriter(check_global_constraints=False) result = cvisitor(invoke.schedule[0]) - correct = '''#pragma omp do schedule(static) + correct = '''#pragma omp do schedule(auto) { a = b; }''' diff --git a/src/psyclone/tests/psyir/frontend/fparser2_part_ref_test.py b/src/psyclone/tests/psyir/frontend/fparser2_part_ref_test.py new file mode 100644 index 0000000000..cf1e2d3692 --- /dev/null +++ b/src/psyclone/tests/psyir/frontend/fparser2_part_ref_test.py @@ -0,0 +1,152 @@ +# ----------------------------------------------------------------------------- +# BSD 3-Clause License +# +# Copyright (c) 2022, Science and Technology Facilities Council. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# ----------------------------------------------------------------------------- +# Author R. W. Ford, STFC Daresbury Lab + +'''Performs pytest tests on the parsing of Part_Ref in the fparser2 + PSyIR front-end. + +''' +import pytest + +from fparser.common.readfortran import FortranStringReader +from fparser.two.Fortran2003 import Execution_Part + +from psyclone.psyir.frontend.fparser2 import Fparser2Reader +from psyclone.psyir.nodes import KernelSchedule, Routine, Call, ArrayReference +from psyclone.psyir.symbols import SymbolError, DataSymbol, ScalarType, \ + INTEGER_TYPE, RoutineSymbol, ArrayType + + +@pytest.mark.usefixtures("f2008_parser") +def test_handling_part_ref_error(): + '''Test that the expected exception is raised when the parsed symbol + is not found in the symbol table. + + ''' + reader = FortranStringReader("x(2)=1") + fparser2part_ref = Execution_Part.match(reader)[0][0] + + fake_parent = KernelSchedule('kernel') + processor = Fparser2Reader() + + # If one of the ancestors has a symbol table then process_nodes() + # checks that the symbol is declared. + with pytest.raises(SymbolError) as error: + processor.process_nodes(fake_parent, [fparser2part_ref]) + assert "No Symbol found for name 'x'." in str(error.value) + + +@pytest.mark.usefixtures("f2008_parser") +def test_handling_part_ref(): + '''Test that fparser2 Part_Ref is converted to the expected PSyIR + tree structure. + + ''' + reader = FortranStringReader("x(2)=1") + fparser2part_ref = Execution_Part.match(reader)[0][0] + + fake_parent = KernelSchedule('kernel') + fake_parent.symbol_table.add(DataSymbol('x', INTEGER_TYPE)) + + processor = Fparser2Reader() + processor.process_nodes(fake_parent, [fparser2part_ref]) + + assert len(fake_parent.children) == 1 + assignment = fake_parent.children[0] + assert len(assignment.children) == 2 + new_node = assignment.children[0] + assert isinstance(new_node, ArrayReference) + assert new_node.name == "x" + assert len(new_node.children) == 1 # Array dimensions + + +@pytest.mark.usefixtures("f2008_parser") +def test_handling_part_ref_expression(): + '''Test that fparser2 Part_Ref is converted to the expected PSyIR + tree structure when there is a complex expression. + + ''' + # Parse a complex array expression + reader = FortranStringReader("x(i+3,j-4,(z*5)+1)=1") + fparser2part_ref = Execution_Part.match(reader)[0][0] + + fake_parent = KernelSchedule('assign') + array_type = ArrayType(INTEGER_TYPE, [10, 10, 10]) + fake_parent.symbol_table.add(DataSymbol('x', array_type)) + fake_parent.symbol_table.add(DataSymbol('i', INTEGER_TYPE)) + fake_parent.symbol_table.add(DataSymbol('j', INTEGER_TYPE)) + fake_parent.symbol_table.add(DataSymbol('z', INTEGER_TYPE)) + + processor = Fparser2Reader() + processor.process_nodes(fake_parent, [fparser2part_ref]) + # Check a new node was generated and connected to parent + assert len(fake_parent.children) == 1 + new_node = fake_parent[0].lhs + assert isinstance(new_node, ArrayReference) + assert new_node.name == "x" + assert len(new_node.children) == 3 # Array dimensions + + +def test_handling_part_ref_function(fortran_reader): + '''Test that fparser2 Part_Ref is converted to the expected PSyIR + tree structure when there is a function. The function will be a + part_ref but will have a RoutineSymbol. + + ''' + code = ( + "module test_mod\n" + "contains\n" + " subroutine test_sub()\n" + " integer :: a\n" + " integer :: i\n" + " a = test_func(i)\n" + " end subroutine\n" + " integer function test_func(i)\n" + " integer :: i\n" + " test_func = i\n" + " end function test_func\n" + "end module\n") + psyir = fortran_reader.psyir_from_source(code) + + function = psyir.children[0].children[1] + assert isinstance(function, Routine) + assert isinstance(function.return_symbol, DataSymbol) + assert isinstance(function.return_symbol.datatype, ScalarType) + call = psyir.children[0].children[0].children[0].rhs + assert isinstance(call, Call) + assert call.routine.name == "test_func" + symbol = call.scope.symbol_table.lookup("test_func") + assert isinstance(symbol, RoutineSymbol) + assert isinstance(symbol.datatype, ScalarType) + assert symbol.datatype is function.return_symbol.datatype diff --git a/src/psyclone/tests/psyir/frontend/fparser2_test.py b/src/psyclone/tests/psyir/frontend/fparser2_test.py index de81893fb5..ab0150c7f6 100644 --- a/src/psyclone/tests/psyir/frontend/fparser2_test.py +++ b/src/psyclone/tests/psyir/frontend/fparser2_test.py @@ -37,7 +37,6 @@ ''' Performs py.test tests on the fparser2 PSyIR front-end ''' -from __future__ import absolute_import import pytest import fparser @@ -61,7 +60,7 @@ UnaryOperation, BinaryOperation, NaryOperation, IfBlock, Reference, ArrayReference, Container, Literal, Range, KernelSchedule, RegionDirective, StandaloneDirective, StructureReference, - ArrayOfStructuresReference) + ArrayOfStructuresReference, Call, Routine) from psyclone.psyir.symbols import ( DataSymbol, ContainerSymbol, SymbolTable, RoutineSymbol, ArgumentInterface, SymbolError, ScalarType, ArrayType, INTEGER_TYPE, REAL_TYPE, @@ -421,7 +420,7 @@ def test_array_notation_rank(): # An array with no dimensions raises an exception array_type = ArrayType(REAL_TYPE, [10]) symbol = DataSymbol("a", array_type) - array = ArrayReference(symbol, []) + array = ArrayReference(symbol) with pytest.raises(InternalError) as excinfo: Fparser2Reader._array_notation_rank(array) assert ("ArrayReference malformed or incomplete: must have one or more " @@ -1784,52 +1783,6 @@ def test_handling_parenthesis(): assert isinstance(new_node, BinaryOperation) -@pytest.mark.usefixtures("f2008_parser") -def test_handling_part_ref(): - ''' Test that fparser2 Part_Ref is converted to the expected PSyIR - tree structure. - ''' - reader = FortranStringReader("x(2)=1") - fparser2part_ref = Execution_Part.match(reader)[0][0] - - fake_parent = KernelSchedule('kernel') - processor = Fparser2Reader() - - # If one of the ancestors has a symbol table then process_nodes() - # checks that the symbol is declared. - with pytest.raises(SymbolError) as error: - processor.process_nodes(fake_parent, [fparser2part_ref]) - assert "No Symbol found for name 'x'." in str(error.value) - - fake_parent.symbol_table.add(DataSymbol('x', INTEGER_TYPE)) - processor.process_nodes(fake_parent, [fparser2part_ref]) - assert len(fake_parent.children) == 1 - assignment = fake_parent.children[0] - assert len(assignment.children) == 2 - new_node = assignment.children[0] - assert isinstance(new_node, ArrayReference) - assert new_node.name == "x" - assert len(new_node.children) == 1 # Array dimensions - - # Parse a complex array expression - reader = FortranStringReader("x(i+3,j-4,(z*5)+1)=1") - fparser2part_ref = Execution_Part.match(reader)[0][0] - - fake_parent = KernelSchedule('assign') - array_type = ArrayType(INTEGER_TYPE, [10, 10, 10]) - fake_parent.symbol_table.add(DataSymbol('x', array_type)) - fake_parent.symbol_table.add(DataSymbol('i', INTEGER_TYPE)) - fake_parent.symbol_table.add(DataSymbol('j', INTEGER_TYPE)) - fake_parent.symbol_table.add(DataSymbol('z', INTEGER_TYPE)) - processor.process_nodes(fake_parent, [fparser2part_ref]) - # Check a new node was generated and connected to parent - assert len(fake_parent.children) == 1 - new_node = fake_parent[0].lhs - assert isinstance(new_node, ArrayReference) - assert new_node.name == "x" - assert len(new_node.children) == 3 # Array dimensions - - @pytest.fixture(scope="function", name="symbol_table") def make_symbol_table(): ''' diff --git a/src/psyclone/tests/psyir/nodes/array_mixin_test.py b/src/psyclone/tests/psyir/nodes/array_mixin_test.py index a9eaee7d3a..5460dea354 100644 --- a/src/psyclone/tests/psyir/nodes/array_mixin_test.py +++ b/src/psyclone/tests/psyir/nodes/array_mixin_test.py @@ -38,8 +38,9 @@ ''' Performs py.test tests of the ArrayMixin PSyIR nodes trait. ''' import pytest +from psyclone.errors import InternalError from psyclone.psyir.nodes import ArrayReference, ArrayOfStructuresReference, \ - Range, Literal + BinaryOperation, Range, Literal, Routine, Assignment from psyclone.psyir.symbols import DataSymbol, DeferredType, ArrayType, \ INTEGER_TYPE @@ -80,13 +81,16 @@ def test_is_upper_lower_bound(fortran_reader): code = ( "subroutine test()\n" "real a(10)\n" + "character(10) my_str\n" "a(1:10) = 0.0\n" + "my_str(2:2) = 'b'\n" "end subroutine\n") # Return True as the literal values or the declaration and array # reference match. psyir = fortran_reader.psyir_from_source(code) - array_ref = psyir.children[0].children[0].lhs + assigns = psyir.walk(Assignment) + array_ref = assigns[0].lhs assert array_ref.is_lower_bound(0) assert array_ref.is_upper_bound(0) @@ -105,3 +109,54 @@ def test_is_upper_lower_bound(fortran_reader): array_ref = psyir.children[0].children[0].lhs assert not array_ref.is_lower_bound(0) assert not array_ref.is_upper_bound(0) + + # Return False if the symbol being referenced is of UnknownFortranType. + array_ref = assigns[1].lhs + assert not array_ref.is_lower_bound(0) + assert not array_ref.is_upper_bound(0) + + +def test_get_effective_shape(fortran_reader, fortran_writer): + '''Tests for the _get_effective_shape() method.''' + code = ( + "subroutine test()\n" + " use some_mod\n" + " integer :: indices(8,3)\n" + " real a(10), b(10,10)\n" + " a(1:10) = 0.0\n" + " b(indices(2:3,1), 2:5) = 2.0\n" + " b(indices(2:3,1:2), 2:5) = 2.0\n" + " a(f()) = 2.0\n" + " a(2+3) = 1.0\n" + "end subroutine\n") + psyir = fortran_reader.psyir_from_source(code) + routine = psyir.walk(Routine)[0] + # Direct array slice. + shape = routine.children[0].lhs._get_effective_shape() + assert len(shape) == 1 + assert isinstance(shape[0], BinaryOperation) + # Indirect array slice. + shape = routine.children[1].lhs._get_effective_shape() + assert len(shape) == 2 + assert isinstance(shape[0], BinaryOperation) + code = fortran_writer(shape[0]) + # An ArrayType does not store the number of elements, just lower and upper + # bounds. Therefore, we end up recursively computing the no. of elements. + # The answer is still "2" though! + assert code == "((3 - 2) / 1 + 1 - 1) / 1 + 1" + code = fortran_writer(shape[1]) + assert code == "(5 - 2) / 1 + 1" + # An indirect array slice can only be 1D. + with pytest.raises(InternalError) as err: + _ = routine.children[2].lhs._get_effective_shape() + assert ("array defining a slice of a dimension of another array must be " + "1D but 'indices' used to index into 'b' has 2 dimensions" in + str(err.value)) + # Indirect array access using function call. + with pytest.raises(NotImplementedError) as err: + _ = routine.children[3].lhs._get_effective_shape() + assert "include a function call or expression" in str(err.value) + # Array access with expression in indices. + with pytest.raises(NotImplementedError) as err: + _ = routine.children[4].lhs._get_effective_shape() + assert "include a function call or expression" in str(err.value) diff --git a/src/psyclone/tests/psyir/nodes/array_of_structures_reference_test.py b/src/psyclone/tests/psyir/nodes/array_of_structures_reference_test.py index 8de5333f6f..b55caaaaab 100644 --- a/src/psyclone/tests/psyir/nodes/array_of_structures_reference_test.py +++ b/src/psyclone/tests/psyir/nodes/array_of_structures_reference_test.py @@ -38,7 +38,6 @@ ''' This module contains pytest tests for the ArrayOfStructuresReference class. ''' -from __future__ import absolute_import import pytest from psyclone.tests.utilities import check_links from psyclone.psyir import symbols, nodes @@ -167,3 +166,43 @@ def test_ast_is_array(): asref = nodes.ArrayOfStructuresReference.create( ssym, [nodes.Literal("2", symbols.INTEGER_TYPE)], ["nx"]) assert asref.is_array + + +def test_asr_datatype(): + '''Test that the datatype property works correctly for + ArrayOfStructuresReference. (The actual implementation is in + StructureReference.)''' + one = nodes.Literal("1", symbols.INTEGER_TYPE) + two = nodes.Literal("2", symbols.INTEGER_TYPE) + + ndofs = symbols.DataSymbol("ndofs", symbols.INTEGER_TYPE) + atype = symbols.ArrayType(symbols.REAL_TYPE, + [nodes.Reference(ndofs), nodes.Reference(ndofs)]) + grid_type = symbols.StructureType.create([ + ("nx", symbols.INTEGER_TYPE, symbols.Symbol.Visibility.PUBLIC), + ("data", atype, symbols.Symbol.Visibility.PUBLIC)]) + grid_type_symbol = symbols.DataTypeSymbol("grid_type", grid_type) + grid_array_type = symbols.ArrayType(grid_type_symbol, [5]) + ssym = symbols.DataSymbol("grid", grid_array_type) + # Reference to a single member of the array of structures and to the "nx" + # member of it. + asref = nodes.ArrayOfStructuresReference.create( + ssym, [two.copy()], ["nx"]) + assert asref.datatype == symbols.INTEGER_TYPE + # Reference to a range of members of the array of structures and to the + # "nx" member of each. + my_range = nodes.Range.create(two.copy(), + nodes.Literal("3", symbols.INTEGER_TYPE)) + asref2 = nodes.ArrayOfStructuresReference.create( + ssym, [my_range], ["nx"]) + assert isinstance(asref2.datatype, symbols.ArrayType) + assert asref2.datatype.intrinsic == symbols.ScalarType.Intrinsic.INTEGER + assert len(asref2.datatype.shape) == 1 + assert asref2.datatype.shape[0].lower == one + assert isinstance(asref2.datatype.shape[0].upper, nodes.BinaryOperation) + # Reference to a single member of the array of structures and to the "data" + # member of it which is itself an array. + asref3 = nodes.ArrayOfStructuresReference.create( + ssym, [one.copy()], ["data"]) + assert isinstance(asref3.datatype, symbols.ArrayType) + assert len(asref3.datatype.shape) == 2 diff --git a/src/psyclone/tests/psyir/nodes/array_reference_test.py b/src/psyclone/tests/psyir/nodes/array_reference_test.py index e4de58deea..d98dc9ae1f 100644 --- a/src/psyclone/tests/psyir/nodes/array_reference_test.py +++ b/src/psyclone/tests/psyir/nodes/array_reference_test.py @@ -38,15 +38,14 @@ ''' Performs py.test tests of the ArrayReference PSyIR node. ''' -from __future__ import absolute_import import pytest +from psyclone.errors import GenerationError, InternalError +from psyclone.psyir.backend.fortran import FortranWriter from psyclone.psyir.nodes.node import colored from psyclone.psyir.nodes import Reference, ArrayReference, Assignment, \ Literal, BinaryOperation, Range, KernelSchedule -from psyclone.psyir.symbols import DataSymbol, ArrayType, \ +from psyclone.psyir.symbols import DataSymbol, ArrayType, ScalarType, \ REAL_SINGLE_TYPE, INTEGER_SINGLE_TYPE, REAL_TYPE, INTEGER_TYPE -from psyclone.errors import GenerationError, InternalError -from psyclone.psyir.backend.fortran import FortranWriter from psyclone.tests.utilities import check_links @@ -65,11 +64,14 @@ def test_array_can_be_printed(): '''Test that an ArrayReference instance can always be printed (i.e. is initialised fully)''' kschedule = KernelSchedule("kname") - symbol = DataSymbol("aname", INTEGER_SINGLE_TYPE) + symbol = DataSymbol("aname", ArrayType(INTEGER_SINGLE_TYPE, [10])) kschedule.symbol_table.add(symbol) assignment = Assignment() - array = ArrayReference(symbol, assignment) + array = ArrayReference(symbol, parent=assignment) assert "ArrayReference[name:'aname']\n" in str(array) + array2 = ArrayReference.create(symbol, [Literal("1", INTEGER_TYPE)]) + assert ("ArrayReference[name:'aname']\nLiteral[value:'1', " + "Scalar]" in str(array2)) def test_array_create(): @@ -482,3 +484,29 @@ def test_array_same_array(): # A Reference to the array symbol should also match bare_array = Reference(test_sym) assert array.is_same_array(bare_array) is True + + +def test_array_datatype(fortran_writer): + '''Test the datatype() method for an ArrayReference.''' + test_sym = DataSymbol("test", ArrayType(REAL_TYPE, [10])) + one = Literal("1", INTEGER_TYPE) + two = Literal("2", INTEGER_TYPE) + four = Literal("4", INTEGER_TYPE) + # Reference to a single element of an array. + aref = ArrayReference.create(test_sym, [one]) + assert aref.datatype == REAL_TYPE + # Reference to a 1D sub-array of a 2D array. + test_sym2d = DataSymbol("test", ArrayType(REAL_TYPE, [10, 8])) + bref = ArrayReference.create(test_sym2d, [two.copy(), + Range.create(two.copy(), + four.copy())]) + assert isinstance(bref.datatype, ArrayType) + assert bref.datatype.intrinsic == ScalarType.Intrinsic.REAL + assert len(bref.datatype.shape) == 1 + # The sub-array will have a lower bound of one. + assert bref.datatype.shape[0].lower == one + upper = bref.datatype.shape[0].upper + assert isinstance(upper, BinaryOperation) + # The easiest way to check the expression is to convert it to Fortran + code = fortran_writer(upper) + assert code == "(4 - 2) / 1 + 1" diff --git a/src/psyclone/tests/psyir/nodes/assignment_test.py b/src/psyclone/tests/psyir/nodes/assignment_test.py index 21c4f441ba..878d8684db 100644 --- a/src/psyclone/tests/psyir/nodes/assignment_test.py +++ b/src/psyclone/tests/psyir/nodes/assignment_test.py @@ -1,7 +1,7 @@ # ----------------------------------------------------------------------------- # BSD 3-Clause License # -# Copyright (c) 2019-2021, Science and Technology Facilities Council. +# Copyright (c) 2019-2022, Science and Technology Facilities Council. # All rights reserved. # # Redistribution and use in source and binary forms, with or without @@ -38,7 +38,6 @@ ''' Performs py.test tests on the Assignment PSyIR node. ''' -from __future__ import absolute_import import pytest from psyclone.errors import InternalError, GenerationError from psyclone.f2pygen import ModuleGen @@ -78,7 +77,7 @@ def test_assignment_semantic_navigation(): assert "' malformed or incomplete. It needs at least 1 child to have " \ "a lhs." in str(err.value) - ref = Reference(DataSymbol("a", REAL_SINGLE_TYPE), assignment) + ref = Reference(DataSymbol("a", REAL_SINGLE_TYPE), parent=assignment) assignment.addchild(ref) # rhs should fail if second child is not present diff --git a/src/psyclone/tests/psyir/nodes/omp_directives_test.py b/src/psyclone/tests/psyir/nodes/omp_directives_test.py index 4a7879d1c8..bb6204ac62 100644 --- a/src/psyclone/tests/psyir/nodes/omp_directives_test.py +++ b/src/psyclone/tests/psyir/nodes/omp_directives_test.py @@ -50,8 +50,9 @@ OMPTaskwaitDirective, OMPTargetDirective, OMPLoopDirective, Schedule, \ Return, OMPSingleDirective, Loop, Literal, Routine, Assignment, \ Reference, OMPDeclareTargetDirective, OMPNowaitClause, \ - OMPGrainsizeClause, OMPNumTasksClause, OMPNogroupClause, CodeBlock, \ - OMPPrivateClause, OMPDefaultClause, OMPReductionClause, OMPScheduleClause + OMPGrainsizeClause, OMPNumTasksClause, OMPNogroupClause, \ + OMPPrivateClause, OMPDefaultClause, OMPReductionClause, \ + OMPScheduleClause, OMPTeamsDistributeParallelDoDirective, CodeBlock from psyclone.psyir.symbols import DataSymbol, INTEGER_TYPE, SymbolTable, \ REAL_SINGLE_TYPE, INTEGER_SINGLE_TYPE from psyclone.errors import InternalError, GenerationError @@ -232,6 +233,32 @@ def test_omp_parallel_do_changes_begin_str(fortran_reader): assert isinstance(pdir.children[3], OMPScheduleClause) +def test_omp_teams_distribute_parallel_do_strings( + fortran_reader, fortran_writer): + ''' Check that the beginning and ending directive strings that the + backend uses are the expected ones.''' + code = ''' + subroutine my_subroutine() + integer, dimension(321, 10) :: A + integer, dimension(32, 10) :: B + integer :: i, ii + integer :: j + + do i = 1, 320, 32 + A(i, 1) = B(i, 1) + 1 + end do + end subroutine + ''' + tree = fortran_reader.psyir_from_source(code) + loop = tree.walk(Loop)[0] + new_directive = OMPTeamsDistributeParallelDoDirective() + loop.replace_with(new_directive) + new_directive.dir_body.addchild(loop) + output = fortran_writer(tree) + assert "!$omp teams distribute parallel do" in output + assert "!$omp end teams distribute parallel do" in output + + def test_ompdo_constructor(): ''' Check that we can make an OMPDoDirective with and without children ''' @@ -256,6 +283,128 @@ def test_ompdo_constructor(): ompdo = OMPDoDirective(parent=schedule, children=[child]) assert len(ompdo.dir_body.children) == 1 + # Constructor with non-default parameters + ompdo = OMPDoDirective(omp_schedule="dynamic", collapse=4, reprod=True) + assert ompdo.omp_schedule == "dynamic" + assert ompdo.collapse == 4 + assert ompdo.reprod + assert str(ompdo) == "OMPDoDirective[omp_schedule=dynamic,collapse=4]" + + +def test_omp_do_directive_collapse_getter_and_setter(): + ''' Test the OMPDODirective collapse property setter and getter.''' + target = OMPDoDirective() + assert target.collapse is None + + with pytest.raises(ValueError) as err: + target.collapse = 0 + assert ("The OMPDoDirective collapse clause must be a positive integer " + "or None, but value '0' has been given." in str(err.value)) + + with pytest.raises(TypeError) as err: + target.collapse = 'a' + assert ("The OMPDoDirective collapse clause must be a positive integer " + "or None, but value 'a' has been given." in str(err.value)) + + # Set valid collapse values + target.collapse = 2 + assert target.collapse == 2 + assert target.begin_string() == "omp do schedule(auto) collapse(2)" + target.collapse = None + assert target.collapse is None + assert target.begin_string() == "omp do schedule(auto)" + + +def test_omp_do_directive_omp_schedule_getter_and_setter(): + ''' Test the OMPDODirective omp_schedule property setter and getter.''' + directive = OMPDoDirective() + # By default is auto + assert directive.omp_schedule == "auto" + + # By valid omp_schedules are accepted + directive.omp_schedule = "static" + assert directive.omp_schedule == "static" + directive.omp_schedule = "dynamic,3" + assert directive.omp_schedule == "dynamic,3" + + # Invalid omp_schedules raise a TypeError + with pytest.raises(TypeError) as err: + directive.omp_schedule = 3 + assert ("OMPDoDirective omp_schedule should be a str but found 'int'." + in str(err.value)) + + with pytest.raises(TypeError) as err: + directive.omp_schedule = "invalid,3" + assert ("OMPDoDirective omp_schedule should be one of ['runtime', " + "'static', 'dynamic', 'guided', 'auto'] but found 'invalid,3'." + in str(err.value)) + + +def test_omp_do_directive_validate_global_constraints(fortran_reader, + fortran_writer): + ''' Test the OMPDoDirective with a collapse value is only valid if + it has enough perfectly nested loops inside.''' + + code = ''' + subroutine my_subroutine() + integer, dimension(10, 10) :: A + integer, dimension(10, 10) :: B + integer :: i, j, val + + do i = 1, 10 + val = 1 + do j = 1, 10 + A(i, j) = B(i, j) + 1 + end do + end do + do i = 1, 10 + do j = 1, 10 + A(i, j) = B(i, j) + 1 + end do + val = 1 + end do + do i = 1, 10 + do j = 1, 10 + A(i, j) = B(i, j) + 1 + end do + end do + end subroutine + ''' + tree = fortran_reader.psyir_from_source(code) + loops = tree.walk(Loop, stop_type=Loop) + for loop in loops: + parent = loop.parent + position = loop.position + directive = OMPParallelDoDirective(children=[loop.detach()], + collapse=2) + parent.addchild(directive, position) + + directive = tree.walk(OMPParallelDoDirective) + + # The first and second loop nests will fail the validation + for test_directive in directive[0:2]: + with pytest.raises(GenerationError) as err: + _ = fortran_writer(test_directive) + assert ("OMPParallelDoDirective must have as many immediately nested " + "loops as the collapse clause specifies but 'OMPParallelDo" + "Directive[omp_schedule=auto,collapse=2]' has a collapse=2 " + "and the nested body at depth 1 cannot be collapsed." + in str(err.value)) + + # The third loop nest will succeed + code = fortran_writer(directive[2]) + assert "collapse(2)" in code + + # but it will also fail if trying to collapse more loops than available + directive[2].collapse = 3 + with pytest.raises(GenerationError) as err: + _ = fortran_writer(directive[2]) + assert ("OMPParallelDoDirective must have as many immediately nested " + "loops as the collapse clause specifies but 'OMPParallelDo" + "Directive[omp_schedule=auto,collapse=3]' has a collapse=3 and " + "the nested body at depth 2 cannot be collapsed." + in str(err.value)) + def test_omp_pdo_validate_child(): ''' Test the _validate_child method for OMPParallelDoDirective''' diff --git a/src/psyclone/tests/psyir/nodes/reference_test.py b/src/psyclone/tests/psyir/nodes/reference_test.py index e2f3bc7c54..7354ac7542 100644 --- a/src/psyclone/tests/psyir/nodes/reference_test.py +++ b/src/psyclone/tests/psyir/nodes/reference_test.py @@ -39,15 +39,16 @@ ''' Performs py.test tests on the Reference PSyIR node. ''' -from __future__ import absolute_import import pytest -from psyclone.psyir.nodes import Reference, ArrayReference, Assignment, \ - Literal, BinaryOperation, Range, KernelSchedule -from psyclone.psyir.symbols import DataSymbol, ArrayType, \ - REAL_SINGLE_TYPE, INTEGER_SINGLE_TYPE, REAL_TYPE, INTEGER_TYPE -from psyclone.psyGen import GenerationError from psyclone.core.access_info import VariablesAccessInfo +from psyclone.psyGen import GenerationError +from psyclone.psyir.nodes import ( + Reference, ArrayReference, Assignment, + Literal, BinaryOperation, Range, KernelSchedule) from psyclone.psyir.nodes.node import colored +from psyclone.psyir.symbols import ( + DataSymbol, ArrayType, ScalarType, + REAL_SINGLE_TYPE, INTEGER_SINGLE_TYPE, REAL_TYPE, INTEGER_TYPE) def test_reference_bad_init(): @@ -91,7 +92,7 @@ def test_reference_node_str(): symbol = DataSymbol("rname", INTEGER_SINGLE_TYPE) kschedule.symbol_table.add(symbol) assignment = Assignment() - ref = Reference(symbol, assignment) + ref = Reference(symbol, parent=assignment) coloredtext = colored("Reference", Reference._colour) assert coloredtext+"[name:'rname']" in ref.node_str() @@ -103,7 +104,7 @@ def test_reference_can_be_printed(): symbol = DataSymbol("rname", INTEGER_SINGLE_TYPE) kschedule.symbol_table.add(symbol) assignment = Assignment() - ref = Reference(symbol, assignment) + ref = Reference(symbol, parent=assignment) assert "Reference[name:'rname']" in str(ref) @@ -135,6 +136,15 @@ def test_reference_is_array(): assert reference.is_array is False +def test_reference_datatype(): + '''Test the datatype property. + + ''' + reference = Reference(DataSymbol("test", REAL_TYPE)) + assert isinstance(reference.datatype, ScalarType) + assert reference.datatype.intrinsic == ScalarType.Intrinsic.REAL + + def test_reference_accesses(): '''Test that the reference_accesses method behaves as expected in the usual case (see the next test for the unusual case). diff --git a/src/psyclone/tests/psyir/nodes/structure_reference_test.py b/src/psyclone/tests/psyir/nodes/structure_reference_test.py index ddae1e9321..e95f9734fc 100644 --- a/src/psyclone/tests/psyir/nodes/structure_reference_test.py +++ b/src/psyclone/tests/psyir/nodes/structure_reference_test.py @@ -1,7 +1,7 @@ # ----------------------------------------------------------------------------- # BSD 3-Clause License # -# Copyright (c) 2020-2021, Science and Technology Facilities Council. +# Copyright (c) 2020-2022, Science and Technology Facilities Council. # All rights reserved. # # Redistribution and use in source and binary forms, with or without @@ -38,8 +38,6 @@ ''' Module containing pytest tests for the StructureReference class. ''' -from __future__ import absolute_import - import pytest from psyclone.core import Signature, VariablesAccessInfo @@ -204,3 +202,123 @@ def test_struc_ref_semantic_nav(): assert ("StructureReference malformed or incomplete. It must have a " "single child that must be a (sub-class of) Member, but " "found: ['broken']" in str(err.value)) + + +def test_struc_ref_datatype(): + '''Test the datatype() method of StructureReference.''' + atype = symbols.ArrayType(symbols.REAL_TYPE, [10, 8]) + rtype = symbols.StructureType.create([ + ("gibber", symbols.BOOLEAN_TYPE, symbols.Symbol.Visibility.PUBLIC)]) + # TODO #1031. Currently cannot create an array of StructureTypes + # directly - have to have a DataTypeSymbol. + rtype_sym = symbols.DataTypeSymbol("gibber_type", rtype) + artype = symbols.ArrayType(rtype_sym, [10, 3]) + grid_type = symbols.StructureType.create([ + ("nx", symbols.INTEGER_TYPE, symbols.Symbol.Visibility.PUBLIC), + ("data", atype, symbols.Symbol.Visibility.PRIVATE), + ("roger", rtype, symbols.Symbol.Visibility.PUBLIC), + ("titty", artype, symbols.Symbol.Visibility.PUBLIC)]) + # Symbol with type defined by StructureType + ssym0 = symbols.DataSymbol("grid", grid_type) + # Reference to scalar member of structure + sref0 = nodes.StructureReference.create(ssym0, ["nx"]) + assert sref0.datatype == symbols.INTEGER_TYPE + + # Symbol with type defined by DataTypeSymbol + grid_type_symbol = symbols.DataTypeSymbol("grid_type", grid_type) + ssym = symbols.DataSymbol("grid", grid_type_symbol) + # Reference to scalar member of structure + sref = nodes.StructureReference.create(ssym, ["nx"]) + assert sref.datatype == symbols.INTEGER_TYPE + one = nodes.Literal("1", symbols.INTEGER_TYPE) + two = nodes.Literal("2", symbols.INTEGER_TYPE) + sref2 = nodes.StructureReference.create(ssym, [("data", [one, two])]) + assert sref2.datatype == symbols.REAL_TYPE + + # Reference to scalar member of structure member + gref = nodes.StructureReference.create(ssym, ["roger", "gibber"]) + assert gref.datatype == symbols.BOOLEAN_TYPE + + # Reference to structure member of structure + rref = nodes.StructureReference.create(ssym, ["roger"]) + assert rref.datatype == rtype + + # Reference to single element of array of structures within a structure + singleref = nodes.StructureReference.create( + ssym, [("titty", [one.copy(), two.copy()])]) + assert singleref.datatype == rtype_sym + + # Reference to sub-array of structure members of structure + myrange = nodes.Range.create(two.copy(), + nodes.Literal("4", symbols.INTEGER_TYPE)) + arref = nodes.StructureReference.create( + ssym, [("titty", [nodes.Literal("3", symbols.INTEGER_TYPE), myrange])]) + dtype = arref.datatype + assert isinstance(dtype, symbols.ArrayType) + assert dtype.intrinsic == rtype_sym + assert len(dtype.shape) == 1 + assert dtype.shape[0].lower == one + assert isinstance(dtype.shape[0].upper, nodes.BinaryOperation) + + # Reference to whole array of structures that are a member of a structure + fullref = nodes.StructureReference.create(ssym, ["titty"]) + dtype = fullref.datatype + assert dtype == artype + + +def test_structure_reference_deferred_type(): + ''' + Check that the datatype() method behaves as expected when it + encounters members of DeferredType or UnknownType. + + ''' + atype = symbols.ArrayType( + symbols.UnknownFortranType( + "type(atype), dimension(10,8), pointer :: aptr"), [10, 8]) + grid_type = symbols.StructureType.create([ + ("mesh", symbols.DeferredType(), symbols.Symbol.Visibility.PUBLIC), + ("aptr", atype, symbols.Symbol.Visibility.PUBLIC)]) + grid_type_symbol = symbols.DataTypeSymbol("grid_type", grid_type) + ssym = symbols.DataSymbol("grid", grid_type_symbol) + # Structure of deferred type + deft_sym = symbols.DataSymbol("john", symbols.DeferredType()) + jref = nodes.StructureReference.create(deft_sym, ["value"]) + assert jref.datatype == symbols.DeferredType() + # Structure of UnknownType + ut_sym = symbols.DataSymbol("teasel", + symbols.UnknownFortranType("some type decln")) + tref = nodes.StructureReference.create(ut_sym, ["yard"]) + assert tref.datatype == symbols.DeferredType() + # Structure with type given by DataTypeSymbol that is of DeferredType + utypesym = symbols.DataTypeSymbol("my_type", symbols.DeferredType()) + mysym = symbols.DataSymbol("my_sym", utypesym) + myref = nodes.StructureReference.create(mysym, ["flag"]) + assert myref.datatype == symbols.DeferredType() + # Member of structure that is of deferred type + meshref = nodes.StructureReference.create(ssym, ["mesh", "polly"]) + assert meshref.datatype == symbols.DeferredType() + # Member of structure that is an array of unknown type. + two = nodes.Literal("2", symbols.INTEGER_TYPE) + four = nodes.Literal("4", symbols.INTEGER_TYPE) + myrange = nodes.Range.create(two.copy(), four.copy()) + aref = nodes.StructureReference.create(ssym, [("aptr", + [two.copy(), myrange])]) + assert len(aref.datatype.shape) == 1 + assert isinstance(aref.datatype, symbols.ArrayType) + assert isinstance(aref.datatype.intrinsic, symbols.UnknownFortranType) + # An array made of individual elements of a member array. + # my_sym(:)%aptr(2,2) + array_grid_type = symbols.ArrayType(grid_type_symbol, [four.copy()]) + array_sym = symbols.DataSymbol("thing", array_grid_type) + aref2 = nodes.ArrayOfStructuresReference.create( + array_sym, [myrange.copy()], + [("aptr", [two.copy(), two.copy()])]) + assert len(aref2.datatype.shape) == 1 + assert isinstance(aref2.datatype.intrinsic, symbols.UnknownFortranType) + # An array of arrays - not supported. + # my_sym(2:4)%aptr + aref3 = nodes.ArrayOfStructuresReference.create( + array_sym, [myrange.copy()], ["aptr"]) + with pytest.raises(NotImplementedError) as err: + _ = aref3.datatype + assert "Array of arrays not supported: " in str(err.value) diff --git a/src/psyclone/tests/psyir/symbols/datatype_test.py b/src/psyclone/tests/psyir/symbols/datatype_test.py index 082154f0cb..aa56731bcc 100644 --- a/src/psyclone/tests/psyir/symbols/datatype_test.py +++ b/src/psyclone/tests/psyir/symbols/datatype_test.py @@ -74,6 +74,13 @@ def test_deferredtype_str(): assert str(data_type) == "DeferredType" +def test_deferredtype_eq(): + '''Test the equality operator of DeferredType.''' + data_type1 = DeferredType() + assert data_type1 == DeferredType() + assert data_type1 != NoType() + + # NoType class def test_notype(): @@ -84,6 +91,15 @@ def test_notype(): assert str(data_type) == "NoType" +def test_notype_eq(): + '''Test the equality operator of NoType.''' + notype1 = NoType() + assert notype1 == NoType() + assert notype1 != DeferredType() + assert notype1 != ScalarType(ScalarType.Intrinsic.INTEGER, + ScalarType.Precision.SINGLE) + + # ScalarType class @pytest.mark.parametrize("precision", [ScalarType.Precision.SINGLE, @@ -173,7 +189,7 @@ def test_scalartype_not_equal(): assert scalar_type4 != scalar_type # A ScalarType is not equal to an ArrayType atype = ArrayType(scalar_type4, [10]) - assert atype != scalar_type4 + assert scalar_type4 != atype def test_scalartype_invalid_intrinsic_type(): @@ -352,7 +368,7 @@ def test_arraytype_invalid_shape_dimension_1(): _ = ArrayType(scalar_type, [Reference(symbol)]) assert ( "If a DataSymbol is referenced in a dimension declaration then it " - "should be a scalar integer or of UnknownType or DeferredType, but " + "should be an integer or of UnknownType or DeferredType, but " "'fred' is a 'Scalar'." in str(excinfo.value)) @@ -394,7 +410,7 @@ def test_arraytype_invalid_shape_dimension_3(): def test_arraytype_invalid_shape_bounds(): ''' Check that the ArrayType class raises the expected exception when one of the dimensions of the shape list is a tuple that does not contain - either an int or a DataNode.''' + either an int or a DataNode or is not a scalar.''' scalar_type = ScalarType(ScalarType.Intrinsic.REAL, 4) with pytest.raises(TypeError) as excinfo: _ = ArrayType(scalar_type, [(1, 4, 1)]) @@ -422,8 +438,15 @@ def test_arraytype_invalid_shape_bounds(): _ = ArrayType(scalar_type, [(1, Reference(symbol))]) assert ( "If a DataSymbol is referenced in a dimension declaration then it " - "should be a scalar integer or of UnknownType or DeferredType, but " + "should be an integer or of UnknownType or DeferredType, but " "'fred' is a 'Scalar'." in str(excinfo.value)) + array_type = ArrayType(INTEGER_TYPE, [10]) + symbol = DataSymbol("jim", array_type) + with pytest.raises(TypeError) as excinfo: + _ = ArrayType(scalar_type, [(1, Reference(symbol))]) + assert ("If a DataSymbol is referenced in a dimension declaration then it " + "should be a scalar but 'Reference[name:'jim']' is not." in + str(excinfo.value)) def test_arraytype_shape_dim_from_parent_scope(): @@ -483,6 +506,30 @@ def test_arraytype_immutable(): data_type.shape = [] +def test_arraytype_eq(): + '''Test the equality operator for ArrayType.''' + scalar_type = ScalarType(ScalarType.Intrinsic.REAL, 4) + data_type1 = ArrayType(scalar_type, [10, 10]) + assert data_type1 == ArrayType(scalar_type, [10, 10]) + assert data_type1 != scalar_type + assert data_type1 == ArrayType(scalar_type, [10, + Literal("10", INTEGER_TYPE)]) + # Same type but different shape. + assert data_type1 != ArrayType(scalar_type, [10]) + assert data_type1 != ArrayType(scalar_type, [10, 10, 5]) + assert data_type1 != ArrayType(scalar_type, [10, 5]) + assert data_type1 != ArrayType(scalar_type, [10, 5]) + sym = DataSymbol("nx", INTEGER_TYPE) + assert data_type1 != ArrayType(scalar_type, [10, Reference(sym)]) + # Same shape but different type. + dscalar_type = ScalarType(ScalarType.Intrinsic.REAL, 8) + assert data_type1 != ArrayType(dscalar_type, [10, 10]) + iscalar_type = ScalarType(ScalarType.Intrinsic.INTEGER, 4) + assert data_type1 != ArrayType(iscalar_type, [10, 10]) + + +# UnknownFortranType tests + def test_unknown_fortran_type(): ''' Check the constructor and 'declaration' property of the UnknownFortranType class. ''' @@ -496,6 +543,34 @@ def test_unknown_fortran_type(): assert utype.declaration == decl +def test_unknown_fortran_type_text(): + ''' + Check that the 'type_text' property returns the expected string and + that the result is cached. + ''' + decl = "type(some_type) :: var" + utype = UnknownFortranType(decl) + text = utype.type_text + assert text == "TYPE(some_type)" + # Calling it a second time should just return the previously cached + # result. + assert utype.type_text is text + # Changing the declaration text should wipe the cache + utype.declaration = decl + assert utype.type_text is not text + + +def test_unknown_fortran_type_eq(): + '''Test the equality operator for UnknownFortranType.''' + decl = "type(some_type) :: var" + utype = UnknownFortranType(decl) + assert utype == UnknownFortranType(decl) + assert utype != NoType() + # Type is the same even if the variable name is different. + assert utype == UnknownFortranType("type(some_type) :: var1") + assert utype != UnknownFortranType("type(other_type) :: var") + + # StructureType tests def test_structure_type(): @@ -552,3 +627,32 @@ def test_create_structuretype(): assert ("Each component must be specified using a 3-tuple of (name, " "type, visibility) but found a tuple with 2 members: (" "'george', " in str(err.value)) + + +def test_structuretype_eq(): + '''Test the equality operator of StructureType.''' + stype = StructureType.create([ + ("nancy", INTEGER_TYPE, Symbol.Visibility.PUBLIC), + ("peggy", REAL_TYPE, Symbol.Visibility.PRIVATE)]) + assert stype == StructureType.create([ + ("nancy", INTEGER_TYPE, Symbol.Visibility.PUBLIC), + ("peggy", REAL_TYPE, Symbol.Visibility.PRIVATE)]) + # Something that is not a StructureType + assert stype != NoType() + # Component with a different name. + assert stype != StructureType.create([ + ("nancy", INTEGER_TYPE, Symbol.Visibility.PUBLIC), + ("roger", REAL_TYPE, Symbol.Visibility.PRIVATE)]) + # Component with a different type. + assert stype != StructureType.create([ + ("nancy", INTEGER_TYPE, Symbol.Visibility.PUBLIC), + ("peggy", INTEGER_TYPE, Symbol.Visibility.PRIVATE)]) + # Component with a different visibility. + assert stype != StructureType.create([ + ("nancy", INTEGER_TYPE, Symbol.Visibility.PUBLIC), + ("peggy", REAL_TYPE, Symbol.Visibility.PUBLIC)]) + # Different number of components. + assert stype != StructureType.create([ + ("nancy", INTEGER_TYPE, Symbol.Visibility.PUBLIC), + ("peggy", REAL_TYPE, Symbol.Visibility.PRIVATE), + ("roger", INTEGER_TYPE, Symbol.Visibility.PUBLIC)]) diff --git a/src/psyclone/tests/psyir/transformations/hoist_loop_bound_expr_trans_test.py b/src/psyclone/tests/psyir/transformations/hoist_loop_bound_expr_trans_test.py index 3f7ae7bdea..982e41205a 100644 --- a/src/psyclone/tests/psyir/transformations/hoist_loop_bound_expr_trans_test.py +++ b/src/psyclone/tests/psyir/transformations/hoist_loop_bound_expr_trans_test.py @@ -159,7 +159,7 @@ def test_validate_loop_with_directive(fortran_reader): hoist_trans.validate(loop) assert ("The loop provided to HoistLoopBoundExprTrans must not be directly" " inside a Directive as its Schedule does not support multiple " - "statements, but found 'OMPParallelDoDirective[]'." + "statements, but found 'OMPParallelDoDirective'." in str(err.value)) diff --git a/src/psyclone/tests/psyir/transformations/reference2arrayrange_trans_test.py b/src/psyclone/tests/psyir/transformations/reference2arrayrange_trans_test.py index a3190043a9..2a96939335 100644 --- a/src/psyclone/tests/psyir/transformations/reference2arrayrange_trans_test.py +++ b/src/psyclone/tests/psyir/transformations/reference2arrayrange_trans_test.py @@ -44,7 +44,6 @@ from psyclone.psyir.transformations import Reference2ArrayRangeTrans, \ TransformationError - CODE = ( "program test\n" " real, dimension(10) :: a\n" @@ -192,6 +191,32 @@ def test_multid(fortran_reader, fortran_writer): assert "a(:n,:m,:) = b(:n,:m,:) * c(:n,:m,:)\n" in result +def test_operators(fortran_reader, fortran_writer): + '''Test that references to arrays within operators are transformed to + array slice notation, using dotproduct as the example. + + ''' + code = CODE.replace("a = b", "b = dot_product(a, a(:))") + result = apply_trans(fortran_reader, fortran_writer, code) + assert "b = DOT_PRODUCT(a(:), a(:))" in result + + +def test_call(fortran_reader, fortran_writer): + '''Test that references to arrays that are arguments to a call are + transformed to array slice notation. + + ''' + code = ( + "program test\n" + " use workmod, only : work\n" + " real, dimension(10) :: a\n" + " real :: b\n\n" + " call work(a,b)\n" + "end program test\n") + result = apply_trans(fortran_reader, fortran_writer, code) + assert "call work(a(:), b)" in result + + def test_validate(): ''' Test the validate method ''' trans = Reference2ArrayRangeTrans() @@ -217,6 +242,55 @@ def test_validate_range(fortran_reader): "'ArrayReference'." in str(info.value)) +def test_validate_query(fortran_reader): + '''Test that the validate method raises an exception if the Reference + is within one of the LBOUND, UBOUND or SIZE query functions. + + ''' + code = ( + "program test\n" + " real :: a(10),b(10)\n" + " integer :: i,c\n" + " do i = lbound(a,1), ubound(a,1)\n" + " a(i) = 0.0\n" + " end do\n" + " b(:) = 0.0\n" + " c = size(b,1)\n" + "end program test\n") + psyir = fortran_reader.psyir_from_source(code) + trans = Reference2ArrayRangeTrans() + + # Check the references to 'a' in lbound and ubound do not get modified + loop = psyir.children[0].children[0] + locations = [loop.start_expr, loop.stop_expr] + for location in locations: + for reference in location.walk(Reference): + with pytest.raises(TransformationError) as info: + trans.validate(reference) + assert ("References to arrays within LBOUND, UBOUND or SIZE " + "operators should not be transformed." in str(info.value)) + + # Check the references to 'b' in the hidden lbound and ubound + # operators within 'b(:)' do not get modified. + assignment = psyir.children[0].children[1] + for reference in assignment.walk(Reference): + # We want to avoid subclasses such as ArrayReference + # pylint: disable=unidiomatic-typecheck + if type(reference) == Reference: + with pytest.raises(TransformationError) as info: + trans.validate(reference) + assert ("References to arrays within LBOUND, UBOUND or SIZE " + "operators should not be transformed." in str(info.value)) + + # Check the reference to 'b' in the size operator does not get modified + assignment = psyir.children[0].children[2] + reference = assignment.children[1].children[0] + with pytest.raises(TransformationError) as info: + trans.validate(reference) + assert ("References to arrays within LBOUND, UBOUND or SIZE " + "operators should not be transformed." in str(info.value)) + + def test_validate_structure(fortran_reader): '''Test that a StructureReference raises an exception. This limitation will be removed once issue #1858 is addressed. diff --git a/src/psyclone/tests/psyir/transformations/transformations_test.py b/src/psyclone/tests/psyir/transformations/transformations_test.py index 2f1a578a92..2aa4fdaab8 100644 --- a/src/psyclone/tests/psyir/transformations/transformations_test.py +++ b/src/psyclone/tests/psyir/transformations/transformations_test.py @@ -320,26 +320,28 @@ def test_omplooptrans_properties(): # Check default values omplooptrans = OMPLoopTrans() - assert omplooptrans.omp_schedule == "static" - assert omplooptrans.omp_worksharing is True + assert omplooptrans.omp_schedule == "auto" + assert omplooptrans.omp_directive == "do" # Use setters with valid values omplooptrans.omp_schedule = "dynamic,2" - omplooptrans.omp_worksharing = False + omplooptrans.omp_directive = "paralleldo" assert omplooptrans.omp_schedule == "dynamic,2" - assert omplooptrans.omp_worksharing is False + assert omplooptrans.omp_directive == "paralleldo" # Setting things at the constructor also works omplooptrans = OMPLoopTrans(omp_schedule="dynamic,2", - omp_worksharing=False) + omp_directive="loop") assert omplooptrans.omp_schedule == "dynamic,2" - assert omplooptrans.omp_worksharing is False + assert omplooptrans.omp_directive == "loop" # Use setters with invalid values with pytest.raises(TypeError) as err: - omplooptrans.omp_worksharing = "invalid" - assert ("The OMPLoopTrans.omp_worksharing property must be a boolean but" - " found a 'str'." in str(err.value)) + omplooptrans.omp_directive = "invalid" + assert ("The OMPLoopTrans.omp_directive property must be a str with " + "the value of ['do', 'paralleldo', 'teamsdistributeparalleldo', " + "'loop'] but found a 'str' with value 'invalid'." + in str(err.value)) with pytest.raises(TypeError) as err: omplooptrans.omp_schedule = 3 @@ -452,7 +454,7 @@ def test_omplooptrans_apply(sample_psyir, fortran_writer): omplooptrans.apply(tree.walk(Loop)[0]) assert isinstance(tree.walk(Loop)[0].parent, Schedule) assert isinstance(tree.walk(Loop)[0].parent.parent, OMPDoDirective) - assert tree.walk(Loop)[0].parent.parent._omp_schedule == 'static' + assert tree.walk(Loop)[0].parent.parent._omp_schedule == 'auto' # The omp_schedule can be changed omplooptrans = OMPLoopTrans(omp_schedule="dynamic,2") @@ -465,8 +467,8 @@ def test_omplooptrans_apply(sample_psyir, fortran_writer): assert loop1.parent.parent._omp_schedule == 'dynamic,2' ompparalleltrans.apply(loop1.parent.parent) # Needed for generation - # If omp_worksharing is False, it adds a OMPLoopDirective instead - omplooptrans = OMPLoopTrans(omp_worksharing=False) + # The omp_directive can be changed + omplooptrans = OMPLoopTrans(omp_directive="loop") loop2 = tree.walk(Loop, stop_type=Loop)[1] omplooptrans.apply(loop2, {'collapse': 2}) assert isinstance(loop2.parent, Schedule) diff --git a/src/psyclone/transformations.py b/src/psyclone/transformations.py index bf12a8d30b..36945a3585 100644 --- a/src/psyclone/transformations.py +++ b/src/psyclone/transformations.py @@ -59,13 +59,14 @@ ACCEnterDataDirective, ACCKernelsDirective, ACCLoopDirective, \ ACCParallelDirective, ACCRoutineDirective, Assignment, CodeBlock, \ Directive, Loop, Node, OMPDeclareTargetDirective, \ - OMPDirective, OMPDoDirective, OMPLoopDirective, OMPMasterDirective, \ + OMPDirective, OMPMasterDirective, \ OMPParallelDirective, OMPParallelDoDirective, OMPSerialDirective, \ OMPSingleDirective, OMPTaskloopDirective, PSyDataNode, Reference, \ Return, Routine, Schedule from psyclone.psyir.symbols import ArgumentInterface, DataSymbol, \ DeferredType, INTEGER_TYPE, ScalarType, Symbol, SymbolError from psyclone.psyir.transformations.loop_trans import LoopTrans +from psyclone.psyir.transformations.omp_loop_trans import OMPLoopTrans from psyclone.psyir.transformations.parallel_loop_trans import \ ParallelLoopTrans from psyclone.psyir.transformations.region_trans import RegionTrans @@ -73,9 +74,6 @@ TransformationError -VALID_OMP_SCHEDULES = ["runtime", "static", "dynamic", "guided", "auto"] - - def check_intergrid(node): ''' Utility function to check that the supplied node does not have @@ -458,252 +456,6 @@ def validate(self, node, options=None): f"kernel for execution on an OpenMP target.") -class OMPLoopTrans(ParallelLoopTrans): - ''' - Adds an OpenMP directive to a loop. This can be the loop worksharing - OpenMP Do/For directive to distribute the iterations of the enclosed - loop or a descriptive OpenMP loop directive to let the compiler decide - the best implementation. The OpenMP schedule used for the worksharing - directive can also be specified, but this will be ignored in case of the - descriptive OpenMP loop. The configuration-defined 'reprod' parameter - also specifies whether a manual reproducible reproduction is to be used. - - :param str omp_schedule: the OpenMP schedule to use. Defaults to 'static'. - :param bool omp_worksharing: whether to generate OpenMP loop worksharing \ - directives (e.g. omp do/for) or an OpenMP loop directive. Defaults to \ - True. - - For example: - - >>> from psyclone.psyir.frontend.fortran import FortranReader - >>> from psyclone.psyir.nodes import Routine - >>> from psyclone.transformations import OMPLoopTrans, OMPParallelTrans - >>> - >>> tree = FortranReader().psyir_from_source(""" - ... subroutine my_subroutine() - ... integer, dimension(10, 10) :: A - ... integer :: i - ... integer :: j - ... do i = 1, 10 - ... do j = 1, 10 - ... A(i, j) = 0 - ... end do - ... end do - ... do i = 1, 10 - ... do j = 1, 10 - ... A(i, j) = 0 - ... end do - ... end do - ... end subroutine - ... """ - >>> routine.walk(Routine) - >>> ompparalleltrans = OMPParallelTrans() # Necessary in loop worksharing - >>> omplooptrans1 = OMPLoopTrans(omp_schedule="auto") - >>> omplooptrans2 = OMPLoopTrans(omp_worksharing=False) - >>> omplooptrans1.apply(routine.children[0]) - >>> ompparalleltrans.apply(routine.children[0]) - >>> omplooptrans2.apply(routine.children[1]) - - will generate: - - .. code-block:: fortran - - subroutine my_subroutine() - integer, dimension(10, 10) :: A - integer :: i - integer :: j - !$omp parallel - !$omp do schedule(auto) - do i = 1, 10 - do j = 1, 10 - A(i, j) = 0 - end do - end do - !$omp end do - !$omp end parallel - !$omp loop - do i = 1, 10 - do j = 1, 10 - A(i, j) = 0 - end do - end do - !$omp end loop - end subroutine - - ''' - def __init__(self, omp_schedule="static", omp_worksharing=True): - # Whether or not to generate code for (run-to-run on n threads) - # reproducible OpenMP reductions. This setting can be overridden - # via the `reprod` argument to the apply() method. - self._reprod = Config.get().reproducible_reductions - - # Declare the attributes but use the property setter for proper - # error checking - self._omp_worksharing = None - self.omp_worksharing = omp_worksharing - - self._omp_schedule = "" - self.omp_schedule = omp_schedule - - super().__init__() - - def __str__(self): - return "Adds an 'OpenMP DO' directive to a loop" - - @property - def omp_worksharing(self): - ''' - :returns: the value of the omp_worksharing attribute. - :rtype: bool - ''' - return self._omp_worksharing - - @omp_worksharing.setter - def omp_worksharing(self, value): - ''' - :param bool value: new value of the omp_worksharing attribute. - - :raises TypeError: if the provided value is not a boolean. - ''' - if not isinstance(value, bool): - raise TypeError( - f"The OMPLoopTrans.omp_worksharing property must be a boolean" - f" but found a '{type(value).__name__}'.") - self._omp_worksharing = value - - @property - def omp_schedule(self): - ''' - :returns: the OpenMP schedule that will be specified by \ - this transformation. The default schedule is 'static'. - :rtype: str - - ''' - return self._omp_schedule - - @omp_schedule.setter - def omp_schedule(self, value): - ''' - :param str value: Sets the OpenMP schedule value that will be \ - specified by this transformation. - - :raises TypeError: if the provided value is not a string. - :raises ValueError: if the provided string is not a valid OpenMP \ - schedule format. - ''' - - if not isinstance(value, str): - raise TypeError( - f"The OMPLoopTrans.omp_schedule property must be a 'str'" - f" but found a '{type(value).__name__}'.") - - # Some schedules have an optional chunk size following a ',' - value_parts = value.split(',') - if value_parts[0].lower() not in VALID_OMP_SCHEDULES: - raise ValueError(f"Valid OpenMP schedules are " - f"{VALID_OMP_SCHEDULES} but got " - f"'{value_parts[0]}'.") - - if len(value_parts) > 1: - if value_parts[0] == "auto": - raise ValueError("Cannot specify a chunk size when using an " - "OpenMP schedule of 'auto'.") - try: - int(value_parts[1].strip()) - except ValueError as err: - raise ValueError(f"Supplied OpenMP schedule '{value}' has an " - f"invalid chunk-size.") from err - - self._omp_schedule = value - - def _directive(self, children, collapse=None): - ''' - Creates the type of directive needed for this sub-class of - transformation. - - :param children: list of Nodes that will be the children of \ - the created directive. - :type children: list of :py:class:`psyclone.psyir.nodes.Node` - :param int collapse: number of nested loops to collapse or None if \ - no collapse attribute is required. - - :returns: the new node representing the directive in the AST - :rtype: :py:class:`psyclone.psyir.nodes.OMPDoDirective` or \ - :py:class:`psyclone.psyir.nodes.OMPLoopDirective` - - ''' - if self._omp_worksharing: - # TODO 1370: OpenMP Do Directive don't support collapse yet. - _directive = OMPDoDirective(children=children, - omp_schedule=self.omp_schedule, - reprod=self._reprod) - else: - _directive = OMPLoopDirective(children=children, - collapse=collapse) - - return _directive - - def apply(self, node, options=None): - '''Apply the OMPLoopTrans transformation to the specified node in a - Schedule. This node must be a Loop since this transformation - corresponds to wrapping the generated code with directives like so: - - .. code-block:: fortran - - !$OMP DO - do ... - ... - end do - !$OMP END DO - - At code-generation time (when - :py:meth:`OMPLoopDirective.gen_code` is called), this node must be - within (i.e. a child of) an OpenMP PARALLEL region. - - If the keyword "reprod" is specified in the options, it will cause a - reproducible reduction to be generated if it is set to True, otherwise - the default value (as read from the psyclone.cfg file) will be used. - Note, reproducible in this case means obtaining the same results - with the same number of OpenMP threads, not for different - numbers of OpenMP threads. - - :param node: the supplied node to which we will apply the \ - OMPLoopTrans transformation - :type node: :py:class:`psyclone.psyir.nodes.Node` - :param options: a dictionary with options for transformations\ - and validation. - :type options: dictionary of string:values or None - :param bool options["reprod"]: - indicating whether reproducible reductions should be used. \ - By default the value from the config file will be used. - - ''' - if not options: - options = {} - self._reprod = options.get("reprod", - Config.get().reproducible_reductions) - - # Add variable names for OMP functions into the InvokeSchedule - # (a Routine) symboltable if they don't already exist - root = node.ancestor(Routine) - - symtab = root.symbol_table - try: - symtab.lookup_with_tag("omp_thread_index") - except KeyError: - symtab.new_symbol( - "th_idx", tag="omp_thread_index", - symbol_type=DataSymbol, datatype=INTEGER_TYPE) - try: - symtab.lookup_with_tag("omp_num_threads") - except KeyError: - symtab.new_symbol( - "nthreads", tag="omp_num_threads", - symbol_type=DataSymbol, datatype=INTEGER_TYPE) - - super().apply(node, options) - - class ACCLoopTrans(ParallelLoopTrans): ''' Adds an OpenACC loop directive to a loop. This directive must be within @@ -891,7 +643,17 @@ class DynamoOMPParallelLoopTrans(OMPParallelLoopTrans): validity checks. Actual transformation is done by the :py:class:`base class `. + :param str omp_directive: choose which OpenMP loop directive to use. \ + Defaults to "do". + :param str omp_schedule: the OpenMP schedule to use. Must be one of \ + 'runtime', 'static', 'dynamic', 'guided' or 'auto'. Defaults to \ + 'static'. + ''' + def __init__(self, omp_directive="do", omp_schedule="static"): + super().__init__(omp_directive=omp_directive, + omp_schedule=omp_schedule) + def __str__(self): return "Add an OpenMP Parallel Do directive to a Dynamo loop" @@ -934,10 +696,17 @@ class GOceanOMPParallelLoopTrans(OMPParallelLoopTrans): loop). Actual transformation is done by :py:class:`base class `. - :param omp_schedule: the omp schedule to be created. Must be one of \ - 'runtime', 'static', 'dynamic', 'guided' or 'auto'. + :param str omp_directive: choose which OpenMP loop directive to use. \ + Defaults to "do". + :param str omp_schedule: the OpenMP schedule to use. Must be one of \ + 'runtime', 'static', 'dynamic', 'guided' or 'auto'. Defaults to \ + 'static'. ''' + def __init__(self, omp_directive="do", omp_schedule="static"): + super().__init__(omp_directive=omp_directive, + omp_schedule=omp_schedule) + def __str__(self): return "Add an OpenMP Parallel Do directive to a GOcean loop" @@ -969,10 +738,16 @@ def apply(self, node, options=None): class Dynamo0p3OMPLoopTrans(OMPLoopTrans): ''' Dynamo 0.3 specific orphan OpenMP loop transformation. Adds - Dynamo-specific validity checks. Actual transformation is done by - :py:class:`base class `. + Dynamo-specific validity checks. + + :param str omp_schedule: the OpenMP schedule to use. Must be one of \ + 'runtime', 'static', 'dynamic', 'guided' or 'auto'. Defaults to \ + 'static'. ''' + def __init__(self, omp_schedule="static"): + super().__init__(omp_directive="do", omp_schedule=omp_schedule) + def __str__(self): return "Add an OpenMP DO directive to a Dynamo 0.3 loop" @@ -1018,20 +793,26 @@ class GOceanOMPLoopTrans(OMPLoopTrans): ''' GOcean-specific orphan OpenMP loop transformation. Adds GOcean specific validity checks (that the node is either an inner or outer - Loop). Actual transformation is done by - :py:class:`base class `. + Loop). - :param omp_schedule: the omp schedule to be created. Must be one of - 'runtime', 'static', 'dynamic', 'guided' or 'auto'. + :param str omp_directive: choose which OpenMP loop directive to use. \ + Defaults to "do". + :param str omp_schedule: the OpenMP schedule to use. Must be one of \ + 'runtime', 'static', 'dynamic', 'guided' or 'auto'. Defaults to \ + 'static'. ''' + def __init__(self, omp_directive="do", omp_schedule="static"): + super().__init__(omp_directive=omp_directive, + omp_schedule=omp_schedule) + def __str__(self): - return "Add an OpenMP DO directive to a GOcean loop" + return "Add the selected OpenMP loop directive to a GOcean loop" def validate(self, node, options=None): ''' Checks that the supplied node is a valid target for parallelisation - using OMP Do. + using OMP directives. :param node: the candidate loop for parallelising using OMP Do. :type node: :py:class:`psyclone.psyir.nodes.Loop`