Skip to content

Commit

Permalink
Merge pull request #462 from ecmwf-ifs/nams-pipeline-plan-duplicate-r…
Browse files Browse the repository at this point in the history
…emove

Pipeline-plan duplicate/remove transformation changing dependencies
  • Loading branch information
reuterbal authored Jan 10, 2025
2 parents b0a2344 + 534e575 commit 4ed8534
Show file tree
Hide file tree
Showing 14 changed files with 1,564 additions and 579 deletions.
1 change: 1 addition & 0 deletions loki/batch/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

from loki.batch.configure import * # noqa
from loki.batch.item import * # noqa
from loki.batch.item_factory import * # noqa
from loki.batch.pipeline import * # noqa
from loki.batch.scheduler import * # noqa
from loki.batch.sfilter import * # noqa
Expand Down
613 changes: 44 additions & 569 deletions loki/batch/item.py

Large diffs are not rendered by default.

639 changes: 639 additions & 0 deletions loki/batch/item_factory.py

Large diffs are not rendered by default.

24 changes: 18 additions & 6 deletions loki/batch/scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,9 @@
from loki.batch.configure import SchedulerConfig
from loki.batch.item import (
FileItem, ModuleItem, ProcedureItem, ProcedureBindingItem,
InterfaceItem, TypeDefItem, ExternalItem, ItemFactory
InterfaceItem, TypeDefItem, ExternalItem
)
from loki.batch.item_factory import ItemFactory
from loki.batch.pipeline import Pipeline
from loki.batch.sfilter import SFilter
from loki.batch.sgraph import SGraph
Expand Down Expand Up @@ -534,24 +535,35 @@ def _get_definition_items(_item, sgraph_items):
include_external=self.config.default.get('strict', True)
)

# Collect common transformation arguments
kwargs = {
'depths': graph.depths,
'build_args': self.build_args,
'plan_mode': proc_strategy == ProcessingStrategy.PLAN,
}

if transformation.renames_items or transformation.creates_items:
kwargs['item_factory'] = self.item_factory
kwargs['scheduler_config'] = self.config

for _item in traversal:
if isinstance(_item, ExternalItem):
raise RuntimeError(f'Cannot apply {trafo_name} to {_item.name}: Item is marked as external.')

transformation.apply(
_item.scope_ir, role=_item.role, mode=_item.mode,
item=_item, targets=_item.targets, items=_get_definition_items(_item, sgraph_items),
_item.scope_ir, item=_item, items=_get_definition_items(_item, sgraph_items),
successors=graph.successors(_item, item_filter=item_filter),
depths=graph.depths, build_args=self.build_args,
plan_mode=proc_strategy == ProcessingStrategy.PLAN
role=_item.role, mode=_item.mode, targets=_item.targets,
**kwargs
)

if transformation.renames_items:
self.rekey_item_cache()

if transformation.creates_items:
self._discover()
self._parse_items()
if self.full_parse:
self._parse_items()

def callgraph(self, path, with_file_graph=False, with_legend=False):
"""
Expand Down
70 changes: 70 additions & 0 deletions loki/batch/tests/test_batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -671,6 +671,76 @@ def test_procedure_item_external_item(tmp_path, enable_imports, default_config):
assert [it.origin_cls for it in items] == [ModuleItem, ProcedureItem]


def test_procedure_item_from_item1(testdir, default_config):
proj = testdir/'sources/projBatch'

# A file with a single subroutine definition that calls a routine via interface block
item_factory = ItemFactory()
scheduler_config = SchedulerConfig.from_dict(default_config)
file_item = item_factory.get_or_create_file_item_from_path(proj/'source/comp1.F90', config=scheduler_config)
item = file_item.create_definition_items(item_factory=item_factory, config=scheduler_config)[0]
assert item.name == '#comp1'
assert isinstance(item, ProcedureItem)

expected_cache = {str(proj/'source/comp1.F90').lower(), '#comp1'}
assert set(item_factory.item_cache) == expected_cache

# Create a new item by duplicating the existing item
new_item = item_factory.get_or_create_item_from_item('#new_comp1', item, config=scheduler_config)
expected_cache |= {str(proj/'source/new_comp1.F90').lower(), '#new_comp1'}
assert set(item_factory.item_cache) == expected_cache

# Assert the new item differs from the existing item in the name, with the original
# item unchanged
assert new_item.name == '#new_comp1'
assert isinstance(new_item, ProcedureItem)
assert new_item.ir.name == 'new_comp1'
assert item.ir.name == 'comp1'

# Make sure both items have the same dependencies but the dependency
# objects are distinct objects
assert item.dependencies == new_item.dependencies
assert all(d is not new_d for d, new_d in zip(item.dependencies, new_item.dependencies))


def test_procedure_item_from_item2(testdir, default_config):
proj = testdir/'sources/projBatch'

# A file with a single subroutine declared in a module that calls a typebound procedure
# where the type is imported via an import statement in the module scope
item_factory = ItemFactory()
scheduler_config = SchedulerConfig.from_dict(default_config)
file_item = item_factory.get_or_create_file_item_from_path(proj/'module/other_mod.F90', config=scheduler_config)
mod_item = file_item.create_definition_items(item_factory=item_factory, config=scheduler_config)[0]
assert mod_item.name == 'other_mod'
assert isinstance(mod_item, ModuleItem)
item = mod_item.create_definition_items(item_factory=item_factory, config=scheduler_config)[0]
assert item.name == 'other_mod#mod_proc'
assert isinstance(item, ProcedureItem)

expected_cache = {str(proj/'module/other_mod.F90').lower(), 'other_mod', 'other_mod#mod_proc'}
assert set(item_factory.item_cache) == expected_cache

# Create a new item by duplicating the existing item
new_item = item_factory.get_or_create_item_from_item('my_mod#new_proc', item, config=scheduler_config)[0]
expected_cache |= {str(proj/'module/my_mod.F90').lower(), 'my_mod', 'my_mod#new_proc'}
assert set(item_factory.item_cache) == expected_cache

# Assert the new item differs from the existing item in the name, with the original
# item unchanged
assert new_item.name == 'my_mod#new_proc'
assert isinstance(new_item, ProcedureItem)
assert new_item.ir.name == 'new_proc'
assert new_item.ir.parent.name == 'my_mod'
assert item.ir.name == 'mod_proc'
assert item.ir.parent.name == 'other_mod'

# Make sure both items have the same dependencies but the dependency
# objects are distinct objects
assert item.dependencies == new_item.dependencies
assert all(d is not new_d for d, new_d in zip(item.dependencies, new_item.dependencies))


def test_typedef_item(testdir):
proj = testdir/'sources/projBatch'

Expand Down
12 changes: 12 additions & 0 deletions loki/frontend/source.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,18 @@ def __init__(self, lines, string=None, file=None):
self.string = string
self.file = file

def clone(self, **kwargs):
"""
Replicate the object with the provided overrides.
"""
if 'lines' not in kwargs:
kwargs['lines'] = self.lines
if self.string is not None and 'string' not in kwargs:
kwargs['string'] = self.string
if self.file is not None and 'file' not in kwargs:
kwargs['file'] = self.file
return type(self)(**kwargs)

def __repr__(self):
line_end = f'-{self.lines[1]}' if self.lines[1] else ''
return f'Source<line {self.lines[0]}{line_end}>'
Expand Down
1 change: 0 additions & 1 deletion loki/ir/nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@
from loki.tools import flatten, as_tuple, is_iterable, truncate_string, CaseInsensitiveDict
from loki.types import DataType, BasicType, DerivedType, SymbolAttributes


__all__ = [
# Abstract base classes
'Node', 'InternalNode', 'LeafNode', 'ScopedNode',
Expand Down
3 changes: 2 additions & 1 deletion loki/program_unit.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,7 +288,7 @@ def make_complete(self, **frontend_args):
xmods = frontend_args.get('xmods')
parser_classes = frontend_args.get('parser_classes', RegexParserClass.AllClasses)
if frontend == Frontend.REGEX and self._parser_classes:
if self._parser_classes == parser_classes:
if self._parser_classes == (self._parser_classes | parser_classes):
return
parser_classes = parser_classes | self._parser_classes

Expand Down Expand Up @@ -442,6 +442,7 @@ def clone(self, **kwargs):
if self._source is not None and 'source' not in kwargs:
kwargs['source'] = self._source
kwargs.setdefault('incomplete', self._incomplete)
kwargs.setdefault('parser_classes', self._parser_classes)

# Rebuild IRs
rebuild = Transformer({}, rebuild_scopes=True)
Expand Down
30 changes: 30 additions & 0 deletions loki/sourcefile.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,36 @@ def __init__(self, path, ir=None, ast=None, source=None, incomplete=False, parse
self._incomplete = incomplete
self._parser_classes = parser_classes

def clone(self, **kwargs):
"""
Replicate the object with the provided overrides.
"""
kwargs.setdefault('path', self.path)
if self.ir is not None and 'ir' not in kwargs:
kwargs['ir'] = self.ir
ir_needs_clone = True
else:
ir_needs_clone = False
if self._ast is not None and 'ast' not in kwargs:
kwargs['ast'] = self._ast
if self.source is not None and 'source' not in kwargs:
kwargs['source'] = self._source.clone(file=kwargs['path'])
kwargs.setdefault('incomplete', self._incomplete)
if self._parser_classes is not None and 'parser_classes' not in kwargs:
kwargs['parser_classes'] = self._parser_classes

obj = type(self)(**kwargs)

# When the IR has been carried over from the current sourcefile
# we need to make sure we perform a deep copy
if obj.ir and ir_needs_clone:
ir_body = tuple(
node.clone(rescope_symbols=True) if isinstance(node, ProgramUnit)
else node.clone() for node in obj.ir.body
)
obj.ir = obj.ir.clone(body=ir_body)
return obj

@classmethod
def from_file(cls, filename, definitions=None, preprocess=False,
includes=None, defines=None, omni_includes=None,
Expand Down
90 changes: 90 additions & 0 deletions loki/tests/test_sourcefile.py
Original file line number Diff line number Diff line change
Expand Up @@ -352,3 +352,93 @@ def test_sourcefile_lazy_comments(frontend):
assert '! Comment outside' in code
assert '! Comment inside' in code
assert '! Other comment outside' in code


@pytest.mark.parametrize('frontend', available_frontends(include_regex=True))
def test_sourcefile_clone(frontend, tmp_path):
"""
Make sure cloning a source file works as expected
"""
fcode = """
! Comment outside
module my_mod
implicit none
contains
subroutine my_routine
implicit none
end subroutine my_routine
end module my_mod
subroutine other_routine
use my_mod, only: my_routine
implicit none
call my_routine()
end subroutine other_routine
""".strip()
source = Sourcefile.from_source(fcode, frontend=frontend, xmods=[tmp_path])

# Clone the source file twice
new_source = source.clone()
new_new_source = source.clone()

# Apply some changes that should only be affecting each clone
new_source['other_routine'].name = 'new_name'
new_new_source['my_mod']['my_routine'].name = 'new_mod_routine'

assert 'other_routine' in source
assert 'other_routine' not in new_source
assert 'other_routine' in new_new_source

assert 'new_name' not in source
assert 'new_name' in new_source
assert 'new_name' not in new_new_source

assert 'my_mod' in source
assert 'my_mod' in new_source
assert 'my_mod' in new_new_source

assert 'my_routine' in source['my_mod']
assert 'my_routine' in new_source['my_mod']
assert 'my_routine' not in new_new_source['my_mod']

assert 'new_mod_routine' not in source['my_mod']
assert 'new_mod_routine' not in new_source['my_mod']
assert 'new_mod_routine' in new_new_source['my_mod']

if not source._incomplete:
assert isinstance(source.ir.body[0], Comment)
comment_text = source.ir.body[0].text
new_comment_text = comment_text + ' some more text'
source.ir.body[0]._update(text=new_comment_text)

assert source.ir.body[0].text == new_comment_text
assert new_source.ir.body[0].text == comment_text
assert new_new_source.ir.body[0].text == comment_text
else:
assert new_source._incomplete
assert new_new_source._incomplete

assert source['other_routine']._incomplete
assert new_source['new_name']._incomplete
assert new_new_source['other_routine']._incomplete

assert new_source['new_name']._parser_classes == source['other_routine']._parser_classes
assert new_new_source['other_routine']._parser_classes == source['other_routine']._parser_classes

mod = source['my_mod']
new_mod = new_source['my_mod']
new_new_mod = new_new_source['my_mod']

assert mod._incomplete
assert new_mod._incomplete
assert new_new_mod._incomplete

assert new_mod._parser_classes == mod._parser_classes
assert new_new_mod._parser_classes == mod._parser_classes

assert mod['my_routine']._incomplete
assert new_mod['my_routine']._incomplete
assert new_new_mod['new_mod_routine']._incomplete

assert new_mod['my_routine']._parser_classes == mod['my_routine']._parser_classes
assert new_new_mod['new_mod_routine']._parser_classes == mod['my_routine']._parser_classes
1 change: 1 addition & 0 deletions loki/transformations/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,3 +37,4 @@
from loki.transformations.loop_blocking import * # noqa
from loki.transformations.routine_signatures import * # noqa
from loki.transformations.parallel import * # noqa
from loki.transformations.dependency import * # noqa
13 changes: 11 additions & 2 deletions loki/transformations/build_system/plan.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,13 @@ def plan_file(self, sourcefile, **kwargs):
return

sourcepath = item.path.resolve()

# This makes sure the sourcepath does in fact exist. Combined with
# item duplication or other transformations we might end up adding
# items on-the-fly that did not exist before, with fake paths.
# There is possibly a better way of doing this, though.
source_exists = sourcepath.exists()

if self.rootpath is not None:
sourcepath = sourcepath.relative_to(self.rootpath)

Expand All @@ -88,14 +95,16 @@ def plan_file(self, sourcefile, **kwargs):
debug(f'Planning:: {item.name} (role={item.role}, mode={item.mode})')

if newsource not in self.sources_to_append:
self.sources_to_transform += [sourcepath]
if source_exists:
self.sources_to_transform += [sourcepath]
if item.replicate:
# Add new source file next to the old one
self.sources_to_append += [newsource]
else:
# Replace old source file to avoid ghosting
self.sources_to_append += [newsource]
self.sources_to_remove += [sourcepath]
if source_exists:
self.sources_to_remove += [sourcepath]

def write_plan(self, filepath):
"""
Expand Down
Loading

0 comments on commit 4ed8534

Please sign in to comment.