Skip to content

Commit

Permalink
removing remaining references to soma-workflow
Browse files Browse the repository at this point in the history
  • Loading branch information
denisri committed Oct 6, 2023
1 parent c67b9e8 commit 2ebd8c1
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 84 deletions.
74 changes: 20 additions & 54 deletions capsul/pipeline/test/test_custom_nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,17 +3,19 @@
import unittest
from capsul.api import Process, Pipeline, executable
from capsul.pipeline import python_export
from capsul.execution_context import CapsulWorkflow
from soma.controller import File
import os
import os.path as osp
import tempfile
import sys
import shutil
import json


class TestProcess(Process):
__test__ = False

def __init__(self, definition):
super().__init__(definition)
self.add_field('in1', File, output=False)
Expand Down Expand Up @@ -72,6 +74,7 @@ def execute(self, context=None):
with open(self.in2) as f:
of.write(f.read())


class CatFileProcess(Process):
def __init__(self, definition):
super().__init__(definition)
Expand All @@ -84,6 +87,7 @@ def execute(self, context=None):
with open(fname) as f:
of.write(f.read())


class Pipeline1(Pipeline):
def pipeline_definition(self):
self.add_process('train1', TrainProcess1)
Expand Down Expand Up @@ -111,7 +115,7 @@ def pipeline_definition(self):
'intermediate_output',
'capsul.pipeline.custom_nodes.strcat_node.StrCatNode',
parameters={'parameters': ['base', 'sep',
'subject', 'suffix'],
'subject', 'suffix'],
'concat_plug': 'out_file',
'outputs': ['base'],
'param_types': ['Directory', 'str',
Expand Down Expand Up @@ -498,8 +502,7 @@ def test_custom_nodes_workflow(self):
pipeline = executable(Pipeline1)
pipeline.main_input = os.path.join(self.temp_dir, 'file')
pipeline.output_directory = os.path.join(self.temp_dir, 'out_dir')
wf = pipeline_workflow.workflow_from_pipeline(pipeline,
create_directories=False)
wf = CapsulWorkflow(pipeline, create_output_dirs=False)
self.assertEqual(len(wf.jobs), 7)
self.assertEqual(len(wf.dependencies), 6)
self.assertEqual(
Expand All @@ -515,11 +518,10 @@ def _test_loo_pipeline(self, pipeline2):
pipeline2.output_directory = os.path.join(self.temp_dir, 'out_dir')
pipeline2.test_output = os.path.join(self.temp_dir, 'out_dir',
'outputs')
wf = pipeline_workflow.workflow_from_pipeline(pipeline2,
create_directories=False)
import soma_workflow.client as swc
swc.Helper.serialize(os.path.join(self.temp_dir,
'custom_nodes.workflow'), wf)
wf = CapsulWorkflow(pipeline2, create_output_dirs=False)
with open(os.path.join(self.temp_dir, 'custom_nodes.workflow'), 'w') \
as f:
json.dump(wf, f)
#print('workflow:')
#print('jobs:', wf.jobs)
#print('dependencies:', sorted([(x[0].name, x[1].name) for x in wf.dependencies]))
Expand Down Expand Up @@ -575,11 +577,10 @@ def _test_cv_pipeline(self, pipeline):
pipeline.subjects = ['subject%d' % i for i in range(4)]
pipeline.output_directory = os.path.join(self.temp_dir, 'out_dir')
pipeline.fold = list(range(pipeline.nfolds))
wf = pipeline_workflow.workflow_from_pipeline(pipeline,
create_directories=False)
import soma_workflow.client as swc
swc.Helper.serialize(os.path.join(self.temp_dir,
'custom_nodes.workflow'), wf)
wf = CapsulWorkflow(pipeline, create_output_dirs=False)
with open(os.path.join(self.temp_dir, 'custom_nodes.workflow'), 'w') \
as f:
json.dump(wf, f)
#print('workflow:')
#print('jobs:', wf.jobs)
#print('n deps:', len(wf.dependencies))
Expand Down Expand Up @@ -649,17 +650,6 @@ def test_custom_nodes_py_io(self):
self.add_py_tmpfile(pyfname)
python_export.save_py_pipeline(pipeline, pyfname)
pipeline2 = executable(pyfname)
self._test_custom_nodes(pipeline)

@unittest.skip('XML is no longer supported')
def test_custom_nodes_xml_io(self):
pipeline = executable(Pipeline1)
xml_file = tempfile.mkstemp(suffix='_capsul.xml')
xmlfname = xml_file[1]
os.close(xml_file[0])
self.temp_files.append(xmlfname)
xml.save_xml_pipeline(pipeline, xmlfname)
pipeline2 = executable(xmlfname)
self._test_custom_nodes(pipeline2)

@unittest.skip('reimplementation expected for capsul v3')
Expand All @@ -673,17 +663,6 @@ def test_loo_py_io(self):
pipeline2 = executable(pyfname)
self._test_loo_pipeline(pipeline2)

@unittest.skip('XML is no longer supported')
def test_loo_xml_io(self):
pipeline = executable(PipelineLOO)
xml_file = tempfile.mkstemp(suffix='_capsul.xml')
xmlfname = xml_file[1]
os.close(xml_file[0])
self.temp_files.append(xmlfname)
xml.save_xml_pipeline(pipeline, xmlfname)
pipeline2 = executable(xmlfname)
self._test_loo_pipeline(pipeline2)

@unittest.skip('reimplementation expected for capsul v3')
def test_mapreduce(self):
pipeline = executable(PipelineMapReduce)
Expand All @@ -695,10 +674,9 @@ def test_mapreduce(self):
pipeline.nodes['cat'].files,
[os.path.join(pipeline.output_directory,
'%s_test_output' % pipeline.subjects[0]),
os.path.join(pipeline.output_directory,
os.path.join(pipeline.output_directory,
'%s_test_output' % pipeline.subjects[1])])
wf = pipeline_workflow.workflow_from_pipeline(pipeline,
create_directories=False)
wf = CapsulWorkflow(pipeline, create_output_dirs=False)
self.assertEqual(len(wf.jobs), 19)
#print(sorted([(d[0].name, d[1].name) for d in wf.dependencies]))
self.assertEqual(len(wf.dependencies), 28)
Expand All @@ -714,17 +692,6 @@ def test_cv_py_io(self):
pipeline2 = executable(pyfname)
self._test_cv_pipeline(pipeline2)

@unittest.skip('XML is no longer supported')
def test_cv_xml_io(self):
pipeline = executable(PipelineCV)
xml_file = tempfile.mkstemp(suffix='_capsul.xml')
xmlfname = xml_file[1]
os.close(xml_file[0])
self.temp_files.append(xmlfname)
xml.save_xml_pipeline(pipeline, xmlfname)
pipeline2 = executable(xmlfname)
self._test_cv_pipeline(pipeline2)


if __name__ == '__main__':
if '-v' in sys.argv[1:] or '--verbose' in sys.argv[1:]:
Expand Down Expand Up @@ -753,11 +720,10 @@ def test_cv_xml_io(self):
pipeline2.output_directory = '/tmp/out_dir'
pipeline2.nfolds = 4
pipeline2.fold = list(range(pipeline2.nfolds))
#wf = pipeline_workflow.workflow_from_pipeline(pipeline2,
#create_directories=False)
#wf = CapsulWorkflow(pipeline2, create_output_dirs=False)
view2 = PipelineDeveloperView(pipeline2, allow_open_controller=True,
show_sub_pipelines=True,
enable_edition=True)
show_sub_pipelines=True,
enable_edition=True)
view2.show()

app.exec_()
Expand Down
30 changes: 0 additions & 30 deletions capsul/pipeline/test/test_proc_with_outputs.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

import unittest
import os
import os.path as osp
import sys
import tempfile

Expand Down Expand Up @@ -205,35 +204,6 @@ def pipeline_definition(self):
self.add_link("pipeline_a.output->node_b.input")


# def setUpModule():
# global old_home
# global temp_home_dir
# # Run tests with a temporary HOME directory so that they are isolated from
# # the user's environment
# temp_home_dir = None
# old_home = os.environ.get('HOME')
# try:
# temp_home_dir = tempfile.mkdtemp('', prefix='soma_workflow')
# os.environ['HOME'] = temp_home_dir
# swconfig.change_soma_workflow_directory(temp_home_dir)
# except BaseException: # clean up in case of interruption
# if old_home is None:
# del os.environ['HOME']
# else:
# os.environ['HOME'] = old_home
# if temp_home_dir:
# shutil.rmtree(temp_home_dir)
# raise


# def tearDownModule():
# if old_home is None:
# del os.environ['HOME']
# else:
# os.environ['HOME'] = old_home
# shutil.rmtree(temp_home_dir)


class TestPipelineContainingProcessWithOutputs(unittest.TestCase):

def setUp(self):
Expand Down

0 comments on commit 2ebd8c1

Please sign in to comment.