77from loky import get_reusable_executor
88
99from laygo .context .types import IContextHandle
10+ from laygo .context .types import IContextManager
1011from laygo .transformers .strategies .types import ExecutionStrategy
1112from laygo .transformers .types import InternalTransformer
1213
@@ -16,37 +17,33 @@ def _worker_process_chunk[In, Out](
1617 context_handle : IContextHandle ,
1718 chunk : list [In ],
1819) -> list [Out ]:
19- """
20- Top-level function executed by each worker process.
21- It reconstructs the context proxy from the handle and runs the transformation.
22- """
20+ """Top-level function executed by each worker process."""
2321 context_proxy = context_handle .create_proxy ()
2422 try :
2523 return transformer_logic (chunk , context_proxy )
2624 finally :
27- # The proxy's shutdown is a no-op, but it's good practice to call it.
2825 context_proxy .shutdown ()
2926
3027
3128class ProcessStrategy [In , Out ](ExecutionStrategy [In , Out ]):
29+ """Execute transformer logic using a process pool."""
30+
3231 def __init__ (self , max_workers : int = 4 , ordered : bool = True ):
3332 self .max_workers = max_workers
3433 self .ordered = ordered
3534
36- def execute (self , transformer_logic , chunk_generator , data , context ):
35+ def execute (
36+ self ,
37+ transformer_logic : InternalTransformer [In , Out ],
38+ chunks : Iterator [list [In ]],
39+ context : IContextManager ,
40+ ) -> Iterator [list [Out ]]:
3741 """Execute the transformer by distributing chunks to a process pool."""
38-
39- # Get the picklable handle from the context manager.
4042 context_handle = context .get_handle ()
41-
4243 executor = get_reusable_executor (max_workers = self .max_workers )
43- chunks_to_process = chunk_generator (data )
4444
4545 gen_func = self ._ordered_generator if self .ordered else self ._unordered_generator
46-
47- processed_chunks_iterator = gen_func (chunks_to_process , transformer_logic , executor , context_handle )
48- for result_chunk in processed_chunks_iterator :
49- yield from result_chunk
46+ yield from gen_func (chunks , transformer_logic , executor , context_handle )
5047
5148 def _ordered_generator (
5249 self ,
@@ -69,22 +66,16 @@ def _ordered_generator(
6966
7067 try :
7168 while futures :
72- # Get the result of the oldest task. If it failed or the pool
73- # is broken, .result() will raise an exception.
7469 result = futures .popleft ().result ()
7570
76- # If successful, submit a new task.
7771 try :
7872 chunk = next (chunks_iter )
7973 futures .append (executor .submit (_worker_process_chunk , transformer , context_handle , chunk ))
8074 except StopIteration :
81- # No more chunks to process.
8275 pass
8376
8477 yield result
8578 finally :
86- # This cleanup runs if the loop finishes or if an exception occurs.
87- # It prevents orphaned processes by cancelling pending tasks.
8879 for future in futures :
8980 future .cancel ()
9081 if futures :
@@ -104,27 +95,18 @@ def _unordered_generator(
10495 }
10596
10697 try :
107- # as_completed is ideal for this "process as they finish" pattern
10898 for future in as_completed (futures ):
109- # Get the result. This raises an exception if the task failed,
110- # which immediately stops the loop and proceeds to finally.
11199 result = future .result ()
112-
113- # Remove the completed future from our tracking set
114100 futures .remove (future )
115101
116- # Try to submit a new task to replace the one that just finished
117102 try :
118103 chunk = next (chunks_iter )
119104 futures .add (executor .submit (_worker_process_chunk , transformer , context_handle , chunk ))
120105 except StopIteration :
121- # No more chunks left to submit.
122106 pass
123107
124108 yield result
125109 finally :
126- # Clean up any futures that were still running or pending when
127- # an exception occurred or the input was exhausted.
128110 for future in futures :
129111 future .cancel ()
130112 if futures :
0 commit comments