''' This example demonstates how to manage processes in a singleton. The main problem is that we have to kill all children of a process as well. This means we cannot loose the reference to the process object. Also we no matter what we want to terminate our child-processes when this script ends. We do this via atexit. Otherwise, e.g. when we start a server, it keeps running even if this scripts ends. You can use the Processes either as tool to kill processes properly (recursive) or as a context manager to define the lifetime of a process. ''' import subprocess import psutil import atexit import shlex import os from threading import Lock from typing import Optional from pathlib import Path class Processes: """A singleton class for managing subprocesses. Provides methods to run commands or scripts as subprocesses, with support for terminating individual processes or all subprocesses upon script exit or context block exit. """ _instance = None _lock = Lock() def __new__(cls): """Ensure only one instance of the class is created.""" with cls._lock: if cls._instance is None: cls._instance = super(Processes, cls).__new__(cls) cls._subprocesses: list[subprocess.Popen] = [] atexit.register(cls._instance.terminate_all) return cls._instance def run(self, command: str | list[str], cwd: Optional[str] = None, shell: bool = False) -> subprocess.Popen: """Starts a subprocess with the given command. Args: command: The command to execute. If it's a string, it will be split using shlex. cwd: The working directory to execute the command in. shell: Whether to execute the command through the shell. Returns: The subprocess.Popen object for the started subprocess. """ if isinstance(command, str): command = shlex.split(command) print(command, cwd, shell) process = subprocess.Popen(command, cwd=cwd, shell=shell) self._subprocesses.append(process) return process def run_scripts(self, path: Path|str) -> list[subprocess.Popen]: """Executes each script found in the specified directory path. Only files with execute permissions are considered. Args: path: The directory path containing scripts to execute. Returns: A list of subprocess.Popen objects for the started subprocesses. """ path = Path(path) # Ensure path is a Path object processes = [] for script_path in path.iterdir(): if script_path.is_file() and os.access(script_path, os.X_OK): process = self.run(str(script_path), shell=True) processes.append(process) return processes def terminate(self, process: subprocess.Popen) -> None: """Terminates a specific subprocess. Args: process: The subprocess.Popen object to terminate. """ if process.poll() is None: # Process is still running try: parent = psutil.Process(process.pid) for child in parent.children(recursive=True): child.terminate() parent.terminate() parent.wait(3) # Wait before force killing except psutil.NoSuchProcess: pass # Process already terminated self._subprocesses.remove(process) def terminate_all(self) -> None: """Terminates all subprocesses started by this class.""" for process in list(self._subprocesses): # Create a copy of the list for iteration self.terminate(process) def __enter__(self) -> 'Processes': """Enables use of the class as a context manager.""" return self def __exit__(self, exc_type, exc_val, exc_tb) -> None: """Ensures all subprocesses are terminated when exiting a context block.""" self.terminate_all() # Example usage as a singleton: process_manager = Processes() proc = process_manager.run('ls -l') import time time.sleep(2) process_manager.terminate(proc) # Running scripts from a specified directory path and terminating one # procs = process_manager.run_scripts('/path/to/scripts') # if procs: # process_manager.terminate(procs[0]) # Example usage as a context manager: with Processes() as pm: proc = pm.run('sleep 10') time.sleep(2) # pm.run('clear', shell=True)
Wednesday, 20 March 2024
Managing processes in python (and kill them on script exit)
Subscribe to:
Post Comments (Atom)
Parse Wikipedia dump
""" This module processes Wikipedia dump files by extracting individual articles and parsing them into a structured format, ...
-
Der Kollektivgeist: Ihr intelligentes Unternehmensgedächtnis Wissen aus den Köpfen der Mitarbeiter extrahieren - Die ...
-
docker pull quay.io/unstructured-io/unstructured-api 20gb image. After docker-pull: docker image inspect --format '{{json .}}' ...
No comments:
Post a Comment