pipetex.pipeline
Definition of a pipeline object.
The pipeline executes multiple operations to compile a tex file to a pdf file. The object is responsible for errorhandling and direction of values returned form the operations.
@author: Max Weise created: 29.07.2022
1"""Definition of a pipeline object. 2 3The pipeline executes multiple operations to compile a tex file to a 4pdf file. The object is responsible for errorhandling and direction of values 5returned form the operations. 6 7@author: Max Weise 8created: 29.07.2022 9""" 10 11from pipetex import enums 12from pipetex import exceptions 13from pipetex import operations 14 15from collections.abc import Callable 16from typing import Any, Optional, Tuple 17 18import logging 19 20 21# === Type Def === 22Monad = Tuple[bool, Optional[exceptions.InternalException]] 23OperationStep = Callable[[str, dict[str, Any]], Monad] 24 25 26class Pipeline: 27 """Representation of a pipeline object. Runs different tasks on the file. 28 29 The pipeline keeps a reference of which operations should be run in which 30 order. Also the pipeline will comunicate errors to the user by using 31 logging stements. 32 33 Common Usage: 34 p = Pipeline(file_name, True, False, False, False) 35 p.execute(p.file_name) 36 37 Attributes: 38 file_name: Name of the file which should be processed. 39 config_dict: Contains metadata which should be shared 40 with the operations. 41 oder_of_operations: List of operations which will be run on the file. 42 """ 43 44 file_name: str 45 config_dict: dict[str, Any] 46 order_of_operations: list[OperationStep] 47 48 def __init__(self, 49 file_name: str, 50 create_bib: Optional[bool] = False, 51 create_glo: Optional[bool] = False, 52 verbose: Optional[bool] = False, 53 ) -> None: 54 """Initialize a pipeline object. 55 56 Args: 57 file_name: Name of the file which will be processed. 58 create_bib: Create a bibliography. Defaults to false. 59 create_glo: Create a glossary. Defaults to false. 60 verbose: Print console output of latex engines. Defaults to false. 61 """ 62 # Creating object logger 63 self.logger = logging.getLogger("main.pipeline") 64 65 # Create sequence of operations 66 self.order_of_operations = [ 67 operations.copy_latex_file, 68 operations.remove_draft_option, 69 operations.compile_latex_file 70 ] 71 72 if create_bib: 73 self.order_of_operations.append(operations.create_bibliograpyh) 74 75 if create_glo: 76 self.order_of_operations.append(operations.create_glossary) 77 78 self.order_of_operations.append(operations.compile_latex_file) 79 self.order_of_operations.append(operations.clean_working_dir) 80 81 # For some reason, the linter doesnt let me assign the dict as 82 # an instance variable of pipeline 83 self.config_dict = { # type: ignore 84 enums.ConfigDictKeys.VERBOSE.value: verbose, 85 enums.ConfigDictKeys.FILE_PREFIX.value: "[piped]" 86 } 87 88 self.file_name = file_name 89 90 def _set_error( 91 self, 92 current_error: Optional[exceptions.InternalException], 93 new_error: exceptions.InternalException 94 ) -> exceptions.InternalException: 95 """Compares two errors and returns the one with higher severity_level. 96 97 If the errors are equal in severity, the current_error will be kept, as 98 it may be the root cause for any further errors. 99 100 Args: 101 current_error: The error which is currently most important. 102 new_error: The error which is compared. 103 104 returns 105 InternalException: The error with higher severity_level. 106 """ 107 rv = new_error 108 if current_error and new_error < current_error: 109 rv = current_error 110 111 return rv 112 113 # TODO: Refactor this method to make it physically smaller. 114 def execute(self, file_name) -> Monad: 115 """Executes the operations defined by the constructor. 116 117 Args: 118 file_name: The file which is processed by the operations. 119 120 Returns: 121 Monad: Tuple which holds a value indicating the success of the 122 pipeline and an error value if success is false. 123 """ 124 rv_success: bool = True 125 rv_error: Optional[exceptions.InternalException] = None 126 local_file_name = file_name 127 128 for operation in self.order_of_operations: 129 self.logger.debug(f"Now executing: {operation}") 130 success, error = operation(local_file_name, self.config_dict) 131 132 try: 133 local_file_name = self.config_dict[ 134 enums.ConfigDictKeys.NEW_NAME.value 135 ] 136 except KeyError: 137 # log this exception 138 # for now, eat it 139 pass 140 141 if error: 142 match error.severity_level: 143 case enums.SeverityLevels.LOW: 144 self.logger.warning( 145 "There has been a minor issue during the " 146 f"execution of {operation} which did not affect " 147 "the flow of the pipeline. For more information " 148 "please see the logfiles." 149 ) 150 151 self.logger.warning( 152 f""" Operation {operation} 153 154 SeverityLevel: {error.severity_level} 155 Error Message: {error.message} 156 Error Tpye: {error.error_tpye}""" 157 ) 158 159 rv_error = self._set_error(rv_error, error) 160 161 case enums.SeverityLevels.HIGH: 162 self.logger.warning( 163 "There has been an issue during the " 164 f"execution of {operation} which did not affect " 165 "the flow of the pipeline but my produce an " 166 "incorrect PDF file. For more information " 167 "please see the logfiles." 168 ) 169 170 self.logger.debug( 171 f""" Operation {operation} 172 173 SeverityLevel: {error.severity_level} 174 Error Message: {error.message} 175 Error Tpye: {error.error_tpye}""" 176 ) 177 178 rv_error = self._set_error(rv_error, error) 179 rv_success = False 180 181 case enums.SeverityLevels.CRITICAL: 182 self.logger.warning( 183 "There has been an issue during the " 184 f"execution of {operation} which caused the " 185 "pipeline to stop its execution. Please see the " 186 "logfiles to for more information." 187 ) 188 189 self.logger.critical( 190 f""" Operation {operation} 191 192 SeverityLevel: {error.severity_level} 193 Error Message: {error.message} 194 Error Tpye: {error.error_tpye}""" 195 ) 196 197 # Preemtive exit 198 return False, error 199 200 case _: 201 self.logger.warning( 202 "No recognized severity level to handle" 203 ) 204 pass 205 206 return rv_success, rv_error
class
Pipeline:
27class Pipeline: 28 """Representation of a pipeline object. Runs different tasks on the file. 29 30 The pipeline keeps a reference of which operations should be run in which 31 order. Also the pipeline will comunicate errors to the user by using 32 logging stements. 33 34 Common Usage: 35 p = Pipeline(file_name, True, False, False, False) 36 p.execute(p.file_name) 37 38 Attributes: 39 file_name: Name of the file which should be processed. 40 config_dict: Contains metadata which should be shared 41 with the operations. 42 oder_of_operations: List of operations which will be run on the file. 43 """ 44 45 file_name: str 46 config_dict: dict[str, Any] 47 order_of_operations: list[OperationStep] 48 49 def __init__(self, 50 file_name: str, 51 create_bib: Optional[bool] = False, 52 create_glo: Optional[bool] = False, 53 verbose: Optional[bool] = False, 54 ) -> None: 55 """Initialize a pipeline object. 56 57 Args: 58 file_name: Name of the file which will be processed. 59 create_bib: Create a bibliography. Defaults to false. 60 create_glo: Create a glossary. Defaults to false. 61 verbose: Print console output of latex engines. Defaults to false. 62 """ 63 # Creating object logger 64 self.logger = logging.getLogger("main.pipeline") 65 66 # Create sequence of operations 67 self.order_of_operations = [ 68 operations.copy_latex_file, 69 operations.remove_draft_option, 70 operations.compile_latex_file 71 ] 72 73 if create_bib: 74 self.order_of_operations.append(operations.create_bibliograpyh) 75 76 if create_glo: 77 self.order_of_operations.append(operations.create_glossary) 78 79 self.order_of_operations.append(operations.compile_latex_file) 80 self.order_of_operations.append(operations.clean_working_dir) 81 82 # For some reason, the linter doesnt let me assign the dict as 83 # an instance variable of pipeline 84 self.config_dict = { # type: ignore 85 enums.ConfigDictKeys.VERBOSE.value: verbose, 86 enums.ConfigDictKeys.FILE_PREFIX.value: "[piped]" 87 } 88 89 self.file_name = file_name 90 91 def _set_error( 92 self, 93 current_error: Optional[exceptions.InternalException], 94 new_error: exceptions.InternalException 95 ) -> exceptions.InternalException: 96 """Compares two errors and returns the one with higher severity_level. 97 98 If the errors are equal in severity, the current_error will be kept, as 99 it may be the root cause for any further errors. 100 101 Args: 102 current_error: The error which is currently most important. 103 new_error: The error which is compared. 104 105 returns 106 InternalException: The error with higher severity_level. 107 """ 108 rv = new_error 109 if current_error and new_error < current_error: 110 rv = current_error 111 112 return rv 113 114 # TODO: Refactor this method to make it physically smaller. 115 def execute(self, file_name) -> Monad: 116 """Executes the operations defined by the constructor. 117 118 Args: 119 file_name: The file which is processed by the operations. 120 121 Returns: 122 Monad: Tuple which holds a value indicating the success of the 123 pipeline and an error value if success is false. 124 """ 125 rv_success: bool = True 126 rv_error: Optional[exceptions.InternalException] = None 127 local_file_name = file_name 128 129 for operation in self.order_of_operations: 130 self.logger.debug(f"Now executing: {operation}") 131 success, error = operation(local_file_name, self.config_dict) 132 133 try: 134 local_file_name = self.config_dict[ 135 enums.ConfigDictKeys.NEW_NAME.value 136 ] 137 except KeyError: 138 # log this exception 139 # for now, eat it 140 pass 141 142 if error: 143 match error.severity_level: 144 case enums.SeverityLevels.LOW: 145 self.logger.warning( 146 "There has been a minor issue during the " 147 f"execution of {operation} which did not affect " 148 "the flow of the pipeline. For more information " 149 "please see the logfiles." 150 ) 151 152 self.logger.warning( 153 f""" Operation {operation} 154 155 SeverityLevel: {error.severity_level} 156 Error Message: {error.message} 157 Error Tpye: {error.error_tpye}""" 158 ) 159 160 rv_error = self._set_error(rv_error, error) 161 162 case enums.SeverityLevels.HIGH: 163 self.logger.warning( 164 "There has been an issue during the " 165 f"execution of {operation} which did not affect " 166 "the flow of the pipeline but my produce an " 167 "incorrect PDF file. For more information " 168 "please see the logfiles." 169 ) 170 171 self.logger.debug( 172 f""" Operation {operation} 173 174 SeverityLevel: {error.severity_level} 175 Error Message: {error.message} 176 Error Tpye: {error.error_tpye}""" 177 ) 178 179 rv_error = self._set_error(rv_error, error) 180 rv_success = False 181 182 case enums.SeverityLevels.CRITICAL: 183 self.logger.warning( 184 "There has been an issue during the " 185 f"execution of {operation} which caused the " 186 "pipeline to stop its execution. Please see the " 187 "logfiles to for more information." 188 ) 189 190 self.logger.critical( 191 f""" Operation {operation} 192 193 SeverityLevel: {error.severity_level} 194 Error Message: {error.message} 195 Error Tpye: {error.error_tpye}""" 196 ) 197 198 # Preemtive exit 199 return False, error 200 201 case _: 202 self.logger.warning( 203 "No recognized severity level to handle" 204 ) 205 pass 206 207 return rv_success, rv_error
Representation of a pipeline object. Runs different tasks on the file.
The pipeline keeps a reference of which operations should be run in which order. Also the pipeline will comunicate errors to the user by using logging stements.
Common Usage
p = Pipeline(file_name, True, False, False, False) p.execute(p.file_name)
Attributes
- file_name: Name of the file which should be processed.
- config_dict: Contains metadata which should be shared with the operations.
- oder_of_operations: List of operations which will be run on the file.
Pipeline( file_name: str, create_bib: Optional[bool] = False, create_glo: Optional[bool] = False, verbose: Optional[bool] = False)
49 def __init__(self, 50 file_name: str, 51 create_bib: Optional[bool] = False, 52 create_glo: Optional[bool] = False, 53 verbose: Optional[bool] = False, 54 ) -> None: 55 """Initialize a pipeline object. 56 57 Args: 58 file_name: Name of the file which will be processed. 59 create_bib: Create a bibliography. Defaults to false. 60 create_glo: Create a glossary. Defaults to false. 61 verbose: Print console output of latex engines. Defaults to false. 62 """ 63 # Creating object logger 64 self.logger = logging.getLogger("main.pipeline") 65 66 # Create sequence of operations 67 self.order_of_operations = [ 68 operations.copy_latex_file, 69 operations.remove_draft_option, 70 operations.compile_latex_file 71 ] 72 73 if create_bib: 74 self.order_of_operations.append(operations.create_bibliograpyh) 75 76 if create_glo: 77 self.order_of_operations.append(operations.create_glossary) 78 79 self.order_of_operations.append(operations.compile_latex_file) 80 self.order_of_operations.append(operations.clean_working_dir) 81 82 # For some reason, the linter doesnt let me assign the dict as 83 # an instance variable of pipeline 84 self.config_dict = { # type: ignore 85 enums.ConfigDictKeys.VERBOSE.value: verbose, 86 enums.ConfigDictKeys.FILE_PREFIX.value: "[piped]" 87 } 88 89 self.file_name = file_name
Initialize a pipeline object.
Args
- file_name: Name of the file which will be processed.
- create_bib: Create a bibliography. Defaults to false.
- create_glo: Create a glossary. Defaults to false.
- verbose: Print console output of latex engines. Defaults to false.
115 def execute(self, file_name) -> Monad: 116 """Executes the operations defined by the constructor. 117 118 Args: 119 file_name: The file which is processed by the operations. 120 121 Returns: 122 Monad: Tuple which holds a value indicating the success of the 123 pipeline and an error value if success is false. 124 """ 125 rv_success: bool = True 126 rv_error: Optional[exceptions.InternalException] = None 127 local_file_name = file_name 128 129 for operation in self.order_of_operations: 130 self.logger.debug(f"Now executing: {operation}") 131 success, error = operation(local_file_name, self.config_dict) 132 133 try: 134 local_file_name = self.config_dict[ 135 enums.ConfigDictKeys.NEW_NAME.value 136 ] 137 except KeyError: 138 # log this exception 139 # for now, eat it 140 pass 141 142 if error: 143 match error.severity_level: 144 case enums.SeverityLevels.LOW: 145 self.logger.warning( 146 "There has been a minor issue during the " 147 f"execution of {operation} which did not affect " 148 "the flow of the pipeline. For more information " 149 "please see the logfiles." 150 ) 151 152 self.logger.warning( 153 f""" Operation {operation} 154 155 SeverityLevel: {error.severity_level} 156 Error Message: {error.message} 157 Error Tpye: {error.error_tpye}""" 158 ) 159 160 rv_error = self._set_error(rv_error, error) 161 162 case enums.SeverityLevels.HIGH: 163 self.logger.warning( 164 "There has been an issue during the " 165 f"execution of {operation} which did not affect " 166 "the flow of the pipeline but my produce an " 167 "incorrect PDF file. For more information " 168 "please see the logfiles." 169 ) 170 171 self.logger.debug( 172 f""" Operation {operation} 173 174 SeverityLevel: {error.severity_level} 175 Error Message: {error.message} 176 Error Tpye: {error.error_tpye}""" 177 ) 178 179 rv_error = self._set_error(rv_error, error) 180 rv_success = False 181 182 case enums.SeverityLevels.CRITICAL: 183 self.logger.warning( 184 "There has been an issue during the " 185 f"execution of {operation} which caused the " 186 "pipeline to stop its execution. Please see the " 187 "logfiles to for more information." 188 ) 189 190 self.logger.critical( 191 f""" Operation {operation} 192 193 SeverityLevel: {error.severity_level} 194 Error Message: {error.message} 195 Error Tpye: {error.error_tpye}""" 196 ) 197 198 # Preemtive exit 199 return False, error 200 201 case _: 202 self.logger.warning( 203 "No recognized severity level to handle" 204 ) 205 pass 206 207 return rv_success, rv_error
Executes the operations defined by the constructor.
Args
- file_name: The file which is processed by the operations.
Returns
Monad: Tuple which holds a value indicating the success of the pipeline and an error value if success is false.