cengal.performance_test_lib.versions.v_0.performance_test_lib
1#!/usr/bin/env python 2# coding=utf-8 3 4# Copyright © 2012-2024 ButenkoMS. All rights reserved. Contacts: <gtalk@butenkoms.space> 5# 6# Licensed under the Apache License, Version 2.0 (the "License"); 7# you may not use this file except in compliance with the License. 8# You may obtain a copy of the License at 9# 10# http://www.apache.org/licenses/LICENSE-2.0 11# 12# Unless required by applicable law or agreed to in writing, software 13# distributed under the License is distributed on an "AS IS" BASIS, 14# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15# See the License for the specific language governing permissions and 16# limitations under the License. 17 18import copy 19import gc 20from cengal.code_flow_control.gc import DisableGC 21from contextlib import contextmanager 22from cengal.code_flow_control.smart_values.versions.v_2 import ValueExistence 23from cengal.parallel_execution.coroutines.coro_standard_services.lazy_print.versions.v_0.lazy_print import lprint 24from cengal.time_management.cpu_clock_cycles import perf_counter 25from cengal.time_management.repeat_for_a_time import Tracer, ClockType 26from typing import Generator, Any 27 28""" 29Module Docstring 30Docstrings: http://www.python.org/dev/peps/pep-0257/ 31""" 32 33__author__ = "ButenkoMS <gtalk@butenkoms.space>" 34__copyright__ = "Copyright © 2012-2024 ButenkoMS. All rights reserved. Contacts: <gtalk@butenkoms.space>" 35__credits__ = ["ButenkoMS <gtalk@butenkoms.space>", ] 36__license__ = "Apache License, Version 2.0" 37__version__ = "4.4.1" 38__maintainer__ = "ButenkoMS <gtalk@butenkoms.space>" 39__email__ = "gtalk@butenkoms.space" 40# __status__ = "Prototype" 41__status__ = "Development" 42# __status__ = "Production" 43 44 45class PerformanceTestResult(Exception): 46 def __init__(self, result): 47 super(PerformanceTestResult, self).__init__() 48 self.result = result 49 50 51@contextmanager 52def test_run_time(test_name: str, number_of_iterations: int, throw_result: bool=False, throw_result_anyway: bool=True, ignore_index=False) -> Generator[ValueExistence[int], Any, Any]: 53 index = ValueExistence(True, copy.copy(number_of_iterations)) 54 start_time = perf_counter() 55 exception_occures = False 56 try: 57 yield index 58 except: 59 if not throw_result_anyway: 60 exception_occures = True 61 raise 62 finally: 63 if not ignore_index: 64 number_of_iterations -= index.value 65 66 end_time = perf_counter() 67 result_time = end_time - start_time 68 if result_time > 0: 69 text_result = f'>>> "{test_name}"\n\tIt was used {result_time} seconds to process {number_of_iterations} iterations.\n\tThere is {number_of_iterations / result_time} iterations per second\n' 70 else: 71 text_result = f'>>> "{test_name}"\n\tIt was used {result_time} seconds to process {number_of_iterations} iterations.\n' 72 73 lprint(text_result) 74 75 if (not exception_occures) and throw_result: 76 result_data = dict() 77 result_data['test_name'] = test_name 78 result_data['result_time'] = result_time 79 if result_time > 0: 80 result_data['iterations_per_time_unit'] = number_of_iterations / result_time 81 else: 82 result_data['iterations_per_time_unit'] = None 83 raise PerformanceTestResult(result_data) 84 85 86def test_function_run_time(testable_function): 87 """ 88 Use 'performance_test_lib__iterations_qnt=1000000' parameter to pass number of iterations 89 :param testable_function: function 90 :return: 91 """ 92 def run_time_test_func(*args, **kwargs): 93 94 test_name = '' 95 if 'performance_test_lib__test_name' in kwargs: 96 test_name = str(kwargs['performance_test_lib__test_name']) 97 del kwargs['performance_test_lib__test_name'] 98 test_name = '{}: {}'.format(str(testable_function), test_name) 99 100 number_of_iterations = 1 101 if 'performance_test_lib__iterations_qnt' in kwargs: 102 number_of_iterations = int(kwargs['performance_test_lib__iterations_qnt']) 103 del kwargs['performance_test_lib__iterations_qnt'] 104 105 throw_result = False 106 if 'performance_test_lib__throw_result' in kwargs: 107 throw_result = kwargs['performance_test_lib__throw_result'] 108 del kwargs['performance_test_lib__throw_result'] 109 110 with test_run_time(test_name, number_of_iterations, throw_result) as index: 111 while index.value > 0: 112 testable_function(*args, **kwargs) 113 index.value -= 1 114 return run_time_test_func 115 116 117def process_performance_test_results(tracer: Tracer, test_name: str, throw_result: bool=False): 118 number_of_iterations = tracer.iterations_made 119 result_time = tracer.time_spent 120 iterations_per_time_unit = tracer.iter_per_time_unit 121 print('>>> "{}"'.format(test_name)) 122 print('\t' + 'It was used', result_time, 'seconds to process', number_of_iterations, 'iterations.') 123 print('\t' + 'There is', iterations_per_time_unit, 'iterations per second') 124 125 if throw_result: 126 result_data = (test_name, result_time, iterations_per_time_unit) 127 raise PerformanceTestResult(result_data) 128 129 130@contextmanager 131def test_performance(test_name: str, run_time: float, throw_result: bool=False, clock_type=ClockType.perf_counter): 132 tracer = Tracer(run_time, clock_type) 133 try: 134 yield tracer 135 except: 136 raise 137 finally: 138 process_performance_test_results(tracer, test_name, throw_result) 139 140 141def test_function_performance(testable_function): 142 """ 143 Use 'performance_test_lib__run_time=1.5' parameter to pass number of seconds to test 144 :param testable_function: function 145 :return: 146 """ 147 def run_time_test_func(*args, **kwargs): 148 149 test_name = '' 150 if 'performance_test_lib__test_name' in kwargs: 151 test_name = str(kwargs['performance_test_lib__test_name']) 152 del kwargs['performance_test_lib__test_name'] 153 test_name = '{}: {}'.format(str(testable_function), test_name) 154 155 run_time = 0 156 if 'performance_test_lib__run_time' in kwargs: 157 run_time = int(kwargs['performance_test_lib__run_time']) 158 del kwargs['performance_test_lib__run_time'] 159 160 throw_result = False 161 if 'performance_test_lib__throw_result' in kwargs: 162 throw_result = kwargs['performance_test_lib__throw_result'] 163 del kwargs['performance_test_lib__throw_result'] 164 165 clock_type = ClockType.perf_counter 166 if 'performance_test_lib__clock_type' in kwargs: 167 clock_type = kwargs['performance_test_lib__clock_type'] 168 del kwargs['performance_test_lib__clock_type'] 169 170 with test_performance(test_name, run_time, throw_result, clock_type) as tracer: 171 while tracer.iter(): 172 testable_function(*args, **kwargs) 173 174 return run_time_test_func 175 176 177class PrecisePerformanceTestTracer(Tracer): 178 """ 179 Precise tracer. 180 At first you need to use it as a usual Tracer. After tracing was done - use it as a fast `for i in range(...)` block 181 182 Example of use: 183 184 tr = PrecisePerformanceTestTracer(10.0) 185 while tr.iter(): 186 i = '456' 187 k = int('1243' + i) 188 189 with tr as fast_iter: 190 for i in fast_iter: 191 i = '456' 192 k = int('1243' + i) 193 194 print('{} iter/s; {} seconds; {} iterations'.format(tr.iter_per_time_unit, tr.time_spent, tr.iterations_made)) 195 196 """ 197 198 def __init__(self, 199 run_time: float, 200 clock_type: ClockType=ClockType.perf_counter, 201 suppress_exceptions: bool=False, 202 turn_off_gc: bool=False 203 ): 204 super().__init__(run_time, clock_type) 205 self.suppress_exceptions = suppress_exceptions 206 self.turn_off_gc = turn_off_gc 207 self.gc_was_enabled = None 208 209 def __enter__(self): 210 self._relevant_start_time = self._start_time = self._relevant_stop_time = self._end_time = self._clock() 211 self._relevant_number_of_iterations_at_start = 0 212 if self.turn_off_gc: 213 self.gc_was_enabled = gc.isenabled() 214 gc.disable() 215 216 return range(self._last_tracked_number_of_iterations) 217 218 def __exit__(self, exc_type, exc_val, exc_tb): 219 self._relevant_stop_time = self._end_time = self._clock() 220 if self.turn_off_gc and self.gc_was_enabled: 221 gc.enable() 222 223 if self.suppress_exceptions: 224 return True
class
PerformanceTestResult(builtins.Exception):
46class PerformanceTestResult(Exception): 47 def __init__(self, result): 48 super(PerformanceTestResult, self).__init__() 49 self.result = result
Common base class for all non-exit exceptions.
Inherited Members
- builtins.BaseException
- with_traceback
- args
@contextmanager
def
test_run_time( test_name: str, number_of_iterations: int, throw_result: bool = False, throw_result_anyway: bool = True, ignore_index=False) -> Generator[cengal.code_flow_control.smart_values.versions.v_2.smart_values.ValueExistence[int], Any, Any]:
52@contextmanager 53def test_run_time(test_name: str, number_of_iterations: int, throw_result: bool=False, throw_result_anyway: bool=True, ignore_index=False) -> Generator[ValueExistence[int], Any, Any]: 54 index = ValueExistence(True, copy.copy(number_of_iterations)) 55 start_time = perf_counter() 56 exception_occures = False 57 try: 58 yield index 59 except: 60 if not throw_result_anyway: 61 exception_occures = True 62 raise 63 finally: 64 if not ignore_index: 65 number_of_iterations -= index.value 66 67 end_time = perf_counter() 68 result_time = end_time - start_time 69 if result_time > 0: 70 text_result = f'>>> "{test_name}"\n\tIt was used {result_time} seconds to process {number_of_iterations} iterations.\n\tThere is {number_of_iterations / result_time} iterations per second\n' 71 else: 72 text_result = f'>>> "{test_name}"\n\tIt was used {result_time} seconds to process {number_of_iterations} iterations.\n' 73 74 lprint(text_result) 75 76 if (not exception_occures) and throw_result: 77 result_data = dict() 78 result_data['test_name'] = test_name 79 result_data['result_time'] = result_time 80 if result_time > 0: 81 result_data['iterations_per_time_unit'] = number_of_iterations / result_time 82 else: 83 result_data['iterations_per_time_unit'] = None 84 raise PerformanceTestResult(result_data)
def
test_function_run_time(testable_function):
87def test_function_run_time(testable_function): 88 """ 89 Use 'performance_test_lib__iterations_qnt=1000000' parameter to pass number of iterations 90 :param testable_function: function 91 :return: 92 """ 93 def run_time_test_func(*args, **kwargs): 94 95 test_name = '' 96 if 'performance_test_lib__test_name' in kwargs: 97 test_name = str(kwargs['performance_test_lib__test_name']) 98 del kwargs['performance_test_lib__test_name'] 99 test_name = '{}: {}'.format(str(testable_function), test_name) 100 101 number_of_iterations = 1 102 if 'performance_test_lib__iterations_qnt' in kwargs: 103 number_of_iterations = int(kwargs['performance_test_lib__iterations_qnt']) 104 del kwargs['performance_test_lib__iterations_qnt'] 105 106 throw_result = False 107 if 'performance_test_lib__throw_result' in kwargs: 108 throw_result = kwargs['performance_test_lib__throw_result'] 109 del kwargs['performance_test_lib__throw_result'] 110 111 with test_run_time(test_name, number_of_iterations, throw_result) as index: 112 while index.value > 0: 113 testable_function(*args, **kwargs) 114 index.value -= 1 115 return run_time_test_func
Use 'performance_test_lib__iterations_qnt=1000000' parameter to pass number of iterations :param testable_function: function :return:
def
process_performance_test_results( tracer: cengal.time_management.repeat_for_a_time.versions.v_0.repeat_for_a_time__cython.Tracer, test_name: str, throw_result: bool = False):
118def process_performance_test_results(tracer: Tracer, test_name: str, throw_result: bool=False): 119 number_of_iterations = tracer.iterations_made 120 result_time = tracer.time_spent 121 iterations_per_time_unit = tracer.iter_per_time_unit 122 print('>>> "{}"'.format(test_name)) 123 print('\t' + 'It was used', result_time, 'seconds to process', number_of_iterations, 'iterations.') 124 print('\t' + 'There is', iterations_per_time_unit, 'iterations per second') 125 126 if throw_result: 127 result_data = (test_name, result_time, iterations_per_time_unit) 128 raise PerformanceTestResult(result_data)
@contextmanager
def
test_performance( test_name: str, run_time: float, throw_result: bool = False, clock_type=<ClockType.perf_counter: 2>):
131@contextmanager 132def test_performance(test_name: str, run_time: float, throw_result: bool=False, clock_type=ClockType.perf_counter): 133 tracer = Tracer(run_time, clock_type) 134 try: 135 yield tracer 136 except: 137 raise 138 finally: 139 process_performance_test_results(tracer, test_name, throw_result)
def
test_function_performance(testable_function):
142def test_function_performance(testable_function): 143 """ 144 Use 'performance_test_lib__run_time=1.5' parameter to pass number of seconds to test 145 :param testable_function: function 146 :return: 147 """ 148 def run_time_test_func(*args, **kwargs): 149 150 test_name = '' 151 if 'performance_test_lib__test_name' in kwargs: 152 test_name = str(kwargs['performance_test_lib__test_name']) 153 del kwargs['performance_test_lib__test_name'] 154 test_name = '{}: {}'.format(str(testable_function), test_name) 155 156 run_time = 0 157 if 'performance_test_lib__run_time' in kwargs: 158 run_time = int(kwargs['performance_test_lib__run_time']) 159 del kwargs['performance_test_lib__run_time'] 160 161 throw_result = False 162 if 'performance_test_lib__throw_result' in kwargs: 163 throw_result = kwargs['performance_test_lib__throw_result'] 164 del kwargs['performance_test_lib__throw_result'] 165 166 clock_type = ClockType.perf_counter 167 if 'performance_test_lib__clock_type' in kwargs: 168 clock_type = kwargs['performance_test_lib__clock_type'] 169 del kwargs['performance_test_lib__clock_type'] 170 171 with test_performance(test_name, run_time, throw_result, clock_type) as tracer: 172 while tracer.iter(): 173 testable_function(*args, **kwargs) 174 175 return run_time_test_func
Use 'performance_test_lib__run_time=1.5' parameter to pass number of seconds to test :param testable_function: function :return:
class
PrecisePerformanceTestTracer(cengal.time_management.repeat_for_a_time.versions.v_0.repeat_for_a_time__cython.Tracer):
178class PrecisePerformanceTestTracer(Tracer): 179 """ 180 Precise tracer. 181 At first you need to use it as a usual Tracer. After tracing was done - use it as a fast `for i in range(...)` block 182 183 Example of use: 184 185 tr = PrecisePerformanceTestTracer(10.0) 186 while tr.iter(): 187 i = '456' 188 k = int('1243' + i) 189 190 with tr as fast_iter: 191 for i in fast_iter: 192 i = '456' 193 k = int('1243' + i) 194 195 print('{} iter/s; {} seconds; {} iterations'.format(tr.iter_per_time_unit, tr.time_spent, tr.iterations_made)) 196 197 """ 198 199 def __init__(self, 200 run_time: float, 201 clock_type: ClockType=ClockType.perf_counter, 202 suppress_exceptions: bool=False, 203 turn_off_gc: bool=False 204 ): 205 super().__init__(run_time, clock_type) 206 self.suppress_exceptions = suppress_exceptions 207 self.turn_off_gc = turn_off_gc 208 self.gc_was_enabled = None 209 210 def __enter__(self): 211 self._relevant_start_time = self._start_time = self._relevant_stop_time = self._end_time = self._clock() 212 self._relevant_number_of_iterations_at_start = 0 213 if self.turn_off_gc: 214 self.gc_was_enabled = gc.isenabled() 215 gc.disable() 216 217 return range(self._last_tracked_number_of_iterations) 218 219 def __exit__(self, exc_type, exc_val, exc_tb): 220 self._relevant_stop_time = self._end_time = self._clock() 221 if self.turn_off_gc and self.gc_was_enabled: 222 gc.enable() 223 224 if self.suppress_exceptions: 225 return True
Precise tracer.
At first you need to use it as a usual Tracer. After tracing was done - use it as a fast for i in range(...)
block
Example of use:
tr = PrecisePerformanceTestTracer(10.0)
while tr.iter():
i = '456'
k = int('1243' + i)
with tr as fast_iter:
for i in fast_iter:
i = '456'
k = int('1243' + i)
print('{} iter/s; {} seconds; {} iterations'.format(tr.iter_per_time_unit, tr.time_spent, tr.iterations_made))
PrecisePerformanceTestTracer( run_time: float, clock_type: cengal.time_management.repeat_for_a_time.versions.v_0.repeat_for_a_time__cython.ClockType = <ClockType.perf_counter: 2>, suppress_exceptions: bool = False, turn_off_gc: bool = False)
199 def __init__(self, 200 run_time: float, 201 clock_type: ClockType=ClockType.perf_counter, 202 suppress_exceptions: bool=False, 203 turn_off_gc: bool=False 204 ): 205 super().__init__(run_time, clock_type) 206 self.suppress_exceptions = suppress_exceptions 207 self.turn_off_gc = turn_off_gc 208 self.gc_was_enabled = None
Inherited Members
- cengal.time_management.repeat_for_a_time.versions.v_0.repeat_for_a_time__cython.Tracer
- iter
- iter_per_time_unit
- cengal.time_management.repeat_for_a_time.versions.v_0.repeat_for_a_time__cython.BaseTracer
- iterations_made
- total_number_of_iterations_made
- time_spent
- total_amount_of_time_spent
- clock_type