SND@LHC Software
Loading...
Searching...
No Matches
benchmark_api.py
Go to the documentation of this file.
1""" This module benchmarks some of the CDB API functions. """
2import datetime
3import logging
4import time
5
6import dummydata_generator
7from ..factory import APIFactory
8
9
10# ---------------------Set up logger for testing-----------------
11log_file = logging.FileHandler(filename='benchmarking_dummydata.log', mode='a', encoding='utf-8')
12fmt = logging.Formatter()
13log_file.setFormatter(fmt)
14logger = logging.Logger(name='benchmarking', level=logging.INFO)
15logger.addHandler(log_file)
16logger.info(msg=time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()))
17logger.info(msg='Benchmarking start - Record code execution time :')
18logger.info(msg='Method_name wall_time cpu_time')
19
20if __name__ == "__main__":
21 # Provides access to the condition database api
22 factory = APIFactory()
23 cdb_api = factory.construct_DB_API("conditionsDatabase/tests/test_mongodb/test_mongodb_config.yml")
24
25 # Benchmarking adding one detector to the conditions database
26 wall_time_start = time.time()
27 cpu_time_start = time.clock()
28
29 cdb_api.add_detector("detector_one")
30
31 wall_time_end = time.time()
32 cpu_time_end = time.clock()
33 logger.info(msg='add One detector to the database%15.6f%13.6f' % (
34 wall_time_end - wall_time_start, cpu_time_end - cpu_time_start))
35
36 # Benchmarking adding 100 detectors to the conditions database
37 wall_time_start = time.time()
38 cpu_time_start = time.clock()
39
40 for i in range(1, 100):
41 cdb_api.add_detector("detector_i" + i)
42
43 wall_time_end = time.time()
44 cpu_time_end = time.clock()
45 logger.info(msg='add 100 detectors to the database%15.6f%13.6f' % (
46 wall_time_end - wall_time_start, cpu_time_end - cpu_time_start))
47
48 # Benchmarking listing detectors
49 wall_time_start = time.time()
50 cpu_time_start = time.clock()
51
52 cdb_api.list_detectors()
53
54 wall_time_end = time.time()
55 cpu_time_end = time.clock()
56 logger.info(msg='List detectors%15.6f%13.6f' % (
57 wall_time_end - wall_time_start, cpu_time_end - cpu_time_start))
58
59
60 # Benchmarking adding high hierarchy of detectors, subdetectors and each has
61 # one condition, to the condition database
62 group_detector_parent = []
63 dummydata_generator.create_multilevel_detectors(5, 5, "detector", None, group_detector_parent)
64
65 wall_time_start = time.time()
66 cpu_time_start = time.clock()
67
68 for detector_and_parent in group_detector_parent:
69 detector_name = detector_and_parent[0]
70 detector_parent = detector_and_parent[1]
71 cdb_api.add_detector(detector_name, detector_parent)
72 values = {"T853_MA_853": [814, 973, 65]}
73
74 reconstructed_detector_id = ""
75 if detector_parent is None:
76 reconstructed_detector_id = detector_name
77 else:
78 reconstructed_detector_id = detector_parent + "/" + detector_name
79
80 cdb_api.add_condition(reconstructed_detector_id, "daniel", "tag1", values, "calibration")
81
82 wall_time_end = time.time()
83 cpu_time_end = time.clock()
84 logger.info(
85 msg='create Complex Structure%15.6f%13.6f' % (wall_time_end - wall_time_start, cpu_time_end - cpu_time_start))
86
87 # Benchmarking adding a massive condition to a specific detector/subdetector
88 # the conditions database when it has complex structure
89 daniel = dummydata_generator.create_big_daniel()
90 wall_time_start = time.time()
91 cpu_time_start = time.clock()
92 cdb_api.add_condition("detector1/subdetector1/subsubdetector1/subsubsubdetector1/subsubsubsubdetector1",
93 "daniel", "tag2", daniel, "calibration")
94 wall_time_end = time.time()
95 cpu_time_end = time.clock()
96 logger.info(msg='add One massive Condition to the deepest level%15.6f%13.6f' % (
97 wall_time_end - wall_time_start, cpu_time_end - cpu_time_start))
98
99 # Benchmarking retrieving a condition which is located in the deepest level of the hierarchy
100 wall_time_start = time.time()
101 cpu_time_start = time.clock()
102 cdb_api.get_condition_by_name_and_tag(
103 "detector1/subdetector1/subsubdetector1/subsubsubdetector1/subsubsubsubdetector1", "daniel", "tag2")
104 wall_time_end = time.time()
105 cpu_time_end = time.clock()
106 logger.info(msg='get a massive Condition by name from the deepest level%15.6f%13.6f' % (
107 wall_time_end - wall_time_start, cpu_time_end - cpu_time_start))
108
109 # Benchmarking: retrieving a condition which is located in the deepest level of the hierarchy
110 wall_time_start = time.time()
111 cpu_time_start = time.clock()
112 cdb_api.get_conditions_by_tag(
113 "detector1/subdetector1/subsubdetector1/subsubsubdetector1/subsubsubsubdetector1", "tag1")
114 wall_time_end = time.time()
115 cpu_time_end = time.clock()
116 logger.info(msg='get Conditions by tag from the deepest level%15.6f%13.6f' % (
117 wall_time_end - wall_time_start, cpu_time_end - cpu_time_start))
118
119 # Benchmarking: updating a condition which is located at the deepest level of the hierarchy
120 wall_time_start = time.time()
121 cpu_time_start = time.clock()
122 cdb_api.update_condition_by_name_and_tag(
123 "detector1/subdetector1/subsubdetector1/subsubsubdetector1/subsubsubsubdetector1",
124 "daniel",
125 "tag1",
126 "2020-01-01")
127 wall_time_end = time.time()
128 cpu_time_end = time.clock()
129 logger.info(msg='update a Condition at the deepest level%15.6f%13.6f' % (
130 wall_time_end - wall_time_start, cpu_time_end - cpu_time_start))
This class creates an instance of the specified database API.
Definition factory.py:15