10
10
# See the License for the specific language governing permissions and
11
11
# limitations under the License.
12
12
13
+ from logging import getLogger
13
14
from typing import NamedTuple
14
15
from urllib .parse import urlunparse
15
16
import os
16
17
import socket
17
18
19
+ from audit .utils import audit_background_task
18
20
from django .conf import settings
19
21
from django .core .management import BaseCommand
22
+ from django .db .models import Q
23
+ from django .utils import timezone
20
24
21
25
from cm .adcm_config .config import get_adcm_config
22
26
from cm .collect_statistics .collectors import ADCMEntities , BundleCollector , RBACCollector
23
27
from cm .collect_statistics .encoders import TarFileEncoder
24
28
from cm .collect_statistics .senders import SenderSettings , StatisticSender
25
- from cm .collect_statistics .storages import JSONFile , TarFileWithJSONFileStorage , TarFileWithTarFileStorage
29
+ from cm .collect_statistics .storages import JSONFile , TarFileWithJSONFileStorage
26
30
from cm .models import ADCM
27
31
28
32
SENDER_REQUEST_TIMEOUT = 15.0
29
- DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
33
+ DATE_TIME_FORMAT = "%Y-%m-%d %H:%M:%S"
34
+ DATE_FORMAT = "%Y-%m-%d"
35
+ STATISTIC_DIR = settings .TMP_DIR / "statistics"
36
+ STATISTIC_DIR .mkdir (exist_ok = True )
30
37
31
- collect_community = BundleCollector (date_format = DATE_FORMAT , include_editions = ["community" ])
32
- collect_enterprise = BundleCollector (date_format = DATE_FORMAT , include_editions = ["enterprise" ])
38
+ logger = getLogger ("background_tasks" )
39
+
40
+ collect_not_enterprise = BundleCollector (date_format = DATE_TIME_FORMAT , filters = [~ Q (edition = "enterprise" )])
41
+ collect_all = BundleCollector (date_format = DATE_TIME_FORMAT , filters = [])
33
42
34
43
35
44
class URLComponents (NamedTuple ):
@@ -64,18 +73,34 @@ def get_statistics_url() -> str:
64
73
return urlunparse (components = URLComponents (scheme = scheme , netloc = netloc , path = url_path ))
65
74
66
75
76
+ def get_enabled () -> bool :
77
+ if os .getenv ("STATISTICS_ENABLED" ) is not None :
78
+ return os .environ ["STATISTICS_ENABLED" ].upper () in {"1" , "TRUE" }
79
+
80
+ attr , _ = get_adcm_config (section = "statistics_collection" )
81
+ return bool (attr ["active" ])
82
+
83
+
67
84
class Command (BaseCommand ):
68
85
help = "Collect data and send to Statistic Server"
69
86
70
87
def __init__ (self , * args , ** kwargs ):
71
88
super ().__init__ (* args , ** kwargs )
72
89
73
90
def add_arguments (self , parser ):
74
- parser .add_argument ("--full" , action = "store_true" , help = "collect all data" )
75
- parser .add_argument ("--send" , action = "store_true" , help = "send data to Statistic Server" )
76
- parser .add_argument ("--encode" , action = "store_true" , help = "encode data" )
91
+ parser .add_argument (
92
+ "--mode" ,
93
+ choices = ["send" , "archive-all" ],
94
+ help = (
95
+ "'send' - collect archive with only community bundles and send to Statistic Server, "
96
+ "'archive-all' - collect community and enterprise bundles to archive and return path to file"
97
+ ),
98
+ default = "archive-all" ,
99
+ )
77
100
78
- def handle (self , * _ , full : bool , send : bool , encode : bool , ** __ ):
101
+ @audit_background_task (start_operation_status = "launched" , end_operation_status = "completed" )
102
+ def handle (self , * _ , mode : str , ** __ ):
103
+ logger .debug (msg = "Statistics collector: started" )
79
104
statistics_data = {
80
105
"adcm" : {
81
106
"uuid" : str (ADCM .objects .values_list ("uuid" , flat = True ).get ()),
@@ -84,47 +109,63 @@ def handle(self, *_, full: bool, send: bool, encode: bool, **__):
84
109
},
85
110
"format_version" : "0.2" ,
86
111
}
87
- rbac_entries_data : dict = RBACCollector (date_format = DATE_FORMAT )().model_dump ()
112
+ logger .debug (msg = "Statistics collector: RBAC data preparation" )
113
+ rbac_entries_data : dict = RBACCollector (date_format = DATE_TIME_FORMAT )().model_dump ()
114
+ storage = TarFileWithJSONFileStorage (date_format = DATE_FORMAT )
88
115
89
- community_bundle_data : ADCMEntities = collect_community ()
90
- community_storage = TarFileWithJSONFileStorage ()
116
+ match mode :
117
+ case "send" :
118
+ logger .debug (msg = "Statistics collector: 'send' mode is used" )
91
119
92
- community_storage .add (
93
- JSONFile (
94
- filename = "community.json" ,
95
- data = {** statistics_data , ** rbac_entries_data , ** community_bundle_data .model_dump ()},
96
- )
97
- )
98
- community_archive = community_storage .gather ()
120
+ if not get_enabled ():
121
+ logger .debug (msg = "Statistics collector: disabled" )
122
+ return
99
123
100
- final_storage = TarFileWithTarFileStorage ()
101
- final_storage .add (community_archive )
124
+ logger .debug (
125
+ msg = "Statistics collector: bundles data preparation, collect everything except 'enterprise' edition"
126
+ )
127
+ bundle_data : ADCMEntities = collect_not_enterprise ()
128
+ storage .add (
129
+ JSONFile (
130
+ filename = f"{ timezone .now ().strftime (DATE_FORMAT )} _statistics.json" ,
131
+ data = {** statistics_data , ** rbac_entries_data , ** bundle_data .model_dump ()},
132
+ )
133
+ )
134
+ logger .debug (msg = "Statistics collector: archive preparation" )
135
+ archive = storage .gather ()
136
+ sender_settings = SenderSettings (
137
+ url = get_statistics_url (),
138
+ adcm_uuid = statistics_data ["adcm" ]["uuid" ],
139
+ retries_limit = int (os .getenv ("STATISTICS_RETRIES" , 10 )),
140
+ retries_frequency = int (os .getenv ("STATISTICS_FREQUENCY" , 1 * 60 * 60 )), # in seconds
141
+ request_timeout = SENDER_REQUEST_TIMEOUT ,
142
+ )
143
+ logger .debug (msg = "Statistics collector: sender preparation" )
144
+ sender = StatisticSender (settings = sender_settings )
145
+ logger .debug (msg = "Statistics collector: statistics sending has started" )
146
+ sender .send ([archive ])
147
+ logger .debug (msg = "Statistics collector: sending statistics completed" )
148
+
149
+ case "archive-all" :
150
+ logger .debug (msg = "Statistics collector: 'archive-all' mode is used" )
151
+ logger .debug (msg = "Statistics collector: bundles data preparation, collect everything" )
152
+ bundle_data : ADCMEntities = collect_all ()
153
+ storage .add (
154
+ JSONFile (
155
+ filename = f"{ timezone .now ().strftime (DATE_FORMAT )} _statistics.json" ,
156
+ data = {** statistics_data , ** rbac_entries_data , ** bundle_data .model_dump ()},
157
+ )
158
+ )
159
+ logger .debug (msg = "Statistics collector: archive preparation" )
160
+ archive = storage .gather ()
102
161
103
- if full :
104
- enterprise_bundle_data : ADCMEntities = collect_enterprise ()
105
- enterprise_storage = TarFileWithJSONFileStorage ()
162
+ logger .debug (msg = "Statistics collector: archive encoding" )
163
+ encoder = TarFileEncoder (suffix = ".enc" )
164
+ encoded_file = encoder .encode (path_file = archive )
165
+ encoded_file = encoded_file .replace (STATISTIC_DIR / encoded_file .name )
106
166
107
- enterprise_storage .add (
108
- JSONFile (
109
- filename = "enterprise.json" ,
110
- data = {** statistics_data , ** rbac_entries_data , ** enterprise_bundle_data .model_dump ()},
111
- )
112
- )
113
- final_storage .add (enterprise_storage .gather ())
114
-
115
- final_archive = final_storage .gather ()
116
-
117
- if encode :
118
- encoder = TarFileEncoder ()
119
- encoder .encode (final_archive )
120
-
121
- if send :
122
- sender_settings = SenderSettings (
123
- url = get_statistics_url (),
124
- adcm_uuid = statistics_data ["adcm" ]["uuid" ],
125
- retries_limit = int (os .getenv ("STATISTICS_RETRIES" , 10 )),
126
- retries_frequency = int (os .getenv ("STATISTICS_FREQUENCY" , 1 * 60 * 60 )), # in seconds
127
- request_timeout = SENDER_REQUEST_TIMEOUT ,
128
- )
129
- sender = StatisticSender (settings = sender_settings )
130
- sender .send ([community_archive ])
167
+ self .stdout .write (f"Data saved in: { str (encoded_file .absolute ())} " )
168
+ case _:
169
+ pass
170
+
171
+ logger .debug (msg = "Statistics collector: finished" )
0 commit comments