source: people/peter.buschman/backup_monitoring/backmon/commands/db/subcommands/update.py

Last change on this file was 976, checked in by peter, on Dec 6, 2011 at 10:19:33 AM

Raw checkin of current NetBackup / TSM parsing code.

File size: 48.4 KB
RevLine 
[976]1#!/usr/bin/python26
2###
3### backmon.commands.db.update
4###
5
6import sys
7import os
8import os.path
9import glob
10import re
11
12from optparse import OptionParser
13from guppy import hpy
14
15from ....lib import *
16
17from backup_monitoring.debug import *
18from backup_monitoring.math import *
19
20from backup_monitoring.dal.builtins import *
21
22from backup_monitoring.parsing.parsers import ontap_df
23from backup_monitoring.parsing.parsers import sis_status_l
24from backup_monitoring.parsing.parsers import reallocate_status_v
25from backup_monitoring.parsing.parsers import bpimagelist
26from backup_monitoring.parsing.parsers import bpplclients
27from backup_monitoring.parsing.parsers import bppllist
28from backup_monitoring.parsing.parsers import bpstulist
29from backup_monitoring.parsing.parsers import bpdbjobs
30from backup_monitoring.parsing.parsers import du
31
32METRIC_LIST = [
33 'filers',
34 'filer_volume_tasks',
35]
36
37METRIC_HELP = {
38 'filers' : 'netapp filer metrics',
39 'filer_volume_tasks' : 'netapp filer volume tasks (sis / reallocation)',
40}
41
42##
43## OPTIONS
44##
45usage = """Usage: %prog [options] -e [ENVIRONMENT] db update [metrics]'
46
47 available metrics:
48
49""" + '\n'.join([' %-8s %s' % (cmd, METRIC_HELP[cmd]) for cmd in METRIC_LIST])
50
51parser = OptionParser(usage=usage)
52
53def run(args, kwargs):
54
55 #
56 # add kwargs to local namespace
57 #
58 for key in kwargs.keys():
59
60 if re.compile('^[A-Z][A-Z_]+$').match(key):
61 exec(key + ' = kwargs[\'' + key + '\']')
62
63 (options, args) = parser.parse_args(args)
64
65 #
66 # Database
67 #
68 conn = CONN
69 curs = conn.cursor()
70
71 #
72 # Metrics
73 #
74 metrics = args
75
76 INFO('Metrics to update: %s' % (', '.join(metrics)))
77
78 for metric in metrics:
79
80 DEBUG('metric = %s' % (metric))
81
82 #
83 # policies
84 #
85 if metric in ['policies', 'daily_statistics']:
86
87 master_feeds = ['bppllist']
88 media_feeds = []
89
90 environment_ids = lookup_table(curs, "SELECT name, environment_id FROM environments")
91 max_environment_id = one_value(curs, "SELECT MAX(environment_id) FROM environments")
92
93 for environment in ENVIRONMENTS:
94
95 environment_name = environment.name
96
97 INFO('environment = %s' % (environment_name))
98
99 if environment_name not in environment_ids:
100 WARN('Environment %s does not exist in the database!' % (environment_name))
101 continue
102
103 environment_id = environment_ids[environment_name]
104
105 environment.load_feeds(master=master_feeds, media=media_feeds)
106 environment.parse_policies()
107
108 active_policy_ids = lookup_table(curs, "SELECT name, policy_id FROM policies WHERE environment_id = %d AND active = 'Y'" % (environment_id))
109 max_policy_id = one_value(curs, "SELECT MAX(policy_id) FROM policies")
110
111 policy_count = 0
112
113 for policy in environment.policies:
114
115 policy_count += 1
116
117 policy_name = policy.policy_name
118 policy_type = policy.policy_type
119
120 if policy_name not in active_policy_ids:
121
122 max_policy_id += 1
123 policy_id = max_policy_id
124
125 source = 'bppllist'
126 active = 'Y'
127
128 sql = """
129 INSERT INTO policies (policy_id, environment_id, name, type, description, comment, source, commissioned, active, added, updated, seen)
130 VALUES (%d, %d, '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s')
131 """ % (policy_id, environment_id, policy_name, policy_type, '', '', source, TIMESTAMP_ISO, active, TIMESTAMP_ISO, TIMESTAMP_ISO, TIMESTAMP_ISO)
132
133 print sql
134
135 curs.execute(sql)
136 conn.commit()
137
138 active_policy_ids[policy_name] = policy_id
139
140 INFO('policy_count = %d' % (policy_count))
141
142 #
143 # clients
144 #
145 if metric in ['clients', 'daily_statistics']:
146
147 master_feeds = ['bpplclients']
148 media_feeds = []
149
150 environment_ids = lookup_table(curs, "SELECT name, environment_id FROM environments")
151 max_environment_id = one_value(curs, "SELECT MAX(environment_id) FROM environments")
152
153 for environment in ENVIRONMENTS:
154
155 environment_name = environment.name
156
157 INFO('environment = %s' % (environment_name))
158
159 if environment_name not in environment_ids:
160 WARN('Environment %s does not exist in the database!' % (environment_name))
161 continue
162
163 environment_id = environment_ids[environment_name]
164
165 environment.load_feeds(master=master_feeds, media=media_feeds)
166 environment.parse_clients()
167
168 client_ids = lookup_table(curs, "SELECT name, client_id FROM clients WHERE environment_id = %d" % (environment_id))
169 max_client_id = one_value(curs, "SELECT MAX(client_id) FROM clients")
170
171 client_count = 0
172
173 for client in environment.clients:
174
175 client_count += 1
176
177 client_name = client.name
178 hardware = client.hw
179 os = client.os
180 priority = client.pri
181
182 if client_name not in client_ids:
183
184 max_client_id += 1
185 client_id = max_client_id
186
187 source = 'bpplclients'
188 active = 'Y'
189
190 sql = """
191 INSERT INTO clients (client_id, environment_id, name, description, comment, source, hardware, os, priority, commissioned, active, added, updated, seen)
192 VALUES (%d, %d, '%s', '%s', '%s', '%s', '%s', '%s', %d, '%s', '%s', '%s', '%s', '%s')
193 """ % (client_id, environment_id, client_name, '', '', source, hardware, os, priority, TIMESTAMP_ISO, active, TIMESTAMP_ISO, TIMESTAMP_ISO, TIMESTAMP_ISO)
194
195 print sql
196
197 curs.execute(sql)
198 conn.commit()
199
200 client_ids[client_name] = client_id
201
202 INFO('client_count = %d' % (client_count))
203
204 #
205 # stunits
206 #
207 if metric == 'stunits':
208
209 master_feeds = ['bpstulist']
210 media_feeds = []
211
212 environment_ids = lookup_table(curs, "SELECT name, environment_id FROM environments")
213 max_environment_id = one_value(curs, "SELECT MAX(environment_id) FROM environments")
214
215 for environment in ENVIRONMENTS:
216
217 environment_name = environment.name
218
219 INFO('environment = %s' % (environment_name))
220
221 if environment_name not in environment_ids:
222 WARN('Environment %s does not exist in the database!' % (environment_name))
223 continue
224
225 environment_id = environment_ids[environment_name]
226
227 environment.load_feeds(master=master_feeds, media=media_feeds)
228 environment.parse_stunits()
229
230 stunit_ids = lookup_table(curs, "SELECT name, stunit_id FROM stunits WHERE environment_id = %d" % (environment_id))
231 max_stunit_id = one_value(curs, "SELECT MAX(stunit_id) FROM stunits")
232
233 stunit_count = 0
234
235 for stunit in environment.stunits:
236
237 stunit_count += 1
238
239 stunit_name = stunit.label
240
241 if stunit_name not in stunit_ids:
242
243 max_stunit_id += 1
244 stunit_id = max_stunit_id
245
246 sql = """
247 INSERT INTO stunits (stunit_id, environment_id, name, description, commissioned, active, added, updated, seen)
248 VALUES (%d, %d, '%s', '%s', '%s', '%s', '%s', '%s', '%s')
249 """ % (stunit_id, environment_id, stunit_name, '', TIMESTAMP_ISO, 'Y', TIMESTAMP_ISO, TIMESTAMP_ISO, TIMESTAMP_ISO)
250
251 print sql
252
253 curs.execute(sql)
254 conn.commit()
255
256 stunit_ids[stunit_name] = stunit_id
257
258 INFO('stunit_count = %d' % (stunit_count))
259
260 #
261 # retention levels
262 #
263 if metric == 'retlevels':
264
265 master_feeds = ['bpretlevel']
266 media_feeds = []
267
268 environment_ids = lookup_table(curs, "SELECT name, environment_id FROM environments")
269 max_environment_id = one_value(curs, "SELECT MAX(environment_id) FROM environments")
270
271 for environment in ENVIRONMENTS:
272
273 environment_name = environment.name
274
275 INFO('environment = %s' % (environment_name))
276
277 if environment_name not in environment_ids:
278 WARN('Environment %s does not exist in the database!' % (environment_name))
279 continue
280
281 environment_id = environment_ids[environment_name]
282
283 environment.load_feeds(master=master_feeds, media=media_feeds)
284 environment.parse_retlevels()
285
286 retlevel_ids = lookup_table(curs, "SELECT retention_level, retlevel_id FROM retlevels WHERE environment_id = %d" % (environment_id))
287 max_retlevel_id = one_value(curs, "SELECT MAX(retlevel_id) FROM retlevels")
288
289 retlevel_count = 0
290
291 for retlevel in environment.retlevels:
292
293 retlevel_count += 1
294
295 retention_level = retlevel.retention_level
296 equivalent_days = retlevel.equivalent_days
297 retention_seconds = retlevel.retention_seconds
298 retention_period = retlevel.retention_period
299
300 if retention_level not in retlevel_ids:
301
302 max_retlevel_id += 1
303 retlevel_id = max_retlevel_id
304
305 sql = """
306 INSERT INTO retlevels (retlevel_id, environment_id, retention_level, equivalent_days, retention_seconds, retention_period, added, updated)
307 VALUES (%d, %d, %d, %d, %d, '%s', '%s', '%s')
308 """ % (retlevel_id, environment_id, retention_level, equivalent_days, retention_seconds, retention_period, TIMESTAMP_ISO, TIMESTAMP_ISO)
309
310 print sql
311
312 curs.execute(sql)
313 conn.commit()
314
315 retlevel_ids[retention_level] = retlevel_id
316
317 INFO('retlevel_count = %d' % (retlevel_count))
318
319 #
320 # NetApp Filers
321 #
322 if metric == 'filers':
323
324 INFO('Updating filers..')
325
326 filers = CONFIG['NETAPP']['filers']
327
328 host_ids = lookup_table(curs, "SELECT name, host_id FROM hosts")
329 max_host_id = one_value(curs, "SELECT MAX(host_id) FROM hosts")
330
331 INFO('max_host_id = %d' % (max_host_id))
332
333 filer_ids = lookup_table(curs, "SELECT h.name, f.filer_id FROM filers AS f INNER JOIN hosts AS h ON h.host_id = f.host_id")
334 max_filer_id = one_value(curs, "SELECT MAX(filer_id) FROM filers")
335
336 INFO('max_filer_id = %d' % (max_filer_id))
337
338 for filer in filers:
339
340 if filer not in host_ids:
341
342 max_host_id += 1
343 host_id = max_host_id
344
345 sql = """
346 INSERT INTO hosts (host_id, name, alias, address, description, active, added, updated)
347 VALUES (%d, '%s', '%s', '%s', '%s', '%s', '%s', '%s')
348 """ % (host_id, filer, filer, filer, '', 'Y', UTCTIMESTAMP_ISO, UTCTIMESTAMP_ISO)
349
350 print sql
351
352 curs.execute(sql)
353
354 host_ids[filer] = host_id
355
356 max_filer_id += 1
357 filer_id = max_filer_id
358
359 sql = """
360 INSERT INTO filers (filer_id, host_id)
361 VALUES (%d, %d)
362 """ % (filer_id, host_id)
363
364 print sql
365
366 curs.execute(sql)
367
368 filer_ids[filer] = filer_id
369
370 conn.commit()
371
372 volumes = ExtendedDict()
373
374 if filer in SERVER_PATHS:
375
376 INFO('+ %s' % (filer))
377
378 filer_id = filer_ids[filer]
379
380 volume_ids = lookup_table(curs, "SELECT name, volume_id FROM filer_volumes WHERE filer_id = %d" % (filer_id))
381 max_volume_id = one_value(curs, "SELECT MAX(volume_id) FROM filer_volumes")
382
383 updates = Updates(LOGGER, SERVER_PATHS[filer])
384
385 for record in ontap_df.stream(updates.feed('df_s'), format='df -s'):
386
387 obj = ontap_df.parse(record)
388
389 if obj is not None:
390
391 filesystem = obj.filesystem
392 used = obj.used
393 saved = obj.saved
394 pct_saved = obj.pct_saved
395
396 volume_name = filesystem.split('/')[-1]
397 volume_path = filesystem
398
399 if volume_name not in volume_ids:
400
401 max_volume_id += 1
402 volume_id = max_volume_id
403
404 sql = """
405 INSERT INTO filer_volumes (volume_id, filer_id, name, path, description, active, added, updated)
406 VALUES (%d, %d, '%s', '%s', '%s', '%s', '%s', '%s')
407 """ % (volume_id, filer_id, volume_name, volume_path, '', 'Y', UTCTIMESTAMP_ISO, UTCTIMESTAMP_ISO)
408
409 print sql
410
411 curs.execute(sql)
412
413 volume_ids[volume_name] = volume_id
414
415 INFO(' df %s' % (filesystem))
416
417 volume_id = volume_ids[volume_name]
418
419 if volume_name not in volumes:
420 volumes[volume_name] = ExtendedDict()
421
422 volumes[volume_name]['volume_id'] = volume_id
423 volumes[volume_name]['name'] = volume_name
424 volumes[volume_name]['path'] = volume_path
425 volumes[volume_name]['kb_used'] = used
426 volumes[volume_name]['kb_saved'] = saved
427 volumes[volume_name]['pct_saved'] = pct_saved
428
429 volumes[volume_name]['sis_schedule'] = ''
430 volumes[volume_name]['sis_status'] = ''
431 volumes[volume_name]['reallocation_schedule'] = ''
432 volumes[volume_name]['reallocation_status'] = ''
433
434 for record in sis_status_l.stream(updates.feed('sis_status_l'), format='sis status -l'):
435
436 obj = sis_status_l.parse(record)
437
438 if obj is not None:
439
440 path = obj.path
441 status = obj.status
442 state = obj.state
443 schedule = obj.schedule
444
445 volume_name = path.split('/')[-1]
446
447 volumes[volume_name]['sis_schedule'] = schedule
448 volumes[volume_name]['sis_status'] = status
449
450 INFO(' sis %s' % (path))
451
452 for record in reallocate_status_v.stream(updates.feed('reallocate_status_v'), format='reallocate status -v'):
453
454 obj = reallocate_status_v.parse(record)
455
456 if obj is not None:
457
458 volume = obj.volume
459 state = obj.state
460 schedule = obj.schedule
461
462 volume_name = volume.split('/')[-1]
463
464 if volume_name in volumes:
465
466 volumes[volume_name]['reallocation_schedule'] = schedule
467 volumes[volume_name]['reallocation_status'] = status
468
469 INFO(' reallocate %s' % (volume))
470
471 for volume in volumes:
472
473 volume_id = volume.volume_id
474 sis_status = volume.sis_status
475 reallocation_status = volume.reallocation_status
476 kb_used = volume.kb_used
477 kb_saved = volume.kb_saved
478 pct_saved = volume.pct_saved
479
480 sql = """
481 INSERT INTO filer_volume_status (timestamp, volume_id, sis_status, reallocation_status, kb_used, kb_saved, pct_saved)
482 VALUES ('%s', %d, '%s', '%s', %d, %d, %d)
483 """ % (UTCTIMESTAMP_ISO, volume_id, sis_status, reallocation_status, kb_used, kb_saved, pct_saved)
484
485 print sql
486
487 curs.execute(sql)
488
489 conn.commit()
490
491 else:
492
493 INFO('- %s' % (filer))
494
495 INFO('Done updating filers.')
496
497 #
498 # NetApp Filers Volume Tasks
499 #
500 if metric == 'filer_volume_tasks':
501
502 INFO('Updating filer_volume_tasks..')
503
504 volume_set = ExtendedDict()
505
506 tasks = []
507
508 sis_tasks = ExtendedDict()
509 reallocation_tasks = ExtendedDict()
510
511 i = 0
512
513 curs.execute("SELECT * FROM filer_volume_status WHERE timestamp >= now() - INTERVAL 7 DAY")
514
515 rows = curs.fetchmany()
516
517 while rows:
518
519 for timestamp, volume_id, sis_status, reallocation_status, kb_used, kb_saved, pct_saved in rows:
520
521 i += 1
522
523 if i % 1000 == 0:
524 print i
525
526 if (volume_id not in volume_set) and (sis_status != 'Active') and (reallocation_status != 'Active'):
527
528 volume_set[volume_id] = volume_id
529
530 #
531 # Start SIS Task
532 #
533 if (sis_status == 'Active') and (volume_id in volume_set) and (volume_id not in sis_tasks):
534
535 sis_tasks[volume_id] = { 'start' : timestamp, 'end' : None }
536
537 #
538 # Start Reallocation Task
539 #
540 if (reallocation_status == 'Active') and (volume_id in volume_set) and (volume_id not in reallocation_tasks):
541
542 reallocation_tasks[volume_id] = { 'start' : timestamp, 'end' : None }
543
544 #
545 # End SIS Task
546 #
547 if (sis_status != 'Active') and (volume_id in sis_tasks):
548
549 sis_tasks[volume_id]['end'] = timestamp
550 tasks.append( {'volume_id' : volume_id, 'start' : sis_tasks[volume_id]['start'], 'end' : sis_tasks[volume_id]['end'], 'type' : 'SIS' })
551 del sis_tasks[volume_id]
552
553 #
554 # End Reallocation Task
555 #
556 if (reallocation_status != 'Active') and (volume_id in reallocation_tasks):
557
558 reallocation_tasks[volume_id]['end'] = timestamp
559 tasks.append( {'volume_id' : volume_id, 'start' : reallocation_tasks[volume_id]['start'], 'end' : reallocation_tasks[volume_id]['end'], 'type' : 'REALLOC' })
560 del reallocation_tasks[volume_id]
561
562 rows = curs.fetchmany()
563
564 for task in tasks:
565
566 volume_id = task['volume_id']
567 start = task['start']
568 end = task['end']
569 Type = task['type']
570
571 sql = "INSERT INTO filer_volume_tasks (volume_id, start, end, type) VALUES (%d, '%s', '%s', '%s')" % (volume_id, start.strftime('%Y-%m-%d %H:%M:%S'), end.strftime('%Y-%m-%d %H:%M:%S'), Type)
572 print sql
573
574 try:
575 curs.execute(sql)
576 conn.commit()
577 except conn.IntegrityError:
578 pass
579
580 INFO('Done updating filers_volume_tasks.')
581
582
583 #
584 # daily statistics
585 #
586 if metric == 'daily_statistics':
587
588 INFO('BACKUP_DAY = %s' % (PREV_BACKUP_DAY_ISO))
589
590 master_feeds = ['bpplclients', 'bppllist', 'bpstulist', 'bpdbjobs_most_columns', ]
591 media_feeds = []
592
593 environment_ids = lookup_table(curs, "SELECT name, environment_id FROM environments")
594
595 for environment in ENVIRONMENTS:
596
597 environment_name = environment.name
598
599 INFO('')
600 INFO('environment........: %s' % (environment_name))
601
602 if environment_name not in environment_ids:
603 WARN('Environment %s does not exist in the database!' % (environment_name))
604 continue
605
606 environment_id = environment_ids[environment_name]
607
608 environment.load_feeds(master=master_feeds, media=media_feeds)
609 environment.parse_clients()
610 environment.parse_policies()
611 environment.parse_stunits()
612 environment.parse_jobs()
613
614 windows = re.compile('^.*Windows.*$')
615 linux = re.compile('^.*Linux.*$|^.*RedHat.*$')
616 solaris = re.compile('^.*Solaris.*$')
617 aix = re.compile('^.*AIX.*$')
618 osf = re.compile('^.*OSF1.*$')
619 bsd = re.compile('^.*BSD.*$')
620 openvms = re.compile('^.*OpenVMS.*$')
621
622 added_policies = one_column(curs, "SELECT name FROM policies WHERE environment_id = %d" % (environment_id))
623 added_clients = one_column(curs, "SELECT name FROM clients WHERE environment_id = %d" % (environment_id))
624
625 max_policy_id = one_value(curs, "SELECT MAX(policy_id) FROM policies")
626 max_client_id = one_value(curs, "SELECT MAX(client_id) FROM clients")
627
628 active_policy_ids = lookup_table(curs, "SELECT name, policy_id FROM policies WHERE environment_id = %d AND active = 'Y'" % (environment_id))
629 global_policy_ids = lookup_table(curs, "SELECT CONCAT(name, LOWER(type)), policy_id FROM policies WHERE environment_id = %d" % (environment_id))
630 client_ids = lookup_table(curs, "SELECT name, client_id FROM clients WHERE environment_id = %d" % (environment_id))
631 retlevel_ids = lookup_table(curs, "SELECT retention_level, retlevel_id FROM retlevels WHERE environment_id = %d" % (environment_id))
632
633 image_statistics = ExtendedDict()
634
635 client_count = 0
636 windows_count = 0
637 linux_count = 0
638 solaris_count = 0
639 aix_count = 0
640 osf_count = 0
641 bsd_count = 0
642 ndmp_count = 0
643 openvms_count = 0
644 other_count = 0
645
646 seen_hw = ExtendedDict()
647 seen_os = ExtendedDict()
648 other_hw = ExtendedDict()
649 other_os = ExtendedDict()
650
651 bpplclients_stream = bpplclients.stream(environment.updates[environment.master].fopen('bpplclients_daily', version=1))
652
653 for record in bpplclients_stream:
654
655 client = bpplclients.parse(record)
656
657 client_count += 1
658
659 seen_hw[client.hw] = True
660 seen_os[client.os] = True
661
662 if windows.match(client.os):
663 windows_count += 1
664 elif linux.match(client.os) or linux.match(client.hw):
665 linux_count += 1
666 elif solaris.match(client.os) or solaris.match(client.hw):
667 solaris_count += 1
668 elif client.hw == 'NDMP' or client.os == 'NDMP':
669 ndmp_count += 1
670 elif openvms.match(client.os) or openvms.match(client.hw):
671 openvms_count += 1
672 elif aix.match(client.os):
673 aix_count += 1
674 elif osf.match(client.os):
675 osf_count += 1
676 elif bsd.match(client.os):
677 bsd_count += 1
678 else:
679 other_count += 1
680 other_hw[client.hw] = True
681 other_os[client.os] = True
682
683 INFO('client_count.......: %d' % (client_count))
684 INFO('windows_count......: %d' % (windows_count))
685 INFO('linux_count........: %d' % (linux_count))
686 INFO('solaris_count......: %d' % (solaris_count))
687 INFO('aix_count..........: %d' % (aix_count))
688 INFO('osf_count..........: %d' % (osf_count))
689 INFO('bsd_count..........: %d' % (bsd_count))
690 INFO('ndmp_count.........: %d' % (ndmp_count))
691 INFO('openvms_count......: %d' % (openvms_count))
692 INFO('other_count........: %d' % (other_count))
693 #INFO('seen_hw............: %s' % (', '.join(seen_hw.keys())))
694 #INFO('seen_os............: %s' % (', '.join(seen_os.keys())))
695 INFO('other_hw...........: %s' % (', '.join(other_hw.keys())))
696 INFO('other_os...........: %s' % (', '.join(other_os.keys())))
697
698 policy_count = 0
699
700 bppllist_stream = bppllist.stream(environment.updates[environment.master].fopen('bppllist_daily', version=1))
701
702 for record in bppllist_stream:
703
704 policy = bppllist.parse(record)
705
706 policy_count += 1
707
708 policy_name = policy.policy_name
709
710 INFO('policy_count.......: %d' % (policy_count))
711
712 stunit_ids = lookup_table(curs, "SELECT name, stunit_id FROM stunits WHERE environment_id = %d" % (environment_id))
713 stunit_count = 0
714
715 bpstulist_stream = bpstulist.stream(environment.updates[environment.master].fopen('bpstulist', version=1))
716
717 for record in bpstulist_stream:
718
719 stunit = bpstulist.parse(record)
720
721 stunit_count += 1
722
723 INFO('stunit_count.......: %d' % (stunit_count))
724
725 #
726 # process db size
727 #
728 netbackup_db_du_k_stream = du.stream(environment.updates[environment.master].fopen('netbackup_db_du_k', version=1))
729
730 db_size = -1
731
732 for record in netbackup_db_du_k_stream:
733
734 dir = du.parse(record, tz=environment.tz)
735
736 if dir.path == '/opt/openv/netbackup/db':
737 db_size = dir.kbytes
738
739 INFO('db_size............: %s' % (pp_kbytes(db_size, format='%.2f %s')))
740
741 #
742 # process log size
743 #
744 netbackup_logs_du_k_stream = du.stream(environment.updates[environment.master].fopen('netbackup_logs_du_k', version=1))
745
746 log_size = -1
747
748 for record in netbackup_logs_du_k_stream:
749
750 dir = du.parse(record, tz=environment.tz)
751
752 if dir.path == '/opt/openv/netbackup/logs':
753 log_size = dir.kbytes
754
755 INFO('log_size...........: %s' % (pp_kbytes(log_size, format='%.2f %s')))
756
757 #
758 # process jobs
759 #
760 job_count = 0
761
762 bpdbjobs_stream = bpdbjobs.stream(environment.updates[environment.master].fopen('bpdbjobs_most_columns_daily', version=1))
763
764 for job in environment.jobs:
765
766 job_count += 1
767
768 INFO('job_count..........: %d' % (job_count))
769
770 #
771 # process image list
772 #
773 bpimagelist_stream = bpimagelist.stream(environment.updates[environment.master].fopen('bpimagelist', version=1))
774
775 image_count = 0
776 copy_count = 0
777 fragment_count = 0
778 file_count = 0
779
780 image_data = 0
781 copy_data = 0
782 fragment_data = 0
783 full_data = 0
784 incr_data = 0
785 cinc_data = 0
786 ubak_data = 0
787 uarc_data = 0
788 compressed_data = 0
789 tir_data = 0
790
791 for record in bpimagelist_stream:
792
793 image_count += 1
794
795 image = bpimagelist.parse(record, tz=environment.tz)
796
797 if image_count % 1000 == 0:
798 DEBUG('%d images processed..' % (image_count))
799
800 retlevel = image.retention_level_id
801
802 policy = image.policy
803 policy_name = image.policy
804 policy_type = image.policy_type
805 policy_key = '%s%s' % (policy_name, policy_type.lower().replace('_','-'))
806
807 if policy_key not in global_policy_ids:
808
809 max_policy_id += 1
810 policy_id = max_policy_id
811
812 source = 'bpimagelist'
813 active = 'N'
814
815 sql = """
816 INSERT INTO policies (policy_id, environment_id, name, type, description, comment, source, commissioned, active, added, updated, seen)
817 VALUES (%d, %d, '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s')
818 """ % (policy_id, environment_id, policy_name, policy_type, '', '', source, BOT_ISO, active, TIMESTAMP_ISO, TIMESTAMP_ISO, TIMESTAMP_ISO)
819
820 print sql
821
822 curs.execute(sql)
823 conn.commit()
824
825 global_policy_ids[policy_key] = policy_id
826
827 client = image.client
828 client_name = image.client
829
830 if client not in client_ids:
831
832 max_client_id += 1
833 client_id = max_client_id
834
835 source = 'bpimagelist'
836 active = 'N'
837
838 sql = """
839 INSERT INTO clients (client_id, environment_id, name, description, comment, source, commissioned, active, added, updated, seen)
840 VALUES (%d, %d, '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s')
841 """ % (client_id, environment_id, client_name, '', '', source, BOT_ISO, active, TIMESTAMP_ISO, TIMESTAMP_ISO, TIMESTAMP_ISO)
842
843 print sql
844
845 curs.execute(sql)
846 conn.commit()
847
848 client_ids[client] = client_id
849
850 if policy_key not in image_statistics:
851
852 image_statistics[policy_key] = ExtendedDict()
853
854 if client not in image_statistics[policy_key]:
855
856 image_statistics[policy_key][client] = ExtendedDict()
857
858 retlevel = image.retention_level_id
859
860 if retlevel not in image_statistics[policy_key][client]:
861
862 image_statistics[policy_key][client][retlevel] = ExtendedDict()
863
864 image_statistics[policy_key][client][retlevel]['image_count'] = 0
865 image_statistics[policy_key][client][retlevel]['copy_count'] = 0
866 image_statistics[policy_key][client][retlevel]['fragment_count'] = 0
867 image_statistics[policy_key][client][retlevel]['file_count'] = 0
868 image_statistics[policy_key][client][retlevel]['image_data'] = 0
869 image_statistics[policy_key][client][retlevel]['copy_data'] = 0
870 image_statistics[policy_key][client][retlevel]['fragment_data'] = 0
871 image_statistics[policy_key][client][retlevel]['full_data'] = 0
872 image_statistics[policy_key][client][retlevel]['incr_data'] = 0
873 image_statistics[policy_key][client][retlevel]['cinc_data'] = 0
874 image_statistics[policy_key][client][retlevel]['ubak_data'] = 0
875 image_statistics[policy_key][client][retlevel]['uarc_data'] = 0
876 image_statistics[policy_key][client][retlevel]['compressed_data'] = 0
877
878 schedule_type = image.schedule_type
879 number_of_copies = image.number_of_copies
880 number_of_fragments = image.number_of_fragments
881 number_of_files = image.number_of_files
882
883 kilobytes = image.kilobytes
884
885 copy_count += number_of_copies
886 fragment_count += number_of_fragments
887 file_count += number_of_files
888 image_data += kilobytes
889 copy_data += kilobytes * number_of_copies
890
891 image_statistics[policy_key][client][retlevel]['image_count'] += 1
892 image_statistics[policy_key][client][retlevel]['copy_count'] += number_of_copies
893 image_statistics[policy_key][client][retlevel]['fragment_count'] += number_of_fragments
894 image_statistics[policy_key][client][retlevel]['copy_count'] += number_of_copies
895 image_statistics[policy_key][client][retlevel]['file_count'] += number_of_files
896 image_statistics[policy_key][client][retlevel]['image_data'] += kilobytes
897 image_statistics[policy_key][client][retlevel]['copy_data'] += kilobytes * number_of_copies
898
899 copy_retlevels = ExtendedDict()
900
901 for copy in image.copies:
902
903 copy_number = copy.copy_number
904
905 for fragment in copy.fragments:
906
907 fragment_data += fragment.kilobytes
908
909 if fragment.tir:
910 tir_data += fragment.kilobytes
911 continue
912
913 if hasattr(fragment, 'retention_lvl_id'):
914
915 copy_retlevels[copy_number] = fragment.retention_lvl_id
916
917 if hasattr(fragment, 'expiration_time'):
918
919 expiration_time = fragment.expiration_time
920
921 if type(expiration_time) is datetime.datetime:
922
923 pass
924
925 copy_retlevel = copy_retlevels[copy_number]
926
927 if copy_retlevel not in image_statistics[policy_key][client]:
928
929 image_statistics[policy_key][client][copy_retlevel] = ExtendedDict()
930
931 image_statistics[policy_key][client][copy_retlevel]['image_count'] = 0
932 image_statistics[policy_key][client][copy_retlevel]['copy_count'] = 0
933 image_statistics[policy_key][client][copy_retlevel]['fragment_count'] = 0
934 image_statistics[policy_key][client][copy_retlevel]['file_count'] = 0
935 image_statistics[policy_key][client][copy_retlevel]['image_data'] = 0
936 image_statistics[policy_key][client][copy_retlevel]['copy_data'] = 0
937 image_statistics[policy_key][client][copy_retlevel]['fragment_data'] = 0
938 image_statistics[policy_key][client][copy_retlevel]['full_data'] = 0
939 image_statistics[policy_key][client][copy_retlevel]['incr_data'] = 0
940 image_statistics[policy_key][client][copy_retlevel]['cinc_data'] = 0
941 image_statistics[policy_key][client][copy_retlevel]['ubak_data'] = 0
942 image_statistics[policy_key][client][copy_retlevel]['uarc_data'] = 0
943 image_statistics[policy_key][client][copy_retlevel]['compressed_data'] = 0
944
945 image_statistics[policy_key][client][copy_retlevel]['fragment_data'] += fragment.kilobytes
946
947 if schedule_type == 'FULL':
948 image_statistics[policy_key][client][copy_retlevel]['full_data'] += fragment.kilobytes
949 elif schedule_type == 'INCR':
950 image_statistics[policy_key][client][copy_retlevel]['incr_data'] += fragment.kilobytes
951 elif schedule_type == 'CINC':
952 image_statistics[policy_key][client][copy_retlevel]['cinc_data'] += fragment.kilobytes
953 elif schedule_type == 'UBAK':
954 image_statistics[policy_key][client][copy_retlevel]['ubak_data'] += fragment.kilobytes
955 elif schedule_type == 'UARC':
956 image_statistics[policy_key][client][copy_retlevel]['uarc_data'] += fragment.kilobytes
957
958 if image.compressed == 'yes':
959 image_statistics[policy_key][client][copy_retlevel]['compressed_data'] += fragment.kilobytes
960
961 if schedule_type == 'FULL':
962 full_data += kilobytes * number_of_copies
963 elif schedule_type == 'INCR':
964 incr_data += kilobytes * number_of_copies
965 elif schedule_type == 'CINC':
966 cinc_data += kilobytes * number_of_copies
967 elif schedule_type == 'UBAK':
968 ubak_data += kilobytes * number_of_copies
969 elif schedule_type == 'UARC':
970 uarc_data += kilobytes * number_of_copies
971
972 if image.compressed == 'yes':
973 compressed_data += kilobytes * number_of_copies
974
975 environment.updates[environment.master].fclose('bpimagelist')
976
977 INFO('image_count........: %d' % (image_count))
978 INFO('copy_count.........: %d' % (copy_count))
979 INFO('fragment_count.....: %d' % (fragment_count))
980 INFO('file_count.........: %d' % (file_count))
981 INFO('image_data.........: %s' % (pp_kbytes(image_data, format='%.2f %s')))
982 INFO('copy_data..........: %s' % (pp_kbytes(copy_data, format='%.2f %s')))
983 INFO('fragment_data......: %s' % (pp_kbytes(fragment_data, format='%.2f %s')))
984 INFO('full_data..........: %s' % (pp_kbytes(full_data, format='%.2f %s')))
985 INFO('incr_data..........: %s' % (pp_kbytes(incr_data, format='%.2f %s')))
986 INFO('cinc_data..........: %s' % (pp_kbytes(cinc_data, format='%.2f %s')))
987 INFO('ubak_data..........: %s' % (pp_kbytes(ubak_data, format='%.2f %s')))
988 INFO('uarc_data..........: %s' % (pp_kbytes(uarc_data, format='%.2f %s')))
989 INFO('compressed_data....: %s' % (pp_kbytes(compressed_data, format='%.2f %s')))
990 INFO('tir_data...........: %s' % (pp_kbytes(tir_data, format='%.2f %s')))
991
992 sql = "DELETE FROM environment_statistics_daily WHERE environment_id = %d AND day = '%s'" % (environment_id, PREV_BACKUP_DAY_ISO)
993
994 print sql
995
996 curs.execute(sql)
997
998 sql = """
999 INSERT INTO environment_statistics_daily (
1000 day,
1001 environment_id,
1002 client_count,
1003 windows_count,
1004 linux_count,
1005 solaris_count,
1006 aix_count,
1007 osf_count,
1008 bsd_count,
1009 ndmp_count,
1010 openvms_count,
1011 other_count,
1012 policy_count,
1013 stunit_count,
1014 db_size,
1015 log_size,
1016 image_count,
1017 copy_count,
1018 fragment_count,
1019 file_count,
1020 image_data,
1021 copy_data,
1022 fragment_data,
1023 full_data,
1024 incr_data,
1025 cinc_data,
1026 ubak_data,
1027 uarc_data,
1028 compressed_data,
1029 tir_data,
1030 added
1031 ) VALUES (
1032 '%s',
1033 %d,
1034 %d,
1035 %d,
1036 %d,
1037 %d,
1038 %d,
1039 %d,
1040 %d,
1041 %d,
1042 %d,
1043 %d,
1044 %d,
1045 %d,
1046 %d,
1047 %d,
1048 %d,
1049 %d,
1050 %d,
1051 %d,
1052 %d,
1053 %d,
1054 %d,
1055 %d,
1056 %d,
1057 %d,
1058 %d,
1059 %d,
1060 %d,
1061 %d,
1062 '%s'
1063 )
1064 """ % (
1065 PREV_BACKUP_DAY_ISO,
1066 environment_id,
1067 client_count,
1068 windows_count,
1069 linux_count,
1070 solaris_count,
1071 aix_count,
1072 osf_count,
1073 bsd_count,
1074 ndmp_count,
1075 openvms_count,
1076 other_count,
1077 policy_count,
1078 stunit_count,
1079 db_size,
1080 log_size,
1081 image_count,
1082 copy_count,
1083 fragment_count,
1084 file_count,
1085 image_data,
1086 copy_data,
1087 fragment_data,
1088 full_data,
1089 incr_data,
1090 cinc_data,
1091 ubak_data,
1092 uarc_data,
1093 compressed_data,
1094 tir_data,
1095 datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
1096 )
1097
1098 #print sql
1099
1100 curs.execute(sql)
1101 conn.commit()
1102
1103 sql = "DELETE FROM image_statistics_daily WHERE environment_id = %d AND day = '%s'" % (environment_id, PREV_BACKUP_DAY_ISO)
1104
1105 print sql
1106
1107 curs.execute(sql)
1108
1109 for policy_key in image_statistics.keys():
1110
1111 policy_id = global_policy_ids[policy_key]
1112
1113 for client in image_statistics[policy_key].keys():
1114
1115 client_id = client_ids[client]
1116
1117 for retlevel in image_statistics[policy_key][client].keys():
1118
1119 retlevel_id = retlevel_ids[retlevel]
1120
1121 sql = """
1122 INSERT INTO image_statistics_daily (
1123 day,
1124 environment_id,
1125 policy_id,
1126 client_id,
1127 retlevel_id,
1128 image_count,
1129 copy_count,
1130 fragment_count,
1131 file_count,
1132 image_data,
1133 copy_data,
1134 fragment_data,
1135 full_data,
1136 incr_data,
1137 cinc_data,
1138 ubak_data,
1139 uarc_data,
1140 compressed_data,
1141 added
1142 ) VALUES (
1143 '%s',
1144 %d,
1145 %d,
1146 %d,
1147 %d,
1148 %d,
1149 %d,
1150 %d,
1151 %d,
1152 %d,
1153 %d,
1154 %d,
1155 %d,
1156 %d,
1157 %d,
1158 %d,
1159 %d,
1160 %d,
1161 '%s'
1162 )
1163 """ % (
1164 PREV_BACKUP_DAY_ISO,
1165 environment_id,
1166 policy_id,
1167 client_id,
1168 retlevel_id,
1169 image_statistics[policy_key][client][retlevel]['image_count'],
1170 image_statistics[policy_key][client][retlevel]['copy_count'],
1171 image_statistics[policy_key][client][retlevel]['fragment_count'],
1172 image_statistics[policy_key][client][retlevel]['file_count'],
1173 image_statistics[policy_key][client][retlevel]['image_data'],
1174 image_statistics[policy_key][client][retlevel]['copy_data'],
1175 image_statistics[policy_key][client][retlevel]['fragment_data'],
1176 image_statistics[policy_key][client][retlevel]['full_data'],
1177 image_statistics[policy_key][client][retlevel]['incr_data'],
1178 image_statistics[policy_key][client][retlevel]['cinc_data'],
1179 image_statistics[policy_key][client][retlevel]['ubak_data'],
1180 image_statistics[policy_key][client][retlevel]['uarc_data'],
1181 image_statistics[policy_key][client][retlevel]['compressed_data'],
1182 datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
1183 )
1184
1185 #print sql
1186
1187 curs.execute(sql)
1188
1189 conn.commit()
1190
1191 #
1192 # unknown metric
1193 #
1194 else:
1195
1196 pass
1197
1198 #WARN('Unknown metric %s!' % (metric))
1199
Note: See TracBrowser for help on using the repository browser.