Various adjustments
[pstop.git] / performance_schema / file_summary_by_instance / file_summary_by_instance.go
1 // performance_schema - library routines for pstop.
2 //
3 // This file contains the library routines for managing the
4 // file_summary_by_instance table.
5 package file_summary_by_instance
6
7 import (
8         "database/sql"
9         "time"
10
11         "github.com/sjmudd/pstop/lib"
12         ps "github.com/sjmudd/pstop/performance_schema"
13 )
14
15 /*
16 CREATE TABLE `file_summary_by_instance` (
17   `FILE_NAME` varchar(512) NOT NULL,
18   `EVENT_NAME` varchar(128) NOT NULL,                           // not collected
19   `OBJECT_INSTANCE_BEGIN` bigint(20) unsigned NOT NULL,         // not collected
20   `COUNT_STAR` bigint(20) unsigned NOT NULL,
21   `SUM_TIMER_WAIT` bigint(20) unsigned NOT NULL,
22   `MIN_TIMER_WAIT` bigint(20) unsigned NOT NULL,
23   `AVG_TIMER_WAIT` bigint(20) unsigned NOT NULL,
24   `MAX_TIMER_WAIT` bigint(20) unsigned NOT NULL,
25   `COUNT_READ` bigint(20) unsigned NOT NULL,
26   `SUM_TIMER_READ` bigint(20) unsigned NOT NULL,
27   `MIN_TIMER_READ` bigint(20) unsigned NOT NULL,
28   `AVG_TIMER_READ` bigint(20) unsigned NOT NULL,
29   `MAX_TIMER_READ` bigint(20) unsigned NOT NULL,
30   `SUM_NUMBER_OF_BYTES_READ` bigint(20) NOT NULL,
31   `COUNT_WRITE` bigint(20) unsigned NOT NULL,
32   `SUM_TIMER_WRITE` bigint(20) unsigned NOT NULL,
33   `MIN_TIMER_WRITE` bigint(20) unsigned NOT NULL,
34   `AVG_TIMER_WRITE` bigint(20) unsigned NOT NULL,
35   `MAX_TIMER_WRITE` bigint(20) unsigned NOT NULL,
36   `SUM_NUMBER_OF_BYTES_WRITE` bigint(20) NOT NULL,
37   `COUNT_MISC` bigint(20) unsigned NOT NULL,
38   `SUM_TIMER_MISC` bigint(20) unsigned NOT NULL,
39   `MIN_TIMER_MISC` bigint(20) unsigned NOT NULL,
40   `AVG_TIMER_MISC` bigint(20) unsigned NOT NULL,
41   `MAX_TIMER_MISC` bigint(20) unsigned NOT NULL
42 ) ENGINE=PERFORMANCE_SCHEMA DEFAULT CHARSET=utf8
43 1 row in set (0.00 sec)
44
45 */
46
47 // a table of rows
48 type File_summary_by_instance struct {
49         ps.RelativeStats
50         ps.InitialTime
51         initial          file_summary_by_instance_rows
52         current          file_summary_by_instance_rows
53         results          file_summary_by_instance_rows
54         totals           file_summary_by_instance_row
55         global_variables map[string]string
56 }
57
58 // reset the statistics to current values
59 func (t *File_summary_by_instance) SyncReferenceValues() {
60         t.SetNow()
61         t.initial = make(file_summary_by_instance_rows, len(t.current))
62         copy(t.initial, t.current)
63
64         t.results = make(file_summary_by_instance_rows, len(t.current))
65         copy(t.results, t.current)
66
67         if t.WantRelativeStats() {
68                 t.results.subtract(t.initial) // should be 0 if relative
69         }
70
71         t.results.sort()
72         t.totals = t.results.totals()
73 }
74
75 // Collect data from the db, then merge it in.
76 func (t *File_summary_by_instance) Collect(dbh *sql.DB) {
77         start := time.Now()
78         // UPDATE current from db handle
79         t.current = merge_by_table_name(select_fsbi_rows(dbh), t.global_variables)
80
81         // copy in initial data if it was not there
82         if len(t.initial) == 0 && len(t.current) > 0 {
83                 t.initial = make(file_summary_by_instance_rows, len(t.current))
84                 copy(t.initial, t.current)
85         }
86
87         // check for reload initial characteristics
88         if t.initial.needs_refresh(t.current) {
89                 t.initial = make(file_summary_by_instance_rows, len(t.current))
90                 copy(t.initial, t.current)
91         }
92
93         // update results to current value
94         t.results = make(file_summary_by_instance_rows, len(t.current))
95         copy(t.results, t.current)
96
97         // make relative if need be
98         if t.WantRelativeStats() {
99                 t.results.subtract(t.initial)
100         }
101
102         // sort the results
103         t.results.sort()
104
105         // setup the totals
106         t.totals = t.results.totals()
107         lib.Logger.Println("File_summary_by_instance.Collect() took:", time.Duration(time.Since(start)).String())
108 }
109
110 // return the headings for a table
111 func (t File_summary_by_instance) Headings() string {
112         var r file_summary_by_instance_row
113
114         return r.headings()
115 }
116
117 // return the rows we need for displaying
118 func (t File_summary_by_instance) RowContent(max_rows int) []string {
119         rows := make([]string, 0, max_rows)
120
121         for i := range t.results {
122                 if i < max_rows {
123                         rows = append(rows, t.results[i].row_content(t.totals))
124                 }
125         }
126
127         return rows
128 }
129
130 // return all the totals
131 func (t File_summary_by_instance) TotalRowContent() string {
132         return t.totals.row_content(t.totals)
133 }
134
135 // return an empty string of data (for filling in)
136 func (t File_summary_by_instance) EmptyRowContent() string {
137         var emtpy file_summary_by_instance_row
138         return emtpy.row_content(emtpy)
139 }
140
141 func (t File_summary_by_instance) Description() string {
142         return "File I/O by filename (file_summary_by_instance)"
143 }
144
145 // create a new structure and include various variable values:
146 // - datadir, relay_log
147 // There's no checking that these are actually provided!
148 func NewFileSummaryByInstance(global_variables map[string]string) *File_summary_by_instance {
149         n := new(File_summary_by_instance)
150
151         n.global_variables = global_variables
152
153         return n
154 }