Be able to collect some usage metrics
[pstop.git] / performance_schema / file_summary_by_instance / file_summary_by_instance_row.go
1 // This file contains the library routines for managing the
2 // file_summary_by_instance table.
3 package file_summary_by_instance
4
5 import (
6         "database/sql"
7         "fmt"
8         "log"
9         "regexp"
10         "sort"
11         "time"
12
13         "github.com/sjmudd/pstop/lib"
14 )
15
16 /*
17 CREATE TABLE `file_summary_by_instance` (
18   `FILE_NAME` varchar(512) NOT NULL,
19   `EVENT_NAME` varchar(128) NOT NULL,                           // not collected
20   `OBJECT_INSTANCE_BEGIN` bigint(20) unsigned NOT NULL,         // not collected
21   `COUNT_STAR` bigint(20) unsigned NOT NULL,
22   `SUM_TIMER_WAIT` bigint(20) unsigned NOT NULL,
23   `MIN_TIMER_WAIT` bigint(20) unsigned NOT NULL,
24   `AVG_TIMER_WAIT` bigint(20) unsigned NOT NULL,
25   `MAX_TIMER_WAIT` bigint(20) unsigned NOT NULL,
26   `COUNT_READ` bigint(20) unsigned NOT NULL,
27   `SUM_TIMER_READ` bigint(20) unsigned NOT NULL,
28   `MIN_TIMER_READ` bigint(20) unsigned NOT NULL,
29   `AVG_TIMER_READ` bigint(20) unsigned NOT NULL,
30   `MAX_TIMER_READ` bigint(20) unsigned NOT NULL,
31   `SUM_NUMBER_OF_BYTES_READ` bigint(20) NOT NULL,
32   `COUNT_WRITE` bigint(20) unsigned NOT NULL,
33   `SUM_TIMER_WRITE` bigint(20) unsigned NOT NULL,
34   `MIN_TIMER_WRITE` bigint(20) unsigned NOT NULL,
35   `AVG_TIMER_WRITE` bigint(20) unsigned NOT NULL,
36   `MAX_TIMER_WRITE` bigint(20) unsigned NOT NULL,
37   `SUM_NUMBER_OF_BYTES_WRITE` bigint(20) NOT NULL,
38   `COUNT_MISC` bigint(20) unsigned NOT NULL,
39   `SUM_TIMER_MISC` bigint(20) unsigned NOT NULL,
40   `MIN_TIMER_MISC` bigint(20) unsigned NOT NULL,
41   `AVG_TIMER_MISC` bigint(20) unsigned NOT NULL,
42   `MAX_TIMER_MISC` bigint(20) unsigned NOT NULL
43 ) ENGINE=PERFORMANCE_SCHEMA DEFAULT CHARSET=utf8
44 1 row in set (0.00 sec)
45 */
46
47 type file_summary_by_instance_row struct {
48         FILE_NAME string
49
50         COUNT_STAR  uint64
51         COUNT_READ  uint64
52         COUNT_WRITE uint64
53         COUNT_MISC  uint64
54
55         SUM_TIMER_WAIT  uint64
56         SUM_TIMER_READ  uint64
57         SUM_TIMER_WRITE uint64
58         SUM_TIMER_MISC  uint64
59
60         SUM_NUMBER_OF_BYTES_READ  uint64
61         SUM_NUMBER_OF_BYTES_WRITE uint64
62 }
63
64 // represents a table or set of rows
65 type file_summary_by_instance_rows []file_summary_by_instance_row
66
67 // Return the name using the FILE_NAME attribute.
68 func (r *file_summary_by_instance_row) name() string {
69         return r.FILE_NAME
70 }
71
72 // Return a formatted pretty name for the row.
73 func (r *file_summary_by_instance_row) pretty_name() string {
74         s := r.name()
75         if len(s) > 30 {
76                 s = s[:29]
77         }
78         return fmt.Sprintf("%-30s", s)
79 }
80
81 func (r *file_summary_by_instance_row) headings() string {
82         return fmt.Sprintf("%-30s %10s %6s|%6s %6s %6s|%8s %8s|%8s %6s %6s %6s",
83                 "Table Name",
84                 "Latency",
85                 "%",
86                 "Read",
87                 "Write",
88                 "Misc",
89                 "Rd bytes",
90                 "Wr bytes",
91                 "Ops",
92                 "R Ops",
93                 "W Ops",
94                 "M Ops")
95 }
96
97 // generate a printable result
98 func (row *file_summary_by_instance_row) row_content(totals file_summary_by_instance_row) string {
99         var name string
100
101         // We assume that if COUNT_STAR = 0 then there's no data at all...
102         // when we have no data we really don't want to show the name either.
103         if row.COUNT_STAR == 0 {
104                 name = ""
105         } else {
106                 name = row.pretty_name()
107         }
108
109         return fmt.Sprintf("%-30s %10s %6s|%6s %6s %6s|%8s %8s|%8s %6s %6s %6s",
110                 name,
111                 lib.FormatTime(row.SUM_TIMER_WAIT),
112                 lib.FormatPct(lib.MyDivide(row.SUM_TIMER_WAIT, totals.SUM_TIMER_WAIT)),
113                 lib.FormatPct(lib.MyDivide(row.SUM_TIMER_READ, row.SUM_TIMER_WAIT)),
114                 lib.FormatPct(lib.MyDivide(row.SUM_TIMER_WRITE, row.SUM_TIMER_WAIT)),
115                 lib.FormatPct(lib.MyDivide(row.SUM_TIMER_MISC, row.SUM_TIMER_WAIT)),
116                 lib.FormatAmount(row.SUM_NUMBER_OF_BYTES_READ),
117                 lib.FormatAmount(row.SUM_NUMBER_OF_BYTES_WRITE),
118                 lib.FormatAmount(row.COUNT_STAR),
119                 lib.FormatPct(lib.MyDivide(row.COUNT_READ, row.COUNT_STAR)),
120                 lib.FormatPct(lib.MyDivide(row.COUNT_WRITE, row.COUNT_STAR)),
121                 lib.FormatPct(lib.MyDivide(row.COUNT_MISC, row.COUNT_STAR)))
122 }
123
124 func (this *file_summary_by_instance_row) add(other file_summary_by_instance_row) {
125         this.COUNT_STAR += other.COUNT_STAR
126         this.COUNT_READ += other.COUNT_READ
127         this.COUNT_WRITE += other.COUNT_WRITE
128         this.COUNT_MISC += other.COUNT_MISC
129
130         this.SUM_TIMER_WAIT += other.SUM_TIMER_WAIT
131         this.SUM_TIMER_READ += other.SUM_TIMER_READ
132         this.SUM_TIMER_WRITE += other.SUM_TIMER_WRITE
133         this.SUM_TIMER_MISC += other.SUM_TIMER_MISC
134
135         this.SUM_NUMBER_OF_BYTES_READ += other.SUM_NUMBER_OF_BYTES_READ
136         this.SUM_NUMBER_OF_BYTES_WRITE += other.SUM_NUMBER_OF_BYTES_WRITE
137 }
138
139 func (this *file_summary_by_instance_row) subtract(other file_summary_by_instance_row) {
140         this.COUNT_STAR -= other.COUNT_STAR
141         this.COUNT_READ -= other.COUNT_READ
142         this.COUNT_WRITE -= other.COUNT_WRITE
143         this.COUNT_MISC -= other.COUNT_MISC
144
145         this.SUM_TIMER_WAIT -= other.SUM_TIMER_WAIT
146         this.SUM_TIMER_READ -= other.SUM_TIMER_READ
147         this.SUM_TIMER_WRITE -= other.SUM_TIMER_WRITE
148         this.SUM_TIMER_MISC -= other.SUM_TIMER_MISC
149
150         this.SUM_NUMBER_OF_BYTES_READ -= other.SUM_NUMBER_OF_BYTES_READ
151         this.SUM_NUMBER_OF_BYTES_WRITE -= other.SUM_NUMBER_OF_BYTES_WRITE
152 }
153
154 // return the totals of a slice of rows
155 func (t file_summary_by_instance_rows) totals() file_summary_by_instance_row {
156         var totals file_summary_by_instance_row
157         totals.FILE_NAME = "TOTALS"
158
159         for i := range t {
160                 totals.add(t[i])
161         }
162
163         return totals
164 }
165
166 // clean up the given path reducing redundant stuff and return the clean path
167 func cleanup_path(path string) string {
168         //     foo/../bar --> bar       perl: $new =~ s{[^/]+/\.\./}{/};
169         //     foo/./bar  --> foo/bar   perl: $new =~ s{/\./}{};
170         //     //         --> /         perl: $new =~ s{//}{/};
171         const (
172                 double_slash_re        = `//`
173                 slash_dot_slash_re     = `/\./`
174                 slash_dot_dot_slash_re = `[^/]+/\.\./`
175         )
176         for {
177                 orig_path := path
178
179                 r := regexp.MustCompile(double_slash_re)
180                 path = r.ReplaceAllString(path, "")
181                 r = regexp.MustCompile(slash_dot_slash_re)
182                 path = r.ReplaceAllString(path, "")
183                 r = regexp.MustCompile(slash_dot_dot_slash_re)
184                 path = r.ReplaceAllString(path, "")
185                 if orig_path == path { // no change so give up
186                         break
187                 }
188         }
189
190         return path
191 }
192
193 // From the original FILE_NAME we want to generate a simpler name to use.
194 // This simpler name may also merge several different filenames into one.
195 func (t file_summary_by_instance_row) simple_name(global_variables map[string]string) string {
196         const (
197                 auto_cnf_re  = `/auto\.cnf$`
198                 binlog_re    = `/binlog\.(\d{6}|index)$`
199                 charset_re   = `/share/charsets/Index\.xml$`
200                 db_opt_re    = `/db\.opt$`
201                 error_msg_re = `/share/[^/]+/errmsg\.sys$`
202                 ibdata_re    = `/ibdata\d+$`
203                 redo_log_re  = `/ib_logfile\d+$`
204                 pid_file_re  = `/[^/]+\.pid$`
205                 //              relay_log_re  = `/mysql-relay-bin.(\d{6}|index)$`
206                 relative_path_re = `^\.\./`
207                 current_dir_re   = `^\./`
208                 slowlog_re       = `/slowlog$`
209                 table_file_re    = `/([^/]+)/([^/]+)\.(frm|ibd|MYD|MYI|CSM|CSV|par)$`
210                 temp_table_re    = `#sql-[0-9_]+`
211                 part_table_re    = `(.+)#P#p\d+`
212         )
213
214         path := t.FILE_NAME
215
216         // this should probably be ordered from most expected regexp to least
217         re := regexp.MustCompile(table_file_re)
218         if m1 := re.FindStringSubmatch(path); m1 != nil {
219                 // we may match temporary tables so check for them
220                 re2 := regexp.MustCompile(temp_table_re)
221                 if m2 := re2.FindStringSubmatch(m1[2]); m2 != nil {
222                         return "<temp_table>"
223                 }
224
225                 // we may match partitioned tables so check for them
226                 re3 := regexp.MustCompile(part_table_re)
227                 if m3 := re3.FindStringSubmatch(m1[2]); m3 != nil {
228                         return m1[1] + "." + m3[1] // <schema>.<table> (less partition info)
229                 }
230
231                 return m1[1] + "." + m1[2] // <schema>.<table>
232         }
233         if regexp.MustCompile(ibdata_re).MatchString(path) == true {
234                 return "<ibdata>"
235         }
236         if regexp.MustCompile(redo_log_re).MatchString(path) == true {
237                 return "<redo_log>"
238         }
239         if regexp.MustCompile(binlog_re).MatchString(path) == true {
240                 return "<binlog>"
241         }
242         if regexp.MustCompile(db_opt_re).MatchString(path) == true {
243                 return "<db_opt>"
244         }
245         if regexp.MustCompile(slowlog_re).MatchString(path) == true {
246                 return "<slow_log>"
247         }
248         if regexp.MustCompile(auto_cnf_re).MatchString(path) == true {
249                 return "<auto_cnf>"
250         }
251         // relay logs are a bit complicated. If a full path then easy to
252         // identify,but if a relative path we may need to add $datadir,
253         // but also if as I do we have a ../blah/somewhere/path then we
254         // need to make it match too.
255         if len(global_variables["relay_log"]) > 0 {
256                 relay_log := global_variables["relay_log"]
257                 if relay_log[0] != '/' { // relative path
258                         relay_log = cleanup_path(global_variables["datadir"] + relay_log) // datadir always ends in /
259                 }
260                 relay_log_re := relay_log + `\.(\d{6}|index)$`
261                 if regexp.MustCompile(relay_log_re).MatchString(path) == true {
262                         return "<relay_log>"
263                 }
264         }
265         if regexp.MustCompile(pid_file_re).MatchString(path) == true {
266                 return "<pid_file>"
267         }
268         if regexp.MustCompile(error_msg_re).MatchString(path) == true {
269                 return "<errmsg>"
270         }
271         if regexp.MustCompile(charset_re).MatchString(path) == true {
272                 return "<charset>"
273         }
274         return path
275 }
276
277 // Convert the imported "table" to a merged one with merged data.
278 // Combine all entries with the same "FILE_NAME" by adding their values.
279 func merge_by_table_name(orig file_summary_by_instance_rows, global_variables map[string]string) file_summary_by_instance_rows {
280         start := time.Now()
281         t := make(file_summary_by_instance_rows, 0, len(orig))
282
283         m := make(map[string]file_summary_by_instance_row)
284
285         // iterate over source table
286         for i := range orig {
287                 var file_name string
288                 var new_row file_summary_by_instance_row
289                 orig_row := orig[i]
290
291                 if orig_row.COUNT_STAR > 0 {
292                         file_name = orig_row.simple_name(global_variables)
293
294                         // check if we have an entry in the map
295                         if _, found := m[file_name]; found {
296                                 new_row = m[file_name]
297                         } else {
298                                 new_row.FILE_NAME = file_name
299                         }
300                         new_row.add(orig_row)
301                         m[file_name] = new_row // update the map with the new value
302                 }
303         }
304
305         // add the map contents back into the table
306         for _, row := range m {
307                 t = append(t, row)
308         }
309
310         lib.Logger.Println("merge_by_table_name() took:", time.Duration(time.Since(start)).String())
311         return t
312 }
313
314 // Select the raw data from the database into file_summary_by_instance_rows
315 // - filter out empty values
316 // - merge rows with the same name into a single row
317 // - change FILE_NAME into a more descriptive value.
318 func select_fsbi_rows(dbh *sql.DB) file_summary_by_instance_rows {
319         var t file_summary_by_instance_rows
320         start := time.Now()
321
322         sql := "SELECT FILE_NAME, COUNT_STAR, SUM_TIMER_WAIT, COUNT_READ, SUM_TIMER_READ, SUM_NUMBER_OF_BYTES_READ, COUNT_WRITE, SUM_TIMER_WRITE, SUM_NUMBER_OF_BYTES_WRITE, COUNT_MISC, SUM_TIMER_MISC FROM file_summary_by_instance"
323
324         rows, err := dbh.Query(sql)
325         if err != nil {
326                 log.Fatal(err)
327         }
328         defer rows.Close()
329
330         for rows.Next() {
331                 var r file_summary_by_instance_row
332
333                 if err := rows.Scan(&r.FILE_NAME, &r.COUNT_STAR, &r.SUM_TIMER_WAIT, &r.COUNT_READ, &r.SUM_TIMER_READ, &r.SUM_NUMBER_OF_BYTES_READ, &r.COUNT_WRITE, &r.SUM_TIMER_WRITE, &r.SUM_NUMBER_OF_BYTES_WRITE, &r.COUNT_MISC, &r.SUM_TIMER_MISC); err != nil {
334                         log.Fatal(err)
335                 }
336                 t = append(t, r)
337         }
338         if err := rows.Err(); err != nil {
339                 log.Fatal(err)
340         }
341         lib.Logger.Println("select_fsbi_rows() took:", time.Duration(time.Since(start)).String())
342
343         return t
344 }
345
346 // remove the initial values from those rows where there's a match
347 // - if we find a row we can't match ignore it
348 func (this *file_summary_by_instance_rows) subtract(initial file_summary_by_instance_rows) {
349         i_by_name := make(map[string]int)
350
351         // iterate over rows by name
352         for i := range initial {
353                 i_by_name[initial[i].name()] = i
354         }
355
356         for i := range *this {
357                 if _, ok := i_by_name[(*this)[i].name()]; ok {
358                         initial_i := i_by_name[(*this)[i].name()]
359                         (*this)[i].subtract(initial[initial_i])
360                 }
361         }
362 }
363
364 func (t file_summary_by_instance_rows) Len() int      { return len(t) }
365 func (t file_summary_by_instance_rows) Swap(i, j int) { t[i], t[j] = t[j], t[i] }
366 func (t file_summary_by_instance_rows) Less(i, j int) bool {
367         return (t[i].SUM_TIMER_WAIT > t[j].SUM_TIMER_WAIT) ||
368                 ((t[i].SUM_TIMER_WAIT == t[j].SUM_TIMER_WAIT) && (t[i].FILE_NAME < t[j].FILE_NAME))
369 }
370
371 func (t *file_summary_by_instance_rows) sort() {
372         sort.Sort(t)
373 }
374
375 // if the data in t2 is "newer", "has more values" than t then it needs refreshing.
376 // check this by comparing totals.
377 func (t file_summary_by_instance_rows) needs_refresh(t2 file_summary_by_instance_rows) bool {
378         my_totals := t.totals()
379         t2_totals := t2.totals()
380
381         return my_totals.SUM_TIMER_WAIT > t2_totals.SUM_TIMER_WAIT
382 }