BUG: remove the @0024 --> $ comment as fixed
[pstop.git] / performance_schema / file_summary_by_instance / file_summary_by_instance.go
1 // performance_schema - library routines for pstop.
2 //
3 // This file contains the library routines for managing the
4 // file_summary_by_instance table.
5 package file_summary_by_instance
6
7 import (
8         "database/sql"
9         "fmt"
10         "time"
11
12         "github.com/sjmudd/pstop/lib"
13         ps "github.com/sjmudd/pstop/performance_schema"
14 )
15
16 /*
17 CREATE TABLE `file_summary_by_instance` (
18   `FILE_NAME` varchar(512) NOT NULL,
19   `EVENT_NAME` varchar(128) NOT NULL,                           // not collected
20   `OBJECT_INSTANCE_BEGIN` bigint(20) unsigned NOT NULL,         // not collected
21   `COUNT_STAR` bigint(20) unsigned NOT NULL,
22   `SUM_TIMER_WAIT` bigint(20) unsigned NOT NULL,
23   `MIN_TIMER_WAIT` bigint(20) unsigned NOT NULL,
24   `AVG_TIMER_WAIT` bigint(20) unsigned NOT NULL,
25   `MAX_TIMER_WAIT` bigint(20) unsigned NOT NULL,
26   `COUNT_READ` bigint(20) unsigned NOT NULL,
27   `SUM_TIMER_READ` bigint(20) unsigned NOT NULL,
28   `MIN_TIMER_READ` bigint(20) unsigned NOT NULL,
29   `AVG_TIMER_READ` bigint(20) unsigned NOT NULL,
30   `MAX_TIMER_READ` bigint(20) unsigned NOT NULL,
31   `SUM_NUMBER_OF_BYTES_READ` bigint(20) NOT NULL,
32   `COUNT_WRITE` bigint(20) unsigned NOT NULL,
33   `SUM_TIMER_WRITE` bigint(20) unsigned NOT NULL,
34   `MIN_TIMER_WRITE` bigint(20) unsigned NOT NULL,
35   `AVG_TIMER_WRITE` bigint(20) unsigned NOT NULL,
36   `MAX_TIMER_WRITE` bigint(20) unsigned NOT NULL,
37   `SUM_NUMBER_OF_BYTES_WRITE` bigint(20) NOT NULL,
38   `COUNT_MISC` bigint(20) unsigned NOT NULL,
39   `SUM_TIMER_MISC` bigint(20) unsigned NOT NULL,
40   `MIN_TIMER_MISC` bigint(20) unsigned NOT NULL,
41   `AVG_TIMER_MISC` bigint(20) unsigned NOT NULL,
42   `MAX_TIMER_MISC` bigint(20) unsigned NOT NULL
43 ) ENGINE=PERFORMANCE_SCHEMA DEFAULT CHARSET=utf8
44 1 row in set (0.00 sec)
45
46 */
47
48 // a table of rows
49 type File_summary_by_instance struct {
50         ps.RelativeStats
51         ps.InitialTime
52         initial          file_summary_by_instance_rows
53         current          file_summary_by_instance_rows
54         results          file_summary_by_instance_rows
55         totals           file_summary_by_instance_row
56         global_variables map[string]string
57 }
58
59 // reset the statistics to current values
60 func (t *File_summary_by_instance) SyncReferenceValues() {
61         t.SetNow()
62         t.initial = make(file_summary_by_instance_rows, len(t.current))
63         copy(t.initial, t.current)
64
65         t.results = make(file_summary_by_instance_rows, len(t.current))
66         copy(t.results, t.current)
67
68         if t.WantRelativeStats() {
69                 t.results.subtract(t.initial) // should be 0 if relative
70         }
71
72         t.results.sort()
73         t.totals = t.results.totals()
74 }
75
76 // Collect data from the db, then merge it in.
77 func (t *File_summary_by_instance) Collect(dbh *sql.DB) {
78         start := time.Now()
79         // UPDATE current from db handle
80         t.current = merge_by_table_name(select_fsbi_rows(dbh), t.global_variables)
81
82         // copy in initial data if it was not there
83         if len(t.initial) == 0 && len(t.current) > 0 {
84                 t.initial = make(file_summary_by_instance_rows, len(t.current))
85                 copy(t.initial, t.current)
86         }
87
88         // check for reload initial characteristics
89         if t.initial.needs_refresh(t.current) {
90                 t.initial = make(file_summary_by_instance_rows, len(t.current))
91                 copy(t.initial, t.current)
92         }
93
94         // update results to current value
95         t.results = make(file_summary_by_instance_rows, len(t.current))
96         copy(t.results, t.current)
97
98         // make relative if need be
99         if t.WantRelativeStats() {
100                 t.results.subtract(t.initial)
101         }
102
103         // sort the results
104         t.results.sort()
105
106         // setup the totals
107         t.totals = t.results.totals()
108         lib.Logger.Println("File_summary_by_instance.Collect() took:", time.Duration(time.Since(start)).String())
109 }
110
111 // return the headings for a table
112 func (t File_summary_by_instance) Headings() string {
113         var r file_summary_by_instance_row
114
115         return r.headings()
116 }
117
118 // return the rows we need for displaying
119 func (t File_summary_by_instance) RowContent(max_rows int) []string {
120         rows := make([]string, 0, max_rows)
121
122         for i := range t.results {
123                 if i < max_rows {
124                         rows = append(rows, t.results[i].row_content(t.totals))
125                 }
126         }
127
128         return rows
129 }
130
131 // return all the totals
132 func (t File_summary_by_instance) TotalRowContent() string {
133         return t.totals.row_content(t.totals)
134 }
135
136 // return an empty string of data (for filling in)
137 func (t File_summary_by_instance) EmptyRowContent() string {
138         var emtpy file_summary_by_instance_row
139         return emtpy.row_content(emtpy)
140 }
141
142 func (t File_summary_by_instance) Description() string {
143         count := t.count_rows()
144         return fmt.Sprintf("File I/O by filename (file_summary_by_instance) %4d row(s)    ", count)
145 }
146
147 // create a new structure and include various variable values:
148 // - datadir, relay_log
149 // There's no checking that these are actually provided!
150 func NewFileSummaryByInstance(global_variables map[string]string) *File_summary_by_instance {
151         n := new(File_summary_by_instance)
152
153         n.global_variables = global_variables
154
155         return n
156 }
157
158 func (t File_summary_by_instance) count_rows() int {
159         var count int
160         for row := range t.results {
161                 if t.results[row].SUM_TIMER_WAIT > 0 {
162                         count++
163                 }
164         }
165         return count
166 }