@@ -31,30 +31,37 @@ async function main() {
3131 }
3232 const startDate = new Date ( ) ;
3333 const fileSize = Deno . statSync ( logPath ) . size ;
34+ console . log ( `logPath=${ logPath } ,fileSize=${ fileSize } ` ) ;
3435 // core.setOutput("time", new Date().toLocaleTimeString());
36+ const args = [
37+ "--dump-raw-csv" ,
38+ "--no-progressbar" ,
39+ "-f" ,
40+ "stderr" ,
41+ // "--begin",
42+ // "2025-06-24 10:00:00",
43+ logPath ,
44+ ] ;
3545 const command = new Deno . Command ( "pgbadger" , {
3646 stdout : "piped" ,
3747 stderr : "piped" ,
38- args : [
39- "--dump-raw-csv" ,
40- "--no-progressbar" ,
41- // "--begin",
42- // "2025-06-24 10:00:00",
43- logPath ,
44- ] ,
48+ args,
4549 } ) ;
50+ console . log ( `pgbadger ${ args . join ( " " ) } ` ) ;
4651 const output = command . spawn ( ) ;
4752 output . stderr . pipeTo ( Deno . stderr . writable ) ;
53+ let error : Error | undefined ;
4854 const stream = csv
4955 . parseStream ( Readable . from ( output . stdout ) , {
5056 headers : false ,
5157 } )
5258 . on ( "error" , ( err ) => {
53- console . error ( err ) ;
59+ error = err ;
5460 } ) ;
5561
5662 const seenQueries = new Set < number > ( ) ;
5763 const recommendations : ReportIndexRecommendation [ ] = [ ] ;
64+ let allQueries = 0 ;
5865 let matching = 0 ;
5966 const pg = postgres ( postgresUrl ) ;
6067 const stats = new Statistics ( pg ) ;
@@ -75,16 +82,16 @@ async function main() {
7582 loglevel ,
7683 _sqlstate ,
7784 _duration ,
78- query ,
85+ queryString ,
7986 _parameters ,
8087 _appname ,
8188 _backendtype ,
8289 _queryid ,
8390 ] = chunk as string [ ] ;
84- if ( loglevel !== "LOG" || ! query . startsWith ( "plan:" ) ) {
91+ if ( loglevel !== "LOG" || ! queryString . startsWith ( "plan:" ) ) {
8592 continue ;
8693 }
87- const plan : string = query . split ( "plan:" ) [ 1 ] . trim ( ) ;
94+ const plan : string = queryString . split ( "plan:" ) [ 1 ] . trim ( ) ;
8895 let isJSONOutput = false ;
8996 let i = 0 ;
9097 for ( ; i < plan . length ; i ++ ) {
@@ -99,120 +106,120 @@ async function main() {
99106 break ;
100107 }
101108 }
102- if ( isJSONOutput ) {
103- const json = plan
104- . slice ( i )
105- . replace ( / \\ n / g, "\n" )
106- // there are random control characters in the json lol
107- // deno-lint-ignore no-control-regex
108- . replace ( / [ \u0000 - \u001F ] + / g, ( c ) =>
109- c === "\n" ? "\\n" : c === "\r" ? "\\r" : c === "\t" ? "\\t" : ""
110- ) ;
111- let parsed : any ;
112- try {
113- parsed = JSON . parse ( json ) ;
114- } catch ( e ) {
115- console . log ( e ) ;
116- break ;
117- }
118- const queryFingerprint = await fingerprint ( parsed [ "Query Text" ] ) ;
119- if (
120- parsed . Plan [ "Node Type" ] === "ModifyTable" ||
121- // we get some infinite loops in development here
122- parsed [ "Query Text" ] . includes ( "pg_catalog" ) ||
123- parsed [ "Query Text" ] . includes ( "@qd_introspection" )
124- ) {
125- continue ;
126- }
127- const fingerprintNum = parseInt ( queryFingerprint , 16 ) ;
128- if ( seenQueries . has ( fingerprintNum ) ) {
129- console . log ( "Skipping duplicate query" , fingerprintNum ) ;
130- continue ;
131- }
132- seenQueries . add ( fingerprintNum ) ;
133- const query = parsed [ "Query Text" ] ;
134- const rawParams = parsed [ "Query Parameters" ] ;
135- const params = rawParams ? extractParams ( rawParams ) : [ ] ;
136- const analyzer = new Analyzer ( ) ;
109+ if ( ! isJSONOutput ) {
110+ return ;
111+ }
112+ const json = plan
113+ . slice ( i )
114+ . replace ( / \\ n / g, "\n" )
115+ // there are random control characters in the json lol
116+ // deno-lint-ignore no-control-regex
117+ . replace ( / [ \u0000 - \u001F ] + / g, ( c ) =>
118+ c === "\n" ? "\\n" : c === "\r" ? "\\r" : c === "\t" ? "\\t" : ""
119+ ) ;
120+ let parsed : any ;
121+ try {
122+ parsed = JSON . parse ( json ) ;
123+ } catch ( e ) {
124+ console . log ( e ) ;
125+ break ;
126+ }
127+ allQueries ++ ;
128+ const queryFingerprint = await fingerprint ( parsed [ "Query Text" ] ) ;
129+ if (
130+ // TODO: we can support inserts/updates too. Just need the right optimization for it.
131+ parsed . Plan [ "Node Type" ] === "ModifyTable" ||
132+ parsed [ "Query Text" ] . includes ( "@qd_introspection" )
133+ ) {
134+ continue ;
135+ }
136+ const fingerprintNum = parseInt ( queryFingerprint , 16 ) ;
137+ if ( seenQueries . has ( fingerprintNum ) ) {
138+ console . log ( "Skipping duplicate query" , fingerprintNum ) ;
139+ continue ;
140+ }
141+ seenQueries . add ( fingerprintNum ) ;
142+ const query = parsed [ "Query Text" ] ;
143+ const rawParams = parsed [ "Query Parameters" ] ;
144+ const params = rawParams ? extractParams ( rawParams ) : [ ] ;
145+ const analyzer = new Analyzer ( ) ;
137146
138- const { indexesToCheck, ansiHighlightedQuery, referencedTables } =
139- await analyzer . analyze ( formatQuery ( query ) , params ) ;
147+ const { indexesToCheck, ansiHighlightedQuery, referencedTables } =
148+ await analyzer . analyze ( formatQuery ( query ) , params ) ;
140149
141- const selectsCatalog = referencedTables . find ( ( table ) =>
142- table . startsWith ( "pg_" )
150+ const selectsCatalog = referencedTables . find ( ( table ) =>
151+ table . startsWith ( "pg_" )
152+ ) ;
153+ if ( selectsCatalog ) {
154+ console . log (
155+ "Skipping query that selects from catalog tables" ,
156+ selectsCatalog ,
157+ fingerprintNum
143158 ) ;
144- if ( selectsCatalog ) {
145- console . log (
146- "Skipping query that selects from catalog tables" ,
147- selectsCatalog ,
148- fingerprintNum
149- ) ;
150- continue ;
151- }
152- const indexCandidates = analyzer . deriveIndexes ( tables , indexesToCheck ) ;
153- if ( indexCandidates . length > 0 ) {
154- await core . group ( `query:${ fingerprintNum } ` , async ( ) => {
155- console . time ( `timing` ) ;
156- matching ++ ;
157- printLegend ( ) ;
158- console . log ( ansiHighlightedQuery ) ;
159- const out = await optimizer . run (
160- query ,
161- params ,
162- indexCandidates ,
163- tables
164- ) ;
165- if ( out . newIndexes . size > 0 ) {
166- const newIndexes = Array . from ( out . newIndexes )
167- . map ( ( n ) => out . triedIndexes . get ( n ) ?. definition )
168- . filter ( ( n ) => n !== undefined ) ;
169- const existingIndexesForQuery = Array . from ( out . existingIndexes )
170- . map ( ( index ) => {
171- const existing = existingIndexes . find (
172- ( e ) => e . index_name === index
173- ) ;
174- if ( existing ) {
175- return `${ existing . schema_name } .${
176- existing . table_name
177- } (${ existing . index_columns
178- . map ( ( c ) => `"${ c . name } " ${ c . order } ` )
179- . join ( ", " ) } )`;
180- }
181- } )
182- . filter ( ( i ) => i !== undefined ) ;
183- console . log ( dedent `
184- Optimized cost from ${ out . baseCost } to ${ out . finalCost }
185- Existing indexes: ${ Array . from ( out . existingIndexes ) . join ( ", " ) }
186- New indexes: ${ newIndexes . join ( ", " ) }
187- ` ) ;
188- recommendations . push ( {
189- formattedQuery : formatQuery ( query ) ,
190- baseCost : out . baseCost ,
191- optimizedCost : out . finalCost ,
192- existingIndexes : existingIndexesForQuery ,
193- proposedIndexes : newIndexes ,
194- explainPlan : out . explainPlan ,
195- } ) ;
196- } else {
197- console . log ( "No new indexes found" ) ;
198- }
199- console . timeEnd ( `timing` ) ;
200- } ) ;
201- }
159+ continue ;
160+ }
161+ console . log ( query ) ;
162+ const indexCandidates = analyzer . deriveIndexes ( tables , indexesToCheck ) ;
163+ if ( indexCandidates . length > 0 ) {
164+ await core . group ( `query:${ fingerprintNum } ` , async ( ) => {
165+ console . time ( `timing` ) ;
166+ matching ++ ;
167+ printLegend ( ) ;
168+ console . log ( ansiHighlightedQuery ) ;
169+ const out = await optimizer . run ( query , params , indexCandidates , tables ) ;
170+ if ( out . newIndexes . size > 0 ) {
171+ const newIndexes = Array . from ( out . newIndexes )
172+ . map ( ( n ) => out . triedIndexes . get ( n ) ?. definition )
173+ . filter ( ( n ) => n !== undefined ) ;
174+ const existingIndexesForQuery = Array . from ( out . existingIndexes )
175+ . map ( ( index ) => {
176+ const existing = existingIndexes . find (
177+ ( e ) => e . index_name === index
178+ ) ;
179+ if ( existing ) {
180+ return `${ existing . schema_name } .${
181+ existing . table_name
182+ } (${ existing . index_columns
183+ . map ( ( c ) => `"${ c . name } " ${ c . order } ` )
184+ . join ( ", " ) } )`;
185+ }
186+ } )
187+ . filter ( ( i ) => i !== undefined ) ;
188+ console . log ( dedent `
189+ Optimized cost from ${ out . baseCost } to ${ out . finalCost }
190+ Existing indexes: ${ Array . from ( out . existingIndexes ) . join ( ", " ) }
191+ New indexes: ${ newIndexes . join ( ", " ) }
192+ ` ) ;
193+ recommendations . push ( {
194+ formattedQuery : formatQuery ( query ) ,
195+ baseCost : out . baseCost ,
196+ optimizedCost : out . finalCost ,
197+ existingIndexes : existingIndexesForQuery ,
198+ proposedIndexes : newIndexes ,
199+ explainPlan : out . explainPlan ,
200+ } ) ;
201+ } else {
202+ console . log ( "No new indexes found" ) ;
203+ }
204+ console . timeEnd ( `timing` ) ;
205+ } ) ;
202206 }
203207 }
208+ await output . status ;
209+ console . log ( `Matched ${ matching } queries out of ${ allQueries } ` ) ;
210+ // output.unref();
204211 const reporter = new GithubReporter ( process . env . GITHUB_TOKEN ) ;
205212 await reporter . report ( {
206213 recommendations,
207- queriesLookedAt : seenQueries . size ,
214+ queriesLookedAt : matching ,
215+ totalQueries : allQueries ,
216+ error,
208217 metadata : {
209218 logSize : fileSize ,
210219 timeElapsed : Date . now ( ) - startDate . getTime ( ) ,
211220 } ,
212221 } ) ;
213222 console . timeEnd ( "total" ) ;
214- await output . status ;
215- console . log ( `Ran ${ matching } queries` ) ;
216223 Deno . exit ( 0 ) ;
217224}
218225
0 commit comments