@@ -240,7 +240,7 @@ version(unittest) void main(){}
240
240
else void main(string [] args){
241
241
bool graphMode = false ;
242
242
243
- auto re = regex(` (.*[\\/]\w+)\.d\((\d+)\):\s*vgc:\s*(.*)` );
243
+ auto re = regex(` (.*[\\/]\w+)\.d\((\d+),\d+ \):\s*vgc:\s*(.*)` );
244
244
auto opts = getopt(args, " graph" , &graphMode);
245
245
if (opts.helpWanted){
246
246
defaultGetoptPrinter(" gchunt - pinpoint GC usage in D apps" , opts.options);
@@ -252,18 +252,33 @@ else void main(string[] args){
252
252
results ~= Result(m[1 ].replace(" \\ " , " /" ), m[2 ], m[3 ]);
253
253
}
254
254
}
255
- sort! ((a,b) => a.file < b.file || (a.file == b.file && a.line < b.line))
256
- (results);
257
-
258
- results = uniq(results).array; // deduplicate
255
+ auto deduplicate (Result[] arr){
256
+ // deduplicate
257
+ sort! ((a,b) => a.file < b.file || (a.file == b.file && a.line < b.line))
258
+ (arr);
259
+ return uniq (arr).array;
260
+ }
261
+ results = deduplicate(results).dup ;
259
262
// Tokenize modules in question
260
263
auto interned = StringCache(4096 );
261
264
foreach (mod;results.map! (x => x.file).uniq){
262
265
auto config = LexerConfig(mod~ " .d" , StringBehavior.compiler);
263
266
auto data = cast (ubyte [])std.file.read (mod ~ " .d" );
264
267
tokenStreams[mod] = getTokensForParser(data, config, &interned).dup ;
265
- // TODO: generate new "vgc" records for each .(i)dup
266
268
}
269
+ // generate new "vgc" records for each .(i)dup
270
+ foreach (mod, toks; tokenStreams){
271
+ for (auto r = toks;! r.empty;){
272
+ r = r.findAdjacent! ((a,b) => a.type == tok! " ."
273
+ && b.type == tok! " identifier" && (b.text == " dup" || b.text == " idup" ));
274
+ if (! r.empty){
275
+ results ~= Result(mod, to! string (r[0 ].line), " (i)dup allocates on GC heap" );
276
+ r.popFront();
277
+ }
278
+ }
279
+
280
+ }
281
+ results = deduplicate(results);
267
282
try {
268
283
auto f = File (" blacklist.gchunt" );
269
284
stderr.writeln(" Found blacklist.gchunt ..." );
0 commit comments