1 package Analysis.Disjoint;
3 import Analysis.CallGraph.*;
4 import Analysis.Liveness;
5 import Analysis.ArrayReferencees;
6 import Analysis.OoOJava.RBlockRelationAnalysis;
9 import IR.Tree.Modifiers;
14 public class DisjointAnalysis implements HeapAnalysis {
16 ///////////////////////////////////////////
18 // Public interface to discover possible
19 // sharing in the program under analysis
21 ///////////////////////////////////////////
23 // if an object allocated at the target site may be
24 // reachable from both an object from root1 and an
25 // object allocated at root2, return TRUE
26 public boolean mayBothReachTarget( FlatMethod fm,
31 AllocSite asr1 = getAllocationSiteFromFlatNew( fnRoot1 );
32 AllocSite asr2 = getAllocationSiteFromFlatNew( fnRoot2 );
33 assert asr1.isFlagged();
34 assert asr2.isFlagged();
36 AllocSite ast = getAllocationSiteFromFlatNew( fnTarget );
37 ReachGraph rg = getPartial( fm.getMethod() );
39 return rg.mayBothReachTarget( asr1, asr2, ast );
42 // similar to the method above, return TRUE if ever
43 // more than one object from the root allocation site
44 // may reach an object from the target site
45 public boolean mayManyReachTarget( FlatMethod fm,
49 AllocSite asr = getAllocationSiteFromFlatNew( fnRoot );
50 assert asr.isFlagged();
52 AllocSite ast = getAllocationSiteFromFlatNew( fnTarget );
53 ReachGraph rg = getPartial( fm.getMethod() );
55 return rg.mayManyReachTarget( asr, ast );
61 public HashSet<AllocSite>
62 getFlaggedAllocationSitesReachableFromTask(TaskDescriptor td) {
63 checkAnalysisComplete();
64 return getFlaggedAllocationSitesReachableFromTaskPRIVATE(td);
67 public AllocSite getAllocationSiteFromFlatNew(FlatNew fn) {
68 checkAnalysisComplete();
69 return getAllocSiteFromFlatNewPRIVATE(fn);
72 public AllocSite getAllocationSiteFromHeapRegionNodeID(Integer id) {
73 checkAnalysisComplete();
74 return mapHrnIdToAllocSite.get(id);
77 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
80 checkAnalysisComplete();
81 ReachGraph rg=mapDescriptorToCompleteReachGraph.get(taskOrMethod);
82 FlatMethod fm=state.getMethodFlat(taskOrMethod);
84 return rg.mayReachSharedObjects(fm, paramIndex1, paramIndex2);
87 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
88 int paramIndex, AllocSite alloc) {
89 checkAnalysisComplete();
90 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
91 FlatMethod fm=state.getMethodFlat(taskOrMethod);
93 return rg.mayReachSharedObjects(fm, paramIndex, alloc);
96 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
97 AllocSite alloc, int paramIndex) {
98 checkAnalysisComplete();
99 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
100 FlatMethod fm=state.getMethodFlat(taskOrMethod);
102 return rg.mayReachSharedObjects(fm, paramIndex, alloc);
105 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
106 AllocSite alloc1, AllocSite alloc2) {
107 checkAnalysisComplete();
108 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
110 return rg.mayReachSharedObjects(alloc1, alloc2);
113 public String prettyPrintNodeSet(Set<HeapRegionNode> s) {
114 checkAnalysisComplete();
118 Iterator<HeapRegionNode> i = s.iterator();
119 while (i.hasNext()) {
120 HeapRegionNode n = i.next();
122 AllocSite as = n.getAllocSite();
124 out += " " + n.toString() + ",\n";
126 out += " " + n.toString() + ": " + as.toStringVerbose()
135 // use the methods given above to check every possible sharing class
136 // between task parameters and flagged allocation sites reachable
138 public void writeAllSharing(String outputFile,
141 boolean tabularOutput,
144 throws java.io.IOException {
145 checkAnalysisComplete();
147 BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile));
149 if (!tabularOutput) {
150 bw.write("Conducting ownership analysis with allocation depth = "
151 + allocationDepth + "\n");
152 bw.write(timeReport + "\n");
157 // look through every task for potential sharing
158 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
159 while (taskItr.hasNext()) {
160 TaskDescriptor td = (TaskDescriptor) taskItr.next();
162 if (!tabularOutput) {
163 bw.write("\n---------" + td + "--------\n");
166 HashSet<AllocSite> allocSites = getFlaggedAllocationSitesReachableFromTask(td);
168 Set<HeapRegionNode> common;
170 // for each task parameter, check for sharing classes with
171 // other task parameters and every allocation site
172 // reachable from this task
173 boolean foundSomeSharing = false;
175 FlatMethod fm = state.getMethodFlat(td);
176 for (int i = 0; i < fm.numParameters(); ++i) {
178 // skip parameters with types that cannot reference
180 if( !shouldAnalysisTrack( fm.getParameter( i ).getType() ) ) {
184 // for the ith parameter check for sharing classes to all
185 // higher numbered parameters
186 for (int j = i + 1; j < fm.numParameters(); ++j) {
188 // skip parameters with types that cannot reference
190 if( !shouldAnalysisTrack( fm.getParameter( j ).getType() ) ) {
195 common = hasPotentialSharing(td, i, j);
196 if (!common.isEmpty()) {
197 foundSomeSharing = true;
199 if (!tabularOutput) {
200 bw.write("Potential sharing between parameters " + i
201 + " and " + j + ".\n");
202 bw.write(prettyPrintNodeSet(common) + "\n");
207 // for the ith parameter, check for sharing classes against
208 // the set of allocation sites reachable from this
210 Iterator allocItr = allocSites.iterator();
211 while (allocItr.hasNext()) {
212 AllocSite as = (AllocSite) allocItr.next();
213 common = hasPotentialSharing(td, i, as);
214 if (!common.isEmpty()) {
215 foundSomeSharing = true;
217 if (!tabularOutput) {
218 bw.write("Potential sharing between parameter " + i
219 + " and " + as.getFlatNew() + ".\n");
220 bw.write(prettyPrintNodeSet(common) + "\n");
226 // for each allocation site check for sharing classes with
227 // other allocation sites in the context of execution
229 HashSet<AllocSite> outerChecked = new HashSet<AllocSite>();
230 Iterator allocItr1 = allocSites.iterator();
231 while (allocItr1.hasNext()) {
232 AllocSite as1 = (AllocSite) allocItr1.next();
234 Iterator allocItr2 = allocSites.iterator();
235 while (allocItr2.hasNext()) {
236 AllocSite as2 = (AllocSite) allocItr2.next();
238 if (!outerChecked.contains(as2)) {
239 common = hasPotentialSharing(td, as1, as2);
241 if (!common.isEmpty()) {
242 foundSomeSharing = true;
244 if (!tabularOutput) {
245 bw.write("Potential sharing between "
246 + as1.getFlatNew() + " and "
247 + as2.getFlatNew() + ".\n");
248 bw.write(prettyPrintNodeSet(common) + "\n");
254 outerChecked.add(as1);
257 if (!foundSomeSharing) {
258 if (!tabularOutput) {
259 bw.write("No sharing between flagged objects in Task " + td
267 bw.write(" & " + numSharing + " & " + justTime + " & " + numLines
268 + " & " + numMethodsAnalyzed() + " \\\\\n");
270 bw.write("\nNumber sharing classes: "+numSharing);
278 // this version of writeAllSharing is for Java programs that have no tasks
279 // ***********************************
280 // WARNING: THIS DOES NOT DO THE RIGHT THING, REPORTS 0 ALWAYS!
281 // It should use mayBothReachTarget and mayManyReachTarget like
282 // OoOJava does to query analysis results
283 // ***********************************
284 public void writeAllSharingJava(String outputFile,
287 boolean tabularOutput,
290 throws java.io.IOException {
291 checkAnalysisComplete();
297 BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile));
299 bw.write("Conducting disjoint reachability analysis with allocation depth = "
300 + allocationDepth + "\n");
301 bw.write(timeReport + "\n\n");
303 boolean foundSomeSharing = false;
305 Descriptor d = typeUtil.getMain();
306 HashSet<AllocSite> allocSites = getFlaggedAllocationSites(d);
308 // for each allocation site check for sharing classes with
309 // other allocation sites in the context of execution
311 HashSet<AllocSite> outerChecked = new HashSet<AllocSite>();
312 Iterator allocItr1 = allocSites.iterator();
313 while (allocItr1.hasNext()) {
314 AllocSite as1 = (AllocSite) allocItr1.next();
316 Iterator allocItr2 = allocSites.iterator();
317 while (allocItr2.hasNext()) {
318 AllocSite as2 = (AllocSite) allocItr2.next();
320 if (!outerChecked.contains(as2)) {
321 Set<HeapRegionNode> common = hasPotentialSharing(d,
324 if (!common.isEmpty()) {
325 foundSomeSharing = true;
326 bw.write("Potential sharing between "
327 + as1.getDisjointAnalysisId() + " and "
328 + as2.getDisjointAnalysisId() + ".\n");
329 bw.write(prettyPrintNodeSet(common) + "\n");
335 outerChecked.add(as1);
338 if (!foundSomeSharing) {
339 bw.write("No sharing classes between flagged objects found.\n");
341 bw.write("\nNumber sharing classes: "+numSharing);
344 bw.write("Number of methods analyzed: "+numMethodsAnalyzed()+"\n");
349 ///////////////////////////////////////////
351 // end public interface
353 ///////////////////////////////////////////
357 protected void checkAnalysisComplete() {
358 if( !analysisComplete ) {
359 throw new Error("Warning: public interface method called while analysis is running.");
368 // run in faster mode, only when bugs wrung out!
369 public static boolean releaseMode;
371 // use command line option to set this, analysis
372 // should attempt to be deterministic
373 public static boolean determinismDesired;
375 // when we want to enforce determinism in the
376 // analysis we need to sort descriptors rather
377 // than toss them in efficient sets, use this
378 public static DescriptorComparator dComp =
379 new DescriptorComparator();
382 // data from the compiler
384 public CallGraph callGraph;
385 public Liveness liveness;
386 public ArrayReferencees arrayReferencees;
387 public RBlockRelationAnalysis rblockRel;
388 public TypeUtil typeUtil;
389 public int allocationDepth;
391 protected boolean doEffectsAnalysis = false;
392 protected EffectsAnalysis effectsAnalysis;
393 protected BuildStateMachines buildStateMachines;
396 // data structure for public interface
397 private Hashtable< Descriptor, HashSet<AllocSite> >
398 mapDescriptorToAllocSiteSet;
401 // for public interface methods to warn that they
402 // are grabbing results during analysis
403 private boolean analysisComplete;
406 // used to identify HeapRegionNode objects
407 // A unique ID equates an object in one
408 // ownership graph with an object in another
409 // graph that logically represents the same
411 // start at 10 and increment to reserve some
412 // IDs for special purposes
413 static protected int uniqueIDcount = 10;
416 // An out-of-scope method created by the
417 // analysis that has no parameters, and
418 // appears to allocate the command line
419 // arguments, then invoke the source code's
420 // main method. The purpose of this is to
421 // provide the analysis with an explicit
422 // top-level context with no parameters
423 protected MethodDescriptor mdAnalysisEntry;
424 protected FlatMethod fmAnalysisEntry;
426 // main method defined by source program
427 protected MethodDescriptor mdSourceEntry;
429 // the set of task and/or method descriptors
430 // reachable in call graph
431 protected Set<Descriptor>
432 descriptorsToAnalyze;
434 // current descriptors to visit in fixed-point
435 // interprocedural analysis, prioritized by
436 // dependency in the call graph
437 protected Stack<Descriptor>
438 descriptorsToVisitStack;
439 protected PriorityQueue<DescriptorQWrapper>
442 // a duplication of the above structure, but
443 // for efficient testing of inclusion
444 protected HashSet<Descriptor>
445 descriptorsToVisitSet;
447 // storage for priorities (doesn't make sense)
448 // to add it to the Descriptor class, just in
450 protected Hashtable<Descriptor, Integer>
451 mapDescriptorToPriority;
453 // when analyzing a method and scheduling more:
454 // remember set of callee's enqueued for analysis
455 // so they can be put on top of the callers in
456 // the stack-visit mode
457 protected Set<Descriptor>
460 // maps a descriptor to its current partial result
461 // from the intraprocedural fixed-point analysis--
462 // then the interprocedural analysis settles, this
463 // mapping will have the final results for each
465 protected Hashtable<Descriptor, ReachGraph>
466 mapDescriptorToCompleteReachGraph;
468 // maps a descriptor to its known dependents: namely
469 // methods or tasks that call the descriptor's method
470 // AND are part of this analysis (reachable from main)
471 protected Hashtable< Descriptor, Set<Descriptor> >
472 mapDescriptorToSetDependents;
474 // if the analysis client wants to flag allocation sites
475 // programmatically, it should provide a set of FlatNew
476 // statements--this may be null if unneeded
477 protected Set<FlatNew> sitesToFlag;
479 // maps each flat new to one analysis abstraction
480 // allocate site object, these exist outside reach graphs
481 protected Hashtable<FlatNew, AllocSite>
482 mapFlatNewToAllocSite;
484 // maps intergraph heap region IDs to intergraph
485 // allocation sites that created them, a redundant
486 // structure for efficiency in some operations
487 protected Hashtable<Integer, AllocSite>
490 // maps a method to its initial heap model (IHM) that
491 // is the set of reachability graphs from every caller
492 // site, all merged together. The reason that we keep
493 // them separate is that any one call site's contribution
494 // to the IHM may changed along the path to the fixed point
495 protected Hashtable< Descriptor, Hashtable< FlatCall, ReachGraph > >
496 mapDescriptorToIHMcontributions;
498 // additionally, keep a mapping from descriptors to the
499 // merged in-coming initial context, because we want this
500 // initial context to be STRICTLY MONOTONIC
501 protected Hashtable<Descriptor, ReachGraph>
502 mapDescriptorToInitialContext;
504 // make the result for back edges analysis-wide STRICTLY
505 // MONOTONIC as well, but notice we use FlatNode as the
506 // key for this map: in case we want to consider other
507 // nodes as back edge's in future implementations
508 protected Hashtable<FlatNode, ReachGraph>
509 mapBackEdgeToMonotone;
512 public static final String arrayElementFieldName = "___element_";
513 static protected Hashtable<TypeDescriptor, FieldDescriptor>
517 protected boolean suppressOutput;
519 // for controlling DOT file output
520 protected boolean writeFinalDOTs;
521 protected boolean writeAllIncrementalDOTs;
523 // supporting DOT output--when we want to write every
524 // partial method result, keep a tally for generating
526 protected Hashtable<Descriptor, Integer>
527 mapDescriptorToNumUpdates;
529 //map task descriptor to initial task parameter
530 protected Hashtable<Descriptor, ReachGraph>
531 mapDescriptorToReachGraph;
533 protected PointerMethod pm;
535 //Keeps track of all the reach graphs at every program point
536 //DO NOT USE UNLESS YOU REALLY NEED IT
537 static protected Hashtable<FlatNode, ReachGraph> fn2rgAtEnter =
538 new Hashtable<FlatNode, ReachGraph>();
540 private Hashtable<FlatCall, Descriptor> fc2enclosing;
543 // allocate various structures that are not local
544 // to a single class method--should be done once
545 protected void allocateStructures() {
547 if( determinismDesired ) {
548 // use an ordered set
549 descriptorsToAnalyze = new TreeSet<Descriptor>( dComp );
551 // otherwise use a speedy hashset
552 descriptorsToAnalyze = new HashSet<Descriptor>();
555 mapDescriptorToCompleteReachGraph =
556 new Hashtable<Descriptor, ReachGraph>();
558 mapDescriptorToNumUpdates =
559 new Hashtable<Descriptor, Integer>();
561 mapDescriptorToSetDependents =
562 new Hashtable< Descriptor, Set<Descriptor> >();
564 mapFlatNewToAllocSite =
565 new Hashtable<FlatNew, AllocSite>();
567 mapDescriptorToIHMcontributions =
568 new Hashtable< Descriptor, Hashtable< FlatCall, ReachGraph > >();
570 mapDescriptorToInitialContext =
571 new Hashtable<Descriptor, ReachGraph>();
573 mapBackEdgeToMonotone =
574 new Hashtable<FlatNode, ReachGraph>();
576 mapHrnIdToAllocSite =
577 new Hashtable<Integer, AllocSite>();
579 mapTypeToArrayField =
580 new Hashtable <TypeDescriptor, FieldDescriptor>();
582 if( state.DISJOINTDVISITSTACK ||
583 state.DISJOINTDVISITSTACKEESONTOP
585 descriptorsToVisitStack =
586 new Stack<Descriptor>();
589 if( state.DISJOINTDVISITPQUE ) {
590 descriptorsToVisitQ =
591 new PriorityQueue<DescriptorQWrapper>();
594 descriptorsToVisitSet =
595 new HashSet<Descriptor>();
597 mapDescriptorToPriority =
598 new Hashtable<Descriptor, Integer>();
601 new HashSet<Descriptor>();
603 mapDescriptorToAllocSiteSet =
604 new Hashtable<Descriptor, HashSet<AllocSite> >();
606 mapDescriptorToReachGraph =
607 new Hashtable<Descriptor, ReachGraph>();
609 pm = new PointerMethod();
611 fc2enclosing = new Hashtable<FlatCall, Descriptor>();
616 // this analysis generates a disjoint reachability
617 // graph for every reachable method in the program
618 public DisjointAnalysis( State s,
623 Set<FlatNew> sitesToFlag,
624 RBlockRelationAnalysis rra
626 init( s, tu, cg, l, ar, sitesToFlag, rra, null, false );
629 public DisjointAnalysis( State s,
634 Set<FlatNew> sitesToFlag,
635 RBlockRelationAnalysis rra,
636 boolean suppressOutput
638 init( s, tu, cg, l, ar, sitesToFlag, rra, null, suppressOutput );
641 public DisjointAnalysis( State s,
646 Set<FlatNew> sitesToFlag,
647 RBlockRelationAnalysis rra,
648 BuildStateMachines bsm,
649 boolean suppressOutput
651 init( s, tu, cg, l, ar, sitesToFlag, rra, bsm, suppressOutput );
654 protected void init( State state,
658 ArrayReferencees arrayReferencees,
659 Set<FlatNew> sitesToFlag,
660 RBlockRelationAnalysis rra,
661 BuildStateMachines bsm,
662 boolean suppressOutput
665 analysisComplete = false;
668 this.typeUtil = typeUtil;
669 this.callGraph = callGraph;
670 this.liveness = liveness;
671 this.arrayReferencees = arrayReferencees;
672 this.sitesToFlag = sitesToFlag;
673 this.rblockRel = rra;
674 this.suppressOutput = suppressOutput;
675 this.buildStateMachines = bsm;
677 if( rblockRel != null ) {
678 doEffectsAnalysis = true;
679 effectsAnalysis = new EffectsAnalysis();
682 this.allocationDepth = state.DISJOINTALLOCDEPTH;
683 this.releaseMode = state.DISJOINTRELEASEMODE;
684 this.determinismDesired = state.DISJOINTDETERMINISM;
686 this.writeFinalDOTs = state.DISJOINTWRITEDOTS && !state.DISJOINTWRITEALL && !suppressOutput;
687 this.writeAllIncrementalDOTs = state.DISJOINTWRITEDOTS && state.DISJOINTWRITEALL && !suppressOutput;
689 this.takeDebugSnapshots = state.DISJOINTSNAPSYMBOL != null;
690 this.descSymbolDebug = state.DISJOINTSNAPSYMBOL;
691 this.visitStartCapture = state.DISJOINTSNAPVISITTOSTART;
692 this.numVisitsToCapture = state.DISJOINTSNAPNUMVISITS;
693 this.stopAfterCapture = state.DISJOINTSNAPSTOPAFTER;
694 this.snapVisitCounter = 1; // count visits from 1 (user will write 1, means 1st visit)
695 this.snapNodeCounter = 0; // count nodes from 0
698 state.DISJOINTDVISITSTACK ||
699 state.DISJOINTDVISITPQUE ||
700 state.DISJOINTDVISITSTACKEESONTOP;
701 assert !(state.DISJOINTDVISITSTACK && state.DISJOINTDVISITPQUE);
702 assert !(state.DISJOINTDVISITSTACK && state.DISJOINTDVISITSTACKEESONTOP);
703 assert !(state.DISJOINTDVISITPQUE && state.DISJOINTDVISITSTACKEESONTOP);
705 // set some static configuration for ReachGraphs
706 ReachGraph.allocationDepth = allocationDepth;
707 ReachGraph.typeUtil = typeUtil;
708 ReachGraph.state = state;
710 ReachGraph.debugCallSiteVisitStartCapture
711 = state.DISJOINTDEBUGCALLVISITTOSTART;
713 ReachGraph.debugCallSiteNumVisitsToCapture
714 = state.DISJOINTDEBUGCALLNUMVISITS;
716 ReachGraph.debugCallSiteStopAfter
717 = state.DISJOINTDEBUGCALLSTOPAFTER;
719 ReachGraph.debugCallSiteVisitCounter
720 = 0; // count visits from 1, is incremented before first visit
723 EffectsAnalysis.state = state;
724 EffectsAnalysis.buildStateMachines = buildStateMachines;
727 if( suppressOutput ) {
728 System.out.println( "* Running disjoint reachability analysis with output suppressed! *" );
731 allocateStructures();
733 double timeStartAnalysis = (double) System.nanoTime();
735 // start interprocedural fixed-point computation
738 } catch( IOException e ) {
739 throw new Error( "IO Exception while writing disjointness analysis output." );
742 analysisComplete=true;
744 double timeEndAnalysis = (double) System.nanoTime();
745 double dt = (timeEndAnalysis - timeStartAnalysis)/(Math.pow( 10.0, 9.0 ) );
748 if( sitesToFlag != null ) {
749 treport = String.format( "Disjoint reachability analysis flagged %d sites and took %.3f sec.", sitesToFlag.size(), dt );
750 if(sitesToFlag.size()>0){
751 treport+="\nFlagged sites:"+"\n"+sitesToFlag.toString();
754 treport = String.format( "Disjoint reachability analysis took %.3f sec.", dt );
756 String justtime = String.format( "%.2f", dt );
757 System.out.println( treport );
761 if( writeFinalDOTs && !writeAllIncrementalDOTs ) {
765 if( state.DISJOINTWRITEIHMS && !suppressOutput ) {
769 if( state.DISJOINTWRITEINITCONTEXTS && !suppressOutput ) {
770 writeInitialContexts();
773 if( state.DISJOINTALIASFILE != null && !suppressOutput ) {
775 writeAllSharing(state.DISJOINTALIASFILE, treport, justtime, state.DISJOINTALIASTAB, state.lines);
777 writeAllSharingJava(state.DISJOINTALIASFILE,
780 state.DISJOINTALIASTAB,
787 buildStateMachines.writeStateMachines();
790 } catch( IOException e ) {
791 throw new Error( "IO Exception while writing disjointness analysis output." );
796 protected boolean moreDescriptorsToVisit() {
797 if( state.DISJOINTDVISITSTACK ||
798 state.DISJOINTDVISITSTACKEESONTOP
800 return !descriptorsToVisitStack.isEmpty();
802 } else if( state.DISJOINTDVISITPQUE ) {
803 return !descriptorsToVisitQ.isEmpty();
806 throw new Error( "Neither descriptor visiting mode set" );
810 // fixed-point computation over the call graph--when a
811 // method's callees are updated, it must be reanalyzed
812 protected void analyzeMethods() throws java.io.IOException {
814 // task or non-task (java) mode determines what the roots
815 // of the call chain are, and establishes the set of methods
816 // reachable from the roots that will be analyzed
819 if( !suppressOutput ) {
820 System.out.println( "Bamboo mode..." );
823 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
824 while( taskItr.hasNext() ) {
825 TaskDescriptor td = (TaskDescriptor) taskItr.next();
826 if( !descriptorsToAnalyze.contains( td ) ) {
827 // add all methods transitively reachable from the
829 descriptorsToAnalyze.add( td );
830 descriptorsToAnalyze.addAll( callGraph.getAllMethods( td ) );
835 if( !suppressOutput ) {
836 System.out.println( "Java mode..." );
839 // add all methods transitively reachable from the
840 // source's main to set for analysis
841 mdSourceEntry = typeUtil.getMain();
842 descriptorsToAnalyze.add( mdSourceEntry );
843 descriptorsToAnalyze.addAll( callGraph.getAllMethods( mdSourceEntry ) );
845 // fabricate an empty calling context that will call
846 // the source's main, but call graph doesn't know
847 // about it, so explicitly add it
848 makeAnalysisEntryMethod( mdSourceEntry );
849 descriptorsToAnalyze.add( mdAnalysisEntry );
853 // now, depending on the interprocedural mode for visiting
854 // methods, set up the needed data structures
856 if( state.DISJOINTDVISITPQUE ) {
858 // topologically sort according to the call graph so
859 // leaf calls are last, helps build contexts up first
860 LinkedList<Descriptor> sortedDescriptors =
861 topologicalSort( descriptorsToAnalyze );
863 // add sorted descriptors to priority queue, and duplicate
864 // the queue as a set for efficiently testing whether some
865 // method is marked for analysis
867 Iterator<Descriptor> dItr;
869 // for the priority queue, give items at the head
870 // of the sorted list a low number (highest priority)
871 while( !sortedDescriptors.isEmpty() ) {
872 Descriptor d = sortedDescriptors.removeFirst();
873 mapDescriptorToPriority.put( d, new Integer( p ) );
874 descriptorsToVisitQ.add( new DescriptorQWrapper( p, d ) );
875 descriptorsToVisitSet.add( d );
879 } else if( state.DISJOINTDVISITSTACK ||
880 state.DISJOINTDVISITSTACKEESONTOP
882 // if we're doing the stack scheme, just throw the root
883 // method or tasks on the stack
885 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
886 while( taskItr.hasNext() ) {
887 TaskDescriptor td = (TaskDescriptor) taskItr.next();
888 descriptorsToVisitStack.add( td );
889 descriptorsToVisitSet.add( td );
893 descriptorsToVisitStack.add( mdAnalysisEntry );
894 descriptorsToVisitSet.add( mdAnalysisEntry );
898 throw new Error( "Unknown method scheduling mode" );
902 // analyze scheduled methods until there are no more to visit
903 while( moreDescriptorsToVisit() ) {
906 if( state.DISJOINTDVISITSTACK ||
907 state.DISJOINTDVISITSTACKEESONTOP
909 d = descriptorsToVisitStack.pop();
911 } else if( state.DISJOINTDVISITPQUE ) {
912 d = descriptorsToVisitQ.poll().getDescriptor();
915 assert descriptorsToVisitSet.contains( d );
916 descriptorsToVisitSet.remove( d );
918 // because the task or method descriptor just extracted
919 // was in the "to visit" set it either hasn't been analyzed
920 // yet, or some method that it depends on has been
921 // updated. Recompute a complete reachability graph for
922 // this task/method and compare it to any previous result.
923 // If there is a change detected, add any methods/tasks
924 // that depend on this one to the "to visit" set.
926 if( !suppressOutput ) {
927 System.out.println( "Analyzing " + d );
930 if( state.DISJOINTDVISITSTACKEESONTOP ) {
931 assert calleesToEnqueue.isEmpty();
934 ReachGraph rg = analyzeMethod( d );
935 ReachGraph rgPrev = getPartial( d );
937 if( !rg.equals( rgPrev ) ) {
940 if( state.DISJOINTDEBUGSCHEDULING ) {
941 System.out.println( " complete graph changed, scheduling callers for analysis:" );
944 // results for d changed, so enqueue dependents
945 // of d for further analysis
946 Iterator<Descriptor> depsItr = getDependents( d ).iterator();
947 while( depsItr.hasNext() ) {
948 Descriptor dNext = depsItr.next();
951 if( state.DISJOINTDEBUGSCHEDULING ) {
952 System.out.println( " "+dNext );
957 // whether or not the method under analysis changed,
958 // we may have some callees that are scheduled for
959 // more analysis, and they should go on the top of
960 // the stack now (in other method-visiting modes they
961 // are already enqueued at this point
962 if( state.DISJOINTDVISITSTACKEESONTOP ) {
963 Iterator<Descriptor> depsItr = calleesToEnqueue.iterator();
964 while( depsItr.hasNext() ) {
965 Descriptor dNext = depsItr.next();
968 calleesToEnqueue.clear();
974 protected ReachGraph analyzeMethod( Descriptor d )
975 throws java.io.IOException {
977 // get the flat code for this descriptor
979 if( d == mdAnalysisEntry ) {
980 fm = fmAnalysisEntry;
982 fm = state.getMethodFlat( d );
984 pm.analyzeMethod( fm );
986 // intraprocedural work set
987 Set<FlatNode> flatNodesToVisit = new HashSet<FlatNode>();
988 flatNodesToVisit.add( fm );
990 // if determinism is desired by client, shadow the
991 // set with a queue to make visit order deterministic
992 Queue<FlatNode> flatNodesToVisitQ = null;
993 if( determinismDesired ) {
994 flatNodesToVisitQ = new LinkedList<FlatNode>();
995 flatNodesToVisitQ.add( fm );
998 // mapping of current partial results
999 Hashtable<FlatNode, ReachGraph> mapFlatNodeToReachGraph =
1000 new Hashtable<FlatNode, ReachGraph>();
1002 // the set of return nodes partial results that will be combined as
1003 // the final, conservative approximation of the entire method
1004 HashSet<FlatReturnNode> setReturns = new HashSet<FlatReturnNode>();
1006 while( !flatNodesToVisit.isEmpty() ) {
1009 if( determinismDesired ) {
1010 assert !flatNodesToVisitQ.isEmpty();
1011 fn = flatNodesToVisitQ.remove();
1013 fn = flatNodesToVisit.iterator().next();
1015 flatNodesToVisit.remove( fn );
1017 // effect transfer function defined by this node,
1018 // then compare it to the old graph at this node
1019 // to see if anything was updated.
1021 ReachGraph rg = new ReachGraph();
1022 TaskDescriptor taskDesc;
1023 if(fn instanceof FlatMethod && (taskDesc=((FlatMethod)fn).getTask())!=null){
1024 if(mapDescriptorToReachGraph.containsKey(taskDesc)){
1025 // retrieve existing reach graph if it is not first time
1026 rg=mapDescriptorToReachGraph.get(taskDesc);
1028 // create initial reach graph for a task
1029 rg=createInitialTaskReachGraph((FlatMethod)fn);
1031 mapDescriptorToReachGraph.put(taskDesc, rg);
1035 // start by merging all node's parents' graphs
1036 for( int i = 0; i < pm.numPrev(fn); ++i ) {
1037 FlatNode pn = pm.getPrev(fn,i);
1038 if( mapFlatNodeToReachGraph.containsKey( pn ) ) {
1039 ReachGraph rgParent = mapFlatNodeToReachGraph.get( pn );
1040 rg.merge( rgParent );
1045 if( takeDebugSnapshots &&
1046 d.getSymbol().equals( descSymbolDebug )
1048 debugSnapshot( rg, fn, true );
1052 // modify rg with appropriate transfer function
1053 rg = analyzeFlatNode( d, fm, fn, setReturns, rg );
1056 if( takeDebugSnapshots &&
1057 d.getSymbol().equals( descSymbolDebug )
1059 debugSnapshot( rg, fn, false );
1064 // if the results of the new graph are different from
1065 // the current graph at this node, replace the graph
1066 // with the update and enqueue the children
1067 ReachGraph rgPrev = mapFlatNodeToReachGraph.get( fn );
1068 if( !rg.equals( rgPrev ) ) {
1069 mapFlatNodeToReachGraph.put( fn, rg );
1071 for( int i = 0; i < pm.numNext( fn ); i++ ) {
1072 FlatNode nn = pm.getNext( fn, i );
1074 flatNodesToVisit.add( nn );
1075 if( determinismDesired ) {
1076 flatNodesToVisitQ.add( nn );
1083 // end by merging all return nodes into a complete
1084 // reach graph that represents all possible heap
1085 // states after the flat method returns
1086 ReachGraph completeGraph = new ReachGraph();
1088 assert !setReturns.isEmpty();
1089 Iterator retItr = setReturns.iterator();
1090 while( retItr.hasNext() ) {
1091 FlatReturnNode frn = (FlatReturnNode) retItr.next();
1093 assert mapFlatNodeToReachGraph.containsKey( frn );
1094 ReachGraph rgRet = mapFlatNodeToReachGraph.get( frn );
1096 completeGraph.merge( rgRet );
1100 if( takeDebugSnapshots &&
1101 d.getSymbol().equals( descSymbolDebug )
1103 // increment that we've visited the debug snap
1104 // method, and reset the node counter
1105 System.out.println( " @@@ debug snap at visit "+snapVisitCounter );
1107 snapNodeCounter = 0;
1109 if( snapVisitCounter == visitStartCapture + numVisitsToCapture &&
1112 System.out.println( "!!! Stopping analysis after debug snap captures. !!!" );
1118 return completeGraph;
1122 protected ReachGraph
1123 analyzeFlatNode( Descriptor d,
1124 FlatMethod fmContaining,
1126 HashSet<FlatReturnNode> setRetNodes,
1128 ) throws java.io.IOException {
1131 // any variables that are no longer live should be
1132 // nullified in the graph to reduce edges
1133 //rg.nullifyDeadVars( liveness.getLiveInTemps( fmContaining, fn ) );
1137 FieldDescriptor fld;
1138 TypeDescriptor tdElement;
1139 FieldDescriptor fdElement;
1140 FlatSESEEnterNode sese;
1141 FlatSESEExitNode fsexn;
1143 //Stores the flatnode's reach graph at enter
1144 ReachGraph rgOnEnter = new ReachGraph();
1145 rgOnEnter.merge( rg );
1146 fn2rgAtEnter.put(fn, rgOnEnter);
1148 // use node type to decide what transfer function
1149 // to apply to the reachability graph
1150 switch( fn.kind() ) {
1152 case FKind.FlatGenReachNode: {
1153 FlatGenReachNode fgrn = (FlatGenReachNode) fn;
1155 System.out.println( " Generating reach graph for program point: "+fgrn.getGraphName() );
1157 rg.writeGraph( "genReach"+fgrn.getGraphName(),
1158 true, // write labels (variables)
1159 true, // selectively hide intermediate temp vars
1160 true, // prune unreachable heap regions
1161 false, // hide reachability altogether
1162 false, // hide subset reachability states
1163 true, // hide predicates
1164 true ); // hide edge taints
1168 case FKind.FlatMethod: {
1169 // construct this method's initial heap model (IHM)
1170 // since we're working on the FlatMethod, we know
1171 // the incoming ReachGraph 'rg' is empty
1173 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
1174 getIHMcontributions( d );
1176 Set entrySet = heapsFromCallers.entrySet();
1177 Iterator itr = entrySet.iterator();
1178 while( itr.hasNext() ) {
1179 Map.Entry me = (Map.Entry) itr.next();
1180 FlatCall fc = (FlatCall) me.getKey();
1181 ReachGraph rgContrib = (ReachGraph) me.getValue();
1183 assert fc.getMethod().equals( d );
1185 rg.merge( rgContrib );
1188 // additionally, we are enforcing STRICT MONOTONICITY for the
1189 // method's initial context, so grow the context by whatever
1190 // the previously computed context was, and put the most
1191 // up-to-date context back in the map
1192 ReachGraph rgPrevContext = mapDescriptorToInitialContext.get( d );
1193 rg.merge( rgPrevContext );
1194 mapDescriptorToInitialContext.put( d, rg );
1198 case FKind.FlatOpNode:
1199 FlatOpNode fon = (FlatOpNode) fn;
1200 if( fon.getOp().getOp() == Operation.ASSIGN ) {
1201 lhs = fon.getDest();
1202 rhs = fon.getLeft();
1204 // before transfer, do effects analysis support
1205 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1206 if(rblockRel.isPotentialStallSite(fn)){
1207 // x gets status of y
1208 if(!rg.isAccessible(rhs)){
1209 rg.makeInaccessible(lhs);
1215 rg.assignTempXEqualToTempY( lhs, rhs );
1219 case FKind.FlatCastNode:
1220 FlatCastNode fcn = (FlatCastNode) fn;
1224 TypeDescriptor td = fcn.getType();
1227 // before transfer, do effects analysis support
1228 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1229 if(rblockRel.isPotentialStallSite(fn)){
1230 // x gets status of y
1231 if(!rg.isAccessible(rhs)){
1232 rg.makeInaccessible(lhs);
1238 rg.assignTempXEqualToCastedTempY( lhs, rhs, td );
1241 case FKind.FlatFieldNode:
1242 FlatFieldNode ffn = (FlatFieldNode) fn;
1246 fld = ffn.getField();
1248 // before graph transform, possible inject
1249 // a stall-site taint
1250 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1252 if(rblockRel.isPotentialStallSite(fn)){
1253 // x=y.f, stall y if not accessible
1254 // contributes read effects on stall site of y
1255 if(!rg.isAccessible(rhs)) {
1256 rg.taintStallSite(fn, rhs);
1259 // after this, x and y are accessbile.
1260 rg.makeAccessible(lhs);
1261 rg.makeAccessible(rhs);
1265 if( shouldAnalysisTrack( fld.getType() ) ) {
1267 rg.assignTempXEqualToTempYFieldF( lhs, rhs, fld, fn );
1270 // after transfer, use updated graph to
1271 // do effects analysis
1272 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1273 effectsAnalysis.analyzeFlatFieldNode( rg, rhs, fld, fn );
1277 case FKind.FlatSetFieldNode:
1278 FlatSetFieldNode fsfn = (FlatSetFieldNode) fn;
1280 lhs = fsfn.getDst();
1281 fld = fsfn.getField();
1282 rhs = fsfn.getSrc();
1284 boolean strongUpdate = false;
1286 // before transfer func, possibly inject
1287 // stall-site taints
1288 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1290 if(rblockRel.isPotentialStallSite(fn)){
1291 // x.y=f , stall x and y if they are not accessible
1292 // also contribute write effects on stall site of x
1293 if(!rg.isAccessible(lhs)) {
1294 rg.taintStallSite(fn, lhs);
1297 if(!rg.isAccessible(rhs)) {
1298 rg.taintStallSite(fn, rhs);
1301 // accessible status update
1302 rg.makeAccessible(lhs);
1303 rg.makeAccessible(rhs);
1307 if( shouldAnalysisTrack( fld.getType() ) ) {
1309 strongUpdate = rg.assignTempXFieldFEqualToTempY( lhs, fld, rhs, fn );
1312 // use transformed graph to do effects analysis
1313 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1314 effectsAnalysis.analyzeFlatSetFieldNode( rg, lhs, fld, fn, strongUpdate );
1318 case FKind.FlatElementNode:
1319 FlatElementNode fen = (FlatElementNode) fn;
1324 assert rhs.getType() != null;
1325 assert rhs.getType().isArray();
1327 tdElement = rhs.getType().dereference();
1328 fdElement = getArrayField( tdElement );
1330 // before transfer func, possibly inject
1332 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1333 if(rblockRel.isPotentialStallSite(fn)){
1334 // x=y.f, stall y if not accessible
1335 // contributes read effects on stall site of y
1336 // after this, x and y are accessbile.
1337 if(!rg.isAccessible(rhs)) {
1338 rg.taintStallSite(fn, rhs);
1341 rg.makeAccessible(lhs);
1342 rg.makeAccessible(rhs);
1346 if( shouldAnalysisTrack( lhs.getType() ) ) {
1348 rg.assignTempXEqualToTempYFieldF( lhs, rhs, fdElement, fn );
1351 // use transformed graph to do effects analysis
1352 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1353 effectsAnalysis.analyzeFlatFieldNode( rg, rhs, fdElement, fn );
1357 case FKind.FlatSetElementNode:
1358 FlatSetElementNode fsen = (FlatSetElementNode) fn;
1360 lhs = fsen.getDst();
1361 rhs = fsen.getSrc();
1363 assert lhs.getType() != null;
1364 assert lhs.getType().isArray();
1366 tdElement = lhs.getType().dereference();
1367 fdElement = getArrayField( tdElement );
1369 // before transfer func, possibly inject
1370 // stall-site taints
1371 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1373 if(rblockRel.isPotentialStallSite(fn)){
1374 // x.y=f , stall x and y if they are not accessible
1375 // also contribute write effects on stall site of x
1376 if(!rg.isAccessible(lhs)) {
1377 rg.taintStallSite(fn, lhs);
1380 if(!rg.isAccessible(rhs)) {
1381 rg.taintStallSite(fn, rhs);
1384 // accessible status update
1385 rg.makeAccessible(lhs);
1386 rg.makeAccessible(rhs);
1390 if( shouldAnalysisTrack( rhs.getType() ) ) {
1391 // transfer func, BUT
1392 // skip this node if it cannot create new reachability paths
1393 if( !arrayReferencees.doesNotCreateNewReaching( fsen ) ) {
1394 rg.assignTempXFieldFEqualToTempY( lhs, fdElement, rhs, fn );
1398 // use transformed graph to do effects analysis
1399 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1400 effectsAnalysis.analyzeFlatSetFieldNode( rg, lhs, fdElement, fn,
1406 FlatNew fnn = (FlatNew) fn;
1408 if( shouldAnalysisTrack( lhs.getType() ) ) {
1409 AllocSite as = getAllocSiteFromFlatNewPRIVATE( fnn );
1411 // before transform, support effects analysis
1412 if (doEffectsAnalysis && fmContaining != fmAnalysisEntry) {
1413 if (rblockRel.isPotentialStallSite(fn)) {
1414 // after creating new object, lhs is accessible
1415 rg.makeAccessible(lhs);
1420 rg.assignTempEqualToNewAlloc( lhs, as );
1424 case FKind.FlatSESEEnterNode:
1425 sese = (FlatSESEEnterNode) fn;
1427 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1429 // always remove ALL stall site taints at enter
1430 rg.removeAllStallSiteTaints();
1432 // inject taints for in-set vars
1433 rg.taintInSetVars( sese );
1438 case FKind.FlatSESEExitNode:
1439 fsexn = (FlatSESEExitNode) fn;
1440 sese = fsexn.getFlatEnter();
1442 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1444 // @ sese exit make all live variables
1445 // inaccessible to later parent statements
1446 rg.makeInaccessible( liveness.getLiveInTemps( fmContaining, fn ) );
1448 // always remove ALL stall site taints at exit
1449 rg.removeAllStallSiteTaints();
1451 // remove in-set var taints for the exiting rblock
1452 rg.removeInContextTaints( sese );
1457 case FKind.FlatCall: {
1458 Descriptor mdCaller;
1459 if( fmContaining.getMethod() != null ){
1460 mdCaller = fmContaining.getMethod();
1462 mdCaller = fmContaining.getTask();
1464 FlatCall fc = (FlatCall) fn;
1465 MethodDescriptor mdCallee = fc.getMethod();
1466 FlatMethod fmCallee = state.getMethodFlat( mdCallee );
1469 if( mdCallee.getSymbol().equals( "genReach" ) ) {
1470 rg.writeGraph( "genReach"+d,
1471 true, // write labels (variables)
1472 true, // selectively hide intermediate temp vars
1473 true, // prune unreachable heap regions
1474 false, // hide reachability altogether
1475 true, // hide subset reachability states
1476 true, // hide predicates
1477 true ); // hide edge taints
1483 boolean debugCallSite =
1484 mdCaller.getSymbol().equals( state.DISJOINTDEBUGCALLER ) &&
1485 mdCallee.getSymbol().equals( state.DISJOINTDEBUGCALLEE );
1487 boolean writeDebugDOTs = false;
1488 boolean stopAfter = false;
1489 if( debugCallSite ) {
1490 ++ReachGraph.debugCallSiteVisitCounter;
1491 System.out.println( " $$$ Debug call site visit "+
1492 ReachGraph.debugCallSiteVisitCounter+
1496 (ReachGraph.debugCallSiteVisitCounter >=
1497 ReachGraph.debugCallSiteVisitStartCapture) &&
1499 (ReachGraph.debugCallSiteVisitCounter <
1500 ReachGraph.debugCallSiteVisitStartCapture +
1501 ReachGraph.debugCallSiteNumVisitsToCapture)
1503 writeDebugDOTs = true;
1504 System.out.println( " $$$ Capturing this call site visit $$$" );
1505 if( ReachGraph.debugCallSiteStopAfter &&
1506 (ReachGraph.debugCallSiteVisitCounter ==
1507 ReachGraph.debugCallSiteVisitStartCapture +
1508 ReachGraph.debugCallSiteNumVisitsToCapture - 1)
1516 // calculate the heap this call site can reach--note this is
1517 // not used for the current call site transform, we are
1518 // grabbing this heap model for future analysis of the callees,
1519 // so if different results emerge we will return to this site
1520 ReachGraph heapForThisCall_old =
1521 getIHMcontribution( mdCallee, fc );
1523 // the computation of the callee-reachable heap
1524 // is useful for making the callee starting point
1525 // and for applying the call site transfer function
1526 Set<Integer> callerNodeIDsCopiedToCallee =
1527 new HashSet<Integer>();
1529 ReachGraph heapForThisCall_cur =
1530 rg.makeCalleeView( fc,
1532 callerNodeIDsCopiedToCallee,
1536 // enforce that a call site contribution can only
1537 // monotonically increase
1538 heapForThisCall_cur.merge( heapForThisCall_old );
1540 if( !heapForThisCall_cur.equals( heapForThisCall_old ) ) {
1541 // if heap at call site changed, update the contribution,
1542 // and reschedule the callee for analysis
1543 addIHMcontribution( mdCallee, fc, heapForThisCall_cur );
1545 // map a FlatCall to its enclosing method/task descriptor
1546 // so we can write that info out later
1547 fc2enclosing.put( fc, mdCaller );
1549 if( state.DISJOINTDEBUGSCHEDULING ) {
1550 System.out.println( " context changed, scheduling callee: "+mdCallee );
1553 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1554 calleesToEnqueue.add( mdCallee );
1556 enqueue( mdCallee );
1561 // the transformation for a call site should update the
1562 // current heap abstraction with any effects from the callee,
1563 // or if the method is virtual, the effects from any possible
1564 // callees, so find the set of callees...
1565 Set<MethodDescriptor> setPossibleCallees;
1566 if( determinismDesired ) {
1567 // use an ordered set
1568 setPossibleCallees = new TreeSet<MethodDescriptor>( dComp );
1570 // otherwise use a speedy hashset
1571 setPossibleCallees = new HashSet<MethodDescriptor>();
1574 if( mdCallee.isStatic() ) {
1575 setPossibleCallees.add( mdCallee );
1577 TypeDescriptor typeDesc = fc.getThis().getType();
1578 setPossibleCallees.addAll( callGraph.getMethods( mdCallee,
1583 ReachGraph rgMergeOfPossibleCallers = new ReachGraph();
1585 Iterator<MethodDescriptor> mdItr = setPossibleCallees.iterator();
1586 while( mdItr.hasNext() ) {
1587 MethodDescriptor mdPossible = mdItr.next();
1588 FlatMethod fmPossible = state.getMethodFlat( mdPossible );
1590 addDependent( mdPossible, // callee
1593 // don't alter the working graph (rg) until we compute a
1594 // result for every possible callee, merge them all together,
1595 // then set rg to that
1596 ReachGraph rgPossibleCaller = new ReachGraph();
1597 rgPossibleCaller.merge( rg );
1599 ReachGraph rgPossibleCallee = getPartial( mdPossible );
1601 if( rgPossibleCallee == null ) {
1602 // if this method has never been analyzed just schedule it
1603 // for analysis and skip over this call site for now
1604 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1605 calleesToEnqueue.add( mdPossible );
1607 enqueue( mdPossible );
1610 if( state.DISJOINTDEBUGSCHEDULING ) {
1611 System.out.println( " callee hasn't been analyzed, scheduling: "+mdPossible );
1615 // calculate the method call transform
1616 rgPossibleCaller.resolveMethodCall( fc,
1619 callerNodeIDsCopiedToCallee,
1623 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1624 if( !rgPossibleCallee.isAccessible( ReachGraph.tdReturn ) ) {
1625 rgPossibleCaller.makeInaccessible( fc.getReturnTemp() );
1631 rgMergeOfPossibleCallers.merge( rgPossibleCaller );
1636 System.out.println( "$$$ Exiting after requested captures of call site. $$$" );
1641 // now that we've taken care of building heap models for
1642 // callee analysis, finish this transformation
1643 rg = rgMergeOfPossibleCallers;
1646 // jjenista: what is this? It breaks compilation
1647 // of programs with no tasks/SESEs/rblocks...
1648 //XXXXXXXXXXXXXXXXXXXXXXXXX
1649 //need to consider more
1650 FlatNode nextFN=fmCallee.getNext(0);
1651 if( nextFN instanceof FlatSESEEnterNode ) {
1652 FlatSESEEnterNode calleeSESE=(FlatSESEEnterNode)nextFN;
1653 if(!calleeSESE.getIsLeafSESE()){
1654 rg.makeInaccessible( liveness.getLiveInTemps( fmContaining, fn ) );
1661 case FKind.FlatReturnNode:
1662 FlatReturnNode frn = (FlatReturnNode) fn;
1663 rhs = frn.getReturnTemp();
1665 // before transfer, do effects analysis support
1666 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1667 if(!rg.isAccessible(rhs)){
1668 rg.makeInaccessible(ReachGraph.tdReturn);
1672 if( rhs != null && shouldAnalysisTrack( rhs.getType() ) ) {
1673 rg.assignReturnEqualToTemp( rhs );
1676 setRetNodes.add( frn );
1682 // dead variables were removed before the above transfer function
1683 // was applied, so eliminate heap regions and edges that are no
1684 // longer part of the abstractly-live heap graph, and sweep up
1685 // and reachability effects that are altered by the reduction
1686 //rg.abstractGarbageCollect();
1690 // back edges are strictly monotonic
1691 if( pm.isBackEdge( fn ) ) {
1692 ReachGraph rgPrevResult = mapBackEdgeToMonotone.get( fn );
1693 rg.merge( rgPrevResult );
1694 mapBackEdgeToMonotone.put( fn, rg );
1697 // at this point rg should be the correct update
1698 // by an above transfer function, or untouched if
1699 // the flat node type doesn't affect the heap
1705 // this method should generate integers strictly greater than zero!
1706 // special "shadow" regions are made from a heap region by negating
1708 static public Integer generateUniqueHeapRegionNodeID() {
1710 return new Integer( uniqueIDcount );
1715 static public FieldDescriptor getArrayField( TypeDescriptor tdElement ) {
1716 FieldDescriptor fdElement = mapTypeToArrayField.get( tdElement );
1717 if( fdElement == null ) {
1718 fdElement = new FieldDescriptor( new Modifiers( Modifiers.PUBLIC ),
1720 arrayElementFieldName,
1723 mapTypeToArrayField.put( tdElement, fdElement );
1730 private void writeFinalGraphs() {
1731 Set entrySet = mapDescriptorToCompleteReachGraph.entrySet();
1732 Iterator itr = entrySet.iterator();
1733 while( itr.hasNext() ) {
1734 Map.Entry me = (Map.Entry) itr.next();
1735 Descriptor d = (Descriptor) me.getKey();
1736 ReachGraph rg = (ReachGraph) me.getValue();
1739 if( d instanceof TaskDescriptor ) {
1740 graphName = "COMPLETEtask"+d;
1742 graphName = "COMPLETE"+d;
1745 rg.writeGraph( graphName,
1746 true, // write labels (variables)
1747 true, // selectively hide intermediate temp vars
1748 true, // prune unreachable heap regions
1749 false, // hide reachability altogether
1750 true, // hide subset reachability states
1751 true, // hide predicates
1752 false ); // hide edge taints
1756 private void writeFinalIHMs() {
1757 Iterator d2IHMsItr = mapDescriptorToIHMcontributions.entrySet().iterator();
1758 while( d2IHMsItr.hasNext() ) {
1759 Map.Entry me1 = (Map.Entry) d2IHMsItr.next();
1760 Descriptor d = (Descriptor) me1.getKey();
1761 Hashtable<FlatCall, ReachGraph> IHMs = (Hashtable<FlatCall, ReachGraph>) me1.getValue();
1763 Iterator fc2rgItr = IHMs.entrySet().iterator();
1764 while( fc2rgItr.hasNext() ) {
1765 Map.Entry me2 = (Map.Entry) fc2rgItr.next();
1766 FlatCall fc = (FlatCall) me2.getKey();
1767 ReachGraph rg = (ReachGraph) me2.getValue();
1769 rg.writeGraph( "IHMPARTFOR"+d+"FROM"+fc2enclosing.get( fc )+fc,
1770 true, // write labels (variables)
1771 true, // selectively hide intermediate temp vars
1772 true, // hide reachability altogether
1773 true, // prune unreachable heap regions
1774 true, // hide subset reachability states
1775 false, // hide predicates
1776 true ); // hide edge taints
1781 private void writeInitialContexts() {
1782 Set entrySet = mapDescriptorToInitialContext.entrySet();
1783 Iterator itr = entrySet.iterator();
1784 while( itr.hasNext() ) {
1785 Map.Entry me = (Map.Entry) itr.next();
1786 Descriptor d = (Descriptor) me.getKey();
1787 ReachGraph rg = (ReachGraph) me.getValue();
1789 rg.writeGraph( "INITIAL"+d,
1790 true, // write labels (variables)
1791 true, // selectively hide intermediate temp vars
1792 true, // prune unreachable heap regions
1793 false, // hide all reachability
1794 true, // hide subset reachability states
1795 true, // hide predicates
1796 false );// hide edge taints
1801 protected ReachGraph getPartial( Descriptor d ) {
1802 return mapDescriptorToCompleteReachGraph.get( d );
1805 protected void setPartial( Descriptor d, ReachGraph rg ) {
1806 mapDescriptorToCompleteReachGraph.put( d, rg );
1808 // when the flag for writing out every partial
1809 // result is set, we should spit out the graph,
1810 // but in order to give it a unique name we need
1811 // to track how many partial results for this
1812 // descriptor we've already written out
1813 if( writeAllIncrementalDOTs ) {
1814 if( !mapDescriptorToNumUpdates.containsKey( d ) ) {
1815 mapDescriptorToNumUpdates.put( d, new Integer( 0 ) );
1817 Integer n = mapDescriptorToNumUpdates.get( d );
1820 if( d instanceof TaskDescriptor ) {
1821 graphName = d+"COMPLETEtask"+String.format( "%05d", n );
1823 graphName = d+"COMPLETE"+String.format( "%05d", n );
1826 rg.writeGraph( graphName,
1827 true, // write labels (variables)
1828 true, // selectively hide intermediate temp vars
1829 true, // prune unreachable heap regions
1830 false, // hide all reachability
1831 true, // hide subset reachability states
1832 false, // hide predicates
1833 false); // hide edge taints
1835 mapDescriptorToNumUpdates.put( d, n + 1 );
1841 // return just the allocation site associated with one FlatNew node
1842 protected AllocSite getAllocSiteFromFlatNewPRIVATE( FlatNew fnew ) {
1844 boolean flagProgrammatically = false;
1845 if( sitesToFlag != null && sitesToFlag.contains( fnew ) ) {
1846 flagProgrammatically = true;
1849 if( !mapFlatNewToAllocSite.containsKey( fnew ) ) {
1850 AllocSite as = AllocSite.factory( allocationDepth,
1852 fnew.getDisjointId(),
1853 flagProgrammatically
1856 // the newest nodes are single objects
1857 for( int i = 0; i < allocationDepth; ++i ) {
1858 Integer id = generateUniqueHeapRegionNodeID();
1859 as.setIthOldest( i, id );
1860 mapHrnIdToAllocSite.put( id, as );
1863 // the oldest node is a summary node
1864 as.setSummary( generateUniqueHeapRegionNodeID() );
1866 mapFlatNewToAllocSite.put( fnew, as );
1869 return mapFlatNewToAllocSite.get( fnew );
1873 public static boolean shouldAnalysisTrack( TypeDescriptor type ) {
1874 // don't track primitive types, but an array
1875 // of primitives is heap memory
1876 if( type.isImmutable() ) {
1877 return type.isArray();
1880 // everything else is an object
1884 protected int numMethodsAnalyzed() {
1885 return descriptorsToAnalyze.size();
1892 // Take in source entry which is the program's compiled entry and
1893 // create a new analysis entry, a method that takes no parameters
1894 // and appears to allocate the command line arguments and call the
1895 // source entry with them. The purpose of this analysis entry is
1896 // to provide a top-level method context with no parameters left.
1897 protected void makeAnalysisEntryMethod( MethodDescriptor mdSourceEntry ) {
1899 Modifiers mods = new Modifiers();
1900 mods.addModifier( Modifiers.PUBLIC );
1901 mods.addModifier( Modifiers.STATIC );
1903 TypeDescriptor returnType =
1904 new TypeDescriptor( TypeDescriptor.VOID );
1906 this.mdAnalysisEntry =
1907 new MethodDescriptor( mods,
1909 "analysisEntryMethod"
1912 TempDescriptor cmdLineArgs =
1913 new TempDescriptor( "args",
1914 mdSourceEntry.getParamType( 0 )
1918 new FlatNew( mdSourceEntry.getParamType( 0 ),
1923 TempDescriptor[] sourceEntryArgs = new TempDescriptor[1];
1924 sourceEntryArgs[0] = cmdLineArgs;
1927 new FlatCall( mdSourceEntry,
1933 FlatReturnNode frn = new FlatReturnNode( null );
1935 FlatExit fe = new FlatExit();
1937 this.fmAnalysisEntry =
1938 new FlatMethod( mdAnalysisEntry,
1942 this.fmAnalysisEntry.addNext( fn );
1949 protected LinkedList<Descriptor> topologicalSort( Set<Descriptor> toSort ) {
1951 Set<Descriptor> discovered;
1953 if( determinismDesired ) {
1954 // use an ordered set
1955 discovered = new TreeSet<Descriptor>( dComp );
1957 // otherwise use a speedy hashset
1958 discovered = new HashSet<Descriptor>();
1961 LinkedList<Descriptor> sorted = new LinkedList<Descriptor>();
1963 Iterator<Descriptor> itr = toSort.iterator();
1964 while( itr.hasNext() ) {
1965 Descriptor d = itr.next();
1967 if( !discovered.contains( d ) ) {
1968 dfsVisit( d, toSort, sorted, discovered );
1975 // While we're doing DFS on call graph, remember
1976 // dependencies for efficient queuing of methods
1977 // during interprocedural analysis:
1979 // a dependent of a method decriptor d for this analysis is:
1980 // 1) a method or task that invokes d
1981 // 2) in the descriptorsToAnalyze set
1982 protected void dfsVisit( Descriptor d,
1983 Set <Descriptor> toSort,
1984 LinkedList<Descriptor> sorted,
1985 Set <Descriptor> discovered ) {
1986 discovered.add( d );
1988 // only methods have callers, tasks never do
1989 if( d instanceof MethodDescriptor ) {
1991 MethodDescriptor md = (MethodDescriptor) d;
1993 // the call graph is not aware that we have a fabricated
1994 // analysis entry that calls the program source's entry
1995 if( md == mdSourceEntry ) {
1996 if( !discovered.contains( mdAnalysisEntry ) ) {
1997 addDependent( mdSourceEntry, // callee
1998 mdAnalysisEntry // caller
2000 dfsVisit( mdAnalysisEntry, toSort, sorted, discovered );
2004 // otherwise call graph guides DFS
2005 Iterator itr = callGraph.getCallerSet( md ).iterator();
2006 while( itr.hasNext() ) {
2007 Descriptor dCaller = (Descriptor) itr.next();
2009 // only consider callers in the original set to analyze
2010 if( !toSort.contains( dCaller ) ) {
2014 if( !discovered.contains( dCaller ) ) {
2015 addDependent( md, // callee
2019 dfsVisit( dCaller, toSort, sorted, discovered );
2024 // for leaf-nodes last now!
2025 sorted.addLast( d );
2029 protected void enqueue( Descriptor d ) {
2031 if( !descriptorsToVisitSet.contains( d ) ) {
2033 if( state.DISJOINTDVISITSTACK ||
2034 state.DISJOINTDVISITSTACKEESONTOP
2036 descriptorsToVisitStack.add( d );
2038 } else if( state.DISJOINTDVISITPQUE ) {
2039 Integer priority = mapDescriptorToPriority.get( d );
2040 descriptorsToVisitQ.add( new DescriptorQWrapper( priority,
2045 descriptorsToVisitSet.add( d );
2050 // a dependent of a method decriptor d for this analysis is:
2051 // 1) a method or task that invokes d
2052 // 2) in the descriptorsToAnalyze set
2053 protected void addDependent( Descriptor callee, Descriptor caller ) {
2054 Set<Descriptor> deps = mapDescriptorToSetDependents.get( callee );
2055 if( deps == null ) {
2056 deps = new HashSet<Descriptor>();
2059 mapDescriptorToSetDependents.put( callee, deps );
2062 protected Set<Descriptor> getDependents( Descriptor callee ) {
2063 Set<Descriptor> deps = mapDescriptorToSetDependents.get( callee );
2064 if( deps == null ) {
2065 deps = new HashSet<Descriptor>();
2066 mapDescriptorToSetDependents.put( callee, deps );
2072 public Hashtable<FlatCall, ReachGraph> getIHMcontributions( Descriptor d ) {
2074 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
2075 mapDescriptorToIHMcontributions.get( d );
2077 if( heapsFromCallers == null ) {
2078 heapsFromCallers = new Hashtable<FlatCall, ReachGraph>();
2079 mapDescriptorToIHMcontributions.put( d, heapsFromCallers );
2082 return heapsFromCallers;
2085 public ReachGraph getIHMcontribution( Descriptor d,
2088 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
2089 getIHMcontributions( d );
2091 if( !heapsFromCallers.containsKey( fc ) ) {
2095 return heapsFromCallers.get( fc );
2099 public void addIHMcontribution( Descriptor d,
2103 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
2104 getIHMcontributions( d );
2106 heapsFromCallers.put( fc, rg );
2110 private AllocSite createParameterAllocSite( ReachGraph rg,
2111 TempDescriptor tempDesc,
2117 flatNew = new FlatNew( tempDesc.getType(), // type
2118 tempDesc, // param temp
2119 false, // global alloc?
2120 "param"+tempDesc // disjoint site ID string
2123 flatNew = new FlatNew( tempDesc.getType(), // type
2124 tempDesc, // param temp
2125 false, // global alloc?
2126 null // disjoint site ID string
2130 // create allocation site
2131 AllocSite as = AllocSite.factory( allocationDepth,
2133 flatNew.getDisjointId(),
2136 for (int i = 0; i < allocationDepth; ++i) {
2137 Integer id = generateUniqueHeapRegionNodeID();
2138 as.setIthOldest(i, id);
2139 mapHrnIdToAllocSite.put(id, as);
2141 // the oldest node is a summary node
2142 as.setSummary( generateUniqueHeapRegionNodeID() );
2150 private Set<FieldDescriptor> getFieldSetTobeAnalyzed(TypeDescriptor typeDesc){
2152 Set<FieldDescriptor> fieldSet=new HashSet<FieldDescriptor>();
2153 if(!typeDesc.isImmutable()){
2154 ClassDescriptor classDesc = typeDesc.getClassDesc();
2155 for (Iterator it = classDesc.getFields(); it.hasNext();) {
2156 FieldDescriptor field = (FieldDescriptor) it.next();
2157 TypeDescriptor fieldType = field.getType();
2158 if (shouldAnalysisTrack( fieldType )) {
2159 fieldSet.add(field);
2167 private HeapRegionNode createMultiDeimensionalArrayHRN(ReachGraph rg, AllocSite alloc, HeapRegionNode srcHRN, FieldDescriptor fd, Hashtable<HeapRegionNode, HeapRegionNode> map, Hashtable<TypeDescriptor, HeapRegionNode> mapToExistingNode, ReachSet alpha ){
2169 int dimCount=fd.getType().getArrayCount();
2170 HeapRegionNode prevNode=null;
2171 HeapRegionNode arrayEntryNode=null;
2172 for(int i=dimCount;i>0;i--){
2173 TypeDescriptor typeDesc=fd.getType().dereference();//hack to get instance of type desc
2174 typeDesc.setArrayCount(i);
2175 TempDescriptor tempDesc=new TempDescriptor(typeDesc.getSymbol(),typeDesc);
2176 HeapRegionNode hrnSummary ;
2177 if(!mapToExistingNode.containsKey(typeDesc)){
2182 as = createParameterAllocSite(rg, tempDesc, false);
2184 // make a new reference to allocated node
2186 rg.createNewHeapRegionNode(as.getSummary(), // id or null to generate a new one
2187 false, // single object?
2189 false, // out-of-context?
2190 as.getType(), // type
2191 as, // allocation site
2192 alpha, // inherent reach
2193 alpha, // current reach
2194 ExistPredSet.factory(rg.predTrue), // predicates
2195 tempDesc.toString() // description
2197 rg.id2hrn.put(as.getSummary(),hrnSummary);
2199 mapToExistingNode.put(typeDesc, hrnSummary);
2201 hrnSummary=mapToExistingNode.get(typeDesc);
2205 // make a new reference between new summary node and source
2206 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2209 fd.getSymbol(), // field name
2211 ExistPredSet.factory(rg.predTrue), // predicates
2215 rg.addRefEdge(srcHRN, hrnSummary, edgeToSummary);
2216 prevNode=hrnSummary;
2217 arrayEntryNode=hrnSummary;
2219 // make a new reference between summary nodes of array
2220 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2223 arrayElementFieldName, // field name
2225 ExistPredSet.factory(rg.predTrue), // predicates
2229 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2230 prevNode=hrnSummary;
2235 // create a new obj node if obj has at least one non-primitive field
2236 TypeDescriptor type=fd.getType();
2237 if(getFieldSetTobeAnalyzed(type).size()>0){
2238 TypeDescriptor typeDesc=type.dereference();
2239 typeDesc.setArrayCount(0);
2240 if(!mapToExistingNode.containsKey(typeDesc)){
2241 TempDescriptor tempDesc=new TempDescriptor(type.getSymbol(),typeDesc);
2242 AllocSite as = createParameterAllocSite(rg, tempDesc, false);
2243 // make a new reference to allocated node
2244 HeapRegionNode hrnSummary =
2245 rg.createNewHeapRegionNode(as.getSummary(), // id or null to generate a new one
2246 false, // single object?
2248 false, // out-of-context?
2250 as, // allocation site
2251 alpha, // inherent reach
2252 alpha, // current reach
2253 ExistPredSet.factory(rg.predTrue), // predicates
2254 tempDesc.toString() // description
2256 rg.id2hrn.put(as.getSummary(),hrnSummary);
2257 mapToExistingNode.put(typeDesc, hrnSummary);
2258 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2261 arrayElementFieldName, // field name
2263 ExistPredSet.factory(rg.predTrue), // predicates
2266 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2267 prevNode=hrnSummary;
2269 HeapRegionNode hrnSummary=mapToExistingNode.get(typeDesc);
2270 if(prevNode.getReferenceTo(hrnSummary, typeDesc, arrayElementFieldName)==null){
2271 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2274 arrayElementFieldName, // field name
2276 ExistPredSet.factory(rg.predTrue), // predicates
2279 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2281 prevNode=hrnSummary;
2285 map.put(arrayEntryNode, prevNode);
2286 return arrayEntryNode;
2289 private ReachGraph createInitialTaskReachGraph(FlatMethod fm) {
2290 ReachGraph rg = new ReachGraph();
2291 TaskDescriptor taskDesc = fm.getTask();
2293 for (int idx = 0; idx < taskDesc.numParameters(); idx++) {
2294 Descriptor paramDesc = taskDesc.getParameter(idx);
2295 TypeDescriptor paramTypeDesc = taskDesc.getParamType(idx);
2297 // setup data structure
2298 Set<HashMap<HeapRegionNode, FieldDescriptor>> workSet =
2299 new HashSet<HashMap<HeapRegionNode, FieldDescriptor>>();
2300 Hashtable<TypeDescriptor, HeapRegionNode> mapTypeToExistingSummaryNode =
2301 new Hashtable<TypeDescriptor, HeapRegionNode>();
2302 Hashtable<HeapRegionNode, HeapRegionNode> mapToFirstDimensionArrayNode =
2303 new Hashtable<HeapRegionNode, HeapRegionNode>();
2304 Set<String> doneSet = new HashSet<String>();
2306 TempDescriptor tempDesc = fm.getParameter(idx);
2308 AllocSite as = createParameterAllocSite(rg, tempDesc, true);
2309 VariableNode lnX = rg.getVariableNodeFromTemp(tempDesc);
2310 Integer idNewest = as.getIthOldest(0);
2311 HeapRegionNode hrnNewest = rg.id2hrn.get(idNewest);
2313 // make a new reference to allocated node
2314 RefEdge edgeNew = new RefEdge(lnX, // source
2316 taskDesc.getParamType(idx), // type
2318 hrnNewest.getAlpha(), // beta
2319 ExistPredSet.factory(rg.predTrue), // predicates
2322 rg.addRefEdge(lnX, hrnNewest, edgeNew);
2324 // set-up a work set for class field
2325 ClassDescriptor classDesc = paramTypeDesc.getClassDesc();
2326 for (Iterator it = classDesc.getFields(); it.hasNext();) {
2327 FieldDescriptor fd = (FieldDescriptor) it.next();
2328 TypeDescriptor fieldType = fd.getType();
2329 if (shouldAnalysisTrack( fieldType )) {
2330 HashMap<HeapRegionNode, FieldDescriptor> newMap = new HashMap<HeapRegionNode, FieldDescriptor>();
2331 newMap.put(hrnNewest, fd);
2332 workSet.add(newMap);
2336 int uniqueIdentifier = 0;
2337 while (!workSet.isEmpty()) {
2338 HashMap<HeapRegionNode, FieldDescriptor> map = workSet
2340 workSet.remove(map);
2342 Set<HeapRegionNode> key = map.keySet();
2343 HeapRegionNode srcHRN = key.iterator().next();
2344 FieldDescriptor fd = map.get(srcHRN);
2345 TypeDescriptor type = fd.getType();
2346 String doneSetIdentifier = srcHRN.getIDString() + "_" + fd;
2348 if (!doneSet.contains(doneSetIdentifier)) {
2349 doneSet.add(doneSetIdentifier);
2350 if (!mapTypeToExistingSummaryNode.containsKey(type)) {
2351 // create new summary Node
2352 TempDescriptor td = new TempDescriptor("temp"
2353 + uniqueIdentifier, type);
2355 AllocSite allocSite;
2356 if(type.equals(paramTypeDesc)){
2357 //corresponding allocsite has already been created for a parameter variable.
2360 allocSite = createParameterAllocSite(rg, td, false);
2362 String strDesc = allocSite.toStringForDOT()
2364 TypeDescriptor allocType=allocSite.getType();
2366 HeapRegionNode hrnSummary;
2367 if(allocType.isArray() && allocType.getArrayCount()>0){
2368 hrnSummary=createMultiDeimensionalArrayHRN(rg,allocSite,srcHRN,fd,mapToFirstDimensionArrayNode,mapTypeToExistingSummaryNode,hrnNewest.getAlpha());
2371 rg.createNewHeapRegionNode(allocSite.getSummary(), // id or null to generate a new one
2372 false, // single object?
2374 false, // out-of-context?
2375 allocSite.getType(), // type
2376 allocSite, // allocation site
2377 hrnNewest.getAlpha(), // inherent reach
2378 hrnNewest.getAlpha(), // current reach
2379 ExistPredSet.factory(rg.predTrue), // predicates
2380 strDesc // description
2382 rg.id2hrn.put(allocSite.getSummary(),hrnSummary);
2384 // make a new reference to summary node
2385 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2388 fd.getSymbol(), // field name
2389 hrnNewest.getAlpha(), // beta
2390 ExistPredSet.factory(rg.predTrue), // predicates
2394 rg.addRefEdge(srcHRN, hrnSummary, edgeToSummary);
2398 mapTypeToExistingSummaryNode.put(type, hrnSummary);
2400 // set-up a work set for fields of the class
2401 Set<FieldDescriptor> fieldTobeAnalyzed=getFieldSetTobeAnalyzed(type);
2402 for (Iterator iterator = fieldTobeAnalyzed.iterator(); iterator
2404 FieldDescriptor fieldDescriptor = (FieldDescriptor) iterator
2406 HeapRegionNode newDstHRN;
2407 if(mapToFirstDimensionArrayNode.containsKey(hrnSummary)){
2408 //related heap region node is already exsited.
2409 newDstHRN=mapToFirstDimensionArrayNode.get(hrnSummary);
2411 newDstHRN=hrnSummary;
2413 doneSetIdentifier = newDstHRN.getIDString() + "_" + fieldDescriptor;
2414 if(!doneSet.contains(doneSetIdentifier)){
2415 // add new work item
2416 HashMap<HeapRegionNode, FieldDescriptor> newMap =
2417 new HashMap<HeapRegionNode, FieldDescriptor>();
2418 newMap.put(newDstHRN, fieldDescriptor);
2419 workSet.add(newMap);
2424 // if there exists corresponding summary node
2425 HeapRegionNode hrnDst=mapTypeToExistingSummaryNode.get(type);
2427 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2429 fd.getType(), // type
2430 fd.getSymbol(), // field name
2431 srcHRN.getAlpha(), // beta
2432 ExistPredSet.factory(rg.predTrue), // predicates
2435 rg.addRefEdge(srcHRN, hrnDst, edgeToSummary);
2445 // return all allocation sites in the method (there is one allocation
2446 // site per FlatNew node in a method)
2447 private HashSet<AllocSite> getAllocationSiteSet(Descriptor d) {
2448 if( !mapDescriptorToAllocSiteSet.containsKey(d) ) {
2449 buildAllocationSiteSet(d);
2452 return mapDescriptorToAllocSiteSet.get(d);
2456 private void buildAllocationSiteSet(Descriptor d) {
2457 HashSet<AllocSite> s = new HashSet<AllocSite>();
2460 if( d instanceof MethodDescriptor ) {
2461 fm = state.getMethodFlat( (MethodDescriptor) d);
2463 assert d instanceof TaskDescriptor;
2464 fm = state.getMethodFlat( (TaskDescriptor) d);
2466 pm.analyzeMethod(fm);
2468 // visit every node in this FlatMethod's IR graph
2469 // and make a set of the allocation sites from the
2470 // FlatNew node's visited
2471 HashSet<FlatNode> visited = new HashSet<FlatNode>();
2472 HashSet<FlatNode> toVisit = new HashSet<FlatNode>();
2475 while( !toVisit.isEmpty() ) {
2476 FlatNode n = toVisit.iterator().next();
2478 if( n instanceof FlatNew ) {
2479 s.add(getAllocSiteFromFlatNewPRIVATE( (FlatNew) n) );
2485 for( int i = 0; i < pm.numNext(n); ++i ) {
2486 FlatNode child = pm.getNext(n, i);
2487 if( !visited.contains(child) ) {
2493 mapDescriptorToAllocSiteSet.put(d, s);
2496 private HashSet<AllocSite> getFlaggedAllocationSites(Descriptor dIn) {
2498 HashSet<AllocSite> out = new HashSet<AllocSite>();
2499 HashSet<Descriptor> toVisit = new HashSet<Descriptor>();
2500 HashSet<Descriptor> visited = new HashSet<Descriptor>();
2504 while (!toVisit.isEmpty()) {
2505 Descriptor d = toVisit.iterator().next();
2509 HashSet<AllocSite> asSet = getAllocationSiteSet(d);
2510 Iterator asItr = asSet.iterator();
2511 while (asItr.hasNext()) {
2512 AllocSite as = (AllocSite) asItr.next();
2513 if (as.getDisjointAnalysisId() != null) {
2518 // enqueue callees of this method to be searched for
2519 // allocation sites also
2520 Set callees = callGraph.getCalleeSet(d);
2521 if (callees != null) {
2522 Iterator methItr = callees.iterator();
2523 while (methItr.hasNext()) {
2524 MethodDescriptor md = (MethodDescriptor) methItr.next();
2526 if (!visited.contains(md)) {
2537 private HashSet<AllocSite>
2538 getFlaggedAllocationSitesReachableFromTaskPRIVATE(TaskDescriptor td) {
2540 HashSet<AllocSite> asSetTotal = new HashSet<AllocSite>();
2541 HashSet<Descriptor> toVisit = new HashSet<Descriptor>();
2542 HashSet<Descriptor> visited = new HashSet<Descriptor>();
2546 // traverse this task and all methods reachable from this task
2547 while( !toVisit.isEmpty() ) {
2548 Descriptor d = toVisit.iterator().next();
2552 HashSet<AllocSite> asSet = getAllocationSiteSet(d);
2553 Iterator asItr = asSet.iterator();
2554 while( asItr.hasNext() ) {
2555 AllocSite as = (AllocSite) asItr.next();
2556 TypeDescriptor typed = as.getType();
2557 if( typed != null ) {
2558 ClassDescriptor cd = typed.getClassDesc();
2559 if( cd != null && cd.hasFlags() ) {
2565 // enqueue callees of this method to be searched for
2566 // allocation sites also
2567 Set callees = callGraph.getCalleeSet(d);
2568 if( callees != null ) {
2569 Iterator methItr = callees.iterator();
2570 while( methItr.hasNext() ) {
2571 MethodDescriptor md = (MethodDescriptor) methItr.next();
2573 if( !visited.contains(md) ) {
2583 public Set<Descriptor> getDescriptorsToAnalyze() {
2584 return descriptorsToAnalyze;
2587 public EffectsAnalysis getEffectsAnalysis(){
2588 return effectsAnalysis;
2591 public ReachGraph getReachGraph(Descriptor d){
2592 return mapDescriptorToCompleteReachGraph.get(d);
2595 public ReachGraph getEnterReachGraph(FlatNode fn){
2596 return fn2rgAtEnter.get(fn);
2599 // get successive captures of the analysis state, use compiler
2601 boolean takeDebugSnapshots = false;
2602 String descSymbolDebug = null;
2603 boolean stopAfterCapture = false;
2604 int snapVisitCounter = 0;
2605 int snapNodeCounter = 0;
2606 int visitStartCapture = 0;
2607 int numVisitsToCapture = 0;
2610 void debugSnapshot( ReachGraph rg, FlatNode fn, boolean in ) {
2611 if( snapVisitCounter > visitStartCapture + numVisitsToCapture ) {
2619 if( snapVisitCounter >= visitStartCapture ) {
2620 System.out.println( " @@@ snapping visit="+snapVisitCounter+
2621 ", node="+snapNodeCounter+
2625 graphName = String.format( "snap%03d_%04din",
2629 graphName = String.format( "snap%03d_%04dout",
2634 graphName = graphName + fn;
2636 rg.writeGraph( graphName,
2637 true, // write labels (variables)
2638 true, // selectively hide intermediate temp vars
2639 true, // prune unreachable heap regions
2640 false, // hide reachability
2641 false, // hide subset reachability states
2642 true, // hide predicates
2643 true ); // hide edge taints