1 package Analysis.Disjoint;
3 import Analysis.CallGraph.*;
4 import Analysis.Liveness;
5 import Analysis.ArrayReferencees;
6 import Analysis.OoOJava.Accessible;
7 import Analysis.OoOJava.RBlockRelationAnalysis;
8 import Analysis.FlatIRGraph.*;
11 import IR.Tree.Modifiers;
16 public class DisjointAnalysis implements HeapAnalysis {
19 ///////////////////////////////////////////
21 // Public interface to discover possible
22 // sharing in the program under analysis
24 ///////////////////////////////////////////
26 // if an object allocated at the target site may be
27 // reachable from both an object from root1 and an
28 // object allocated at root2, return TRUE
29 public boolean mayBothReachTarget(FlatMethod fm,
34 AllocSite asr1 = getAllocationSiteFromFlatNew(fnRoot1);
35 AllocSite asr2 = getAllocationSiteFromFlatNew(fnRoot2);
36 assert asr1.isFlagged();
37 assert asr2.isFlagged();
39 AllocSite ast = getAllocationSiteFromFlatNew(fnTarget);
40 ReachGraph rg = getPartial(fm.getMethod() );
42 return rg.mayBothReachTarget(asr1, asr2, ast);
45 // similar to the method above, return TRUE if ever
46 // more than one object from the root allocation site
47 // may reach an object from the target site
48 public boolean mayManyReachTarget(FlatMethod fm,
52 AllocSite asr = getAllocationSiteFromFlatNew(fnRoot);
53 assert asr.isFlagged();
55 AllocSite ast = getAllocationSiteFromFlatNew(fnTarget);
56 ReachGraph rg = getPartial(fm.getMethod() );
58 return rg.mayManyReachTarget(asr, ast);
64 public HashSet<AllocSite>
65 getFlaggedAllocationSitesReachableFromTask(TaskDescriptor td) {
66 checkAnalysisComplete();
67 return getFlaggedAllocationSitesReachableFromTaskPRIVATE(td);
70 public AllocSite getAllocationSiteFromFlatNew(FlatNew fn) {
71 checkAnalysisComplete();
72 return getAllocSiteFromFlatNewPRIVATE(fn);
75 public AllocSite getAllocationSiteFromHeapRegionNodeID(Integer id) {
76 checkAnalysisComplete();
77 return mapHrnIdToAllocSite.get(id);
80 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
83 checkAnalysisComplete();
84 ReachGraph rg=mapDescriptorToCompleteReachGraph.get(taskOrMethod);
85 FlatMethod fm=state.getMethodFlat(taskOrMethod);
87 return rg.mayReachSharedObjects(fm, paramIndex1, paramIndex2);
90 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
91 int paramIndex, AllocSite alloc) {
92 checkAnalysisComplete();
93 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
94 FlatMethod fm=state.getMethodFlat(taskOrMethod);
96 return rg.mayReachSharedObjects(fm, paramIndex, alloc);
99 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
100 AllocSite alloc, int paramIndex) {
101 checkAnalysisComplete();
102 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
103 FlatMethod fm=state.getMethodFlat(taskOrMethod);
105 return rg.mayReachSharedObjects(fm, paramIndex, alloc);
108 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
109 AllocSite alloc1, AllocSite alloc2) {
110 checkAnalysisComplete();
111 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
113 return rg.mayReachSharedObjects(alloc1, alloc2);
116 public String prettyPrintNodeSet(Set<HeapRegionNode> s) {
117 checkAnalysisComplete();
121 Iterator<HeapRegionNode> i = s.iterator();
122 while (i.hasNext()) {
123 HeapRegionNode n = i.next();
125 AllocSite as = n.getAllocSite();
127 out += " " + n.toString() + ",\n";
129 out += " " + n.toString() + ": " + as.toStringVerbose()
138 // use the methods given above to check every possible sharing class
139 // between task parameters and flagged allocation sites reachable
141 public void writeAllSharing(String outputFile,
144 boolean tabularOutput,
147 throws java.io.IOException {
148 checkAnalysisComplete();
150 BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile));
152 if (!tabularOutput) {
153 bw.write("Conducting ownership analysis with allocation depth = "
154 + allocationDepth + "\n");
155 bw.write(timeReport + "\n");
160 // look through every task for potential sharing
161 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
162 while (taskItr.hasNext()) {
163 TaskDescriptor td = (TaskDescriptor) taskItr.next();
165 if (!tabularOutput) {
166 bw.write("\n---------" + td + "--------\n");
169 HashSet<AllocSite> allocSites = getFlaggedAllocationSitesReachableFromTask(td);
171 Set<HeapRegionNode> common;
173 // for each task parameter, check for sharing classes with
174 // other task parameters and every allocation site
175 // reachable from this task
176 boolean foundSomeSharing = false;
178 FlatMethod fm = state.getMethodFlat(td);
179 for (int i = 0; i < fm.numParameters(); ++i) {
181 // skip parameters with types that cannot reference
183 if( !shouldAnalysisTrack(fm.getParameter(i).getType() ) ) {
187 // for the ith parameter check for sharing classes to all
188 // higher numbered parameters
189 for (int j = i + 1; j < fm.numParameters(); ++j) {
191 // skip parameters with types that cannot reference
193 if( !shouldAnalysisTrack(fm.getParameter(j).getType() ) ) {
198 common = hasPotentialSharing(td, i, j);
199 if (!common.isEmpty()) {
200 foundSomeSharing = true;
202 if (!tabularOutput) {
203 bw.write("Potential sharing between parameters " + i
204 + " and " + j + ".\n");
205 bw.write(prettyPrintNodeSet(common) + "\n");
210 // for the ith parameter, check for sharing classes against
211 // the set of allocation sites reachable from this
213 Iterator allocItr = allocSites.iterator();
214 while (allocItr.hasNext()) {
215 AllocSite as = (AllocSite) allocItr.next();
216 common = hasPotentialSharing(td, i, as);
217 if (!common.isEmpty()) {
218 foundSomeSharing = true;
220 if (!tabularOutput) {
221 bw.write("Potential sharing between parameter " + i
222 + " and " + as.getFlatNew() + ".\n");
223 bw.write(prettyPrintNodeSet(common) + "\n");
229 // for each allocation site check for sharing classes with
230 // other allocation sites in the context of execution
232 HashSet<AllocSite> outerChecked = new HashSet<AllocSite>();
233 Iterator allocItr1 = allocSites.iterator();
234 while (allocItr1.hasNext()) {
235 AllocSite as1 = (AllocSite) allocItr1.next();
237 Iterator allocItr2 = allocSites.iterator();
238 while (allocItr2.hasNext()) {
239 AllocSite as2 = (AllocSite) allocItr2.next();
241 if (!outerChecked.contains(as2)) {
242 common = hasPotentialSharing(td, as1, as2);
244 if (!common.isEmpty()) {
245 foundSomeSharing = true;
247 if (!tabularOutput) {
248 bw.write("Potential sharing between "
249 + as1.getFlatNew() + " and "
250 + as2.getFlatNew() + ".\n");
251 bw.write(prettyPrintNodeSet(common) + "\n");
257 outerChecked.add(as1);
260 if (!foundSomeSharing) {
261 if (!tabularOutput) {
262 bw.write("No sharing between flagged objects in Task " + td
270 bw.write(" & " + numSharing + " & " + justTime + " & " + numLines
271 + " & " + numMethodsAnalyzed() + " \\\\\n");
273 bw.write("\nNumber sharing classes: "+numSharing);
281 // this version of writeAllSharing is for Java programs that have no tasks
282 // ***********************************
283 // WARNING: THIS DOES NOT DO THE RIGHT THING, REPORTS 0 ALWAYS!
284 // It should use mayBothReachTarget and mayManyReachTarget like
285 // OoOJava does to query analysis results
286 // ***********************************
287 public void writeAllSharingJava(String outputFile,
290 boolean tabularOutput,
293 throws java.io.IOException {
294 checkAnalysisComplete();
300 BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile));
302 bw.write("Conducting disjoint reachability analysis with allocation depth = "
303 + allocationDepth + "\n");
304 bw.write(timeReport + "\n\n");
306 boolean foundSomeSharing = false;
308 Descriptor d = typeUtil.getMain();
309 HashSet<AllocSite> allocSites = getFlaggedAllocationSites(d);
311 // for each allocation site check for sharing classes with
312 // other allocation sites in the context of execution
314 HashSet<AllocSite> outerChecked = new HashSet<AllocSite>();
315 Iterator allocItr1 = allocSites.iterator();
316 while (allocItr1.hasNext()) {
317 AllocSite as1 = (AllocSite) allocItr1.next();
319 Iterator allocItr2 = allocSites.iterator();
320 while (allocItr2.hasNext()) {
321 AllocSite as2 = (AllocSite) allocItr2.next();
323 if (!outerChecked.contains(as2)) {
324 Set<HeapRegionNode> common = hasPotentialSharing(d,
327 if (!common.isEmpty()) {
328 foundSomeSharing = true;
329 bw.write("Potential sharing between "
330 + as1.getDisjointAnalysisId() + " and "
331 + as2.getDisjointAnalysisId() + ".\n");
332 bw.write(prettyPrintNodeSet(common) + "\n");
338 outerChecked.add(as1);
341 if (!foundSomeSharing) {
342 bw.write("No sharing classes between flagged objects found.\n");
344 bw.write("\nNumber sharing classes: "+numSharing);
347 bw.write("Number of methods analyzed: "+numMethodsAnalyzed()+"\n");
354 public Alloc getCmdLineArgsAlloc() {
355 return getAllocationSiteFromFlatNew( constructedCmdLineArgsNew );
357 public Alloc getCmdLineArgAlloc() {
358 return getAllocationSiteFromFlatNew( constructedCmdLineArgNew );
360 public Alloc getCmdLineArgBytesAlloc() {
361 return getAllocationSiteFromFlatNew( constructedCmdLineArgBytesNew );
363 public Alloc getNewStringLiteralAlloc() {
364 return newStringLiteralAlloc;
366 public Alloc getNewStringLiteralBytesAlloc() {
367 return newStringLiteralBytesAlloc;
370 ///////////////////////////////////////////
372 // end public interface
374 ///////////////////////////////////////////
378 protected void checkAnalysisComplete() {
379 if( !analysisComplete ) {
380 throw new Error("Warning: public interface method called while analysis is running.");
389 // run in faster mode, only when bugs wrung out!
390 public static boolean releaseMode;
392 // use command line option to set this, analysis
393 // should attempt to be deterministic
394 public static boolean determinismDesired;
396 // when we want to enforce determinism in the
397 // analysis we need to sort descriptors rather
398 // than toss them in efficient sets, use this
399 public static DescriptorComparator dComp =
400 new DescriptorComparator();
403 // data from the compiler
405 public CallGraph callGraph;
406 public Liveness liveness;
407 public ArrayReferencees arrayReferencees;
408 public RBlockRelationAnalysis rblockRel;
409 public TypeUtil typeUtil;
410 public int allocationDepth;
412 protected boolean doEffectsAnalysis = false;
413 protected EffectsAnalysis effectsAnalysis;
414 protected BuildStateMachines buildStateMachines;
416 protected boolean doDefiniteReachAnalysis = false;
417 protected DefiniteReachAnalysis definiteReachAnalysis;
419 protected boolean summarizePerClass = false;
422 // data structure for public interface
423 private Hashtable< Descriptor, HashSet<AllocSite> >
424 mapDescriptorToAllocSiteSet;
427 // for public interface methods to warn that they
428 // are grabbing results during analysis
429 private boolean analysisComplete;
432 // used to identify HeapRegionNode objects
433 // A unique ID equates an object in one
434 // ownership graph with an object in another
435 // graph that logically represents the same
437 // start at 10 and increment to reserve some
438 // IDs for special purposes
439 static protected int uniqueIDcount = 10;
442 // An out-of-scope method created by the
443 // analysis that has no parameters, and
444 // appears to allocate the command line
445 // arguments, then invoke the source code's
446 // main method. The purpose of this is to
447 // provide the analysis with an explicit
448 // top-level context with no parameters
449 protected MethodDescriptor mdAnalysisEntry;
450 protected FlatMethod fmAnalysisEntry;
452 // main method defined by source program
453 protected MethodDescriptor mdSourceEntry;
455 // the set of task and/or method descriptors
456 // reachable in call graph
457 protected Set<Descriptor>
458 descriptorsToAnalyze;
460 // current descriptors to visit in fixed-point
461 // interprocedural analysis, prioritized by
462 // dependency in the call graph
463 protected Stack<Descriptor>
464 descriptorsToVisitStack;
465 protected PriorityQueue<DescriptorQWrapper>
468 // a duplication of the above structure, but
469 // for efficient testing of inclusion
470 protected HashSet<Descriptor>
471 descriptorsToVisitSet;
473 // storage for priorities (doesn't make sense)
474 // to add it to the Descriptor class, just in
476 protected Hashtable<Descriptor, Integer>
477 mapDescriptorToPriority;
479 // when analyzing a method and scheduling more:
480 // remember set of callee's enqueued for analysis
481 // so they can be put on top of the callers in
482 // the stack-visit mode
483 protected Set<Descriptor>
486 // maps a descriptor to its current partial result
487 // from the intraprocedural fixed-point analysis--
488 // then the interprocedural analysis settles, this
489 // mapping will have the final results for each
491 protected Hashtable<Descriptor, ReachGraph>
492 mapDescriptorToCompleteReachGraph;
494 // maps a descriptor to its known dependents: namely
495 // methods or tasks that call the descriptor's method
496 // AND are part of this analysis (reachable from main)
497 protected Hashtable< Descriptor, Set<Descriptor> >
498 mapDescriptorToSetDependents;
500 // if the analysis client wants to flag allocation sites
501 // programmatically, it should provide a set of FlatNew
502 // statements--this may be null if unneeded
503 protected Set<FlatNew> sitesToFlag;
505 // maps each flat new to one analysis abstraction
506 // allocate site object, these exist outside reach graphs
507 protected Hashtable<FlatNew, AllocSite>
508 mapFlatNewToAllocSite;
510 // if using summarize-per-class then use this to keep
511 // one alloc site per Type (picks up primitives too)
512 protected Hashtable<TypeDescriptor, AllocSite> mapTypeToAllocSite;
513 protected HashSet<TypeDescriptor> typesToFlag;
515 // maps intergraph heap region IDs to intergraph
516 // allocation sites that created them, a redundant
517 // structure for efficiency in some operations
518 protected Hashtable<Integer, AllocSite>
521 // maps a method to its initial heap model (IHM) that
522 // is the set of reachability graphs from every caller
523 // site, all merged together. The reason that we keep
524 // them separate is that any one call site's contribution
525 // to the IHM may changed along the path to the fixed point
526 protected Hashtable< Descriptor, Hashtable< FlatCall, ReachGraph > >
527 mapDescriptorToIHMcontributions;
529 // additionally, keep a mapping from descriptors to the
530 // merged in-coming initial context, because we want this
531 // initial context to be STRICTLY MONOTONIC
532 protected Hashtable<Descriptor, ReachGraph>
533 mapDescriptorToInitialContext;
535 // mapping of current partial results for a given node. Note that
536 // to reanalyze a method we discard all partial results because a
537 // null reach graph indicates the node needs to be visited on the
538 // way to the fixed point.
539 // The reason for a persistent mapping is so after the analysis we
540 // can ask for the graph of any node at the fixed point, but this
541 // option is only enabled with a compiler flag.
542 protected Hashtable<FlatNode, ReachGraph> mapFlatNodeToReachGraphPersist;
543 protected Hashtable<FlatNode, ReachGraph> mapFlatNodeToReachGraph;
546 // make the result for back edges analysis-wide STRICTLY
547 // MONOTONIC as well, but notice we use FlatNode as the
548 // key for this map: in case we want to consider other
549 // nodes as back edge's in future implementations
550 protected Hashtable<FlatNode, ReachGraph>
551 mapBackEdgeToMonotone;
554 public static final String arrayElementFieldName = "___element_";
555 static protected Hashtable<TypeDescriptor, FieldDescriptor>
559 protected boolean suppressOutput;
561 // for controlling DOT file output
562 protected boolean writeFinalDOTs;
563 protected boolean writeAllIncrementalDOTs;
565 // supporting DOT output--when we want to write every
566 // partial method result, keep a tally for generating
568 protected Hashtable<Descriptor, Integer>
569 mapDescriptorToNumUpdates;
571 //map task descriptor to initial task parameter
572 protected Hashtable<Descriptor, ReachGraph>
573 mapDescriptorToReachGraph;
575 protected PointerMethod pm;
577 //Keeps track of all the reach graphs at every program point
578 //DO NOT USE UNLESS YOU REALLY NEED IT
579 static protected Hashtable<FlatNode, ReachGraph> fn2rgAtEnter =
580 new Hashtable<FlatNode, ReachGraph>();
582 static protected Hashtable<FlatNode, ReachGraph> fn2rgAtExit =
583 new Hashtable<FlatNode, ReachGraph>();
586 private Hashtable<FlatCall, Descriptor> fc2enclosing;
588 Accessible accessible;
591 // we construct an entry method of flat nodes complete
592 // with a new allocation site to model the command line
593 // args creation just for the analysis, so remember that
594 // allocation site. Later in code gen we might want to
595 // know if something is pointing-to to the cmd line args
596 // and we can verify by checking the allocation site field.
597 protected FlatNew constructedCmdLineArgsNew;
598 protected FlatNew constructedCmdLineArgNew;
599 protected FlatNew constructedCmdLineArgBytesNew;
601 // similar to above, the runtime allocates new strings
602 // for literal nodes, so make up an alloc to model that
603 protected AllocSite newStringLiteralAlloc;
604 protected AllocSite newStringLiteralBytesAlloc;
606 // both of the above need the descriptor of the field
607 // for the String's value field to reference by the
608 // byte array from the string object
609 protected TypeDescriptor stringType;
610 protected TypeDescriptor stringBytesType;
611 protected FieldDescriptor stringBytesField;
614 protected void initImplicitStringsModel() {
616 ClassDescriptor cdString = typeUtil.getClass( typeUtil.StringClass );
617 assert cdString != null;
621 new TypeDescriptor( cdString );
624 new TypeDescriptor(TypeDescriptor.CHAR).makeArray( state );
627 stringBytesField = null;
628 Iterator sFieldsItr = cdString.getFields();
629 while( sFieldsItr.hasNext() ) {
630 FieldDescriptor fd = (FieldDescriptor) sFieldsItr.next();
631 if( fd.getSymbol().equals( typeUtil.StringClassValueField ) ) {
632 stringBytesField = fd;
636 assert stringBytesField != null;
639 TempDescriptor throwAway1 =
640 new TempDescriptor("stringLiteralTemp_dummy1",
643 FlatNew fnStringLiteral =
644 new FlatNew(stringType,
648 newStringLiteralAlloc
649 = getAllocSiteFromFlatNewPRIVATE( fnStringLiteral );
652 TempDescriptor throwAway2 =
653 new TempDescriptor("stringLiteralTemp_dummy2",
656 FlatNew fnStringLiteralBytes =
657 new FlatNew(stringBytesType,
661 newStringLiteralBytesAlloc
662 = getAllocSiteFromFlatNewPRIVATE( fnStringLiteralBytes );
668 // allocate various structures that are not local
669 // to a single class method--should be done once
670 protected void allocateStructures() {
672 if( determinismDesired ) {
673 // use an ordered set
674 descriptorsToAnalyze = new TreeSet<Descriptor>(dComp);
676 // otherwise use a speedy hashset
677 descriptorsToAnalyze = new HashSet<Descriptor>();
680 mapDescriptorToCompleteReachGraph =
681 new Hashtable<Descriptor, ReachGraph>();
683 mapDescriptorToNumUpdates =
684 new Hashtable<Descriptor, Integer>();
686 mapDescriptorToSetDependents =
687 new Hashtable< Descriptor, Set<Descriptor> >();
689 mapFlatNewToAllocSite =
690 new Hashtable<FlatNew, AllocSite>();
692 mapDescriptorToIHMcontributions =
693 new Hashtable< Descriptor, Hashtable< FlatCall, ReachGraph > >();
695 mapDescriptorToInitialContext =
696 new Hashtable<Descriptor, ReachGraph>();
698 mapFlatNodeToReachGraphPersist =
699 new Hashtable<FlatNode, ReachGraph>();
701 mapBackEdgeToMonotone =
702 new Hashtable<FlatNode, ReachGraph>();
704 mapHrnIdToAllocSite =
705 new Hashtable<Integer, AllocSite>();
707 mapTypeToArrayField =
708 new Hashtable <TypeDescriptor, FieldDescriptor>();
710 if( state.DISJOINTDVISITSTACK ||
711 state.DISJOINTDVISITSTACKEESONTOP
713 descriptorsToVisitStack =
714 new Stack<Descriptor>();
717 if( state.DISJOINTDVISITPQUE ) {
718 descriptorsToVisitQ =
719 new PriorityQueue<DescriptorQWrapper>();
722 descriptorsToVisitSet =
723 new HashSet<Descriptor>();
725 mapDescriptorToPriority =
726 new Hashtable<Descriptor, Integer>();
729 new HashSet<Descriptor>();
731 mapDescriptorToAllocSiteSet =
732 new Hashtable<Descriptor, HashSet<AllocSite> >();
734 mapDescriptorToReachGraph =
735 new Hashtable<Descriptor, ReachGraph>();
737 fc2enclosing = new Hashtable<FlatCall, Descriptor>();
739 if( summarizePerClass ) {
740 mapTypeToAllocSite = new Hashtable<TypeDescriptor, AllocSite>();
741 typesToFlag = new HashSet<TypeDescriptor>();
747 // this analysis generates a disjoint reachability
748 // graph for every reachable method in the program
749 public DisjointAnalysis(State s,
754 Set<FlatNew> sitesToFlag,
755 RBlockRelationAnalysis rra
757 init(s, tu, cg, l, ar, sitesToFlag, rra, null, false);
760 public DisjointAnalysis(State s,
765 Set<FlatNew> sitesToFlag,
766 RBlockRelationAnalysis rra,
767 boolean suppressOutput
769 init(s, tu, cg, l, ar, sitesToFlag, rra, null, suppressOutput);
772 public DisjointAnalysis(State s,
777 Set<FlatNew> sitesToFlag,
778 RBlockRelationAnalysis rra,
779 BuildStateMachines bsm,
780 boolean suppressOutput
782 init(s, tu, cg, l, ar, sitesToFlag, rra, bsm, suppressOutput);
785 protected void init(State state,
789 ArrayReferencees arrayReferencees,
790 Set<FlatNew> sitesToFlag,
791 RBlockRelationAnalysis rra,
792 BuildStateMachines bsm,
793 boolean suppressOutput
796 analysisComplete = false;
799 this.typeUtil = typeUtil;
800 this.callGraph = callGraph;
801 this.liveness = liveness;
802 this.arrayReferencees = arrayReferencees;
803 this.sitesToFlag = sitesToFlag;
804 this.rblockRel = rra;
805 this.suppressOutput = suppressOutput;
806 this.buildStateMachines = bsm;
808 if( rblockRel != null ) {
809 doEffectsAnalysis = true;
810 effectsAnalysis = new EffectsAnalysis();
812 EffectsAnalysis.state = state;
813 EffectsAnalysis.buildStateMachines = buildStateMachines;
815 //note: instead of reachgraph's isAccessible, using the result of accessible analysis
816 //since accessible gives us more accurate results
817 accessible=new Accessible(state, callGraph, rra, liveness);
818 accessible.doAnalysis();
821 this.allocationDepth = state.DISJOINTALLOCDEPTH;
822 this.releaseMode = state.DISJOINTRELEASEMODE;
823 this.determinismDesired = state.DISJOINTDETERMINISM;
825 this.writeFinalDOTs = state.DISJOINTWRITEDOTS && !state.DISJOINTWRITEALL;
826 this.writeAllIncrementalDOTs = state.DISJOINTWRITEDOTS && state.DISJOINTWRITEALL;
828 this.takeDebugSnapshots = state.DISJOINTSNAPSYMBOL != null;
829 this.descSymbolDebug = state.DISJOINTSNAPSYMBOL;
830 this.visitStartCapture = state.DISJOINTSNAPVISITTOSTART;
831 this.numVisitsToCapture = state.DISJOINTSNAPNUMVISITS;
832 this.stopAfterCapture = state.DISJOINTSNAPSTOPAFTER;
833 this.snapVisitCounter = 1; // count visits from 1 (user will write 1, means 1st visit)
834 this.snapNodeCounter = 0; // count nodes from 0
837 state.DISJOINTDVISITSTACK ||
838 state.DISJOINTDVISITPQUE ||
839 state.DISJOINTDVISITSTACKEESONTOP;
840 assert !(state.DISJOINTDVISITSTACK && state.DISJOINTDVISITPQUE);
841 assert !(state.DISJOINTDVISITSTACK && state.DISJOINTDVISITSTACKEESONTOP);
842 assert !(state.DISJOINTDVISITPQUE && state.DISJOINTDVISITSTACKEESONTOP);
844 // set some static configuration for ReachGraphs
845 ReachGraph.allocationDepth = allocationDepth;
846 ReachGraph.typeUtil = typeUtil;
847 ReachGraph.state = state;
849 ReachGraph.initOutOfScopeTemps();
851 ReachGraph.debugCallSiteVisitStartCapture
852 = state.DISJOINTDEBUGCALLVISITTOSTART;
854 ReachGraph.debugCallSiteNumVisitsToCapture
855 = state.DISJOINTDEBUGCALLNUMVISITS;
857 ReachGraph.debugCallSiteStopAfter
858 = state.DISJOINTDEBUGCALLSTOPAFTER;
860 ReachGraph.debugCallSiteVisitCounter
861 = 0; // count visits from 1, is incremented before first visit
863 pm = new PointerMethod();
865 if( state.DO_DEFINITE_REACH_ANALYSIS ) {
866 doDefiniteReachAnalysis = true;
867 definiteReachAnalysis = new DefiniteReachAnalysis( pm );
870 if( !state.DISJOINT_USE_GLOBAL_SWEEP ) {
871 ReachGraph.DISABLE_GLOBAL_SWEEP = true;
874 if( !state.DISJOINT_USE_STRONG_UPDATE ) {
875 ReachGraph.DISABLE_STRONG_UPDATES = true;
878 if( !state.DISJOINT_USE_PREDICATES ) {
879 ReachGraph.DISABLE_PREDICATES = true;
880 ExistPredSet.DISABLE_PREDICATES = true;
883 if( state.DISJOINT_SUMMARIZE_PER_CLASS ) {
884 summarizePerClass = true;
887 if( suppressOutput ) {
888 System.out.println("* Running disjoint reachability analysis with output suppressed! *");
892 allocateStructures();
895 if( summarizePerClass && sitesToFlag != null ) {
896 for( FlatNew fnew : sitesToFlag ) {
897 typesToFlag.add( fnew.getType() );
902 initImplicitStringsModel();
906 double timeStartAnalysis = (double) System.nanoTime();
908 // start interprocedural fixed-point computation
911 } catch( IOException e ) {
912 throw new Error("IO Exception while writing disjointness analysis output.");
915 analysisComplete=true;
917 double timeEndAnalysis = (double) System.nanoTime();
918 double dt = (timeEndAnalysis - timeStartAnalysis)/(Math.pow(10.0, 9.0) );
921 if( sitesToFlag != null ) {
922 treport = String.format("Disjoint reachability analysis flagged %d sites and took %.3f sec.", sitesToFlag.size(), dt);
923 if(sitesToFlag.size()>0) {
924 treport+="\nFlagged sites:"+"\n"+sitesToFlag.toString();
927 treport = String.format("Disjoint reachability analysis took %.3f sec.", dt);
929 if( state.DISJOINT_COUNT_VISITS ) {
930 treport += "\nFixed point algorithm visited "+totalMethodVisits+
931 " methods and "+totalNodeVisits+" nodes.";
933 if( state.DISJOINT_COUNT_GRAPH_ELEMENTS ) {
934 treport += "\n"+getPartial( mdSourceEntry ).countGraphElements()+"\n";
935 getPartial( mdSourceEntry ).writeGraph( "countElementsGraph",
943 getPartial( mdSourceEntry ).writeNodes( "countElementsNodeListing.txt" );
945 String justtime = String.format("%.2f", dt);
946 System.out.println(treport);
950 if( writeFinalDOTs && !writeAllIncrementalDOTs ) {
954 if( state.DISJOINTWRITEIHMS ) {
958 if( state.DISJOINTWRITEINITCONTEXTS ) {
959 writeInitialContexts();
963 if( state.DISJOINT_WRITE_ALL_NODE_FINAL_GRAPHS ) {
964 writeFinalGraphsForEveryNode();
967 if( state.DISJOINTALIASFILE != null && !suppressOutput ) {
969 writeAllSharing(state.DISJOINTALIASFILE, treport, justtime, state.DISJOINTALIASTAB, state.lines);
971 writeAllSharingJava(state.DISJOINTALIASFILE,
974 state.DISJOINTALIASTAB,
981 buildStateMachines.writeStateMachines();
984 } catch( IOException e ) {
985 throw new Error("IO Exception while writing disjointness analysis output.");
990 protected boolean moreDescriptorsToVisit() {
991 if( state.DISJOINTDVISITSTACK ||
992 state.DISJOINTDVISITSTACKEESONTOP
994 return !descriptorsToVisitStack.isEmpty();
996 } else if( state.DISJOINTDVISITPQUE ) {
997 return !descriptorsToVisitQ.isEmpty();
1000 throw new Error("Neither descriptor visiting mode set");
1004 // fixed-point computation over the call graph--when a
1005 // method's callees are updated, it must be reanalyzed
1006 protected void analyzeMethods() throws java.io.IOException {
1008 // task or non-task (java) mode determines what the roots
1009 // of the call chain are, and establishes the set of methods
1010 // reachable from the roots that will be analyzed
1013 if( !suppressOutput ) {
1014 System.out.println("Bamboo mode...");
1017 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
1018 while( taskItr.hasNext() ) {
1019 TaskDescriptor td = (TaskDescriptor) taskItr.next();
1020 if( !descriptorsToAnalyze.contains(td) ) {
1021 // add all methods transitively reachable from the
1023 descriptorsToAnalyze.add(td);
1024 descriptorsToAnalyze.addAll(callGraph.getAllMethods(td) );
1029 if( !suppressOutput ) {
1030 System.out.println("Java mode...");
1033 // add all methods transitively reachable from the
1034 // source's main to set for analysis
1035 mdSourceEntry = typeUtil.getMain();
1036 descriptorsToAnalyze.add(mdSourceEntry);
1037 descriptorsToAnalyze.addAll(callGraph.getAllMethods(mdSourceEntry) );
1039 // fabricate an empty calling context that will call
1040 // the source's main, but call graph doesn't know
1041 // about it, so explicitly add it
1042 makeAnalysisEntryMethod(mdSourceEntry);
1043 descriptorsToAnalyze.add(mdAnalysisEntry);
1048 // now, depending on the interprocedural mode for visiting
1049 // methods, set up the needed data structures
1051 if( state.DISJOINTDVISITPQUE ) {
1053 // topologically sort according to the call graph so
1054 // leaf calls are last, helps build contexts up first
1055 LinkedList<Descriptor> sortedDescriptors =
1056 topologicalSort(descriptorsToAnalyze);
1058 // add sorted descriptors to priority queue, and duplicate
1059 // the queue as a set for efficiently testing whether some
1060 // method is marked for analysis
1062 Iterator<Descriptor> dItr;
1064 // for the priority queue, give items at the head
1065 // of the sorted list a low number (highest priority)
1066 while( !sortedDescriptors.isEmpty() ) {
1067 Descriptor d = sortedDescriptors.removeFirst();
1068 mapDescriptorToPriority.put(d, new Integer(p) );
1069 descriptorsToVisitQ.add(new DescriptorQWrapper(p, d) );
1070 descriptorsToVisitSet.add(d);
1074 } else if( state.DISJOINTDVISITSTACK ||
1075 state.DISJOINTDVISITSTACKEESONTOP
1077 // if we're doing the stack scheme, just throw the root
1078 // method or tasks on the stack
1080 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
1081 while( taskItr.hasNext() ) {
1082 TaskDescriptor td = (TaskDescriptor) taskItr.next();
1083 descriptorsToVisitStack.add(td);
1084 descriptorsToVisitSet.add(td);
1088 descriptorsToVisitStack.add(mdAnalysisEntry);
1089 descriptorsToVisitSet.add(mdAnalysisEntry);
1093 throw new Error("Unknown method scheduling mode");
1097 // analyze scheduled methods until there are no more to visit
1098 while( moreDescriptorsToVisit() ) {
1099 Descriptor d = null;
1101 if( state.DISJOINTDVISITSTACK ||
1102 state.DISJOINTDVISITSTACKEESONTOP
1104 d = descriptorsToVisitStack.pop();
1106 } else if( state.DISJOINTDVISITPQUE ) {
1107 d = descriptorsToVisitQ.poll().getDescriptor();
1110 assert descriptorsToVisitSet.contains(d);
1111 descriptorsToVisitSet.remove(d);
1113 // because the task or method descriptor just extracted
1114 // was in the "to visit" set it either hasn't been analyzed
1115 // yet, or some method that it depends on has been
1116 // updated. Recompute a complete reachability graph for
1117 // this task/method and compare it to any previous result.
1118 // If there is a change detected, add any methods/tasks
1119 // that depend on this one to the "to visit" set.
1121 if( !suppressOutput ) {
1122 System.out.println("Analyzing " + d);
1125 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1126 assert calleesToEnqueue.isEmpty();
1129 ReachGraph rg = analyzeMethod(d);
1130 ReachGraph rgPrev = getPartial(d);
1132 if( !rg.equals(rgPrev) ) {
1135 if( state.DISJOINTDEBUGSCHEDULING ) {
1136 System.out.println(" complete graph changed, scheduling callers for analysis:");
1139 // results for d changed, so enqueue dependents
1140 // of d for further analysis
1141 Iterator<Descriptor> depsItr = getDependents(d).iterator();
1142 while( depsItr.hasNext() ) {
1143 Descriptor dNext = depsItr.next();
1146 if( state.DISJOINTDEBUGSCHEDULING ) {
1147 System.out.println(" "+dNext);
1152 // whether or not the method under analysis changed,
1153 // we may have some callees that are scheduled for
1154 // more analysis, and they should go on the top of
1155 // the stack now (in other method-visiting modes they
1156 // are already enqueued at this point
1157 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1158 Iterator<Descriptor> depsItr = calleesToEnqueue.iterator();
1159 while( depsItr.hasNext() ) {
1160 Descriptor dNext = depsItr.next();
1163 calleesToEnqueue.clear();
1169 protected ReachGraph analyzeMethod(Descriptor d)
1170 throws java.io.IOException {
1172 if( state.DISJOINT_COUNT_VISITS ) {
1173 ++totalMethodVisits;
1176 // get the flat code for this descriptor
1178 if( d == mdAnalysisEntry ) {
1179 fm = fmAnalysisEntry;
1181 fm = state.getMethodFlat(d);
1183 pm.analyzeMethod(fm);
1185 // intraprocedural work set
1186 Set<FlatNode> flatNodesToVisit = new HashSet<FlatNode>();
1187 flatNodesToVisit.add(fm);
1189 // if determinism is desired by client, shadow the
1190 // set with a queue to make visit order deterministic
1191 Queue<FlatNode> flatNodesToVisitQ = null;
1192 if( determinismDesired ) {
1193 flatNodesToVisitQ = new LinkedList<FlatNode>();
1194 flatNodesToVisitQ.add(fm);
1197 // start a new mapping of partial results
1198 mapFlatNodeToReachGraph =
1199 new Hashtable<FlatNode, ReachGraph>();
1201 // the set of return nodes partial results that will be combined as
1202 // the final, conservative approximation of the entire method
1203 HashSet<FlatReturnNode> setReturns = new HashSet<FlatReturnNode>();
1207 boolean snapThisMethod = false;
1208 if( takeDebugSnapshots && d instanceof MethodDescriptor ) {
1209 MethodDescriptor mdThisMethod = (MethodDescriptor)d;
1210 ClassDescriptor cdThisMethod = mdThisMethod.getClassDesc();
1211 if( cdThisMethod != null ) {
1213 descSymbolDebug.equals( cdThisMethod.getSymbol()+
1215 mdThisMethod.getSymbol()
1222 while( !flatNodesToVisit.isEmpty() ) {
1225 if( determinismDesired ) {
1226 assert !flatNodesToVisitQ.isEmpty();
1227 fn = flatNodesToVisitQ.remove();
1229 fn = flatNodesToVisit.iterator().next();
1231 flatNodesToVisit.remove(fn);
1233 // effect transfer function defined by this node,
1234 // then compare it to the old graph at this node
1235 // to see if anything was updated.
1237 ReachGraph rg = new ReachGraph();
1238 TaskDescriptor taskDesc;
1239 if(fn instanceof FlatMethod && (taskDesc=((FlatMethod)fn).getTask())!=null) {
1240 if(mapDescriptorToReachGraph.containsKey(taskDesc)) {
1241 // retrieve existing reach graph if it is not first time
1242 rg=mapDescriptorToReachGraph.get(taskDesc);
1244 // create initial reach graph for a task
1245 rg=createInitialTaskReachGraph((FlatMethod)fn);
1247 mapDescriptorToReachGraph.put(taskDesc, rg);
1251 // start by merging all node's parents' graphs
1252 for( int i = 0; i < pm.numPrev(fn); ++i ) {
1253 FlatNode pn = pm.getPrev(fn,i);
1254 if( mapFlatNodeToReachGraph.containsKey(pn) ) {
1255 ReachGraph rgParent = mapFlatNodeToReachGraph.get(pn);
1261 if( snapThisMethod ) {
1262 debugSnapshot(rg, fn, true);
1266 // modify rg with appropriate transfer function
1267 rg = analyzeFlatNode(d, fm, fn, setReturns, rg);
1270 if( snapThisMethod ) {
1271 debugSnapshot(rg, fn, false);
1276 // if the results of the new graph are different from
1277 // the current graph at this node, replace the graph
1278 // with the update and enqueue the children
1279 ReachGraph rgPrev = mapFlatNodeToReachGraph.get(fn);
1280 if( !rg.equals(rgPrev) ) {
1281 mapFlatNodeToReachGraph.put(fn, rg);
1283 // we don't necessarily want to keep the reach graph for every
1284 // node in the program unless a client or the user wants it
1285 if( state.DISJOINT_WRITE_ALL_NODE_FINAL_GRAPHS ) {
1286 mapFlatNodeToReachGraphPersist.put(fn, rg);
1289 for( int i = 0; i < pm.numNext(fn); i++ ) {
1290 FlatNode nn = pm.getNext(fn, i);
1292 flatNodesToVisit.add(nn);
1293 if( determinismDesired ) {
1294 flatNodesToVisitQ.add(nn);
1301 // end by merging all return nodes into a complete
1302 // reach graph that represents all possible heap
1303 // states after the flat method returns
1304 ReachGraph completeGraph = new ReachGraph();
1306 if( setReturns.isEmpty() ) {
1307 System.out.println( "d = "+d );
1310 assert !setReturns.isEmpty();
1311 Iterator retItr = setReturns.iterator();
1312 while( retItr.hasNext() ) {
1313 FlatReturnNode frn = (FlatReturnNode) retItr.next();
1315 assert mapFlatNodeToReachGraph.containsKey(frn);
1316 ReachGraph rgRet = mapFlatNodeToReachGraph.get(frn);
1318 completeGraph.merge(rgRet);
1322 if( snapThisMethod ) {
1323 // increment that we've visited the debug snap
1324 // method, and reset the node counter
1325 System.out.println(" @@@ debug snap at visit "+snapVisitCounter);
1327 snapNodeCounter = 0;
1329 if( snapVisitCounter == visitStartCapture + numVisitsToCapture &&
1332 System.out.println("!!! Stopping analysis after debug snap captures. !!!");
1338 return completeGraph;
1342 protected ReachGraph
1343 analyzeFlatNode(Descriptor d,
1344 FlatMethod fmContaining,
1346 HashSet<FlatReturnNode> setRetNodes,
1348 ) throws java.io.IOException {
1351 if( state.DISJOINT_COUNT_VISITS ) {
1356 // any variables that are no longer live should be
1357 // nullified in the graph to reduce edges
1358 //rg.nullifyDeadVars( liveness.getLiveInTemps( fmContaining, fn ) );
1362 FieldDescriptor fld;
1363 TypeDescriptor tdElement;
1364 FieldDescriptor fdElement;
1365 FlatSESEEnterNode sese;
1366 FlatSESEExitNode fsexn;
1368 boolean alreadyReachable;
1369 Set<EdgeKey> edgeKeysForLoad;
1370 Set<EdgeKey> edgeKeysRemoved;
1371 Set<EdgeKey> edgeKeysAdded;
1372 Set<DefiniteReachState.FdEntry> edgesToElideFromProp;
1374 //Stores the flatnode's reach graph at enter
1375 ReachGraph rgOnEnter = new ReachGraph();
1376 rgOnEnter.merge(rg);
1377 fn2rgAtEnter.put(fn, rgOnEnter);
1381 boolean didDefReachTransfer = false;
1385 // use node type to decide what transfer function
1386 // to apply to the reachability graph
1387 switch( fn.kind() ) {
1389 case FKind.FlatGenReachNode: {
1390 FlatGenReachNode fgrn = (FlatGenReachNode) fn;
1392 System.out.println(" Generating reach graph for program point: "+fgrn.getGraphName() );
1395 rg.writeGraph("genReach"+fgrn.getGraphName(),
1396 true, // write labels (variables)
1397 true, // selectively hide intermediate temp vars
1398 true, // prune unreachable heap regions
1399 false, // hide reachability altogether
1400 true, // hide subset reachability states
1401 true, // hide predicates
1402 true); //false); // hide edge taints
1406 case FKind.FlatGenDefReachNode: {
1407 FlatGenDefReachNode fgdrn = (FlatGenDefReachNode) fn;
1408 if( doDefiniteReachAnalysis ) {
1409 definiteReachAnalysis.writeState( fn, fgdrn.getOutputName() );
1414 case FKind.FlatMethod: {
1415 // construct this method's initial heap model (IHM)
1416 // since we're working on the FlatMethod, we know
1417 // the incoming ReachGraph 'rg' is empty
1419 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
1420 getIHMcontributions(d);
1422 Set entrySet = heapsFromCallers.entrySet();
1423 Iterator itr = entrySet.iterator();
1424 while( itr.hasNext() ) {
1425 Map.Entry me = (Map.Entry)itr.next();
1426 FlatCall fc = (FlatCall) me.getKey();
1427 ReachGraph rgContrib = (ReachGraph) me.getValue();
1429 // note that "fc.getMethod()" like (Object.toString)
1430 // might not be equal to "d" like (String.toString)
1431 // because the mapping gets set up when we resolve
1433 rg.merge(rgContrib);
1436 // additionally, we are enforcing STRICT MONOTONICITY for the
1437 // method's initial context, so grow the context by whatever
1438 // the previously computed context was, and put the most
1439 // up-to-date context back in the map
1440 ReachGraph rgPrevContext = mapDescriptorToInitialContext.get(d);
1441 rg.merge(rgPrevContext);
1442 mapDescriptorToInitialContext.put(d, rg);
1444 if( doDefiniteReachAnalysis ) {
1445 FlatMethod fm = (FlatMethod) fn;
1446 Set<TempDescriptor> params = new HashSet<TempDescriptor>();
1447 for( int i = 0; i < fm.numParameters(); ++i ) {
1448 params.add( fm.getParameter( i ) );
1450 definiteReachAnalysis.methodEntry( fn, params );
1451 didDefReachTransfer = true;
1455 case FKind.FlatOpNode:
1456 FlatOpNode fon = (FlatOpNode) fn;
1457 if( fon.getOp().getOp() == Operation.ASSIGN ) {
1458 lhs = fon.getDest();
1459 rhs = fon.getLeft();
1461 // before transfer, do effects analysis support
1462 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1463 if(rblockRel.isPotentialStallSite(fn)) {
1464 // x gets status of y
1465 if(!accessible.isAccessible(fn, rhs)) {
1466 rg.makeInaccessible(lhs);
1472 rg.assignTempXEqualToTempY(lhs, rhs);
1474 if( doDefiniteReachAnalysis ) {
1475 definiteReachAnalysis.copy( fn, lhs, rhs );
1476 didDefReachTransfer = true;
1481 case FKind.FlatCastNode:
1482 FlatCastNode fcn = (FlatCastNode) fn;
1486 TypeDescriptor td = fcn.getType();
1489 // before transfer, do effects analysis support
1490 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1491 if(rblockRel.isPotentialStallSite(fn)) {
1492 // x gets status of y
1493 if(!accessible.isAccessible(fn,rhs)) {
1494 rg.makeInaccessible(lhs);
1500 rg.assignTempXEqualToCastedTempY(lhs, rhs, td);
1502 if( doDefiniteReachAnalysis ) {
1503 definiteReachAnalysis.copy( fn, lhs, rhs );
1504 didDefReachTransfer = true;
1508 case FKind.FlatFieldNode:
1509 FlatFieldNode ffn = (FlatFieldNode) fn;
1513 fld = ffn.getField();
1515 // before graph transform, possible inject
1516 // a stall-site taint
1517 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1519 if(rblockRel.isPotentialStallSite(fn)) {
1520 // x=y.f, stall y if not accessible
1521 // contributes read effects on stall site of y
1522 if(!accessible.isAccessible(fn,rhs)) {
1523 rg.taintStallSite(fn, rhs);
1526 // after this, x and y are accessbile.
1527 rg.makeAccessible(lhs);
1528 rg.makeAccessible(rhs);
1532 edgeKeysForLoad = null;
1533 if( doDefiniteReachAnalysis ) {
1534 edgeKeysForLoad = new HashSet<EdgeKey>();
1537 if( shouldAnalysisTrack(fld.getType() ) ) {
1539 rg.assignTempXEqualToTempYFieldF( lhs, rhs, fld, fn, edgeKeysForLoad );
1541 if( doDefiniteReachAnalysis ) {
1542 definiteReachAnalysis.load( fn, lhs, rhs, fld, edgeKeysForLoad );
1543 didDefReachTransfer = true;
1547 // after transfer, use updated graph to
1548 // do effects analysis
1549 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1550 effectsAnalysis.analyzeFlatFieldNode(rg, rhs, fld, fn);
1554 case FKind.FlatSetFieldNode:
1555 FlatSetFieldNode fsfn = (FlatSetFieldNode) fn;
1557 lhs = fsfn.getDst();
1558 fld = fsfn.getField();
1559 rhs = fsfn.getSrc();
1561 boolean strongUpdate = false;
1563 alreadyReachable = false;
1564 edgeKeysRemoved = null;
1565 edgeKeysAdded = null;
1566 edgesToElideFromProp = null;
1567 if( doDefiniteReachAnalysis ) {
1568 alreadyReachable = definiteReachAnalysis.isAlreadyReachable( rhs, lhs, fn );
1569 edgeKeysRemoved = new HashSet<EdgeKey>();
1570 edgeKeysAdded = new HashSet<EdgeKey>();
1571 edgesToElideFromProp = definiteReachAnalysis.edgesToElidePropagation( lhs, rhs, fn );
1574 // before transfer func, possibly inject
1575 // stall-site taints
1576 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1578 if(rblockRel.isPotentialStallSite(fn)) {
1579 // x.y=f , stall x and y if they are not accessible
1580 // also contribute write effects on stall site of x
1581 if(!accessible.isAccessible(fn,lhs)) {
1582 rg.taintStallSite(fn, lhs);
1585 if(!accessible.isAccessible(fn,rhs)) {
1586 rg.taintStallSite(fn, rhs);
1589 // accessible status update
1590 rg.makeAccessible(lhs);
1591 rg.makeAccessible(rhs);
1595 if( shouldAnalysisTrack(fld.getType() ) ) {
1597 strongUpdate = rg.assignTempXFieldFEqualToTempY( lhs,
1604 edgesToElideFromProp );
1605 if( doDefiniteReachAnalysis ) {
1606 definiteReachAnalysis.store( fn,
1612 didDefReachTransfer = true;
1616 // use transformed graph to do effects analysis
1617 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1618 effectsAnalysis.analyzeFlatSetFieldNode(rg, lhs, fld, fn, strongUpdate);
1622 case FKind.FlatElementNode:
1623 FlatElementNode fen = (FlatElementNode) fn;
1628 assert rhs.getType() != null;
1629 assert rhs.getType().isArray();
1631 tdElement = rhs.getType().dereference();
1632 fdElement = getArrayField(tdElement);
1634 // before transfer func, possibly inject
1636 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1637 if(rblockRel.isPotentialStallSite(fn)) {
1638 // x=y.f, stall y if not accessible
1639 // contributes read effects on stall site of y
1640 // after this, x and y are accessbile.
1641 if(!accessible.isAccessible(fn,rhs)) {
1642 rg.taintStallSite(fn, rhs);
1645 rg.makeAccessible(lhs);
1646 rg.makeAccessible(rhs);
1650 edgeKeysForLoad = null;
1651 if( doDefiniteReachAnalysis ) {
1652 edgeKeysForLoad = new HashSet<EdgeKey>();
1655 if( shouldAnalysisTrack(lhs.getType() ) ) {
1657 rg.assignTempXEqualToTempYFieldF( lhs, rhs, fdElement, fn, edgeKeysForLoad );
1659 if( doDefiniteReachAnalysis ) {
1660 definiteReachAnalysis.load( fn, lhs, rhs, fdElement, edgeKeysForLoad );
1661 didDefReachTransfer = true;
1665 // use transformed graph to do effects analysis
1666 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1667 effectsAnalysis.analyzeFlatFieldNode(rg, rhs, fdElement, fn);
1671 case FKind.FlatSetElementNode:
1672 FlatSetElementNode fsen = (FlatSetElementNode) fn;
1674 lhs = fsen.getDst();
1675 rhs = fsen.getSrc();
1677 assert lhs.getType() != null;
1678 assert lhs.getType().isArray();
1680 tdElement = lhs.getType().dereference();
1681 fdElement = getArrayField(tdElement);
1683 alreadyReachable = false;
1684 edgeKeysRemoved = null;
1685 edgeKeysAdded = null;
1686 edgesToElideFromProp = null;
1687 if( doDefiniteReachAnalysis ) {
1688 alreadyReachable = definiteReachAnalysis.isAlreadyReachable( rhs, lhs, fn );
1689 edgeKeysRemoved = new HashSet<EdgeKey>();
1690 edgeKeysAdded = new HashSet<EdgeKey>();
1691 edgesToElideFromProp = definiteReachAnalysis.edgesToElidePropagation( lhs, rhs, fn );
1694 // before transfer func, possibly inject
1695 // stall-site taints
1696 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1698 if(rblockRel.isPotentialStallSite(fn)) {
1699 // x.y=f , stall x and y if they are not accessible
1700 // also contribute write effects on stall site of x
1701 if(!accessible.isAccessible(fn,lhs)) {
1702 rg.taintStallSite(fn, lhs);
1705 if(!accessible.isAccessible(fn,rhs)) {
1706 rg.taintStallSite(fn, rhs);
1709 // accessible status update
1710 rg.makeAccessible(lhs);
1711 rg.makeAccessible(rhs);
1715 if( shouldAnalysisTrack(rhs.getType() ) ) {
1716 // transfer func, BUT
1717 // skip this node if it cannot create new reachability paths
1718 if( !arrayReferencees.doesNotCreateNewReaching(fsen) ) {
1719 rg.assignTempXFieldFEqualToTempY( lhs,
1726 edgesToElideFromProp );
1729 if( doDefiniteReachAnalysis ) {
1730 definiteReachAnalysis.store( fn,
1736 didDefReachTransfer = true;
1740 // use transformed graph to do effects analysis
1741 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1742 effectsAnalysis.analyzeFlatSetFieldNode(rg, lhs, fdElement, fn,
1748 FlatNew fnn = (FlatNew) fn;
1750 if( shouldAnalysisTrack(lhs.getType() ) ) {
1751 AllocSite as = getAllocSiteFromFlatNewPRIVATE(fnn);
1753 // before transform, support effects analysis
1754 if (doEffectsAnalysis && fmContaining != fmAnalysisEntry) {
1755 if (rblockRel.isPotentialStallSite(fn)) {
1756 // after creating new object, lhs is accessible
1757 rg.makeAccessible(lhs);
1762 rg.assignTempEqualToNewAlloc(lhs, as);
1764 if( doDefiniteReachAnalysis ) {
1765 definiteReachAnalysis.newObject( fn, lhs );
1766 didDefReachTransfer = true;
1772 case FKind.FlatLiteralNode:
1773 // BIG NOTE: this transfer function is only here for
1774 // points-to information for String literals. That's it.
1775 // Effects and disjoint reachability and all of that don't
1776 // care about references to literals.
1777 FlatLiteralNode fln = (FlatLiteralNode) fn;
1779 if( fln.getType().equals( stringType ) ) {
1780 rg.assignTempEqualToStringLiteral( fln.getDst(),
1781 newStringLiteralAlloc,
1782 newStringLiteralBytesAlloc,
1788 case FKind.FlatSESEEnterNode:
1789 sese = (FlatSESEEnterNode) fn;
1791 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1793 // always remove ALL stall site taints at enter
1794 rg.removeAllStallSiteTaints();
1796 // inject taints for in-set vars
1797 rg.taintInSetVars(sese);
1802 case FKind.FlatSESEExitNode:
1803 fsexn = (FlatSESEExitNode) fn;
1804 sese = fsexn.getFlatEnter();
1806 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1808 // @ sese exit make all live variables
1809 // inaccessible to later parent statements
1810 rg.makeInaccessible(liveness.getLiveInTemps(fmContaining, fn) );
1812 // always remove ALL stall site taints at exit
1813 rg.removeAllStallSiteTaints();
1815 // remove in-set var taints for the exiting rblock
1816 rg.removeInContextTaints(sese);
1821 case FKind.FlatCall: {
1822 Descriptor mdCaller;
1823 if( fmContaining.getMethod() != null ) {
1824 mdCaller = fmContaining.getMethod();
1826 mdCaller = fmContaining.getTask();
1828 FlatCall fc = (FlatCall) fn;
1829 MethodDescriptor mdCallee = fc.getMethod();
1830 FlatMethod fmCallee = state.getMethodFlat(mdCallee);
1833 if( doDefiniteReachAnalysis ) {
1834 definiteReachAnalysis.methodCall( fn, fc.getReturnTemp() );
1835 didDefReachTransfer = true;
1839 // the transformation for a call site should update the
1840 // current heap abstraction with any effects from the callee,
1841 // or if the method is virtual, the effects from any possible
1842 // callees, so find the set of callees...
1843 Set<MethodDescriptor> setPossibleCallees;
1844 if( determinismDesired ) {
1845 // use an ordered set
1846 setPossibleCallees = new TreeSet<MethodDescriptor>(dComp);
1848 // otherwise use a speedy hashset
1849 setPossibleCallees = new HashSet<MethodDescriptor>();
1852 if( mdCallee.isStatic() ) {
1853 setPossibleCallees.add(mdCallee);
1855 TypeDescriptor typeDesc = fc.getThis().getType();
1856 setPossibleCallees.addAll(callGraph.getMethods(mdCallee,
1862 DebugCallSiteData dcsd = new DebugCallSiteData();
1864 ReachGraph rgMergeOfPossibleCallers = new ReachGraph();
1867 Iterator<MethodDescriptor> mdItr = setPossibleCallees.iterator();
1868 while( mdItr.hasNext() ) {
1869 MethodDescriptor mdPossible = mdItr.next();
1870 FlatMethod fmPossible = state.getMethodFlat(mdPossible);
1872 addDependent(mdPossible, // callee
1876 // decide for each possible resolution of the method whether we
1877 // want to debug this call site
1878 decideDebugCallSite( dcsd, mdCaller, mdPossible );
1882 // calculate the heap this call site can reach--note this is
1883 // not used for the current call site transform, we are
1884 // grabbing this heap model for future analysis of the callees,
1885 // so if different results emerge we will return to this site
1886 ReachGraph heapForThisCall_old =
1887 getIHMcontribution(mdPossible, fc);
1889 // the computation of the callee-reachable heap
1890 // is useful for making the callee starting point
1891 // and for applying the call site transfer function
1892 Set<Integer> callerNodeIDsCopiedToCallee =
1893 new HashSet<Integer>();
1896 ReachGraph heapForThisCall_cur =
1897 rg.makeCalleeView(fc,
1899 callerNodeIDsCopiedToCallee,
1904 // enforce that a call site contribution can only
1905 // monotonically increase
1906 heapForThisCall_cur.merge(heapForThisCall_old);
1908 if( !heapForThisCall_cur.equals(heapForThisCall_old) ) {
1909 // if heap at call site changed, update the contribution,
1910 // and reschedule the callee for analysis
1911 addIHMcontribution(mdPossible, fc, heapForThisCall_cur);
1913 // map a FlatCall to its enclosing method/task descriptor
1914 // so we can write that info out later
1915 fc2enclosing.put(fc, mdCaller);
1917 if( state.DISJOINTDEBUGSCHEDULING ) {
1918 System.out.println(" context changed at callsite: "+fc+", scheduling callee: "+mdPossible);
1921 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1922 calleesToEnqueue.add(mdPossible);
1924 enqueue(mdPossible);
1931 // don't alter the working graph (rg) until we compute a
1932 // result for every possible callee, merge them all together,
1933 // then set rg to that
1934 ReachGraph rgPossibleCaller = new ReachGraph();
1935 rgPossibleCaller.merge(rg);
1937 ReachGraph rgPossibleCallee = getPartial(mdPossible);
1939 if( rgPossibleCallee == null ) {
1940 // if this method has never been analyzed just schedule it
1941 // for analysis and skip over this call site for now
1942 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1943 calleesToEnqueue.add(mdPossible);
1945 enqueue(mdPossible);
1948 if( state.DISJOINTDEBUGSCHEDULING ) {
1949 System.out.println(" callee hasn't been analyzed, scheduling: "+mdPossible);
1955 // calculate the method call transform
1956 rgPossibleCaller.resolveMethodCall(fc,
1959 callerNodeIDsCopiedToCallee,
1964 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1965 if( !accessible.isAccessible(fn, ReachGraph.tdReturn) ) {
1966 rgPossibleCaller.makeInaccessible(fc.getReturnTemp() );
1972 rgMergeOfPossibleCallers.merge(rgPossibleCaller);
1977 statusDebugCallSite( dcsd );
1981 // now that we've taken care of building heap models for
1982 // callee analysis, finish this transformation
1983 rg = rgMergeOfPossibleCallers;
1986 // jjenista: what is this? It breaks compilation
1987 // of programs with no tasks/SESEs/rblocks...
1988 //XXXXXXXXXXXXXXXXXXXXXXXXX
1989 //need to consider more
1990 if( state.OOOJAVA ) {
1991 FlatNode nextFN=fmCallee.getNext(0);
1992 if( nextFN instanceof FlatSESEEnterNode ) {
1993 FlatSESEEnterNode calleeSESE=(FlatSESEEnterNode)nextFN;
1994 if(!calleeSESE.getIsLeafSESE()) {
1995 rg.makeInaccessible(liveness.getLiveInTemps(fmContaining, fn) );
2003 case FKind.FlatReturnNode:
2004 FlatReturnNode frn = (FlatReturnNode) fn;
2005 rhs = frn.getReturnTemp();
2007 // before transfer, do effects analysis support
2008 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
2009 if(!accessible.isAccessible(fn,rhs)) {
2010 rg.makeInaccessible(ReachGraph.tdReturn);
2014 if( rhs != null && shouldAnalysisTrack(rhs.getType() ) ) {
2015 rg.assignReturnEqualToTemp(rhs);
2018 setRetNodes.add(frn);
2025 if( doDefiniteReachAnalysis && !didDefReachTransfer ) {
2026 definiteReachAnalysis.otherStatement( fn );
2031 // dead variables were removed before the above transfer function
2032 // was applied, so eliminate heap regions and edges that are no
2033 // longer part of the abstractly-live heap graph, and sweep up
2034 // and reachability effects that are altered by the reduction
2035 //rg.abstractGarbageCollect();
2039 // back edges are strictly monotonic
2040 if( pm.isBackEdge(fn) ) {
2041 ReachGraph rgPrevResult = mapBackEdgeToMonotone.get(fn);
2042 rg.merge(rgPrevResult);
2043 mapBackEdgeToMonotone.put(fn, rg);
2047 ReachGraph rgOnExit = new ReachGraph();
2049 fn2rgAtExit.put(fn, rgOnExit);
2053 // at this point rg should be the correct update
2054 // by an above transfer function, or untouched if
2055 // the flat node type doesn't affect the heap
2061 // this method should generate integers strictly greater than zero!
2062 // special "shadow" regions are made from a heap region by negating
2064 static public Integer generateUniqueHeapRegionNodeID() {
2066 return new Integer(uniqueIDcount);
2071 static public FieldDescriptor getArrayField(TypeDescriptor tdElement) {
2072 FieldDescriptor fdElement = mapTypeToArrayField.get(tdElement);
2073 if( fdElement == null ) {
2074 fdElement = new FieldDescriptor(new Modifiers(Modifiers.PUBLIC),
2076 arrayElementFieldName,
2079 mapTypeToArrayField.put(tdElement, fdElement);
2086 private void writeFinalGraphs() {
2087 Set entrySet = mapDescriptorToCompleteReachGraph.entrySet();
2088 Iterator itr = entrySet.iterator();
2089 while( itr.hasNext() ) {
2090 Map.Entry me = (Map.Entry)itr.next();
2091 Descriptor d = (Descriptor) me.getKey();
2092 ReachGraph rg = (ReachGraph) me.getValue();
2095 if( d instanceof TaskDescriptor ) {
2096 graphName = "COMPLETEtask"+d;
2098 graphName = "COMPLETE"+d;
2101 rg.writeGraph(graphName,
2102 true, // write labels (variables)
2103 true, // selectively hide intermediate temp vars
2104 true, // prune unreachable heap regions
2105 false, // hide reachability altogether
2106 true, // hide subset reachability states
2107 true, // hide predicates
2108 true); // hide edge taints
2112 private void writeFinalIHMs() {
2113 Iterator d2IHMsItr = mapDescriptorToIHMcontributions.entrySet().iterator();
2114 while( d2IHMsItr.hasNext() ) {
2115 Map.Entry me1 = (Map.Entry)d2IHMsItr.next();
2116 Descriptor d = (Descriptor) me1.getKey();
2117 Hashtable<FlatCall, ReachGraph> IHMs = (Hashtable<FlatCall, ReachGraph>)me1.getValue();
2119 Iterator fc2rgItr = IHMs.entrySet().iterator();
2120 while( fc2rgItr.hasNext() ) {
2121 Map.Entry me2 = (Map.Entry)fc2rgItr.next();
2122 FlatCall fc = (FlatCall) me2.getKey();
2123 ReachGraph rg = (ReachGraph) me2.getValue();
2125 rg.writeGraph("IHMPARTFOR"+d+"FROM"+fc2enclosing.get(fc)+fc,
2126 true, // write labels (variables)
2127 true, // selectively hide intermediate temp vars
2128 true, // hide reachability altogether
2129 true, // prune unreachable heap regions
2130 true, // hide subset reachability states
2131 false, // hide predicates
2132 true); // hide edge taints
2137 private void writeInitialContexts() {
2138 Set entrySet = mapDescriptorToInitialContext.entrySet();
2139 Iterator itr = entrySet.iterator();
2140 while( itr.hasNext() ) {
2141 Map.Entry me = (Map.Entry)itr.next();
2142 Descriptor d = (Descriptor) me.getKey();
2143 ReachGraph rg = (ReachGraph) me.getValue();
2145 rg.writeGraph("INITIAL"+d,
2146 true, // write labels (variables)
2147 true, // selectively hide intermediate temp vars
2148 true, // prune unreachable heap regions
2149 false, // hide all reachability
2150 true, // hide subset reachability states
2151 true, // hide predicates
2152 false); // hide edge taints
2156 private void writeFinalGraphsForEveryNode() {
2157 Set entrySet = mapFlatNodeToReachGraphPersist.entrySet();
2158 Iterator itr = entrySet.iterator();
2159 while( itr.hasNext() ) {
2160 Map.Entry me = (Map.Entry) itr.next();
2161 FlatNode fn = (FlatNode) me.getKey();
2162 ReachGraph rg = (ReachGraph) me.getValue();
2164 rg.writeGraph("NODEFINAL"+fn,
2165 true, // write labels (variables)
2166 false, // selectively hide intermediate temp vars
2167 true, // prune unreachable heap regions
2168 true, // hide all reachability
2169 true, // hide subset reachability states
2170 true, // hide predicates
2171 true); // hide edge taints
2176 protected ReachGraph getPartial(Descriptor d) {
2177 return mapDescriptorToCompleteReachGraph.get(d);
2180 protected void setPartial(Descriptor d, ReachGraph rg) {
2181 mapDescriptorToCompleteReachGraph.put(d, rg);
2183 // when the flag for writing out every partial
2184 // result is set, we should spit out the graph,
2185 // but in order to give it a unique name we need
2186 // to track how many partial results for this
2187 // descriptor we've already written out
2188 if( writeAllIncrementalDOTs ) {
2189 if( !mapDescriptorToNumUpdates.containsKey(d) ) {
2190 mapDescriptorToNumUpdates.put(d, new Integer(0) );
2192 Integer n = mapDescriptorToNumUpdates.get(d);
2195 if( d instanceof TaskDescriptor ) {
2196 graphName = d+"COMPLETEtask"+String.format("%05d", n);
2198 graphName = d+"COMPLETE"+String.format("%05d", n);
2201 rg.writeGraph(graphName,
2202 true, // write labels (variables)
2203 true, // selectively hide intermediate temp vars
2204 true, // prune unreachable heap regions
2205 false, // hide all reachability
2206 true, // hide subset reachability states
2207 false, // hide predicates
2208 false); // hide edge taints
2210 mapDescriptorToNumUpdates.put(d, n + 1);
2216 // return just the allocation site associated with one FlatNew node
2217 protected AllocSite getAllocSiteFromFlatNewPRIVATE(FlatNew fnew) {
2218 return summarizePerClass ?
2219 getAllocSiteFromFlatNewPRIVATEperClass( fnew ) :
2220 getAllocSiteFromFlatNewPRIVATEperSite( fnew );
2223 protected AllocSite getAllocSiteFromFlatNewPRIVATEperSite(FlatNew fnew) {
2224 boolean flagProgrammatically = false;
2225 if( sitesToFlag != null && sitesToFlag.contains(fnew) ) {
2226 flagProgrammatically = true;
2229 if( !mapFlatNewToAllocSite.containsKey(fnew) ) {
2230 AllocSite as = AllocSite.factory(allocationDepth,
2232 fnew.getDisjointId(),
2233 flagProgrammatically
2236 // the newest nodes are single objects
2237 for( int i = 0; i < allocationDepth; ++i ) {
2238 Integer id = generateUniqueHeapRegionNodeID();
2239 as.setIthOldest(i, id);
2240 mapHrnIdToAllocSite.put(id, as);
2243 // the oldest node is a summary node
2244 as.setSummary(generateUniqueHeapRegionNodeID() );
2246 mapFlatNewToAllocSite.put(fnew, as);
2249 return mapFlatNewToAllocSite.get(fnew);
2252 protected AllocSite getAllocSiteFromFlatNewPRIVATEperClass(FlatNew fnew) {
2253 TypeDescriptor type = fnew.getType();
2255 boolean flagProgrammatically = typesToFlag.contains( type );
2257 if( !mapTypeToAllocSite.containsKey( type ) ) {
2258 AllocSite as = AllocSite.factory(allocationDepth,
2260 fnew.getDisjointId(),
2261 flagProgrammatically
2264 // the newest nodes are single objects
2265 for( int i = 0; i < allocationDepth; ++i ) {
2266 Integer id = generateUniqueHeapRegionNodeID();
2267 as.setIthOldest(i, id);
2268 mapHrnIdToAllocSite.put(id, as);
2271 // the oldest node is a summary node
2272 as.setSummary(generateUniqueHeapRegionNodeID() );
2274 mapTypeToAllocSite.put( type, as );
2277 if( !mapFlatNewToAllocSite.containsKey( fnew ) ) {
2278 AllocSite as = mapTypeToAllocSite.get( type );
2279 mapFlatNewToAllocSite.put( fnew, as );
2282 return mapFlatNewToAllocSite.get(fnew);
2286 public static boolean shouldAnalysisTrack(TypeDescriptor type) {
2287 // don't track primitive types, but an array
2288 // of primitives is heap memory
2289 if( type.isImmutable() ) {
2290 return type.isArray();
2293 // everything else is an object
2297 protected int numMethodsAnalyzed() {
2298 return descriptorsToAnalyze.size();
2304 // Take in source entry which is the program's compiled entry and
2305 // create a new analysis entry, a method that takes no parameters
2306 // and appears to allocate the command line arguments and call the
2307 // source entry with them. The purpose of this analysis entry is
2308 // to provide a top-level method context with no parameters left.
2309 protected void makeAnalysisEntryMethod(MethodDescriptor mdSourceEntry) {
2311 Modifiers mods = new Modifiers();
2312 mods.addModifier(Modifiers.PUBLIC);
2313 mods.addModifier(Modifiers.STATIC);
2315 TypeDescriptor returnType = new TypeDescriptor(TypeDescriptor.VOID);
2317 this.mdAnalysisEntry =
2318 new MethodDescriptor(mods,
2320 "analysisEntryMethod"
2323 TypeDescriptor argsType = mdSourceEntry.getParamType(0);
2324 TempDescriptor cmdLineArgs =
2325 new TempDescriptor("analysisEntryTemp_args",
2329 new FlatNew(argsType,
2333 this.constructedCmdLineArgsNew = fnArgs;
2335 TypeDescriptor argType = argsType.dereference();
2336 TempDescriptor anArg =
2337 new TempDescriptor("analysisEntryTemp_arg",
2341 new FlatNew(argType,
2345 this.constructedCmdLineArgNew = fnArg;
2347 TypeDescriptor typeIndex = new TypeDescriptor(TypeDescriptor.INT);
2348 TempDescriptor index =
2349 new TempDescriptor("analysisEntryTemp_index",
2352 FlatLiteralNode fli =
2353 new FlatLiteralNode(typeIndex,
2358 FlatSetElementNode fse =
2359 new FlatSetElementNode(cmdLineArgs,
2364 TypeDescriptor typeSize = new TypeDescriptor(TypeDescriptor.INT);
2365 TempDescriptor sizeBytes =
2366 new TempDescriptor("analysisEntryTemp_size",
2369 FlatLiteralNode fls =
2370 new FlatLiteralNode(typeSize,
2375 TempDescriptor strBytes =
2376 new TempDescriptor("analysisEntryTemp_strBytes",
2380 new FlatNew(stringBytesType,
2385 this.constructedCmdLineArgBytesNew = fnBytes;
2387 FlatSetFieldNode fsf =
2388 new FlatSetFieldNode(anArg,
2393 // throw this in so you can always see what the initial heap context
2394 // looks like if you want to, its cheap
2395 FlatGenReachNode fgen = new FlatGenReachNode( "argContext" );
2397 TempDescriptor[] sourceEntryArgs = new TempDescriptor[1];
2398 sourceEntryArgs[0] = cmdLineArgs;
2400 new FlatCall(mdSourceEntry,
2406 FlatReturnNode frn = new FlatReturnNode(null);
2408 FlatExit fe = new FlatExit();
2410 this.fmAnalysisEntry =
2411 new FlatMethod(mdAnalysisEntry,
2415 List<FlatNode> nodes = new LinkedList<FlatNode>();
2416 nodes.add( fnArgs );
2421 nodes.add( fnBytes );
2428 FlatNode current = this.fmAnalysisEntry;
2429 for( FlatNode next: nodes ) {
2430 current.addNext( next );
2435 // jjenista - this is useful for looking at the FlatIRGraph of the
2436 // analysis entry method constructed above if you have to modify it.
2437 // The usual method of writing FlatIRGraphs out doesn't work because
2438 // this flat method is private to the model of this analysis only.
2440 // FlatIRGraph flatMethodWriter =
2441 // new FlatIRGraph( state, false, false, false );
2442 // flatMethodWriter.writeFlatIRGraph( fmAnalysisEntry, "analysisEntry" );
2443 //} catch( IOException e ) {}
2447 protected LinkedList<Descriptor> topologicalSort(Set<Descriptor> toSort) {
2449 Set<Descriptor> discovered;
2451 if( determinismDesired ) {
2452 // use an ordered set
2453 discovered = new TreeSet<Descriptor>(dComp);
2455 // otherwise use a speedy hashset
2456 discovered = new HashSet<Descriptor>();
2459 LinkedList<Descriptor> sorted = new LinkedList<Descriptor>();
2461 Iterator<Descriptor> itr = toSort.iterator();
2462 while( itr.hasNext() ) {
2463 Descriptor d = itr.next();
2465 if( !discovered.contains(d) ) {
2466 dfsVisit(d, toSort, sorted, discovered);
2473 // While we're doing DFS on call graph, remember
2474 // dependencies for efficient queuing of methods
2475 // during interprocedural analysis:
2477 // a dependent of a method decriptor d for this analysis is:
2478 // 1) a method or task that invokes d
2479 // 2) in the descriptorsToAnalyze set
2480 protected void dfsVisit(Descriptor d,
2481 Set <Descriptor> toSort,
2482 LinkedList<Descriptor> sorted,
2483 Set <Descriptor> discovered) {
2486 // only methods have callers, tasks never do
2487 if( d instanceof MethodDescriptor ) {
2489 MethodDescriptor md = (MethodDescriptor) d;
2491 // the call graph is not aware that we have a fabricated
2492 // analysis entry that calls the program source's entry
2493 if( md == mdSourceEntry ) {
2494 if( !discovered.contains(mdAnalysisEntry) ) {
2495 addDependent(mdSourceEntry, // callee
2496 mdAnalysisEntry // caller
2498 dfsVisit(mdAnalysisEntry, toSort, sorted, discovered);
2502 // otherwise call graph guides DFS
2503 Iterator itr = callGraph.getCallerSet(md).iterator();
2504 while( itr.hasNext() ) {
2505 Descriptor dCaller = (Descriptor) itr.next();
2507 // only consider callers in the original set to analyze
2508 if( !toSort.contains(dCaller) ) {
2512 if( !discovered.contains(dCaller) ) {
2513 addDependent(md, // callee
2517 dfsVisit(dCaller, toSort, sorted, discovered);
2522 // for leaf-nodes last now!
2527 protected void enqueue(Descriptor d) {
2529 if( !descriptorsToVisitSet.contains(d) ) {
2531 if( state.DISJOINTDVISITSTACK ||
2532 state.DISJOINTDVISITSTACKEESONTOP
2534 descriptorsToVisitStack.add(d);
2536 } else if( state.DISJOINTDVISITPQUE ) {
2537 Integer priority = mapDescriptorToPriority.get(d);
2538 descriptorsToVisitQ.add(new DescriptorQWrapper(priority,
2543 descriptorsToVisitSet.add(d);
2548 // a dependent of a method decriptor d for this analysis is:
2549 // 1) a method or task that invokes d
2550 // 2) in the descriptorsToAnalyze set
2551 protected void addDependent(Descriptor callee, Descriptor caller) {
2552 Set<Descriptor> deps = mapDescriptorToSetDependents.get(callee);
2553 if( deps == null ) {
2554 deps = new HashSet<Descriptor>();
2557 mapDescriptorToSetDependents.put(callee, deps);
2560 protected Set<Descriptor> getDependents(Descriptor callee) {
2561 Set<Descriptor> deps = mapDescriptorToSetDependents.get(callee);
2562 if( deps == null ) {
2563 deps = new HashSet<Descriptor>();
2564 mapDescriptorToSetDependents.put(callee, deps);
2570 public Hashtable<FlatCall, ReachGraph> getIHMcontributions(Descriptor d) {
2572 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
2573 mapDescriptorToIHMcontributions.get(d);
2575 if( heapsFromCallers == null ) {
2576 heapsFromCallers = new Hashtable<FlatCall, ReachGraph>();
2577 mapDescriptorToIHMcontributions.put(d, heapsFromCallers);
2580 return heapsFromCallers;
2583 public ReachGraph getIHMcontribution(Descriptor d,
2586 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
2587 getIHMcontributions(d);
2589 if( !heapsFromCallers.containsKey(fc) ) {
2593 return heapsFromCallers.get(fc);
2597 public void addIHMcontribution(Descriptor d,
2601 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
2602 getIHMcontributions(d);
2604 // ensure inputs to initial contexts increase monotonically
2605 ReachGraph merged = new ReachGraph();
2607 merged.merge( heapsFromCallers.get( fc ) );
2609 heapsFromCallers.put( fc, merged );
2614 private AllocSite createParameterAllocSite(ReachGraph rg,
2615 TempDescriptor tempDesc,
2621 flatNew = new FlatNew(tempDesc.getType(), // type
2622 tempDesc, // param temp
2623 false, // global alloc?
2624 "param"+tempDesc // disjoint site ID string
2627 flatNew = new FlatNew(tempDesc.getType(), // type
2628 tempDesc, // param temp
2629 false, // global alloc?
2630 null // disjoint site ID string
2634 // create allocation site
2635 AllocSite as = AllocSite.factory(allocationDepth,
2637 flatNew.getDisjointId(),
2640 for (int i = 0; i < allocationDepth; ++i) {
2641 Integer id = generateUniqueHeapRegionNodeID();
2642 as.setIthOldest(i, id);
2643 mapHrnIdToAllocSite.put(id, as);
2645 // the oldest node is a summary node
2646 as.setSummary(generateUniqueHeapRegionNodeID() );
2654 private Set<FieldDescriptor> getFieldSetTobeAnalyzed(TypeDescriptor typeDesc) {
2656 Set<FieldDescriptor> fieldSet=new HashSet<FieldDescriptor>();
2657 if(!typeDesc.isImmutable()) {
2658 ClassDescriptor classDesc = typeDesc.getClassDesc();
2659 for (Iterator it = classDesc.getFields(); it.hasNext(); ) {
2660 FieldDescriptor field = (FieldDescriptor) it.next();
2661 TypeDescriptor fieldType = field.getType();
2662 if (shouldAnalysisTrack(fieldType)) {
2663 fieldSet.add(field);
2671 private HeapRegionNode createMultiDeimensionalArrayHRN(ReachGraph rg, AllocSite alloc, HeapRegionNode srcHRN, FieldDescriptor fd, Hashtable<HeapRegionNode, HeapRegionNode> map, Hashtable<TypeDescriptor, HeapRegionNode> mapToExistingNode, ReachSet alpha) {
2673 int dimCount=fd.getType().getArrayCount();
2674 HeapRegionNode prevNode=null;
2675 HeapRegionNode arrayEntryNode=null;
2676 for(int i=dimCount; i>0; i--) {
2677 TypeDescriptor typeDesc=fd.getType().dereference(); //hack to get instance of type desc
2678 typeDesc.setArrayCount(i);
2679 TempDescriptor tempDesc=new TempDescriptor(typeDesc.getSymbol(),typeDesc);
2680 HeapRegionNode hrnSummary;
2681 if(!mapToExistingNode.containsKey(typeDesc)) {
2686 as = createParameterAllocSite(rg, tempDesc, false);
2688 // make a new reference to allocated node
2690 rg.createNewHeapRegionNode(as.getSummary(), // id or null to generate a new one
2691 false, // single object?
2693 false, // out-of-context?
2694 as.getType(), // type
2695 as, // allocation site
2696 alpha, // inherent reach
2697 alpha, // current reach
2698 ExistPredSet.factory(rg.predTrue), // predicates
2699 tempDesc.toString() // description
2701 rg.id2hrn.put(as.getSummary(),hrnSummary);
2703 mapToExistingNode.put(typeDesc, hrnSummary);
2705 hrnSummary=mapToExistingNode.get(typeDesc);
2708 if(prevNode==null) {
2709 // make a new reference between new summary node and source
2710 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2713 fd.getSymbol(), // field name
2715 ExistPredSet.factory(rg.predTrue), // predicates
2719 rg.addRefEdge(srcHRN, hrnSummary, edgeToSummary);
2720 prevNode=hrnSummary;
2721 arrayEntryNode=hrnSummary;
2723 // make a new reference between summary nodes of array
2724 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2727 arrayElementFieldName, // field name
2729 ExistPredSet.factory(rg.predTrue), // predicates
2733 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2734 prevNode=hrnSummary;
2739 // create a new obj node if obj has at least one non-primitive field
2740 TypeDescriptor type=fd.getType();
2741 if(getFieldSetTobeAnalyzed(type).size()>0) {
2742 TypeDescriptor typeDesc=type.dereference();
2743 typeDesc.setArrayCount(0);
2744 if(!mapToExistingNode.containsKey(typeDesc)) {
2745 TempDescriptor tempDesc=new TempDescriptor(type.getSymbol(),typeDesc);
2746 AllocSite as = createParameterAllocSite(rg, tempDesc, false);
2747 // make a new reference to allocated node
2748 HeapRegionNode hrnSummary =
2749 rg.createNewHeapRegionNode(as.getSummary(), // id or null to generate a new one
2750 false, // single object?
2752 false, // out-of-context?
2754 as, // allocation site
2755 alpha, // inherent reach
2756 alpha, // current reach
2757 ExistPredSet.factory(rg.predTrue), // predicates
2758 tempDesc.toString() // description
2760 rg.id2hrn.put(as.getSummary(),hrnSummary);
2761 mapToExistingNode.put(typeDesc, hrnSummary);
2762 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2765 arrayElementFieldName, // field name
2767 ExistPredSet.factory(rg.predTrue), // predicates
2770 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2771 prevNode=hrnSummary;
2773 HeapRegionNode hrnSummary=mapToExistingNode.get(typeDesc);
2774 if(prevNode.getReferenceTo(hrnSummary, typeDesc, arrayElementFieldName)==null) {
2775 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2778 arrayElementFieldName, // field name
2780 ExistPredSet.factory(rg.predTrue), // predicates
2783 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2785 prevNode=hrnSummary;
2789 map.put(arrayEntryNode, prevNode);
2790 return arrayEntryNode;
2793 private ReachGraph createInitialTaskReachGraph(FlatMethod fm) {
2794 ReachGraph rg = new ReachGraph();
2795 TaskDescriptor taskDesc = fm.getTask();
2797 for (int idx = 0; idx < taskDesc.numParameters(); idx++) {
2798 Descriptor paramDesc = taskDesc.getParameter(idx);
2799 TypeDescriptor paramTypeDesc = taskDesc.getParamType(idx);
2801 // setup data structure
2802 Set<HashMap<HeapRegionNode, FieldDescriptor>> workSet =
2803 new HashSet<HashMap<HeapRegionNode, FieldDescriptor>>();
2804 Hashtable<TypeDescriptor, HeapRegionNode> mapTypeToExistingSummaryNode =
2805 new Hashtable<TypeDescriptor, HeapRegionNode>();
2806 Hashtable<HeapRegionNode, HeapRegionNode> mapToFirstDimensionArrayNode =
2807 new Hashtable<HeapRegionNode, HeapRegionNode>();
2808 Set<String> doneSet = new HashSet<String>();
2810 TempDescriptor tempDesc = fm.getParameter(idx);
2812 AllocSite as = createParameterAllocSite(rg, tempDesc, true);
2813 VariableNode lnX = rg.getVariableNodeFromTemp(tempDesc);
2814 Integer idNewest = as.getIthOldest(0);
2815 HeapRegionNode hrnNewest = rg.id2hrn.get(idNewest);
2817 // make a new reference to allocated node
2818 RefEdge edgeNew = new RefEdge(lnX, // source
2820 taskDesc.getParamType(idx), // type
2822 hrnNewest.getAlpha(), // beta
2823 ExistPredSet.factory(rg.predTrue), // predicates
2826 rg.addRefEdge(lnX, hrnNewest, edgeNew);
2828 // set-up a work set for class field
2829 ClassDescriptor classDesc = paramTypeDesc.getClassDesc();
2830 for (Iterator it = classDesc.getFields(); it.hasNext(); ) {
2831 FieldDescriptor fd = (FieldDescriptor) it.next();
2832 TypeDescriptor fieldType = fd.getType();
2833 if (shouldAnalysisTrack(fieldType)) {
2834 HashMap<HeapRegionNode, FieldDescriptor> newMap = new HashMap<HeapRegionNode, FieldDescriptor>();
2835 newMap.put(hrnNewest, fd);
2836 workSet.add(newMap);
2840 int uniqueIdentifier = 0;
2841 while (!workSet.isEmpty()) {
2842 HashMap<HeapRegionNode, FieldDescriptor> map = workSet
2844 workSet.remove(map);
2846 Set<HeapRegionNode> key = map.keySet();
2847 HeapRegionNode srcHRN = key.iterator().next();
2848 FieldDescriptor fd = map.get(srcHRN);
2849 TypeDescriptor type = fd.getType();
2850 String doneSetIdentifier = srcHRN.getIDString() + "_" + fd;
2852 if (!doneSet.contains(doneSetIdentifier)) {
2853 doneSet.add(doneSetIdentifier);
2854 if (!mapTypeToExistingSummaryNode.containsKey(type)) {
2855 // create new summary Node
2856 TempDescriptor td = new TempDescriptor("temp"
2857 + uniqueIdentifier, type);
2859 AllocSite allocSite;
2860 if(type.equals(paramTypeDesc)) {
2861 //corresponding allocsite has already been created for a parameter variable.
2864 allocSite = createParameterAllocSite(rg, td, false);
2866 String strDesc = allocSite.toStringForDOT()
2868 TypeDescriptor allocType=allocSite.getType();
2870 HeapRegionNode hrnSummary;
2871 if(allocType.isArray() && allocType.getArrayCount()>0) {
2872 hrnSummary=createMultiDeimensionalArrayHRN(rg,allocSite,srcHRN,fd,mapToFirstDimensionArrayNode,mapTypeToExistingSummaryNode,hrnNewest.getAlpha());
2875 rg.createNewHeapRegionNode(allocSite.getSummary(), // id or null to generate a new one
2876 false, // single object?
2878 false, // out-of-context?
2879 allocSite.getType(), // type
2880 allocSite, // allocation site
2881 hrnNewest.getAlpha(), // inherent reach
2882 hrnNewest.getAlpha(), // current reach
2883 ExistPredSet.factory(rg.predTrue), // predicates
2884 strDesc // description
2886 rg.id2hrn.put(allocSite.getSummary(),hrnSummary);
2888 // make a new reference to summary node
2889 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2892 fd.getSymbol(), // field name
2893 hrnNewest.getAlpha(), // beta
2894 ExistPredSet.factory(rg.predTrue), // predicates
2898 rg.addRefEdge(srcHRN, hrnSummary, edgeToSummary);
2902 mapTypeToExistingSummaryNode.put(type, hrnSummary);
2904 // set-up a work set for fields of the class
2905 Set<FieldDescriptor> fieldTobeAnalyzed=getFieldSetTobeAnalyzed(type);
2906 for (Iterator iterator = fieldTobeAnalyzed.iterator(); iterator
2908 FieldDescriptor fieldDescriptor = (FieldDescriptor) iterator
2910 HeapRegionNode newDstHRN;
2911 if(mapToFirstDimensionArrayNode.containsKey(hrnSummary)) {
2912 //related heap region node is already exsited.
2913 newDstHRN=mapToFirstDimensionArrayNode.get(hrnSummary);
2915 newDstHRN=hrnSummary;
2917 doneSetIdentifier = newDstHRN.getIDString() + "_" + fieldDescriptor;
2918 if(!doneSet.contains(doneSetIdentifier)) {
2919 // add new work item
2920 HashMap<HeapRegionNode, FieldDescriptor> newMap =
2921 new HashMap<HeapRegionNode, FieldDescriptor>();
2922 newMap.put(newDstHRN, fieldDescriptor);
2923 workSet.add(newMap);
2928 // if there exists corresponding summary node
2929 HeapRegionNode hrnDst=mapTypeToExistingSummaryNode.get(type);
2931 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2933 fd.getType(), // type
2934 fd.getSymbol(), // field name
2935 srcHRN.getAlpha(), // beta
2936 ExistPredSet.factory(rg.predTrue), // predicates
2939 rg.addRefEdge(srcHRN, hrnDst, edgeToSummary);
2949 // return all allocation sites in the method (there is one allocation
2950 // site per FlatNew node in a method)
2951 private HashSet<AllocSite> getAllocationSiteSet(Descriptor d) {
2952 if( !mapDescriptorToAllocSiteSet.containsKey(d) ) {
2953 buildAllocationSiteSet(d);
2956 return mapDescriptorToAllocSiteSet.get(d);
2960 private void buildAllocationSiteSet(Descriptor d) {
2961 HashSet<AllocSite> s = new HashSet<AllocSite>();
2964 if( d instanceof MethodDescriptor ) {
2965 fm = state.getMethodFlat( (MethodDescriptor) d);
2967 assert d instanceof TaskDescriptor;
2968 fm = state.getMethodFlat( (TaskDescriptor) d);
2970 pm.analyzeMethod(fm);
2972 // visit every node in this FlatMethod's IR graph
2973 // and make a set of the allocation sites from the
2974 // FlatNew node's visited
2975 HashSet<FlatNode> visited = new HashSet<FlatNode>();
2976 HashSet<FlatNode> toVisit = new HashSet<FlatNode>();
2979 while( !toVisit.isEmpty() ) {
2980 FlatNode n = toVisit.iterator().next();
2982 if( n instanceof FlatNew ) {
2983 s.add(getAllocSiteFromFlatNewPRIVATE( (FlatNew) n) );
2989 for( int i = 0; i < pm.numNext(n); ++i ) {
2990 FlatNode child = pm.getNext(n, i);
2991 if( !visited.contains(child) ) {
2997 mapDescriptorToAllocSiteSet.put(d, s);
3000 private HashSet<AllocSite> getFlaggedAllocationSites(Descriptor dIn) {
3002 HashSet<AllocSite> out = new HashSet<AllocSite>();
3003 HashSet<Descriptor> toVisit = new HashSet<Descriptor>();
3004 HashSet<Descriptor> visited = new HashSet<Descriptor>();
3008 while (!toVisit.isEmpty()) {
3009 Descriptor d = toVisit.iterator().next();
3013 HashSet<AllocSite> asSet = getAllocationSiteSet(d);
3014 Iterator asItr = asSet.iterator();
3015 while (asItr.hasNext()) {
3016 AllocSite as = (AllocSite) asItr.next();
3017 if (as.getDisjointAnalysisId() != null) {
3022 // enqueue callees of this method to be searched for
3023 // allocation sites also
3024 Set callees = callGraph.getCalleeSet(d);
3025 if (callees != null) {
3026 Iterator methItr = callees.iterator();
3027 while (methItr.hasNext()) {
3028 MethodDescriptor md = (MethodDescriptor) methItr.next();
3030 if (!visited.contains(md)) {
3041 private HashSet<AllocSite>
3042 getFlaggedAllocationSitesReachableFromTaskPRIVATE(TaskDescriptor td) {
3044 HashSet<AllocSite> asSetTotal = new HashSet<AllocSite>();
3045 HashSet<Descriptor> toVisit = new HashSet<Descriptor>();
3046 HashSet<Descriptor> visited = new HashSet<Descriptor>();
3050 // traverse this task and all methods reachable from this task
3051 while( !toVisit.isEmpty() ) {
3052 Descriptor d = toVisit.iterator().next();
3056 HashSet<AllocSite> asSet = getAllocationSiteSet(d);
3057 Iterator asItr = asSet.iterator();
3058 while( asItr.hasNext() ) {
3059 AllocSite as = (AllocSite) asItr.next();
3060 TypeDescriptor typed = as.getType();
3061 if( typed != null ) {
3062 ClassDescriptor cd = typed.getClassDesc();
3063 if( cd != null && cd.hasFlags() ) {
3069 // enqueue callees of this method to be searched for
3070 // allocation sites also
3071 Set callees = callGraph.getCalleeSet(d);
3072 if( callees != null ) {
3073 Iterator methItr = callees.iterator();
3074 while( methItr.hasNext() ) {
3075 MethodDescriptor md = (MethodDescriptor) methItr.next();
3077 if( !visited.contains(md) ) {
3087 public Set<Descriptor> getDescriptorsToAnalyze() {
3088 return descriptorsToAnalyze;
3091 public EffectsAnalysis getEffectsAnalysis() {
3092 return effectsAnalysis;
3095 public ReachGraph getReachGraph(Descriptor d) {
3096 return mapDescriptorToCompleteReachGraph.get(d);
3099 public ReachGraph getEnterReachGraph(FlatNode fn) {
3100 return fn2rgAtEnter.get(fn);
3105 protected class DebugCallSiteData {
3106 public boolean debugCallSite;
3107 public boolean didOneDebug;
3108 public boolean writeDebugDOTs;
3109 public boolean stopAfter;
3111 public DebugCallSiteData() {
3112 debugCallSite = false;
3113 didOneDebug = false;
3114 writeDebugDOTs = false;
3119 protected void decideDebugCallSite( DebugCallSiteData dcsd,
3120 Descriptor taskOrMethodCaller,
3121 MethodDescriptor mdCallee ) {
3123 // all this jimma jamma to debug call sites is WELL WORTH the
3124 // effort, so so so many bugs or buggy info appears through call
3127 if( state.DISJOINTDEBUGCALLEE == null ||
3128 state.DISJOINTDEBUGCALLER == null ) {
3133 boolean debugCalleeMatches = false;
3134 boolean debugCallerMatches = false;
3136 ClassDescriptor cdCallee = mdCallee.getClassDesc();
3137 if( cdCallee != null ) {
3138 debugCalleeMatches =
3139 state.DISJOINTDEBUGCALLEE.equals( cdCallee.getSymbol()+
3141 mdCallee.getSymbol()
3146 if( taskOrMethodCaller instanceof MethodDescriptor ) {
3147 ClassDescriptor cdCaller = ((MethodDescriptor)taskOrMethodCaller).getClassDesc();
3148 if( cdCaller != null ) {
3149 debugCallerMatches =
3150 state.DISJOINTDEBUGCALLER.equals( cdCaller.getSymbol()+
3152 taskOrMethodCaller.getSymbol()
3156 // for bristlecone style tasks
3157 debugCallerMatches =
3158 state.DISJOINTDEBUGCALLER.equals( taskOrMethodCaller.getSymbol() );
3162 dcsd.debugCallSite = debugCalleeMatches && debugCallerMatches;
3165 dcsd.writeDebugDOTs =
3167 dcsd.debugCallSite &&
3169 (ReachGraph.debugCallSiteVisitCounter >=
3170 ReachGraph.debugCallSiteVisitStartCapture) &&
3172 (ReachGraph.debugCallSiteVisitCounter <
3173 ReachGraph.debugCallSiteVisitStartCapture +
3174 ReachGraph.debugCallSiteNumVisitsToCapture);
3178 if( dcsd.debugCallSite ) {
3179 dcsd.didOneDebug = true;
3183 protected void statusDebugCallSite( DebugCallSiteData dcsd ) {
3185 dcsd.writeDebugDOTs = false;
3186 dcsd.stopAfter = false;
3188 if( dcsd.didOneDebug ) {
3189 System.out.println(" $$$ Debug call site visit "+
3190 ReachGraph.debugCallSiteVisitCounter+
3194 (ReachGraph.debugCallSiteVisitCounter >=
3195 ReachGraph.debugCallSiteVisitStartCapture) &&
3197 (ReachGraph.debugCallSiteVisitCounter <
3198 ReachGraph.debugCallSiteVisitStartCapture +
3199 ReachGraph.debugCallSiteNumVisitsToCapture)
3201 dcsd.writeDebugDOTs = true;
3202 System.out.println(" $$$ Capturing this call site visit $$$");
3203 if( ReachGraph.debugCallSiteStopAfter &&
3204 (ReachGraph.debugCallSiteVisitCounter ==
3205 ReachGraph.debugCallSiteVisitStartCapture +
3206 ReachGraph.debugCallSiteNumVisitsToCapture - 1)
3208 dcsd.stopAfter = true;
3212 ++ReachGraph.debugCallSiteVisitCounter;
3215 if( dcsd.stopAfter ) {
3216 System.out.println("$$$ Exiting after requested captures of call site. $$$");
3225 // get successive captures of the analysis state, use compiler
3227 boolean takeDebugSnapshots = false;
3228 String descSymbolDebug = null;
3229 boolean stopAfterCapture = false;
3230 int snapVisitCounter = 0;
3231 int snapNodeCounter = 0;
3232 int visitStartCapture = 0;
3233 int numVisitsToCapture = 0;
3236 void debugSnapshot(ReachGraph rg, FlatNode fn, boolean in) {
3237 if( snapVisitCounter > visitStartCapture + numVisitsToCapture ) {
3245 if( snapVisitCounter >= visitStartCapture ) {
3246 System.out.println(" @@@ snapping visit="+snapVisitCounter+
3247 ", node="+snapNodeCounter+
3251 graphName = String.format("snap%03d_%04din",
3255 graphName = String.format("snap%03d_%04dout",
3260 graphName = graphName + fn;
3262 rg.writeGraph(graphName,
3263 true, // write labels (variables)
3264 true, // selectively hide intermediate temp vars
3265 true, // prune unreachable heap regions
3266 false, // hide reachability
3267 true, // hide subset reachability states
3268 true, // hide predicates
3269 true); // hide edge taints
3276 public Set<Alloc> canPointToAt( TempDescriptor x,
3277 FlatNode programPoint ) {
3279 ReachGraph rgAtEnter = fn2rgAtEnter.get( programPoint );
3280 if( rgAtEnter == null ) {
3284 return rgAtEnter.canPointTo( x );
3288 public Hashtable< Alloc, Set<Alloc> > canPointToAt( TempDescriptor x,
3290 FlatNode programPoint ) {
3292 ReachGraph rgAtEnter = fn2rgAtEnter.get( programPoint );
3293 if( rgAtEnter == null ) {
3297 return rgAtEnter.canPointTo( x, f.getSymbol(), f.getType() );
3301 public Hashtable< Alloc, Set<Alloc> > canPointToAtElement( TempDescriptor x,
3302 FlatNode programPoint ) {
3304 ReachGraph rgAtEnter = fn2rgAtEnter.get( programPoint );
3305 if( rgAtEnter == null ) {
3309 assert x.getType() != null;
3310 assert x.getType().isArray();
3312 return rgAtEnter.canPointTo( x, arrayElementFieldName, x.getType().dereference() );
3316 public Set<Alloc> canPointToAfter( TempDescriptor x,
3317 FlatNode programPoint ) {
3319 ReachGraph rgAtExit = fn2rgAtExit.get( programPoint );
3321 if( rgAtExit == null ) {
3325 return rgAtExit.canPointTo( x );
3329 public Hashtable< Alloc, Set<Alloc> > canPointToAfter( TempDescriptor x,
3331 FlatNode programPoint ) {
3333 ReachGraph rgAtExit = fn2rgAtExit.get( programPoint );
3334 if( rgAtExit == null ) {
3338 return rgAtExit.canPointTo( x, f.getSymbol(), f.getType() );
3342 public Hashtable< Alloc, Set<Alloc> > canPointToAfterElement( TempDescriptor x,
3343 FlatNode programPoint ) {
3345 ReachGraph rgAtExit = fn2rgAtExit.get( programPoint );
3346 if( rgAtExit == null ) {
3350 assert x.getType() != null;
3351 assert x.getType().isArray();
3353 return rgAtExit.canPointTo( x, arrayElementFieldName, x.getType().dereference() );
3357 // to evaluate convergence behavior
3358 private static long totalMethodVisits = 0;
3359 private static long totalNodeVisits = 0;