1 package Analysis.Disjoint;
3 import Analysis.CallGraph.*;
4 import Analysis.Liveness;
5 import Analysis.ArrayReferencees;
6 import Analysis.OoOJava.Accessible;
7 import Analysis.OoOJava.RBlockRelationAnalysis;
8 import Analysis.FlatIRGraph.*;
11 import IR.Tree.Modifiers;
16 public class DisjointAnalysis implements HeapAnalysis {
18 ///////////////////////////////////////////
20 // Public interface to discover possible
21 // sharing in the program under analysis
23 ///////////////////////////////////////////
25 // if an object allocated at the target site may be
26 // reachable from both an object from root1 and an
27 // object allocated at root2, return TRUE
28 public boolean mayBothReachTarget(FlatMethod fm,
33 AllocSite asr1 = getAllocationSiteFromFlatNew(fnRoot1);
34 AllocSite asr2 = getAllocationSiteFromFlatNew(fnRoot2);
35 assert asr1.isFlagged();
36 assert asr2.isFlagged();
38 AllocSite ast = getAllocationSiteFromFlatNew(fnTarget);
39 ReachGraph rg = getPartial(fm.getMethod() );
41 return rg.mayBothReachTarget(asr1, asr2, ast);
44 // similar to the method above, return TRUE if ever
45 // more than one object from the root allocation site
46 // may reach an object from the target site
47 public boolean mayManyReachTarget(FlatMethod fm,
51 AllocSite asr = getAllocationSiteFromFlatNew(fnRoot);
52 assert asr.isFlagged();
54 AllocSite ast = getAllocationSiteFromFlatNew(fnTarget);
55 ReachGraph rg = getPartial(fm.getMethod() );
57 return rg.mayManyReachTarget(asr, ast);
63 public HashSet<AllocSite>
64 getFlaggedAllocationSitesReachableFromTask(TaskDescriptor td) {
65 checkAnalysisComplete();
66 return getFlaggedAllocationSitesReachableFromTaskPRIVATE(td);
69 public AllocSite getAllocationSiteFromFlatNew(FlatNew fn) {
70 checkAnalysisComplete();
71 return getAllocSiteFromFlatNewPRIVATE(fn);
74 public AllocSite getAllocationSiteFromHeapRegionNodeID(Integer id) {
75 checkAnalysisComplete();
76 return mapHrnIdToAllocSite.get(id);
79 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
82 checkAnalysisComplete();
83 ReachGraph rg=mapDescriptorToCompleteReachGraph.get(taskOrMethod);
84 FlatMethod fm=state.getMethodFlat(taskOrMethod);
86 return rg.mayReachSharedObjects(fm, paramIndex1, paramIndex2);
89 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
90 int paramIndex, AllocSite alloc) {
91 checkAnalysisComplete();
92 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
93 FlatMethod fm=state.getMethodFlat(taskOrMethod);
95 return rg.mayReachSharedObjects(fm, paramIndex, alloc);
98 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
99 AllocSite alloc, int paramIndex) {
100 checkAnalysisComplete();
101 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
102 FlatMethod fm=state.getMethodFlat(taskOrMethod);
104 return rg.mayReachSharedObjects(fm, paramIndex, alloc);
107 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
108 AllocSite alloc1, AllocSite alloc2) {
109 checkAnalysisComplete();
110 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
112 return rg.mayReachSharedObjects(alloc1, alloc2);
115 public String prettyPrintNodeSet(Set<HeapRegionNode> s) {
116 checkAnalysisComplete();
120 Iterator<HeapRegionNode> i = s.iterator();
121 while (i.hasNext()) {
122 HeapRegionNode n = i.next();
124 AllocSite as = n.getAllocSite();
126 out += " " + n.toString() + ",\n";
128 out += " " + n.toString() + ": " + as.toStringVerbose()
137 // use the methods given above to check every possible sharing class
138 // between task parameters and flagged allocation sites reachable
140 public void writeAllSharing(String outputFile,
143 boolean tabularOutput,
146 throws java.io.IOException {
147 checkAnalysisComplete();
149 BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile));
151 if (!tabularOutput) {
152 bw.write("Conducting ownership analysis with allocation depth = "
153 + allocationDepth + "\n");
154 bw.write(timeReport + "\n");
159 // look through every task for potential sharing
160 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
161 while (taskItr.hasNext()) {
162 TaskDescriptor td = (TaskDescriptor) taskItr.next();
164 if (!tabularOutput) {
165 bw.write("\n---------" + td + "--------\n");
168 HashSet<AllocSite> allocSites = getFlaggedAllocationSitesReachableFromTask(td);
170 Set<HeapRegionNode> common;
172 // for each task parameter, check for sharing classes with
173 // other task parameters and every allocation site
174 // reachable from this task
175 boolean foundSomeSharing = false;
177 FlatMethod fm = state.getMethodFlat(td);
178 for (int i = 0; i < fm.numParameters(); ++i) {
180 // skip parameters with types that cannot reference
182 if( !shouldAnalysisTrack(fm.getParameter(i).getType() ) ) {
186 // for the ith parameter check for sharing classes to all
187 // higher numbered parameters
188 for (int j = i + 1; j < fm.numParameters(); ++j) {
190 // skip parameters with types that cannot reference
192 if( !shouldAnalysisTrack(fm.getParameter(j).getType() ) ) {
197 common = hasPotentialSharing(td, i, j);
198 if (!common.isEmpty()) {
199 foundSomeSharing = true;
201 if (!tabularOutput) {
202 bw.write("Potential sharing between parameters " + i
203 + " and " + j + ".\n");
204 bw.write(prettyPrintNodeSet(common) + "\n");
209 // for the ith parameter, check for sharing classes against
210 // the set of allocation sites reachable from this
212 Iterator allocItr = allocSites.iterator();
213 while (allocItr.hasNext()) {
214 AllocSite as = (AllocSite) allocItr.next();
215 common = hasPotentialSharing(td, i, as);
216 if (!common.isEmpty()) {
217 foundSomeSharing = true;
219 if (!tabularOutput) {
220 bw.write("Potential sharing between parameter " + i
221 + " and " + as.getFlatNew() + ".\n");
222 bw.write(prettyPrintNodeSet(common) + "\n");
228 // for each allocation site check for sharing classes with
229 // other allocation sites in the context of execution
231 HashSet<AllocSite> outerChecked = new HashSet<AllocSite>();
232 Iterator allocItr1 = allocSites.iterator();
233 while (allocItr1.hasNext()) {
234 AllocSite as1 = (AllocSite) allocItr1.next();
236 Iterator allocItr2 = allocSites.iterator();
237 while (allocItr2.hasNext()) {
238 AllocSite as2 = (AllocSite) allocItr2.next();
240 if (!outerChecked.contains(as2)) {
241 common = hasPotentialSharing(td, as1, as2);
243 if (!common.isEmpty()) {
244 foundSomeSharing = true;
246 if (!tabularOutput) {
247 bw.write("Potential sharing between "
248 + as1.getFlatNew() + " and "
249 + as2.getFlatNew() + ".\n");
250 bw.write(prettyPrintNodeSet(common) + "\n");
256 outerChecked.add(as1);
259 if (!foundSomeSharing) {
260 if (!tabularOutput) {
261 bw.write("No sharing between flagged objects in Task " + td
269 bw.write(" & " + numSharing + " & " + justTime + " & " + numLines
270 + " & " + numMethodsAnalyzed() + " \\\\\n");
272 bw.write("\nNumber sharing classes: "+numSharing);
280 // this version of writeAllSharing is for Java programs that have no tasks
281 // ***********************************
282 // WARNING: THIS DOES NOT DO THE RIGHT THING, REPORTS 0 ALWAYS!
283 // It should use mayBothReachTarget and mayManyReachTarget like
284 // OoOJava does to query analysis results
285 // ***********************************
286 public void writeAllSharingJava(String outputFile,
289 boolean tabularOutput,
292 throws java.io.IOException {
293 checkAnalysisComplete();
299 BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile));
301 bw.write("Conducting disjoint reachability analysis with allocation depth = "
302 + allocationDepth + "\n");
303 bw.write(timeReport + "\n\n");
305 boolean foundSomeSharing = false;
307 Descriptor d = typeUtil.getMain();
308 HashSet<AllocSite> allocSites = getFlaggedAllocationSites(d);
310 // for each allocation site check for sharing classes with
311 // other allocation sites in the context of execution
313 HashSet<AllocSite> outerChecked = new HashSet<AllocSite>();
314 Iterator allocItr1 = allocSites.iterator();
315 while (allocItr1.hasNext()) {
316 AllocSite as1 = (AllocSite) allocItr1.next();
318 Iterator allocItr2 = allocSites.iterator();
319 while (allocItr2.hasNext()) {
320 AllocSite as2 = (AllocSite) allocItr2.next();
322 if (!outerChecked.contains(as2)) {
323 Set<HeapRegionNode> common = hasPotentialSharing(d,
326 if (!common.isEmpty()) {
327 foundSomeSharing = true;
328 bw.write("Potential sharing between "
329 + as1.getDisjointAnalysisId() + " and "
330 + as2.getDisjointAnalysisId() + ".\n");
331 bw.write(prettyPrintNodeSet(common) + "\n");
337 outerChecked.add(as1);
340 if (!foundSomeSharing) {
341 bw.write("No sharing classes between flagged objects found.\n");
343 bw.write("\nNumber sharing classes: "+numSharing);
346 bw.write("Number of methods analyzed: "+numMethodsAnalyzed()+"\n");
353 public Alloc getCmdLineArgsAlloc() {
354 return getAllocationSiteFromFlatNew( constructedCmdLineArgsNew );
356 public Alloc getCmdLineArgAlloc() {
357 return getAllocationSiteFromFlatNew( constructedCmdLineArgNew );
359 public Alloc getCmdLineArgBytesAlloc() {
360 return getAllocationSiteFromFlatNew( constructedCmdLineArgBytesNew );
362 public Alloc getNewStringLiteralAlloc() {
363 return newStringLiteralAlloc;
365 public Alloc getNewStringLiteralBytesAlloc() {
366 return newStringLiteralBytesAlloc;
369 ///////////////////////////////////////////
371 // end public interface
373 ///////////////////////////////////////////
377 protected void checkAnalysisComplete() {
378 if( !analysisComplete ) {
379 throw new Error("Warning: public interface method called while analysis is running.");
388 // run in faster mode, only when bugs wrung out!
389 public static boolean releaseMode;
391 // use command line option to set this, analysis
392 // should attempt to be deterministic
393 public static boolean determinismDesired;
395 // when we want to enforce determinism in the
396 // analysis we need to sort descriptors rather
397 // than toss them in efficient sets, use this
398 public static DescriptorComparator dComp =
399 new DescriptorComparator();
402 // data from the compiler
404 public CallGraph callGraph;
405 public Liveness liveness;
406 public ArrayReferencees arrayReferencees;
407 public RBlockRelationAnalysis rblockRel;
408 public TypeUtil typeUtil;
409 public int allocationDepth;
411 protected boolean doEffectsAnalysis = false;
412 protected EffectsAnalysis effectsAnalysis;
413 protected BuildStateMachines buildStateMachines;
415 protected boolean doDefiniteReachAnalysis = false;
416 protected DefiniteReachAnalysis definiteReachAnalysis;
419 // data structure for public interface
420 private Hashtable< Descriptor, HashSet<AllocSite> >
421 mapDescriptorToAllocSiteSet;
424 // for public interface methods to warn that they
425 // are grabbing results during analysis
426 private boolean analysisComplete;
429 // used to identify HeapRegionNode objects
430 // A unique ID equates an object in one
431 // ownership graph with an object in another
432 // graph that logically represents the same
434 // start at 10 and increment to reserve some
435 // IDs for special purposes
436 static protected int uniqueIDcount = 10;
439 // An out-of-scope method created by the
440 // analysis that has no parameters, and
441 // appears to allocate the command line
442 // arguments, then invoke the source code's
443 // main method. The purpose of this is to
444 // provide the analysis with an explicit
445 // top-level context with no parameters
446 protected MethodDescriptor mdAnalysisEntry;
447 protected FlatMethod fmAnalysisEntry;
449 // main method defined by source program
450 protected MethodDescriptor mdSourceEntry;
452 // the set of task and/or method descriptors
453 // reachable in call graph
454 protected Set<Descriptor>
455 descriptorsToAnalyze;
457 // current descriptors to visit in fixed-point
458 // interprocedural analysis, prioritized by
459 // dependency in the call graph
460 protected Stack<Descriptor>
461 descriptorsToVisitStack;
462 protected PriorityQueue<DescriptorQWrapper>
465 // a duplication of the above structure, but
466 // for efficient testing of inclusion
467 protected HashSet<Descriptor>
468 descriptorsToVisitSet;
470 // storage for priorities (doesn't make sense)
471 // to add it to the Descriptor class, just in
473 protected Hashtable<Descriptor, Integer>
474 mapDescriptorToPriority;
476 // when analyzing a method and scheduling more:
477 // remember set of callee's enqueued for analysis
478 // so they can be put on top of the callers in
479 // the stack-visit mode
480 protected Set<Descriptor>
483 // maps a descriptor to its current partial result
484 // from the intraprocedural fixed-point analysis--
485 // then the interprocedural analysis settles, this
486 // mapping will have the final results for each
488 protected Hashtable<Descriptor, ReachGraph>
489 mapDescriptorToCompleteReachGraph;
491 // maps a descriptor to its known dependents: namely
492 // methods or tasks that call the descriptor's method
493 // AND are part of this analysis (reachable from main)
494 protected Hashtable< Descriptor, Set<Descriptor> >
495 mapDescriptorToSetDependents;
497 // if the analysis client wants to flag allocation sites
498 // programmatically, it should provide a set of FlatNew
499 // statements--this may be null if unneeded
500 protected Set<FlatNew> sitesToFlag;
502 // maps each flat new to one analysis abstraction
503 // allocate site object, these exist outside reach graphs
504 protected Hashtable<FlatNew, AllocSite>
505 mapFlatNewToAllocSite;
507 // maps intergraph heap region IDs to intergraph
508 // allocation sites that created them, a redundant
509 // structure for efficiency in some operations
510 protected Hashtable<Integer, AllocSite>
513 // maps a method to its initial heap model (IHM) that
514 // is the set of reachability graphs from every caller
515 // site, all merged together. The reason that we keep
516 // them separate is that any one call site's contribution
517 // to the IHM may changed along the path to the fixed point
518 protected Hashtable< Descriptor, Hashtable< FlatCall, ReachGraph > >
519 mapDescriptorToIHMcontributions;
521 // additionally, keep a mapping from descriptors to the
522 // merged in-coming initial context, because we want this
523 // initial context to be STRICTLY MONOTONIC
524 protected Hashtable<Descriptor, ReachGraph>
525 mapDescriptorToInitialContext;
527 // mapping of current partial results for a given node. Note that
528 // to reanalyze a method we discard all partial results because a
529 // null reach graph indicates the node needs to be visited on the
530 // way to the fixed point.
531 // The reason for a persistent mapping is so after the analysis we
532 // can ask for the graph of any node at the fixed point, but this
533 // option is only enabled with a compiler flag.
534 protected Hashtable<FlatNode, ReachGraph> mapFlatNodeToReachGraphPersist;
535 protected Hashtable<FlatNode, ReachGraph> mapFlatNodeToReachGraph;
538 // make the result for back edges analysis-wide STRICTLY
539 // MONOTONIC as well, but notice we use FlatNode as the
540 // key for this map: in case we want to consider other
541 // nodes as back edge's in future implementations
542 protected Hashtable<FlatNode, ReachGraph>
543 mapBackEdgeToMonotone;
546 public static final String arrayElementFieldName = "___element_";
547 static protected Hashtable<TypeDescriptor, FieldDescriptor>
551 protected boolean suppressOutput;
553 // for controlling DOT file output
554 protected boolean writeFinalDOTs;
555 protected boolean writeAllIncrementalDOTs;
557 // supporting DOT output--when we want to write every
558 // partial method result, keep a tally for generating
560 protected Hashtable<Descriptor, Integer>
561 mapDescriptorToNumUpdates;
563 //map task descriptor to initial task parameter
564 protected Hashtable<Descriptor, ReachGraph>
565 mapDescriptorToReachGraph;
567 protected PointerMethod pm;
569 //Keeps track of all the reach graphs at every program point
570 //DO NOT USE UNLESS YOU REALLY NEED IT
571 static protected Hashtable<FlatNode, ReachGraph> fn2rgAtEnter =
572 new Hashtable<FlatNode, ReachGraph>();
574 static protected Hashtable<FlatNode, ReachGraph> fn2rgAtExit =
575 new Hashtable<FlatNode, ReachGraph>();
578 private Hashtable<FlatCall, Descriptor> fc2enclosing;
580 Accessible accessible;
583 // we construct an entry method of flat nodes complete
584 // with a new allocation site to model the command line
585 // args creation just for the analysis, so remember that
586 // allocation site. Later in code gen we might want to
587 // know if something is pointing-to to the cmd line args
588 // and we can verify by checking the allocation site field.
589 protected FlatNew constructedCmdLineArgsNew;
590 protected FlatNew constructedCmdLineArgNew;
591 protected FlatNew constructedCmdLineArgBytesNew;
593 // similar to above, the runtime allocates new strings
594 // for literal nodes, so make up an alloc to model that
595 protected AllocSite newStringLiteralAlloc;
596 protected AllocSite newStringLiteralBytesAlloc;
598 // both of the above need the descriptor of the field
599 // for the String's value field to reference by the
600 // byte array from the string object
601 protected TypeDescriptor stringType;
602 protected TypeDescriptor stringBytesType;
603 protected FieldDescriptor stringBytesField;
606 protected void initImplicitStringsModel() {
608 ClassDescriptor cdString = typeUtil.getClass( typeUtil.StringClass );
609 assert cdString != null;
613 new TypeDescriptor( cdString );
616 new TypeDescriptor(TypeDescriptor.CHAR).makeArray( state );
619 stringBytesField = null;
620 Iterator sFieldsItr = cdString.getFields();
621 while( sFieldsItr.hasNext() ) {
622 FieldDescriptor fd = (FieldDescriptor) sFieldsItr.next();
623 if( fd.getSymbol().equals( typeUtil.StringClassValueField ) ) {
624 stringBytesField = fd;
628 assert stringBytesField != null;
631 TempDescriptor throwAway1 =
632 new TempDescriptor("stringLiteralTemp_dummy1",
635 FlatNew fnStringLiteral =
636 new FlatNew(stringType,
640 newStringLiteralAlloc
641 = getAllocSiteFromFlatNewPRIVATE( fnStringLiteral );
644 TempDescriptor throwAway2 =
645 new TempDescriptor("stringLiteralTemp_dummy2",
648 FlatNew fnStringLiteralBytes =
649 new FlatNew(stringBytesType,
653 newStringLiteralBytesAlloc
654 = getAllocSiteFromFlatNewPRIVATE( fnStringLiteralBytes );
660 // allocate various structures that are not local
661 // to a single class method--should be done once
662 protected void allocateStructures() {
664 if( determinismDesired ) {
665 // use an ordered set
666 descriptorsToAnalyze = new TreeSet<Descriptor>(dComp);
668 // otherwise use a speedy hashset
669 descriptorsToAnalyze = new HashSet<Descriptor>();
672 mapDescriptorToCompleteReachGraph =
673 new Hashtable<Descriptor, ReachGraph>();
675 mapDescriptorToNumUpdates =
676 new Hashtable<Descriptor, Integer>();
678 mapDescriptorToSetDependents =
679 new Hashtable< Descriptor, Set<Descriptor> >();
681 mapFlatNewToAllocSite =
682 new Hashtable<FlatNew, AllocSite>();
684 mapDescriptorToIHMcontributions =
685 new Hashtable< Descriptor, Hashtable< FlatCall, ReachGraph > >();
687 mapDescriptorToInitialContext =
688 new Hashtable<Descriptor, ReachGraph>();
690 mapFlatNodeToReachGraphPersist =
691 new Hashtable<FlatNode, ReachGraph>();
693 mapBackEdgeToMonotone =
694 new Hashtable<FlatNode, ReachGraph>();
696 mapHrnIdToAllocSite =
697 new Hashtable<Integer, AllocSite>();
699 mapTypeToArrayField =
700 new Hashtable <TypeDescriptor, FieldDescriptor>();
702 if( state.DISJOINTDVISITSTACK ||
703 state.DISJOINTDVISITSTACKEESONTOP
705 descriptorsToVisitStack =
706 new Stack<Descriptor>();
709 if( state.DISJOINTDVISITPQUE ) {
710 descriptorsToVisitQ =
711 new PriorityQueue<DescriptorQWrapper>();
714 descriptorsToVisitSet =
715 new HashSet<Descriptor>();
717 mapDescriptorToPriority =
718 new Hashtable<Descriptor, Integer>();
721 new HashSet<Descriptor>();
723 mapDescriptorToAllocSiteSet =
724 new Hashtable<Descriptor, HashSet<AllocSite> >();
726 mapDescriptorToReachGraph =
727 new Hashtable<Descriptor, ReachGraph>();
729 pm = new PointerMethod();
731 fc2enclosing = new Hashtable<FlatCall, Descriptor>();
736 // this analysis generates a disjoint reachability
737 // graph for every reachable method in the program
738 public DisjointAnalysis(State s,
743 Set<FlatNew> sitesToFlag,
744 RBlockRelationAnalysis rra
746 init(s, tu, cg, l, ar, sitesToFlag, rra, null, false);
749 public DisjointAnalysis(State s,
754 Set<FlatNew> sitesToFlag,
755 RBlockRelationAnalysis rra,
756 boolean suppressOutput
758 init(s, tu, cg, l, ar, sitesToFlag, rra, null, suppressOutput);
761 public DisjointAnalysis(State s,
766 Set<FlatNew> sitesToFlag,
767 RBlockRelationAnalysis rra,
768 BuildStateMachines bsm,
769 boolean suppressOutput
771 init(s, tu, cg, l, ar, sitesToFlag, rra, bsm, suppressOutput);
774 protected void init(State state,
778 ArrayReferencees arrayReferencees,
779 Set<FlatNew> sitesToFlag,
780 RBlockRelationAnalysis rra,
781 BuildStateMachines bsm,
782 boolean suppressOutput
785 analysisComplete = false;
788 this.typeUtil = typeUtil;
789 this.callGraph = callGraph;
790 this.liveness = liveness;
791 this.arrayReferencees = arrayReferencees;
792 this.sitesToFlag = sitesToFlag;
793 this.rblockRel = rra;
794 this.suppressOutput = suppressOutput;
795 this.buildStateMachines = bsm;
797 if( rblockRel != null ) {
798 doEffectsAnalysis = true;
799 effectsAnalysis = new EffectsAnalysis();
801 EffectsAnalysis.state = state;
802 EffectsAnalysis.buildStateMachines = buildStateMachines;
804 //note: instead of reachgraph's isAccessible, using the result of accessible analysis
805 //since accessible gives us more accurate results
806 accessible=new Accessible(state, callGraph, rra, liveness);
807 accessible.doAnalysis();
810 this.allocationDepth = state.DISJOINTALLOCDEPTH;
811 this.releaseMode = state.DISJOINTRELEASEMODE;
812 this.determinismDesired = state.DISJOINTDETERMINISM;
814 this.writeFinalDOTs = state.DISJOINTWRITEDOTS && !state.DISJOINTWRITEALL;
815 this.writeAllIncrementalDOTs = state.DISJOINTWRITEDOTS && state.DISJOINTWRITEALL;
817 this.takeDebugSnapshots = state.DISJOINTSNAPSYMBOL != null;
818 this.descSymbolDebug = state.DISJOINTSNAPSYMBOL;
819 this.visitStartCapture = state.DISJOINTSNAPVISITTOSTART;
820 this.numVisitsToCapture = state.DISJOINTSNAPNUMVISITS;
821 this.stopAfterCapture = state.DISJOINTSNAPSTOPAFTER;
822 this.snapVisitCounter = 1; // count visits from 1 (user will write 1, means 1st visit)
823 this.snapNodeCounter = 0; // count nodes from 0
826 state.DISJOINTDVISITSTACK ||
827 state.DISJOINTDVISITPQUE ||
828 state.DISJOINTDVISITSTACKEESONTOP;
829 assert !(state.DISJOINTDVISITSTACK && state.DISJOINTDVISITPQUE);
830 assert !(state.DISJOINTDVISITSTACK && state.DISJOINTDVISITSTACKEESONTOP);
831 assert !(state.DISJOINTDVISITPQUE && state.DISJOINTDVISITSTACKEESONTOP);
833 // set some static configuration for ReachGraphs
834 ReachGraph.allocationDepth = allocationDepth;
835 ReachGraph.typeUtil = typeUtil;
836 ReachGraph.state = state;
838 ReachGraph.initOutOfScopeTemps();
840 ReachGraph.debugCallSiteVisitStartCapture
841 = state.DISJOINTDEBUGCALLVISITTOSTART;
843 ReachGraph.debugCallSiteNumVisitsToCapture
844 = state.DISJOINTDEBUGCALLNUMVISITS;
846 ReachGraph.debugCallSiteStopAfter
847 = state.DISJOINTDEBUGCALLSTOPAFTER;
849 ReachGraph.debugCallSiteVisitCounter
850 = 0; // count visits from 1, is incremented before first visit
852 if( state.DO_DEFINITE_REACH_ANALYSIS ) {
853 doDefiniteReachAnalysis = true;
854 definiteReachAnalysis = new DefiniteReachAnalysis();
858 if( suppressOutput ) {
859 System.out.println("* Running disjoint reachability analysis with output suppressed! *");
863 allocateStructures();
865 initImplicitStringsModel();
869 double timeStartAnalysis = (double) System.nanoTime();
871 // start interprocedural fixed-point computation
874 } catch( IOException e ) {
875 throw new Error("IO Exception while writing disjointness analysis output.");
878 analysisComplete=true;
880 double timeEndAnalysis = (double) System.nanoTime();
881 double dt = (timeEndAnalysis - timeStartAnalysis)/(Math.pow(10.0, 9.0) );
884 if( sitesToFlag != null ) {
885 treport = String.format("Disjoint reachability analysis flagged %d sites and took %.3f sec.", sitesToFlag.size(), dt);
886 if(sitesToFlag.size()>0) {
887 treport+="\nFlagged sites:"+"\n"+sitesToFlag.toString();
890 treport = String.format("Disjoint reachability analysis took %.3f sec.", dt);
892 String justtime = String.format("%.2f", dt);
893 System.out.println(treport);
897 if( writeFinalDOTs && !writeAllIncrementalDOTs ) {
901 if( state.DISJOINTWRITEIHMS ) {
905 if( state.DISJOINTWRITEINITCONTEXTS ) {
906 writeInitialContexts();
909 if( state.DISJOINT_WRITE_ALL_NODE_FINAL_GRAPHS ) {
910 writeFinalGraphsForEveryNode();
913 if( state.DISJOINTALIASFILE != null && !suppressOutput ) {
915 writeAllSharing(state.DISJOINTALIASFILE, treport, justtime, state.DISJOINTALIASTAB, state.lines);
917 writeAllSharingJava(state.DISJOINTALIASFILE,
920 state.DISJOINTALIASTAB,
927 buildStateMachines.writeStateMachines();
930 } catch( IOException e ) {
931 throw new Error("IO Exception while writing disjointness analysis output.");
936 protected boolean moreDescriptorsToVisit() {
937 if( state.DISJOINTDVISITSTACK ||
938 state.DISJOINTDVISITSTACKEESONTOP
940 return !descriptorsToVisitStack.isEmpty();
942 } else if( state.DISJOINTDVISITPQUE ) {
943 return !descriptorsToVisitQ.isEmpty();
946 throw new Error("Neither descriptor visiting mode set");
950 // fixed-point computation over the call graph--when a
951 // method's callees are updated, it must be reanalyzed
952 protected void analyzeMethods() throws java.io.IOException {
954 // task or non-task (java) mode determines what the roots
955 // of the call chain are, and establishes the set of methods
956 // reachable from the roots that will be analyzed
959 if( !suppressOutput ) {
960 System.out.println("Bamboo mode...");
963 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
964 while( taskItr.hasNext() ) {
965 TaskDescriptor td = (TaskDescriptor) taskItr.next();
966 if( !descriptorsToAnalyze.contains(td) ) {
967 // add all methods transitively reachable from the
969 descriptorsToAnalyze.add(td);
970 descriptorsToAnalyze.addAll(callGraph.getAllMethods(td) );
975 if( !suppressOutput ) {
976 System.out.println("Java mode...");
979 // add all methods transitively reachable from the
980 // source's main to set for analysis
981 mdSourceEntry = typeUtil.getMain();
982 descriptorsToAnalyze.add(mdSourceEntry);
983 descriptorsToAnalyze.addAll(callGraph.getAllMethods(mdSourceEntry) );
985 // fabricate an empty calling context that will call
986 // the source's main, but call graph doesn't know
987 // about it, so explicitly add it
988 makeAnalysisEntryMethod(mdSourceEntry);
989 descriptorsToAnalyze.add(mdAnalysisEntry);
994 // now, depending on the interprocedural mode for visiting
995 // methods, set up the needed data structures
997 if( state.DISJOINTDVISITPQUE ) {
999 // topologically sort according to the call graph so
1000 // leaf calls are last, helps build contexts up first
1001 LinkedList<Descriptor> sortedDescriptors =
1002 topologicalSort(descriptorsToAnalyze);
1004 // add sorted descriptors to priority queue, and duplicate
1005 // the queue as a set for efficiently testing whether some
1006 // method is marked for analysis
1008 Iterator<Descriptor> dItr;
1010 // for the priority queue, give items at the head
1011 // of the sorted list a low number (highest priority)
1012 while( !sortedDescriptors.isEmpty() ) {
1013 Descriptor d = sortedDescriptors.removeFirst();
1014 mapDescriptorToPriority.put(d, new Integer(p) );
1015 descriptorsToVisitQ.add(new DescriptorQWrapper(p, d) );
1016 descriptorsToVisitSet.add(d);
1020 } else if( state.DISJOINTDVISITSTACK ||
1021 state.DISJOINTDVISITSTACKEESONTOP
1023 // if we're doing the stack scheme, just throw the root
1024 // method or tasks on the stack
1026 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
1027 while( taskItr.hasNext() ) {
1028 TaskDescriptor td = (TaskDescriptor) taskItr.next();
1029 descriptorsToVisitStack.add(td);
1030 descriptorsToVisitSet.add(td);
1034 descriptorsToVisitStack.add(mdAnalysisEntry);
1035 descriptorsToVisitSet.add(mdAnalysisEntry);
1039 throw new Error("Unknown method scheduling mode");
1043 // analyze scheduled methods until there are no more to visit
1044 while( moreDescriptorsToVisit() ) {
1045 Descriptor d = null;
1047 if( state.DISJOINTDVISITSTACK ||
1048 state.DISJOINTDVISITSTACKEESONTOP
1050 d = descriptorsToVisitStack.pop();
1052 } else if( state.DISJOINTDVISITPQUE ) {
1053 d = descriptorsToVisitQ.poll().getDescriptor();
1056 assert descriptorsToVisitSet.contains(d);
1057 descriptorsToVisitSet.remove(d);
1059 // because the task or method descriptor just extracted
1060 // was in the "to visit" set it either hasn't been analyzed
1061 // yet, or some method that it depends on has been
1062 // updated. Recompute a complete reachability graph for
1063 // this task/method and compare it to any previous result.
1064 // If there is a change detected, add any methods/tasks
1065 // that depend on this one to the "to visit" set.
1067 if( !suppressOutput ) {
1068 System.out.println("Analyzing " + d);
1071 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1072 assert calleesToEnqueue.isEmpty();
1075 ReachGraph rg = analyzeMethod(d);
1076 ReachGraph rgPrev = getPartial(d);
1078 if( !rg.equals(rgPrev) ) {
1081 if( state.DISJOINTDEBUGSCHEDULING ) {
1082 System.out.println(" complete graph changed, scheduling callers for analysis:");
1085 // results for d changed, so enqueue dependents
1086 // of d for further analysis
1087 Iterator<Descriptor> depsItr = getDependents(d).iterator();
1088 while( depsItr.hasNext() ) {
1089 Descriptor dNext = depsItr.next();
1092 if( state.DISJOINTDEBUGSCHEDULING ) {
1093 System.out.println(" "+dNext);
1098 // whether or not the method under analysis changed,
1099 // we may have some callees that are scheduled for
1100 // more analysis, and they should go on the top of
1101 // the stack now (in other method-visiting modes they
1102 // are already enqueued at this point
1103 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1104 Iterator<Descriptor> depsItr = calleesToEnqueue.iterator();
1105 while( depsItr.hasNext() ) {
1106 Descriptor dNext = depsItr.next();
1109 calleesToEnqueue.clear();
1115 protected ReachGraph analyzeMethod(Descriptor d)
1116 throws java.io.IOException {
1118 // get the flat code for this descriptor
1120 if( d == mdAnalysisEntry ) {
1121 fm = fmAnalysisEntry;
1123 fm = state.getMethodFlat(d);
1125 pm.analyzeMethod(fm);
1127 // intraprocedural work set
1128 Set<FlatNode> flatNodesToVisit = new HashSet<FlatNode>();
1129 flatNodesToVisit.add(fm);
1131 // if determinism is desired by client, shadow the
1132 // set with a queue to make visit order deterministic
1133 Queue<FlatNode> flatNodesToVisitQ = null;
1134 if( determinismDesired ) {
1135 flatNodesToVisitQ = new LinkedList<FlatNode>();
1136 flatNodesToVisitQ.add(fm);
1139 // start a new mapping of partial results
1140 mapFlatNodeToReachGraph =
1141 new Hashtable<FlatNode, ReachGraph>();
1143 // the set of return nodes partial results that will be combined as
1144 // the final, conservative approximation of the entire method
1145 HashSet<FlatReturnNode> setReturns = new HashSet<FlatReturnNode>();
1149 boolean snapThisMethod = false;
1150 if( takeDebugSnapshots && d instanceof MethodDescriptor ) {
1151 MethodDescriptor mdThisMethod = (MethodDescriptor)d;
1152 ClassDescriptor cdThisMethod = mdThisMethod.getClassDesc();
1153 if( cdThisMethod != null ) {
1155 descSymbolDebug.equals( cdThisMethod.getSymbol()+
1157 mdThisMethod.getSymbol()
1164 while( !flatNodesToVisit.isEmpty() ) {
1167 if( determinismDesired ) {
1168 assert !flatNodesToVisitQ.isEmpty();
1169 fn = flatNodesToVisitQ.remove();
1171 fn = flatNodesToVisit.iterator().next();
1173 flatNodesToVisit.remove(fn);
1175 // effect transfer function defined by this node,
1176 // then compare it to the old graph at this node
1177 // to see if anything was updated.
1179 ReachGraph rg = new ReachGraph();
1180 TaskDescriptor taskDesc;
1181 if(fn instanceof FlatMethod && (taskDesc=((FlatMethod)fn).getTask())!=null) {
1182 if(mapDescriptorToReachGraph.containsKey(taskDesc)) {
1183 // retrieve existing reach graph if it is not first time
1184 rg=mapDescriptorToReachGraph.get(taskDesc);
1186 // create initial reach graph for a task
1187 rg=createInitialTaskReachGraph((FlatMethod)fn);
1189 mapDescriptorToReachGraph.put(taskDesc, rg);
1193 // start by merging all node's parents' graphs
1194 for( int i = 0; i < pm.numPrev(fn); ++i ) {
1195 FlatNode pn = pm.getPrev(fn,i);
1196 if( mapFlatNodeToReachGraph.containsKey(pn) ) {
1197 ReachGraph rgParent = mapFlatNodeToReachGraph.get(pn);
1203 if( snapThisMethod ) {
1204 debugSnapshot(rg, fn, true);
1208 // modify rg with appropriate transfer function
1209 rg = analyzeFlatNode(d, fm, fn, setReturns, rg);
1212 if( snapThisMethod ) {
1213 debugSnapshot(rg, fn, false);
1218 // if the results of the new graph are different from
1219 // the current graph at this node, replace the graph
1220 // with the update and enqueue the children
1221 ReachGraph rgPrev = mapFlatNodeToReachGraph.get(fn);
1222 if( !rg.equals(rgPrev) ) {
1223 mapFlatNodeToReachGraph.put(fn, rg);
1225 // we don't necessarily want to keep the reach graph for every
1226 // node in the program unless a client or the user wants it
1227 if( state.DISJOINT_WRITE_ALL_NODE_FINAL_GRAPHS ) {
1228 mapFlatNodeToReachGraphPersist.put(fn, rg);
1231 for( int i = 0; i < pm.numNext(fn); i++ ) {
1232 FlatNode nn = pm.getNext(fn, i);
1234 flatNodesToVisit.add(nn);
1235 if( determinismDesired ) {
1236 flatNodesToVisitQ.add(nn);
1243 // end by merging all return nodes into a complete
1244 // reach graph that represents all possible heap
1245 // states after the flat method returns
1246 ReachGraph completeGraph = new ReachGraph();
1248 if( setReturns.isEmpty() ) {
1249 System.out.println( "d = "+d );
1252 assert !setReturns.isEmpty();
1253 Iterator retItr = setReturns.iterator();
1254 while( retItr.hasNext() ) {
1255 FlatReturnNode frn = (FlatReturnNode) retItr.next();
1257 assert mapFlatNodeToReachGraph.containsKey(frn);
1258 ReachGraph rgRet = mapFlatNodeToReachGraph.get(frn);
1260 completeGraph.merge(rgRet);
1264 if( snapThisMethod ) {
1265 // increment that we've visited the debug snap
1266 // method, and reset the node counter
1267 System.out.println(" @@@ debug snap at visit "+snapVisitCounter);
1269 snapNodeCounter = 0;
1271 if( snapVisitCounter == visitStartCapture + numVisitsToCapture &&
1274 System.out.println("!!! Stopping analysis after debug snap captures. !!!");
1280 return completeGraph;
1284 protected ReachGraph
1285 analyzeFlatNode(Descriptor d,
1286 FlatMethod fmContaining,
1288 HashSet<FlatReturnNode> setRetNodes,
1290 ) throws java.io.IOException {
1293 // any variables that are no longer live should be
1294 // nullified in the graph to reduce edges
1295 //rg.nullifyDeadVars( liveness.getLiveInTemps( fmContaining, fn ) );
1299 FieldDescriptor fld;
1300 TypeDescriptor tdElement;
1301 FieldDescriptor fdElement;
1302 FlatSESEEnterNode sese;
1303 FlatSESEExitNode fsexn;
1305 Set<EdgeKey> edgeKeysForLoad;
1306 Set<EdgeKey> edgeKeysRemoved;
1307 Set<EdgeKey> edgeKeysAdded;
1309 //Stores the flatnode's reach graph at enter
1310 ReachGraph rgOnEnter = new ReachGraph();
1311 rgOnEnter.merge(rg);
1312 fn2rgAtEnter.put(fn, rgOnEnter);
1316 // use node type to decide what transfer function
1317 // to apply to the reachability graph
1318 switch( fn.kind() ) {
1320 case FKind.FlatGenReachNode: {
1321 FlatGenReachNode fgrn = (FlatGenReachNode) fn;
1323 System.out.println(" Generating reach graph for program point: "+fgrn.getGraphName() );
1326 rg.writeGraph("genReach"+fgrn.getGraphName(),
1327 true, // write labels (variables)
1328 true, // selectively hide intermediate temp vars
1329 true, // prune unreachable heap regions
1330 false, // hide reachability altogether
1331 true, // hide subset reachability states
1332 true, // hide predicates
1333 true); //false); // hide edge taints
1337 case FKind.FlatGenDefReachNode: {
1338 FlatGenDefReachNode fgdrn = (FlatGenDefReachNode) fn;
1339 if( doDefiniteReachAnalysis ) {
1340 definiteReachAnalysis.writeState( fn, fgdrn.getOutputName() );
1345 case FKind.FlatMethod: {
1346 // construct this method's initial heap model (IHM)
1347 // since we're working on the FlatMethod, we know
1348 // the incoming ReachGraph 'rg' is empty
1350 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
1351 getIHMcontributions(d);
1353 Set entrySet = heapsFromCallers.entrySet();
1354 Iterator itr = entrySet.iterator();
1355 while( itr.hasNext() ) {
1356 Map.Entry me = (Map.Entry)itr.next();
1357 FlatCall fc = (FlatCall) me.getKey();
1358 ReachGraph rgContrib = (ReachGraph) me.getValue();
1360 // note that "fc.getMethod()" like (Object.toString)
1361 // might not be equal to "d" like (String.toString)
1362 // because the mapping gets set up when we resolve
1364 rg.merge(rgContrib);
1367 // additionally, we are enforcing STRICT MONOTONICITY for the
1368 // method's initial context, so grow the context by whatever
1369 // the previously computed context was, and put the most
1370 // up-to-date context back in the map
1371 ReachGraph rgPrevContext = mapDescriptorToInitialContext.get(d);
1372 rg.merge(rgPrevContext);
1373 mapDescriptorToInitialContext.put(d, rg);
1375 if( doDefiniteReachAnalysis ) {
1376 FlatMethod fm = (FlatMethod) fn;
1377 Set<TempDescriptor> params = new HashSet<TempDescriptor>();
1378 for( int i = 0; i < fm.numParameters(); ++i ) {
1379 params.add( fm.getParameter( i ) );
1381 definiteReachAnalysis.methodEntry( fn, params );
1385 case FKind.FlatOpNode:
1386 FlatOpNode fon = (FlatOpNode) fn;
1387 if( fon.getOp().getOp() == Operation.ASSIGN ) {
1388 lhs = fon.getDest();
1389 rhs = fon.getLeft();
1391 // before transfer, do effects analysis support
1392 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1393 if(rblockRel.isPotentialStallSite(fn)) {
1394 // x gets status of y
1395 if(!accessible.isAccessible(fn, rhs)) {
1396 rg.makeInaccessible(lhs);
1402 rg.assignTempXEqualToTempY(lhs, rhs);
1404 if( doDefiniteReachAnalysis ) {
1405 definiteReachAnalysis.copy( fn, lhs, rhs );
1410 case FKind.FlatCastNode:
1411 FlatCastNode fcn = (FlatCastNode) fn;
1415 TypeDescriptor td = fcn.getType();
1418 // before transfer, do effects analysis support
1419 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1420 if(rblockRel.isPotentialStallSite(fn)) {
1421 // x gets status of y
1422 if(!accessible.isAccessible(fn,rhs)) {
1423 rg.makeInaccessible(lhs);
1429 rg.assignTempXEqualToCastedTempY(lhs, rhs, td);
1431 if( doDefiniteReachAnalysis ) {
1432 definiteReachAnalysis.copy( fn, lhs, rhs );
1436 case FKind.FlatFieldNode:
1437 FlatFieldNode ffn = (FlatFieldNode) fn;
1441 fld = ffn.getField();
1443 // before graph transform, possible inject
1444 // a stall-site taint
1445 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1447 if(rblockRel.isPotentialStallSite(fn)) {
1448 // x=y.f, stall y if not accessible
1449 // contributes read effects on stall site of y
1450 if(!accessible.isAccessible(fn,rhs)) {
1451 rg.taintStallSite(fn, rhs);
1454 // after this, x and y are accessbile.
1455 rg.makeAccessible(lhs);
1456 rg.makeAccessible(rhs);
1460 edgeKeysForLoad = null;
1461 if( doDefiniteReachAnalysis ) {
1462 edgeKeysForLoad = new HashSet<EdgeKey>();
1465 if( shouldAnalysisTrack(fld.getType() ) ) {
1467 rg.assignTempXEqualToTempYFieldF( lhs, rhs, fld, fn, edgeKeysForLoad );
1469 if( doDefiniteReachAnalysis ) {
1470 definiteReachAnalysis.load( fn, lhs, rhs, fld, edgeKeysForLoad );
1474 // after transfer, use updated graph to
1475 // do effects analysis
1476 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1477 effectsAnalysis.analyzeFlatFieldNode(rg, rhs, fld, fn);
1481 case FKind.FlatSetFieldNode:
1482 FlatSetFieldNode fsfn = (FlatSetFieldNode) fn;
1484 lhs = fsfn.getDst();
1485 fld = fsfn.getField();
1486 rhs = fsfn.getSrc();
1488 boolean strongUpdate = false;
1490 edgeKeysRemoved = null;
1491 edgeKeysAdded = null;
1492 if( doDefiniteReachAnalysis ) {
1493 edgeKeysRemoved = new HashSet<EdgeKey>();
1494 edgeKeysAdded = new HashSet<EdgeKey>();
1497 // before transfer func, possibly inject
1498 // stall-site taints
1499 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1501 if(rblockRel.isPotentialStallSite(fn)) {
1502 // x.y=f , stall x and y if they are not accessible
1503 // also contribute write effects on stall site of x
1504 if(!accessible.isAccessible(fn,lhs)) {
1505 rg.taintStallSite(fn, lhs);
1508 if(!accessible.isAccessible(fn,rhs)) {
1509 rg.taintStallSite(fn, rhs);
1512 // accessible status update
1513 rg.makeAccessible(lhs);
1514 rg.makeAccessible(rhs);
1518 if( shouldAnalysisTrack(fld.getType() ) ) {
1520 strongUpdate = rg.assignTempXFieldFEqualToTempY( lhs,
1526 if( doDefiniteReachAnalysis ) {
1527 definiteReachAnalysis.store( fn,
1536 // use transformed graph to do effects analysis
1537 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1538 effectsAnalysis.analyzeFlatSetFieldNode(rg, lhs, fld, fn, strongUpdate);
1542 case FKind.FlatElementNode:
1543 FlatElementNode fen = (FlatElementNode) fn;
1548 assert rhs.getType() != null;
1549 assert rhs.getType().isArray();
1551 tdElement = rhs.getType().dereference();
1552 fdElement = getArrayField(tdElement);
1554 // before transfer func, possibly inject
1556 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1557 if(rblockRel.isPotentialStallSite(fn)) {
1558 // x=y.f, stall y if not accessible
1559 // contributes read effects on stall site of y
1560 // after this, x and y are accessbile.
1561 if(!accessible.isAccessible(fn,rhs)) {
1562 rg.taintStallSite(fn, rhs);
1565 rg.makeAccessible(lhs);
1566 rg.makeAccessible(rhs);
1570 edgeKeysForLoad = null;
1571 if( doDefiniteReachAnalysis ) {
1572 edgeKeysForLoad = new HashSet<EdgeKey>();
1575 if( shouldAnalysisTrack(lhs.getType() ) ) {
1577 rg.assignTempXEqualToTempYFieldF( lhs, rhs, fdElement, fn, edgeKeysForLoad );
1579 if( doDefiniteReachAnalysis ) {
1580 definiteReachAnalysis.load( fn, lhs, rhs, fdElement, edgeKeysForLoad );
1584 // use transformed graph to do effects analysis
1585 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1586 effectsAnalysis.analyzeFlatFieldNode(rg, rhs, fdElement, fn);
1590 case FKind.FlatSetElementNode:
1591 FlatSetElementNode fsen = (FlatSetElementNode) fn;
1593 lhs = fsen.getDst();
1594 rhs = fsen.getSrc();
1596 assert lhs.getType() != null;
1597 assert lhs.getType().isArray();
1599 tdElement = lhs.getType().dereference();
1600 fdElement = getArrayField(tdElement);
1602 edgeKeysRemoved = null;
1603 edgeKeysAdded = null;
1604 if( doDefiniteReachAnalysis ) {
1605 edgeKeysRemoved = new HashSet<EdgeKey>();
1606 edgeKeysAdded = new HashSet<EdgeKey>();
1609 // before transfer func, possibly inject
1610 // stall-site taints
1611 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1613 if(rblockRel.isPotentialStallSite(fn)) {
1614 // x.y=f , stall x and y if they are not accessible
1615 // also contribute write effects on stall site of x
1616 if(!accessible.isAccessible(fn,lhs)) {
1617 rg.taintStallSite(fn, lhs);
1620 if(!accessible.isAccessible(fn,rhs)) {
1621 rg.taintStallSite(fn, rhs);
1624 // accessible status update
1625 rg.makeAccessible(lhs);
1626 rg.makeAccessible(rhs);
1630 if( shouldAnalysisTrack(rhs.getType() ) ) {
1631 // transfer func, BUT
1632 // skip this node if it cannot create new reachability paths
1633 if( !arrayReferencees.doesNotCreateNewReaching(fsen) ) {
1634 rg.assignTempXFieldFEqualToTempY( lhs,
1642 if( doDefiniteReachAnalysis ) {
1643 definiteReachAnalysis.store( fn,
1652 // use transformed graph to do effects analysis
1653 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1654 effectsAnalysis.analyzeFlatSetFieldNode(rg, lhs, fdElement, fn,
1660 FlatNew fnn = (FlatNew) fn;
1662 if( shouldAnalysisTrack(lhs.getType() ) ) {
1663 AllocSite as = getAllocSiteFromFlatNewPRIVATE(fnn);
1665 // before transform, support effects analysis
1666 if (doEffectsAnalysis && fmContaining != fmAnalysisEntry) {
1667 if (rblockRel.isPotentialStallSite(fn)) {
1668 // after creating new object, lhs is accessible
1669 rg.makeAccessible(lhs);
1674 rg.assignTempEqualToNewAlloc(lhs, as);
1676 if( doDefiniteReachAnalysis ) {
1677 definiteReachAnalysis.newObject( fn, lhs );
1683 case FKind.FlatLiteralNode:
1684 // BIG NOTE: this transfer function is only here for
1685 // points-to information for String literals. That's it.
1686 // Effects and disjoint reachability and all of that don't
1687 // care about references to literals.
1688 FlatLiteralNode fln = (FlatLiteralNode) fn;
1690 if( fln.getType().equals( stringType ) ) {
1691 rg.assignTempEqualToStringLiteral( fln.getDst(),
1692 newStringLiteralAlloc,
1693 newStringLiteralBytesAlloc,
1699 case FKind.FlatSESEEnterNode:
1700 sese = (FlatSESEEnterNode) fn;
1702 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1704 // always remove ALL stall site taints at enter
1705 rg.removeAllStallSiteTaints();
1707 // inject taints for in-set vars
1708 rg.taintInSetVars(sese);
1713 case FKind.FlatSESEExitNode:
1714 fsexn = (FlatSESEExitNode) fn;
1715 sese = fsexn.getFlatEnter();
1717 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1719 // @ sese exit make all live variables
1720 // inaccessible to later parent statements
1721 rg.makeInaccessible(liveness.getLiveInTemps(fmContaining, fn) );
1723 // always remove ALL stall site taints at exit
1724 rg.removeAllStallSiteTaints();
1726 // remove in-set var taints for the exiting rblock
1727 rg.removeInContextTaints(sese);
1732 case FKind.FlatCall: {
1733 Descriptor mdCaller;
1734 if( fmContaining.getMethod() != null ) {
1735 mdCaller = fmContaining.getMethod();
1737 mdCaller = fmContaining.getTask();
1739 FlatCall fc = (FlatCall) fn;
1740 MethodDescriptor mdCallee = fc.getMethod();
1741 FlatMethod fmCallee = state.getMethodFlat(mdCallee);
1744 if( doDefiniteReachAnalysis ) {
1745 definiteReachAnalysis.methodCall( fn, fc.getReturnTemp() );
1749 // the transformation for a call site should update the
1750 // current heap abstraction with any effects from the callee,
1751 // or if the method is virtual, the effects from any possible
1752 // callees, so find the set of callees...
1753 Set<MethodDescriptor> setPossibleCallees;
1754 if( determinismDesired ) {
1755 // use an ordered set
1756 setPossibleCallees = new TreeSet<MethodDescriptor>(dComp);
1758 // otherwise use a speedy hashset
1759 setPossibleCallees = new HashSet<MethodDescriptor>();
1762 if( mdCallee.isStatic() ) {
1763 setPossibleCallees.add(mdCallee);
1765 TypeDescriptor typeDesc = fc.getThis().getType();
1766 setPossibleCallees.addAll(callGraph.getMethods(mdCallee,
1772 DebugCallSiteData dcsd = new DebugCallSiteData();
1774 ReachGraph rgMergeOfPossibleCallers = new ReachGraph();
1777 Iterator<MethodDescriptor> mdItr = setPossibleCallees.iterator();
1778 while( mdItr.hasNext() ) {
1779 MethodDescriptor mdPossible = mdItr.next();
1780 FlatMethod fmPossible = state.getMethodFlat(mdPossible);
1782 addDependent(mdPossible, // callee
1786 // decide for each possible resolution of the method whether we
1787 // want to debug this call site
1788 decideDebugCallSite( dcsd, mdCaller, mdPossible );
1792 // calculate the heap this call site can reach--note this is
1793 // not used for the current call site transform, we are
1794 // grabbing this heap model for future analysis of the callees,
1795 // so if different results emerge we will return to this site
1796 ReachGraph heapForThisCall_old =
1797 getIHMcontribution(mdPossible, fc);
1799 // the computation of the callee-reachable heap
1800 // is useful for making the callee starting point
1801 // and for applying the call site transfer function
1802 Set<Integer> callerNodeIDsCopiedToCallee =
1803 new HashSet<Integer>();
1806 ReachGraph heapForThisCall_cur =
1807 rg.makeCalleeView(fc,
1809 callerNodeIDsCopiedToCallee,
1814 // enforce that a call site contribution can only
1815 // monotonically increase
1816 heapForThisCall_cur.merge(heapForThisCall_old);
1818 if( !heapForThisCall_cur.equals(heapForThisCall_old) ) {
1819 // if heap at call site changed, update the contribution,
1820 // and reschedule the callee for analysis
1821 addIHMcontribution(mdPossible, fc, heapForThisCall_cur);
1823 // map a FlatCall to its enclosing method/task descriptor
1824 // so we can write that info out later
1825 fc2enclosing.put(fc, mdCaller);
1827 if( state.DISJOINTDEBUGSCHEDULING ) {
1828 System.out.println(" context changed at callsite: "+fc+", scheduling callee: "+mdPossible);
1831 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1832 calleesToEnqueue.add(mdPossible);
1834 enqueue(mdPossible);
1841 // don't alter the working graph (rg) until we compute a
1842 // result for every possible callee, merge them all together,
1843 // then set rg to that
1844 ReachGraph rgPossibleCaller = new ReachGraph();
1845 rgPossibleCaller.merge(rg);
1847 ReachGraph rgPossibleCallee = getPartial(mdPossible);
1849 if( rgPossibleCallee == null ) {
1850 // if this method has never been analyzed just schedule it
1851 // for analysis and skip over this call site for now
1852 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1853 calleesToEnqueue.add(mdPossible);
1855 enqueue(mdPossible);
1858 if( state.DISJOINTDEBUGSCHEDULING ) {
1859 System.out.println(" callee hasn't been analyzed, scheduling: "+mdPossible);
1865 // calculate the method call transform
1866 rgPossibleCaller.resolveMethodCall(fc,
1869 callerNodeIDsCopiedToCallee,
1874 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1875 if( !accessible.isAccessible(fn, ReachGraph.tdReturn) ) {
1876 rgPossibleCaller.makeInaccessible(fc.getReturnTemp() );
1882 rgMergeOfPossibleCallers.merge(rgPossibleCaller);
1887 statusDebugCallSite( dcsd );
1891 // now that we've taken care of building heap models for
1892 // callee analysis, finish this transformation
1893 rg = rgMergeOfPossibleCallers;
1896 // jjenista: what is this? It breaks compilation
1897 // of programs with no tasks/SESEs/rblocks...
1898 //XXXXXXXXXXXXXXXXXXXXXXXXX
1899 //need to consider more
1900 if( state.OOOJAVA ) {
1901 FlatNode nextFN=fmCallee.getNext(0);
1902 if( nextFN instanceof FlatSESEEnterNode ) {
1903 FlatSESEEnterNode calleeSESE=(FlatSESEEnterNode)nextFN;
1904 if(!calleeSESE.getIsLeafSESE()) {
1905 rg.makeInaccessible(liveness.getLiveInTemps(fmContaining, fn) );
1913 case FKind.FlatReturnNode:
1914 FlatReturnNode frn = (FlatReturnNode) fn;
1915 rhs = frn.getReturnTemp();
1917 // before transfer, do effects analysis support
1918 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1919 if(!accessible.isAccessible(fn,rhs)) {
1920 rg.makeInaccessible(ReachGraph.tdReturn);
1924 if( rhs != null && shouldAnalysisTrack(rhs.getType() ) ) {
1925 rg.assignReturnEqualToTemp(rhs);
1928 setRetNodes.add(frn);
1934 // dead variables were removed before the above transfer function
1935 // was applied, so eliminate heap regions and edges that are no
1936 // longer part of the abstractly-live heap graph, and sweep up
1937 // and reachability effects that are altered by the reduction
1938 //rg.abstractGarbageCollect();
1942 // back edges are strictly monotonic
1943 if( pm.isBackEdge(fn) ) {
1944 ReachGraph rgPrevResult = mapBackEdgeToMonotone.get(fn);
1945 rg.merge(rgPrevResult);
1946 mapBackEdgeToMonotone.put(fn, rg);
1950 ReachGraph rgOnExit = new ReachGraph();
1952 fn2rgAtExit.put(fn, rgOnExit);
1956 // at this point rg should be the correct update
1957 // by an above transfer function, or untouched if
1958 // the flat node type doesn't affect the heap
1964 // this method should generate integers strictly greater than zero!
1965 // special "shadow" regions are made from a heap region by negating
1967 static public Integer generateUniqueHeapRegionNodeID() {
1969 return new Integer(uniqueIDcount);
1974 static public FieldDescriptor getArrayField(TypeDescriptor tdElement) {
1975 FieldDescriptor fdElement = mapTypeToArrayField.get(tdElement);
1976 if( fdElement == null ) {
1977 fdElement = new FieldDescriptor(new Modifiers(Modifiers.PUBLIC),
1979 arrayElementFieldName,
1982 mapTypeToArrayField.put(tdElement, fdElement);
1989 private void writeFinalGraphs() {
1990 Set entrySet = mapDescriptorToCompleteReachGraph.entrySet();
1991 Iterator itr = entrySet.iterator();
1992 while( itr.hasNext() ) {
1993 Map.Entry me = (Map.Entry)itr.next();
1994 Descriptor d = (Descriptor) me.getKey();
1995 ReachGraph rg = (ReachGraph) me.getValue();
1998 if( d instanceof TaskDescriptor ) {
1999 graphName = "COMPLETEtask"+d;
2001 graphName = "COMPLETE"+d;
2004 rg.writeGraph(graphName,
2005 true, // write labels (variables)
2006 true, // selectively hide intermediate temp vars
2007 true, // prune unreachable heap regions
2008 true, // hide reachability altogether
2009 true, // hide subset reachability states
2010 true, // hide predicates
2011 false); // hide edge taints
2015 private void writeFinalIHMs() {
2016 Iterator d2IHMsItr = mapDescriptorToIHMcontributions.entrySet().iterator();
2017 while( d2IHMsItr.hasNext() ) {
2018 Map.Entry me1 = (Map.Entry)d2IHMsItr.next();
2019 Descriptor d = (Descriptor) me1.getKey();
2020 Hashtable<FlatCall, ReachGraph> IHMs = (Hashtable<FlatCall, ReachGraph>)me1.getValue();
2022 Iterator fc2rgItr = IHMs.entrySet().iterator();
2023 while( fc2rgItr.hasNext() ) {
2024 Map.Entry me2 = (Map.Entry)fc2rgItr.next();
2025 FlatCall fc = (FlatCall) me2.getKey();
2026 ReachGraph rg = (ReachGraph) me2.getValue();
2028 rg.writeGraph("IHMPARTFOR"+d+"FROM"+fc2enclosing.get(fc)+fc,
2029 true, // write labels (variables)
2030 true, // selectively hide intermediate temp vars
2031 true, // hide reachability altogether
2032 true, // prune unreachable heap regions
2033 true, // hide subset reachability states
2034 false, // hide predicates
2035 true); // hide edge taints
2040 private void writeInitialContexts() {
2041 Set entrySet = mapDescriptorToInitialContext.entrySet();
2042 Iterator itr = entrySet.iterator();
2043 while( itr.hasNext() ) {
2044 Map.Entry me = (Map.Entry)itr.next();
2045 Descriptor d = (Descriptor) me.getKey();
2046 ReachGraph rg = (ReachGraph) me.getValue();
2048 rg.writeGraph("INITIAL"+d,
2049 true, // write labels (variables)
2050 true, // selectively hide intermediate temp vars
2051 true, // prune unreachable heap regions
2052 false, // hide all reachability
2053 true, // hide subset reachability states
2054 true, // hide predicates
2055 false); // hide edge taints
2059 private void writeFinalGraphsForEveryNode() {
2060 Set entrySet = mapFlatNodeToReachGraphPersist.entrySet();
2061 Iterator itr = entrySet.iterator();
2062 while( itr.hasNext() ) {
2063 Map.Entry me = (Map.Entry) itr.next();
2064 FlatNode fn = (FlatNode) me.getKey();
2065 ReachGraph rg = (ReachGraph) me.getValue();
2067 rg.writeGraph("NODEFINAL"+fn,
2068 true, // write labels (variables)
2069 false, // selectively hide intermediate temp vars
2070 true, // prune unreachable heap regions
2071 true, // hide all reachability
2072 true, // hide subset reachability states
2073 true, // hide predicates
2074 true); // hide edge taints
2079 protected ReachGraph getPartial(Descriptor d) {
2080 return mapDescriptorToCompleteReachGraph.get(d);
2083 protected void setPartial(Descriptor d, ReachGraph rg) {
2084 mapDescriptorToCompleteReachGraph.put(d, rg);
2086 // when the flag for writing out every partial
2087 // result is set, we should spit out the graph,
2088 // but in order to give it a unique name we need
2089 // to track how many partial results for this
2090 // descriptor we've already written out
2091 if( writeAllIncrementalDOTs ) {
2092 if( !mapDescriptorToNumUpdates.containsKey(d) ) {
2093 mapDescriptorToNumUpdates.put(d, new Integer(0) );
2095 Integer n = mapDescriptorToNumUpdates.get(d);
2098 if( d instanceof TaskDescriptor ) {
2099 graphName = d+"COMPLETEtask"+String.format("%05d", n);
2101 graphName = d+"COMPLETE"+String.format("%05d", n);
2104 rg.writeGraph(graphName,
2105 true, // write labels (variables)
2106 true, // selectively hide intermediate temp vars
2107 true, // prune unreachable heap regions
2108 false, // hide all reachability
2109 true, // hide subset reachability states
2110 false, // hide predicates
2111 false); // hide edge taints
2113 mapDescriptorToNumUpdates.put(d, n + 1);
2119 // return just the allocation site associated with one FlatNew node
2120 protected AllocSite getAllocSiteFromFlatNewPRIVATE(FlatNew fnew) {
2122 boolean flagProgrammatically = false;
2123 if( sitesToFlag != null && sitesToFlag.contains(fnew) ) {
2124 flagProgrammatically = true;
2127 if( !mapFlatNewToAllocSite.containsKey(fnew) ) {
2128 AllocSite as = AllocSite.factory(allocationDepth,
2130 fnew.getDisjointId(),
2131 flagProgrammatically
2134 // the newest nodes are single objects
2135 for( int i = 0; i < allocationDepth; ++i ) {
2136 Integer id = generateUniqueHeapRegionNodeID();
2137 as.setIthOldest(i, id);
2138 mapHrnIdToAllocSite.put(id, as);
2141 // the oldest node is a summary node
2142 as.setSummary(generateUniqueHeapRegionNodeID() );
2144 mapFlatNewToAllocSite.put(fnew, as);
2147 return mapFlatNewToAllocSite.get(fnew);
2151 public static boolean shouldAnalysisTrack(TypeDescriptor type) {
2152 // don't track primitive types, but an array
2153 // of primitives is heap memory
2154 if( type.isImmutable() ) {
2155 return type.isArray();
2158 // everything else is an object
2162 protected int numMethodsAnalyzed() {
2163 return descriptorsToAnalyze.size();
2169 // Take in source entry which is the program's compiled entry and
2170 // create a new analysis entry, a method that takes no parameters
2171 // and appears to allocate the command line arguments and call the
2172 // source entry with them. The purpose of this analysis entry is
2173 // to provide a top-level method context with no parameters left.
2174 protected void makeAnalysisEntryMethod(MethodDescriptor mdSourceEntry) {
2176 Modifiers mods = new Modifiers();
2177 mods.addModifier(Modifiers.PUBLIC);
2178 mods.addModifier(Modifiers.STATIC);
2180 TypeDescriptor returnType = new TypeDescriptor(TypeDescriptor.VOID);
2182 this.mdAnalysisEntry =
2183 new MethodDescriptor(mods,
2185 "analysisEntryMethod"
2188 TypeDescriptor argsType = mdSourceEntry.getParamType(0);
2189 TempDescriptor cmdLineArgs =
2190 new TempDescriptor("analysisEntryTemp_args",
2194 new FlatNew(argsType,
2198 this.constructedCmdLineArgsNew = fnArgs;
2200 TypeDescriptor argType = argsType.dereference();
2201 TempDescriptor anArg =
2202 new TempDescriptor("analysisEntryTemp_arg",
2206 new FlatNew(argType,
2210 this.constructedCmdLineArgNew = fnArg;
2212 TypeDescriptor typeIndex = new TypeDescriptor(TypeDescriptor.INT);
2213 TempDescriptor index =
2214 new TempDescriptor("analysisEntryTemp_index",
2217 FlatLiteralNode fli =
2218 new FlatLiteralNode(typeIndex,
2223 FlatSetElementNode fse =
2224 new FlatSetElementNode(cmdLineArgs,
2229 TypeDescriptor typeSize = new TypeDescriptor(TypeDescriptor.INT);
2230 TempDescriptor sizeBytes =
2231 new TempDescriptor("analysisEntryTemp_size",
2234 FlatLiteralNode fls =
2235 new FlatLiteralNode(typeSize,
2240 TempDescriptor strBytes =
2241 new TempDescriptor("analysisEntryTemp_strBytes",
2245 new FlatNew(stringBytesType,
2250 this.constructedCmdLineArgBytesNew = fnBytes;
2252 FlatSetFieldNode fsf =
2253 new FlatSetFieldNode(anArg,
2258 // throw this in so you can always see what the initial heap context
2259 // looks like if you want to, its cheap
2260 FlatGenReachNode fgen = new FlatGenReachNode( "argContext" );
2262 TempDescriptor[] sourceEntryArgs = new TempDescriptor[1];
2263 sourceEntryArgs[0] = cmdLineArgs;
2265 new FlatCall(mdSourceEntry,
2271 FlatReturnNode frn = new FlatReturnNode(null);
2273 FlatExit fe = new FlatExit();
2275 this.fmAnalysisEntry =
2276 new FlatMethod(mdAnalysisEntry,
2280 List<FlatNode> nodes = new LinkedList<FlatNode>();
2281 nodes.add( fnArgs );
2286 nodes.add( fnBytes );
2293 FlatNode current = this.fmAnalysisEntry;
2294 for( FlatNode next: nodes ) {
2295 current.addNext( next );
2300 // jjenista - this is useful for looking at the FlatIRGraph of the
2301 // analysis entry method constructed above if you have to modify it.
2302 // The usual method of writing FlatIRGraphs out doesn't work because
2303 // this flat method is private to the model of this analysis only.
2305 // FlatIRGraph flatMethodWriter =
2306 // new FlatIRGraph( state, false, false, false );
2307 // flatMethodWriter.writeFlatIRGraph( fmAnalysisEntry, "analysisEntry" );
2308 //} catch( IOException e ) {}
2312 protected LinkedList<Descriptor> topologicalSort(Set<Descriptor> toSort) {
2314 Set<Descriptor> discovered;
2316 if( determinismDesired ) {
2317 // use an ordered set
2318 discovered = new TreeSet<Descriptor>(dComp);
2320 // otherwise use a speedy hashset
2321 discovered = new HashSet<Descriptor>();
2324 LinkedList<Descriptor> sorted = new LinkedList<Descriptor>();
2326 Iterator<Descriptor> itr = toSort.iterator();
2327 while( itr.hasNext() ) {
2328 Descriptor d = itr.next();
2330 if( !discovered.contains(d) ) {
2331 dfsVisit(d, toSort, sorted, discovered);
2338 // While we're doing DFS on call graph, remember
2339 // dependencies for efficient queuing of methods
2340 // during interprocedural analysis:
2342 // a dependent of a method decriptor d for this analysis is:
2343 // 1) a method or task that invokes d
2344 // 2) in the descriptorsToAnalyze set
2345 protected void dfsVisit(Descriptor d,
2346 Set <Descriptor> toSort,
2347 LinkedList<Descriptor> sorted,
2348 Set <Descriptor> discovered) {
2351 // only methods have callers, tasks never do
2352 if( d instanceof MethodDescriptor ) {
2354 MethodDescriptor md = (MethodDescriptor) d;
2356 // the call graph is not aware that we have a fabricated
2357 // analysis entry that calls the program source's entry
2358 if( md == mdSourceEntry ) {
2359 if( !discovered.contains(mdAnalysisEntry) ) {
2360 addDependent(mdSourceEntry, // callee
2361 mdAnalysisEntry // caller
2363 dfsVisit(mdAnalysisEntry, toSort, sorted, discovered);
2367 // otherwise call graph guides DFS
2368 Iterator itr = callGraph.getCallerSet(md).iterator();
2369 while( itr.hasNext() ) {
2370 Descriptor dCaller = (Descriptor) itr.next();
2372 // only consider callers in the original set to analyze
2373 if( !toSort.contains(dCaller) ) {
2377 if( !discovered.contains(dCaller) ) {
2378 addDependent(md, // callee
2382 dfsVisit(dCaller, toSort, sorted, discovered);
2387 // for leaf-nodes last now!
2392 protected void enqueue(Descriptor d) {
2394 if( !descriptorsToVisitSet.contains(d) ) {
2396 if( state.DISJOINTDVISITSTACK ||
2397 state.DISJOINTDVISITSTACKEESONTOP
2399 descriptorsToVisitStack.add(d);
2401 } else if( state.DISJOINTDVISITPQUE ) {
2402 Integer priority = mapDescriptorToPriority.get(d);
2403 descriptorsToVisitQ.add(new DescriptorQWrapper(priority,
2408 descriptorsToVisitSet.add(d);
2413 // a dependent of a method decriptor d for this analysis is:
2414 // 1) a method or task that invokes d
2415 // 2) in the descriptorsToAnalyze set
2416 protected void addDependent(Descriptor callee, Descriptor caller) {
2417 Set<Descriptor> deps = mapDescriptorToSetDependents.get(callee);
2418 if( deps == null ) {
2419 deps = new HashSet<Descriptor>();
2422 mapDescriptorToSetDependents.put(callee, deps);
2425 protected Set<Descriptor> getDependents(Descriptor callee) {
2426 Set<Descriptor> deps = mapDescriptorToSetDependents.get(callee);
2427 if( deps == null ) {
2428 deps = new HashSet<Descriptor>();
2429 mapDescriptorToSetDependents.put(callee, deps);
2435 public Hashtable<FlatCall, ReachGraph> getIHMcontributions(Descriptor d) {
2437 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
2438 mapDescriptorToIHMcontributions.get(d);
2440 if( heapsFromCallers == null ) {
2441 heapsFromCallers = new Hashtable<FlatCall, ReachGraph>();
2442 mapDescriptorToIHMcontributions.put(d, heapsFromCallers);
2445 return heapsFromCallers;
2448 public ReachGraph getIHMcontribution(Descriptor d,
2451 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
2452 getIHMcontributions(d);
2454 if( !heapsFromCallers.containsKey(fc) ) {
2458 return heapsFromCallers.get(fc);
2462 public void addIHMcontribution(Descriptor d,
2466 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
2467 getIHMcontributions(d);
2469 // ensure inputs to initial contexts increase monotonically
2470 ReachGraph merged = new ReachGraph();
2472 merged.merge( heapsFromCallers.get( fc ) );
2474 heapsFromCallers.put( fc, merged );
2479 private AllocSite createParameterAllocSite(ReachGraph rg,
2480 TempDescriptor tempDesc,
2486 flatNew = new FlatNew(tempDesc.getType(), // type
2487 tempDesc, // param temp
2488 false, // global alloc?
2489 "param"+tempDesc // disjoint site ID string
2492 flatNew = new FlatNew(tempDesc.getType(), // type
2493 tempDesc, // param temp
2494 false, // global alloc?
2495 null // disjoint site ID string
2499 // create allocation site
2500 AllocSite as = AllocSite.factory(allocationDepth,
2502 flatNew.getDisjointId(),
2505 for (int i = 0; i < allocationDepth; ++i) {
2506 Integer id = generateUniqueHeapRegionNodeID();
2507 as.setIthOldest(i, id);
2508 mapHrnIdToAllocSite.put(id, as);
2510 // the oldest node is a summary node
2511 as.setSummary(generateUniqueHeapRegionNodeID() );
2519 private Set<FieldDescriptor> getFieldSetTobeAnalyzed(TypeDescriptor typeDesc) {
2521 Set<FieldDescriptor> fieldSet=new HashSet<FieldDescriptor>();
2522 if(!typeDesc.isImmutable()) {
2523 ClassDescriptor classDesc = typeDesc.getClassDesc();
2524 for (Iterator it = classDesc.getFields(); it.hasNext(); ) {
2525 FieldDescriptor field = (FieldDescriptor) it.next();
2526 TypeDescriptor fieldType = field.getType();
2527 if (shouldAnalysisTrack(fieldType)) {
2528 fieldSet.add(field);
2536 private HeapRegionNode createMultiDeimensionalArrayHRN(ReachGraph rg, AllocSite alloc, HeapRegionNode srcHRN, FieldDescriptor fd, Hashtable<HeapRegionNode, HeapRegionNode> map, Hashtable<TypeDescriptor, HeapRegionNode> mapToExistingNode, ReachSet alpha) {
2538 int dimCount=fd.getType().getArrayCount();
2539 HeapRegionNode prevNode=null;
2540 HeapRegionNode arrayEntryNode=null;
2541 for(int i=dimCount; i>0; i--) {
2542 TypeDescriptor typeDesc=fd.getType().dereference(); //hack to get instance of type desc
2543 typeDesc.setArrayCount(i);
2544 TempDescriptor tempDesc=new TempDescriptor(typeDesc.getSymbol(),typeDesc);
2545 HeapRegionNode hrnSummary;
2546 if(!mapToExistingNode.containsKey(typeDesc)) {
2551 as = createParameterAllocSite(rg, tempDesc, false);
2553 // make a new reference to allocated node
2555 rg.createNewHeapRegionNode(as.getSummary(), // id or null to generate a new one
2556 false, // single object?
2558 false, // out-of-context?
2559 as.getType(), // type
2560 as, // allocation site
2561 alpha, // inherent reach
2562 alpha, // current reach
2563 ExistPredSet.factory(rg.predTrue), // predicates
2564 tempDesc.toString() // description
2566 rg.id2hrn.put(as.getSummary(),hrnSummary);
2568 mapToExistingNode.put(typeDesc, hrnSummary);
2570 hrnSummary=mapToExistingNode.get(typeDesc);
2573 if(prevNode==null) {
2574 // make a new reference between new summary node and source
2575 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2578 fd.getSymbol(), // field name
2580 ExistPredSet.factory(rg.predTrue), // predicates
2584 rg.addRefEdge(srcHRN, hrnSummary, edgeToSummary);
2585 prevNode=hrnSummary;
2586 arrayEntryNode=hrnSummary;
2588 // make a new reference between summary nodes of array
2589 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2592 arrayElementFieldName, // field name
2594 ExistPredSet.factory(rg.predTrue), // predicates
2598 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2599 prevNode=hrnSummary;
2604 // create a new obj node if obj has at least one non-primitive field
2605 TypeDescriptor type=fd.getType();
2606 if(getFieldSetTobeAnalyzed(type).size()>0) {
2607 TypeDescriptor typeDesc=type.dereference();
2608 typeDesc.setArrayCount(0);
2609 if(!mapToExistingNode.containsKey(typeDesc)) {
2610 TempDescriptor tempDesc=new TempDescriptor(type.getSymbol(),typeDesc);
2611 AllocSite as = createParameterAllocSite(rg, tempDesc, false);
2612 // make a new reference to allocated node
2613 HeapRegionNode hrnSummary =
2614 rg.createNewHeapRegionNode(as.getSummary(), // id or null to generate a new one
2615 false, // single object?
2617 false, // out-of-context?
2619 as, // allocation site
2620 alpha, // inherent reach
2621 alpha, // current reach
2622 ExistPredSet.factory(rg.predTrue), // predicates
2623 tempDesc.toString() // description
2625 rg.id2hrn.put(as.getSummary(),hrnSummary);
2626 mapToExistingNode.put(typeDesc, hrnSummary);
2627 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2630 arrayElementFieldName, // field name
2632 ExistPredSet.factory(rg.predTrue), // predicates
2635 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2636 prevNode=hrnSummary;
2638 HeapRegionNode hrnSummary=mapToExistingNode.get(typeDesc);
2639 if(prevNode.getReferenceTo(hrnSummary, typeDesc, arrayElementFieldName)==null) {
2640 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2643 arrayElementFieldName, // field name
2645 ExistPredSet.factory(rg.predTrue), // predicates
2648 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2650 prevNode=hrnSummary;
2654 map.put(arrayEntryNode, prevNode);
2655 return arrayEntryNode;
2658 private ReachGraph createInitialTaskReachGraph(FlatMethod fm) {
2659 ReachGraph rg = new ReachGraph();
2660 TaskDescriptor taskDesc = fm.getTask();
2662 for (int idx = 0; idx < taskDesc.numParameters(); idx++) {
2663 Descriptor paramDesc = taskDesc.getParameter(idx);
2664 TypeDescriptor paramTypeDesc = taskDesc.getParamType(idx);
2666 // setup data structure
2667 Set<HashMap<HeapRegionNode, FieldDescriptor>> workSet =
2668 new HashSet<HashMap<HeapRegionNode, FieldDescriptor>>();
2669 Hashtable<TypeDescriptor, HeapRegionNode> mapTypeToExistingSummaryNode =
2670 new Hashtable<TypeDescriptor, HeapRegionNode>();
2671 Hashtable<HeapRegionNode, HeapRegionNode> mapToFirstDimensionArrayNode =
2672 new Hashtable<HeapRegionNode, HeapRegionNode>();
2673 Set<String> doneSet = new HashSet<String>();
2675 TempDescriptor tempDesc = fm.getParameter(idx);
2677 AllocSite as = createParameterAllocSite(rg, tempDesc, true);
2678 VariableNode lnX = rg.getVariableNodeFromTemp(tempDesc);
2679 Integer idNewest = as.getIthOldest(0);
2680 HeapRegionNode hrnNewest = rg.id2hrn.get(idNewest);
2682 // make a new reference to allocated node
2683 RefEdge edgeNew = new RefEdge(lnX, // source
2685 taskDesc.getParamType(idx), // type
2687 hrnNewest.getAlpha(), // beta
2688 ExistPredSet.factory(rg.predTrue), // predicates
2691 rg.addRefEdge(lnX, hrnNewest, edgeNew);
2693 // set-up a work set for class field
2694 ClassDescriptor classDesc = paramTypeDesc.getClassDesc();
2695 for (Iterator it = classDesc.getFields(); it.hasNext(); ) {
2696 FieldDescriptor fd = (FieldDescriptor) it.next();
2697 TypeDescriptor fieldType = fd.getType();
2698 if (shouldAnalysisTrack(fieldType)) {
2699 HashMap<HeapRegionNode, FieldDescriptor> newMap = new HashMap<HeapRegionNode, FieldDescriptor>();
2700 newMap.put(hrnNewest, fd);
2701 workSet.add(newMap);
2705 int uniqueIdentifier = 0;
2706 while (!workSet.isEmpty()) {
2707 HashMap<HeapRegionNode, FieldDescriptor> map = workSet
2709 workSet.remove(map);
2711 Set<HeapRegionNode> key = map.keySet();
2712 HeapRegionNode srcHRN = key.iterator().next();
2713 FieldDescriptor fd = map.get(srcHRN);
2714 TypeDescriptor type = fd.getType();
2715 String doneSetIdentifier = srcHRN.getIDString() + "_" + fd;
2717 if (!doneSet.contains(doneSetIdentifier)) {
2718 doneSet.add(doneSetIdentifier);
2719 if (!mapTypeToExistingSummaryNode.containsKey(type)) {
2720 // create new summary Node
2721 TempDescriptor td = new TempDescriptor("temp"
2722 + uniqueIdentifier, type);
2724 AllocSite allocSite;
2725 if(type.equals(paramTypeDesc)) {
2726 //corresponding allocsite has already been created for a parameter variable.
2729 allocSite = createParameterAllocSite(rg, td, false);
2731 String strDesc = allocSite.toStringForDOT()
2733 TypeDescriptor allocType=allocSite.getType();
2735 HeapRegionNode hrnSummary;
2736 if(allocType.isArray() && allocType.getArrayCount()>0) {
2737 hrnSummary=createMultiDeimensionalArrayHRN(rg,allocSite,srcHRN,fd,mapToFirstDimensionArrayNode,mapTypeToExistingSummaryNode,hrnNewest.getAlpha());
2740 rg.createNewHeapRegionNode(allocSite.getSummary(), // id or null to generate a new one
2741 false, // single object?
2743 false, // out-of-context?
2744 allocSite.getType(), // type
2745 allocSite, // allocation site
2746 hrnNewest.getAlpha(), // inherent reach
2747 hrnNewest.getAlpha(), // current reach
2748 ExistPredSet.factory(rg.predTrue), // predicates
2749 strDesc // description
2751 rg.id2hrn.put(allocSite.getSummary(),hrnSummary);
2753 // make a new reference to summary node
2754 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2757 fd.getSymbol(), // field name
2758 hrnNewest.getAlpha(), // beta
2759 ExistPredSet.factory(rg.predTrue), // predicates
2763 rg.addRefEdge(srcHRN, hrnSummary, edgeToSummary);
2767 mapTypeToExistingSummaryNode.put(type, hrnSummary);
2769 // set-up a work set for fields of the class
2770 Set<FieldDescriptor> fieldTobeAnalyzed=getFieldSetTobeAnalyzed(type);
2771 for (Iterator iterator = fieldTobeAnalyzed.iterator(); iterator
2773 FieldDescriptor fieldDescriptor = (FieldDescriptor) iterator
2775 HeapRegionNode newDstHRN;
2776 if(mapToFirstDimensionArrayNode.containsKey(hrnSummary)) {
2777 //related heap region node is already exsited.
2778 newDstHRN=mapToFirstDimensionArrayNode.get(hrnSummary);
2780 newDstHRN=hrnSummary;
2782 doneSetIdentifier = newDstHRN.getIDString() + "_" + fieldDescriptor;
2783 if(!doneSet.contains(doneSetIdentifier)) {
2784 // add new work item
2785 HashMap<HeapRegionNode, FieldDescriptor> newMap =
2786 new HashMap<HeapRegionNode, FieldDescriptor>();
2787 newMap.put(newDstHRN, fieldDescriptor);
2788 workSet.add(newMap);
2793 // if there exists corresponding summary node
2794 HeapRegionNode hrnDst=mapTypeToExistingSummaryNode.get(type);
2796 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2798 fd.getType(), // type
2799 fd.getSymbol(), // field name
2800 srcHRN.getAlpha(), // beta
2801 ExistPredSet.factory(rg.predTrue), // predicates
2804 rg.addRefEdge(srcHRN, hrnDst, edgeToSummary);
2814 // return all allocation sites in the method (there is one allocation
2815 // site per FlatNew node in a method)
2816 private HashSet<AllocSite> getAllocationSiteSet(Descriptor d) {
2817 if( !mapDescriptorToAllocSiteSet.containsKey(d) ) {
2818 buildAllocationSiteSet(d);
2821 return mapDescriptorToAllocSiteSet.get(d);
2825 private void buildAllocationSiteSet(Descriptor d) {
2826 HashSet<AllocSite> s = new HashSet<AllocSite>();
2829 if( d instanceof MethodDescriptor ) {
2830 fm = state.getMethodFlat( (MethodDescriptor) d);
2832 assert d instanceof TaskDescriptor;
2833 fm = state.getMethodFlat( (TaskDescriptor) d);
2835 pm.analyzeMethod(fm);
2837 // visit every node in this FlatMethod's IR graph
2838 // and make a set of the allocation sites from the
2839 // FlatNew node's visited
2840 HashSet<FlatNode> visited = new HashSet<FlatNode>();
2841 HashSet<FlatNode> toVisit = new HashSet<FlatNode>();
2844 while( !toVisit.isEmpty() ) {
2845 FlatNode n = toVisit.iterator().next();
2847 if( n instanceof FlatNew ) {
2848 s.add(getAllocSiteFromFlatNewPRIVATE( (FlatNew) n) );
2854 for( int i = 0; i < pm.numNext(n); ++i ) {
2855 FlatNode child = pm.getNext(n, i);
2856 if( !visited.contains(child) ) {
2862 mapDescriptorToAllocSiteSet.put(d, s);
2865 private HashSet<AllocSite> getFlaggedAllocationSites(Descriptor dIn) {
2867 HashSet<AllocSite> out = new HashSet<AllocSite>();
2868 HashSet<Descriptor> toVisit = new HashSet<Descriptor>();
2869 HashSet<Descriptor> visited = new HashSet<Descriptor>();
2873 while (!toVisit.isEmpty()) {
2874 Descriptor d = toVisit.iterator().next();
2878 HashSet<AllocSite> asSet = getAllocationSiteSet(d);
2879 Iterator asItr = asSet.iterator();
2880 while (asItr.hasNext()) {
2881 AllocSite as = (AllocSite) asItr.next();
2882 if (as.getDisjointAnalysisId() != null) {
2887 // enqueue callees of this method to be searched for
2888 // allocation sites also
2889 Set callees = callGraph.getCalleeSet(d);
2890 if (callees != null) {
2891 Iterator methItr = callees.iterator();
2892 while (methItr.hasNext()) {
2893 MethodDescriptor md = (MethodDescriptor) methItr.next();
2895 if (!visited.contains(md)) {
2906 private HashSet<AllocSite>
2907 getFlaggedAllocationSitesReachableFromTaskPRIVATE(TaskDescriptor td) {
2909 HashSet<AllocSite> asSetTotal = new HashSet<AllocSite>();
2910 HashSet<Descriptor> toVisit = new HashSet<Descriptor>();
2911 HashSet<Descriptor> visited = new HashSet<Descriptor>();
2915 // traverse this task and all methods reachable from this task
2916 while( !toVisit.isEmpty() ) {
2917 Descriptor d = toVisit.iterator().next();
2921 HashSet<AllocSite> asSet = getAllocationSiteSet(d);
2922 Iterator asItr = asSet.iterator();
2923 while( asItr.hasNext() ) {
2924 AllocSite as = (AllocSite) asItr.next();
2925 TypeDescriptor typed = as.getType();
2926 if( typed != null ) {
2927 ClassDescriptor cd = typed.getClassDesc();
2928 if( cd != null && cd.hasFlags() ) {
2934 // enqueue callees of this method to be searched for
2935 // allocation sites also
2936 Set callees = callGraph.getCalleeSet(d);
2937 if( callees != null ) {
2938 Iterator methItr = callees.iterator();
2939 while( methItr.hasNext() ) {
2940 MethodDescriptor md = (MethodDescriptor) methItr.next();
2942 if( !visited.contains(md) ) {
2952 public Set<Descriptor> getDescriptorsToAnalyze() {
2953 return descriptorsToAnalyze;
2956 public EffectsAnalysis getEffectsAnalysis() {
2957 return effectsAnalysis;
2960 public ReachGraph getReachGraph(Descriptor d) {
2961 return mapDescriptorToCompleteReachGraph.get(d);
2964 public ReachGraph getEnterReachGraph(FlatNode fn) {
2965 return fn2rgAtEnter.get(fn);
2970 protected class DebugCallSiteData {
2971 public boolean debugCallSite;
2972 public boolean didOneDebug;
2973 public boolean writeDebugDOTs;
2974 public boolean stopAfter;
2976 public DebugCallSiteData() {
2977 debugCallSite = false;
2978 didOneDebug = false;
2979 writeDebugDOTs = false;
2984 protected void decideDebugCallSite( DebugCallSiteData dcsd,
2985 Descriptor taskOrMethodCaller,
2986 MethodDescriptor mdCallee ) {
2988 // all this jimma jamma to debug call sites is WELL WORTH the
2989 // effort, so so so many bugs or buggy info appears through call
2992 if( state.DISJOINTDEBUGCALLEE == null ||
2993 state.DISJOINTDEBUGCALLER == null ) {
2998 boolean debugCalleeMatches = false;
2999 boolean debugCallerMatches = false;
3001 ClassDescriptor cdCallee = mdCallee.getClassDesc();
3002 if( cdCallee != null ) {
3003 debugCalleeMatches =
3004 state.DISJOINTDEBUGCALLEE.equals( cdCallee.getSymbol()+
3006 mdCallee.getSymbol()
3011 if( taskOrMethodCaller instanceof MethodDescriptor ) {
3012 ClassDescriptor cdCaller = ((MethodDescriptor)taskOrMethodCaller).getClassDesc();
3013 if( cdCaller != null ) {
3014 debugCallerMatches =
3015 state.DISJOINTDEBUGCALLER.equals( cdCaller.getSymbol()+
3017 taskOrMethodCaller.getSymbol()
3021 // for bristlecone style tasks
3022 debugCallerMatches =
3023 state.DISJOINTDEBUGCALLER.equals( taskOrMethodCaller.getSymbol() );
3027 dcsd.debugCallSite = debugCalleeMatches && debugCallerMatches;
3030 dcsd.writeDebugDOTs =
3032 dcsd.debugCallSite &&
3034 (ReachGraph.debugCallSiteVisitCounter >=
3035 ReachGraph.debugCallSiteVisitStartCapture) &&
3037 (ReachGraph.debugCallSiteVisitCounter <
3038 ReachGraph.debugCallSiteVisitStartCapture +
3039 ReachGraph.debugCallSiteNumVisitsToCapture);
3043 if( dcsd.debugCallSite ) {
3044 dcsd.didOneDebug = true;
3048 protected void statusDebugCallSite( DebugCallSiteData dcsd ) {
3050 dcsd.writeDebugDOTs = false;
3051 dcsd.stopAfter = false;
3053 if( dcsd.didOneDebug ) {
3054 System.out.println(" $$$ Debug call site visit "+
3055 ReachGraph.debugCallSiteVisitCounter+
3059 (ReachGraph.debugCallSiteVisitCounter >=
3060 ReachGraph.debugCallSiteVisitStartCapture) &&
3062 (ReachGraph.debugCallSiteVisitCounter <
3063 ReachGraph.debugCallSiteVisitStartCapture +
3064 ReachGraph.debugCallSiteNumVisitsToCapture)
3066 dcsd.writeDebugDOTs = true;
3067 System.out.println(" $$$ Capturing this call site visit $$$");
3068 if( ReachGraph.debugCallSiteStopAfter &&
3069 (ReachGraph.debugCallSiteVisitCounter ==
3070 ReachGraph.debugCallSiteVisitStartCapture +
3071 ReachGraph.debugCallSiteNumVisitsToCapture - 1)
3073 dcsd.stopAfter = true;
3077 ++ReachGraph.debugCallSiteVisitCounter;
3080 if( dcsd.stopAfter ) {
3081 System.out.println("$$$ Exiting after requested captures of call site. $$$");
3090 // get successive captures of the analysis state, use compiler
3092 boolean takeDebugSnapshots = false;
3093 String descSymbolDebug = null;
3094 boolean stopAfterCapture = false;
3095 int snapVisitCounter = 0;
3096 int snapNodeCounter = 0;
3097 int visitStartCapture = 0;
3098 int numVisitsToCapture = 0;
3101 void debugSnapshot(ReachGraph rg, FlatNode fn, boolean in) {
3102 if( snapVisitCounter > visitStartCapture + numVisitsToCapture ) {
3110 if( snapVisitCounter >= visitStartCapture ) {
3111 System.out.println(" @@@ snapping visit="+snapVisitCounter+
3112 ", node="+snapNodeCounter+
3116 graphName = String.format("snap%03d_%04din",
3120 graphName = String.format("snap%03d_%04dout",
3125 graphName = graphName + fn;
3127 rg.writeGraph(graphName,
3128 true, // write labels (variables)
3129 true, // selectively hide intermediate temp vars
3130 true, // prune unreachable heap regions
3131 false, // hide reachability
3132 true, // hide subset reachability states
3133 true, // hide predicates
3134 true); // hide edge taints
3141 public Set<Alloc> canPointToAt( TempDescriptor x,
3142 FlatNode programPoint ) {
3144 ReachGraph rgAtEnter = fn2rgAtEnter.get( programPoint );
3145 if( rgAtEnter == null ) {
3149 return rgAtEnter.canPointTo( x );
3153 public Hashtable< Alloc, Set<Alloc> > canPointToAt( TempDescriptor x,
3155 FlatNode programPoint ) {
3157 ReachGraph rgAtEnter = fn2rgAtEnter.get( programPoint );
3158 if( rgAtEnter == null ) {
3162 return rgAtEnter.canPointTo( x, f.getSymbol(), f.getType() );
3166 public Hashtable< Alloc, Set<Alloc> > canPointToAtElement( TempDescriptor x,
3167 FlatNode programPoint ) {
3169 ReachGraph rgAtEnter = fn2rgAtEnter.get( programPoint );
3170 if( rgAtEnter == null ) {
3174 assert x.getType() != null;
3175 assert x.getType().isArray();
3177 return rgAtEnter.canPointTo( x, arrayElementFieldName, x.getType().dereference() );
3181 public Set<Alloc> canPointToAfter( TempDescriptor x,
3182 FlatNode programPoint ) {
3184 ReachGraph rgAtExit = fn2rgAtExit.get( programPoint );
3186 if( rgAtExit == null ) {
3190 return rgAtExit.canPointTo( x );
3194 public Hashtable< Alloc, Set<Alloc> > canPointToAfter( TempDescriptor x,
3196 FlatNode programPoint ) {
3198 ReachGraph rgAtExit = fn2rgAtExit.get( programPoint );
3199 if( rgAtExit == null ) {
3203 return rgAtExit.canPointTo( x, f.getSymbol(), f.getType() );
3207 public Hashtable< Alloc, Set<Alloc> > canPointToAfterElement( TempDescriptor x,
3208 FlatNode programPoint ) {
3210 ReachGraph rgAtExit = fn2rgAtExit.get( programPoint );
3211 if( rgAtExit == null ) {
3215 assert x.getType() != null;
3216 assert x.getType().isArray();
3218 return rgAtExit.canPointTo( x, arrayElementFieldName, x.getType().dereference() );