From: yeom Date: Sun, 16 Sep 2012 20:20:59 +0000 (+0000) Subject: changes: building field/method hierarchy graph + inserting combination nodes at the... X-Git-Url: http://demsky.eecs.uci.edu/git/?a=commitdiff_plain;h=d421edc382984588192603ed519923beadeb4d3a;p=IRC.git changes: building field/method hierarchy graph + inserting combination nodes at the point where flows from skeleton nodes(params/fields) are actually combined. --- diff --git a/Robust/src/Analysis/SSJava/FlowDownCheck.java b/Robust/src/Analysis/SSJava/FlowDownCheck.java index d01e094e..32c6abda 100644 --- a/Robust/src/Analysis/SSJava/FlowDownCheck.java +++ b/Robust/src/Analysis/SSJava/FlowDownCheck.java @@ -546,7 +546,10 @@ public class FlowDownCheck { ReturnNode rn, CompositeLocation constraint) { ExpressionNode returnExp = rn.getReturnExpression(); + + CompositeLocation declaredReturnLoc = md2ReturnLoc.get(md); + CompositeLocation returnValueLoc; if (returnExp != null) { returnValueLoc = @@ -568,9 +571,40 @@ public class FlowDownCheck { // generateErrorMessage(md.getClassDesc(), rn)); // } + if (constraint != null) { + + // Set inputGLB = new HashSet(); + // inputGLB.add(returnValueLoc); + // inputGLB.add(constraint); + // returnValueLoc = + // CompositeLattice.calculateGLB(inputGLB, + // generateErrorMessage(md.getClassDesc(), rn)); + + // if (!returnValueLoc.get(returnValueLoc.getSize() - 1).isTop()) { + // if (!CompositeLattice.isGreaterThan(constraint, returnValueLoc, + // generateErrorMessage(md.getClassDesc(), rn))) { + // System.out.println("returnValueLoc.get(returnValueLoc.getSize() - 1).isTop()=" + // + returnValueLoc.get(returnValueLoc.getSize() - 1).isTop()); + // throw new Error("The value flow from " + constraint + " to " + + // returnValueLoc + // + " does not respect location hierarchy on the assignment " + + // rn.printNode(0) + // + " at " + md.getClassDesc().getSourceFileName() + "::" + + // rn.getNumLine()); + // } + // } + + if (!CompositeLattice.isGreaterThan(constraint, declaredReturnLoc, + generateErrorMessage(md.getClassDesc(), rn))) { + throw new Error("The value flow from " + constraint + " to " + declaredReturnLoc + + " does not respect location hierarchy on the assignment " + rn.printNode(0) + + " at " + md.getClassDesc().getSourceFileName() + "::" + rn.getNumLine()); + } + + } + // check if return value is equal or higher than RETRUNLOC of method // declaration annotation - CompositeLocation declaredReturnLoc = md2ReturnLoc.get(md); int compareResult = CompositeLattice.compare(returnValueLoc, declaredReturnLoc, false, @@ -1143,7 +1177,7 @@ public class FlowDownCheck { List argList = new ArrayList(); // by default, method has a THIS parameter - if (!md.isStatic()) { + if (!min.getMethod().isStatic()) { argList.add(baseLocation); } @@ -1155,10 +1189,9 @@ public class FlowDownCheck { argList.add(callerArg); } - // System.out.println("\n## computeReturnLocation=" + min.getMethod() + - // " argList=" + argList); + System.out.println("\n## computeReturnLocation=" + min.getMethod() + " argList=" + argList); CompositeLocation ceilLoc = md2ReturnLocGen.get(min.getMethod()).computeReturnLocation(argList); - // System.out.println("## ReturnLocation=" + ceilLoc); + System.out.println("## ReturnLocation=" + ceilLoc); return ceilLoc; @@ -1171,6 +1204,9 @@ public class FlowDownCheck { MethodLattice calleeLattice = ssjava.getMethodLattice(calleemd); + System.out.println("checkCalleeConstraints=" + calleemd + " calleeLattice.getThisLoc()=" + + calleeLattice.getThisLoc()); + CompositeLocation calleeThisLoc = new CompositeLocation(new Location(calleemd, calleeLattice.getThisLoc())); @@ -1280,7 +1316,7 @@ public class FlowDownCheck { private CompositeLocation checkLocationFromArrayAccessNode(MethodDescriptor md, SymbolTable nametable, ArrayAccessNode aan, CompositeLocation constraint, boolean isLHS) { - + System.out.println("aan=" + aan.printNode(0) + " line#=" + aan.getNumLine()); ClassDescriptor cd = md.getClassDesc(); CompositeLocation arrayLoc = @@ -1621,6 +1657,8 @@ public class FlowDownCheck { // generateErrorMessage(cd, an)); } + System.out.println("src=" + srcLocation + " dest=" + destLocation + " const=" + constraint); + if (!CompositeLattice.isGreaterThan(srcLocation, destLocation, generateErrorMessage(cd, an))) { String context = ""; @@ -2054,7 +2092,7 @@ public class FlowDownCheck { public static CompositeLocation calculateGLB(Set inputSet, String errMsg) { - // System.out.println("Calculating GLB=" + inputSet); + System.out.println("Calculating GLB=" + inputSet); CompositeLocation glbCompLoc = new CompositeLocation(); // calculate GLB of the first(priority) element diff --git a/Robust/src/Analysis/SSJava/FlowGraph.java b/Robust/src/Analysis/SSJava/FlowGraph.java index 8dbecd3c..44a859ba 100644 --- a/Robust/src/Analysis/SSJava/FlowGraph.java +++ b/Robust/src/Analysis/SSJava/FlowGraph.java @@ -56,12 +56,13 @@ public class FlowGraph { if (!md.isStatic()) { // create a node for 'this' varialbe - NTuple thisDescTuple = new NTuple(); - thisDescTuple.add(md.getThis()); - FlowNode thisNode = new FlowNode(thisDescTuple, true); + // NTuple thisDescTuple = new NTuple(); + // thisDescTuple.add(md.getThis()); + NTuple thisVarTuple = new NTuple(); thisVarTuple.add(md.getThis()); - createNewFlowNode(thisVarTuple); + FlowNode thisNode = createNewFlowNode(thisVarTuple); + thisNode.setSkeleton(true); thisVarNode = thisNode; } @@ -69,13 +70,25 @@ public class FlowGraph { } + public Map getMapParamDescToIdx() { + return mapParamDescToIdx; + } + public FlowNode createIntermediateNode() { NTuple tuple = new NTuple(); Descriptor interDesc = new InterDescriptor(LocationInference.INTERLOC + interseed); tuple.add(interDesc); interseed++; - FlowNode node = createNewFlowNode(tuple, true); - return node; + + FlowNode newNode = new FlowNode(tuple); + newNode.setIntermediate(true); + + mapDescTupleToInferNode.put(tuple, newNode); + nodeSet.add(newNode); + + System.out.println("create new intermediate node= " + newNode); + + return newNode; } private void setupMapIdxToDesc() { @@ -86,11 +99,17 @@ public class FlowGraph { int idx = mapParamDescToIdx.get(paramDesc); NTuple descTuple = new NTuple(); descTuple.add(paramDesc); - mapIdxToFlowNode.put(idx, getFlowNode(descTuple)); + FlowNode paramNode = getFlowNode(descTuple); + mapIdxToFlowNode.put(idx, paramNode); + paramNode.setSkeleton(true); } } + public int getNumParameters() { + return mapIdxToFlowNode.keySet().size(); + } + public FlowNode getParamFlowNode(int idx) { return mapIdxToFlowNode.get(idx); } @@ -103,17 +122,6 @@ public class FlowGraph { return md; } - public Set getParameterNodeSet() { - Set paramNodeSet = new HashSet(); - for (Iterator iterator = nodeSet.iterator(); iterator.hasNext();) { - FlowNode fn = (FlowNode) iterator.next(); - if (fn.isParameter()) { - paramNodeSet.add(fn); - } - } - return paramNodeSet; - } - public void addNeighbor(FlowNode node, FlowNode neighbor) { Set set = mapNodeToNeighborSet.get(node); if (set == null) { @@ -162,23 +170,37 @@ public class FlowGraph { FlowNode fromNode = getFlowNode(fromDescTuple); FlowNode toNode = getFlowNode(toDescTuple); - // System.out.println("create an edge from " + fromNode + " to " + toNode); + System.out.println("create an edge from " + fromNode + " to " + toNode); int fromTupleSize = fromDescTuple.size(); - NTuple curTuple = new NTuple(); + NTuple curFromTuple = new NTuple(); for (int i = 0; i < fromTupleSize; i++) { Descriptor desc = fromDescTuple.get(i); - curTuple.add(desc); - addFlowEdge(getFlowNode(curTuple), toNode, fromDescTuple, toDescTuple); + curFromTuple.add(desc); + int toTupleSize = toDescTuple.size(); + NTuple curToTuple = new NTuple(); + for (int k = 0; k < toTupleSize; k++) { + Descriptor toDesc = toDescTuple.get(k); + curToTuple.add(toDesc); + addFlowEdge(getFlowNode(curFromTuple), getFlowNode(curToTuple), fromDescTuple, toDescTuple); + } } - int toTupleSize = toDescTuple.size(); - curTuple = new NTuple(); - for (int i = 0; i < toTupleSize; i++) { - Descriptor desc = toDescTuple.get(i); - curTuple.add(desc); - addFlowEdge(fromNode, getFlowNode(curTuple), fromDescTuple, toDescTuple); - } + // int fromTupleSize = fromDescTuple.size(); + // NTuple curTuple = new NTuple(); + // for (int i = 0; i < fromTupleSize; i++) { + // Descriptor desc = fromDescTuple.get(i); + // curTuple.add(desc); + // addFlowEdge(getFlowNode(curTuple), toNode, fromDescTuple, toDescTuple); + // } + // + // int toTupleSize = toDescTuple.size(); + // curTuple = new NTuple(); + // for (int i = 0; i < toTupleSize; i++) { + // Descriptor desc = toDescTuple.get(i); + // curTuple.add(desc); + // addFlowEdge(fromNode, getFlowNode(curTuple), fromDescTuple, toDescTuple); + // } } @@ -188,17 +210,15 @@ public class FlowGraph { FlowEdge edge = new FlowEdge(fromNode, toNode, initTuple, endTuple); fromNode.addOutEdge(edge); - // System.out.println("add a new edge=" + edge); - + System.out.println("add a new edge=" + edge); } public FlowNode getFlowNode(NTuple descTuple) { - if (mapDescTupleToInferNode.containsKey(descTuple)) { - return mapDescTupleToInferNode.get(descTuple); - } else { + if (!mapDescTupleToInferNode.containsKey(descTuple)) { FlowNode node = createNewFlowNode(descTuple); - return node; + mapDescTupleToInferNode.put(descTuple, node); } + return mapDescTupleToInferNode.get(descTuple); } public FlowNode getThisVarNode() { @@ -206,14 +226,9 @@ public class FlowGraph { } public FlowNode createNewFlowNode(NTuple tuple) { - return createNewFlowNode(tuple, false); - } - - public FlowNode createNewFlowNode(NTuple tuple, boolean isIntermediate) { if (!mapDescTupleToInferNode.containsKey(tuple)) { - FlowNode node = new FlowNode(tuple, isParameter(tuple)); - node.setIntermediate(isIntermediate); + FlowNode node = new FlowNode(tuple); mapDescTupleToInferNode.put(tuple, node); nodeSet.add(node); @@ -248,30 +263,67 @@ public class FlowGraph { return returnNodeSet; } - public Set getReachableFlowNodeSet(FlowNode fn) { + public Set getLocalReachFlowNodeSetFrom(FlowNode fn) { Set set = new HashSet(); - getReachableFlowNodeSet(fn, set); + recurLocalReachFlowNodeSet(fn, set); return set; } - private void getReachableFlowNodeSet(FlowNode fn, Set visited) { + private void recurLocalReachFlowNodeSet(FlowNode fn, Set visited) { for (Iterator iterator = fn.getOutEdgeSet().iterator(); iterator.hasNext();) { FlowEdge edge = (FlowEdge) iterator.next(); + FlowNode dstNode = edge.getDst(); - if (fn.equals(getFlowNode(edge.getInitTuple()))) { + if (!visited.contains(dstNode)) { + visited.add(dstNode); + recurLocalReachFlowNodeSet(dstNode, visited); + } + } - FlowNode dstNode = getFlowNode(edge.getEndTuple()); + } + + private void getReachFlowNodeSetFrom(FlowNode fn, Set visited) { + + for (Iterator iterator = fn.getOutEdgeSet().iterator(); iterator.hasNext();) { + FlowEdge edge = (FlowEdge) iterator.next(); + if (fn.equals(getFlowNode(edge.getInitTuple()))) { + FlowNode dstNode = getFlowNode(edge.getEndTuple()); if (!visited.contains(dstNode)) { visited.add(dstNode); - getReachableFlowNodeSet(dstNode, visited); + getReachFlowNodeSetFrom(dstNode, visited); } } } } + public Set getReachFlowNodeSetFrom(FlowNode fn) { + Set set = new HashSet(); + getReachFlowNodeSetFrom(fn, set); + return set; + } + + // private void getReachFlowNodeSetFrom(FlowNode fn, Set visited) { + // + // for (Iterator iterator = fn.getOutEdgeSet().iterator(); + // iterator.hasNext();) { + // FlowEdge edge = (FlowEdge) iterator.next(); + // + // if (fn.equals(getFlowNode(edge.getInitTuple()))) { + // + // FlowNode dstNode = getFlowNode(edge.getEndTuple()); + // + // if (!visited.contains(dstNode)) { + // visited.add(dstNode); + // getReachFlowNodeSetFrom(dstNode, visited); + // } + // } + // } + // + // } + public Set> getReachableFlowTupleSet(Set> visited, FlowNode fn) { for (Iterator iterator = fn.getOutEdgeSet().iterator(); iterator.hasNext();) { FlowEdge edge = (FlowEdge) iterator.next(); @@ -407,11 +459,6 @@ public class FlowGraph { return mapParamDescToIdx.containsKey(firstIdxDesc); } - public int getParamIdx(NTuple tuple) { - Descriptor firstDesc = tuple.get(0); - return mapParamDescToIdx.get(firstDesc).intValue(); - } - private void drawEdges(FlowNode node, BufferedWriter bw, Set addedNodeSet, Set addedEdgeSet) throws IOException { diff --git a/Robust/src/Analysis/SSJava/FlowNode.java b/Robust/src/Analysis/SSJava/FlowNode.java index 7aea5dea..b71789ae 100644 --- a/Robust/src/Analysis/SSJava/FlowNode.java +++ b/Robust/src/Analysis/SSJava/FlowNode.java @@ -17,9 +17,6 @@ public class FlowNode { // this set contains fields of the base type private Set fieldNodeSet; - // set true if this node is driven from a paramter - private boolean isParameter; - // set true if this node stores a return value private boolean isReturn; @@ -27,6 +24,10 @@ public class FlowNode { private boolean isIntermediate; + private CompositeLocation compLoc; + + private boolean isSkeleton; + public boolean isIntermediate() { return isIntermediate; } @@ -41,9 +42,10 @@ public class FlowNode { private Set outEdgeSet; - public FlowNode(NTuple tuple, boolean isParam) { + public FlowNode(NTuple tuple) { - this.isParameter = isParam; + this.isSkeleton = false; + this.isIntermediate = false; NTuple base = null; Descriptor desc = null; @@ -62,14 +64,19 @@ public class FlowNode { descTuple.add(desc); } outEdgeSet = new HashSet(); + } - public void addFieldNode(FlowNode node) { - fieldNodeSet.add(node); + public void setCompositeLocation(CompositeLocation in) { + compLoc = in; } - public boolean isParameter() { - return isParameter; + public CompositeLocation getCompositeLocation() { + return compLoc; + } + + public void addFieldNode(FlowNode node) { + fieldNodeSet.add(node); } public NTuple getDescTuple() { @@ -100,8 +107,8 @@ public class FlowNode { public String toString() { String rtr = "[FlowNode]:"; - if (isParameter()) { - rtr += "param:"; + if (isSkeleton()) { + rtr += "SKELETON:"; } rtr += ":" + descTuple; return rtr; @@ -153,6 +160,11 @@ public class FlowNode { id += descTuple.get(i).getSymbol(); } id += ">"; + + if (compLoc != null) { + id += " " + compLoc; + } + return id; } @@ -163,4 +175,27 @@ public class FlowNode { public boolean isDeclaratonNode() { return isDeclarationNode; } + + public NTuple getCurrentDescTuple() { + + if (compLoc == null) { + return descTuple; + } + + NTuple curDescTuple = new NTuple(); + for (int i = 0; i < compLoc.getSize(); i++) { + Location locElement = compLoc.get(i); + curDescTuple.add(locElement.getLocDescriptor()); + } + return curDescTuple; + } + + public boolean isSkeleton() { + return isSkeleton; + } + + public void setSkeleton(boolean isSkeleton) { + this.isSkeleton = isSkeleton; + } + } diff --git a/Robust/src/Analysis/SSJava/HNode.java b/Robust/src/Analysis/SSJava/HNode.java new file mode 100644 index 00000000..47d2c720 --- /dev/null +++ b/Robust/src/Analysis/SSJava/HNode.java @@ -0,0 +1,85 @@ +package Analysis.SSJava; + +import IR.Descriptor; + +public class HNode { + + private String name; + private Descriptor desc; + + private boolean isSkeleton; + private boolean isCombinationNode; + private boolean isSharedNode; + + public HNode() { + this.isSkeleton = false; + this.isCombinationNode = false; + this.isSharedNode = false; + } + + public HNode(String name) { + this(); + this.name = name; + } + + public HNode(Descriptor d) { + this(); + this.desc = d; + this.name = d.getSymbol(); + } + + public boolean isSharedNode() { + return isSharedNode; + } + + public void setSharedNode(boolean b) { + this.isSharedNode = b; + } + + public boolean isSkeleton() { + return isSkeleton; + } + + public void setSkeleton(boolean isSkeleton) { + this.isSkeleton = isSkeleton; + } + + public boolean isCombinationNode() { + return isCombinationNode; + } + + public void setCombinationNode(boolean b) { + isCombinationNode = b; + } + + public String getName() { + return name; + } + + public boolean equals(Object o) { + if (o instanceof HNode) { + HNode in = (HNode) o; + if (getName().equals(in.getName())) { + return true; + } + } + return false; + } + + public String toString() { + String isShared = ""; + if (isSharedNode()) { + isShared = "*"; + } + return "[Node::" + name + isShared + "]"; + } + + public Descriptor getDescriptor() { + return desc; + } + + public int hashCode() { + return 7 + name.hashCode(); + } + +} diff --git a/Robust/src/Analysis/SSJava/HierarchyGraph.java b/Robust/src/Analysis/SSJava/HierarchyGraph.java new file mode 100644 index 00000000..02625d92 --- /dev/null +++ b/Robust/src/Analysis/SSJava/HierarchyGraph.java @@ -0,0 +1,731 @@ +package Analysis.SSJava; + +import java.io.BufferedWriter; +import java.io.FileWriter; +import java.io.IOException; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; + +import IR.Descriptor; +import IR.FieldDescriptor; + +public class HierarchyGraph { + + Descriptor desc; + + String name; + Map mapDescToHNode; + Map> mapHNodeToDescSet; + Map> mapHNodeToIncomingSet; + Map> mapHNodeToOutgoingSet; + Map, HNode> mapSkeletonNodeSetToCombinationNode; + Map> mapCombinationNodeToCombineNodeSet; + Map, HNode> mapCombineNodeSetToCombinationNode; + Map, Set> mapCombineNodeSetToOutgoingNodeSet; + + Set nodeSet; + + public static int seed = 0; + + public HierarchyGraph() { + mapHNodeToIncomingSet = new HashMap>(); + mapHNodeToOutgoingSet = new HashMap>(); + mapHNodeToDescSet = new HashMap>(); + mapDescToHNode = new HashMap(); + mapSkeletonNodeSetToCombinationNode = new HashMap, HNode>(); + mapCombinationNodeToCombineNodeSet = new HashMap>(); + mapCombineNodeSetToOutgoingNodeSet = new HashMap, Set>(); + mapCombineNodeSetToCombinationNode = new HashMap, HNode>(); + nodeSet = new HashSet(); + } + + public Descriptor getDesc() { + return desc; + } + + public void setDesc(Descriptor desc) { + this.desc = desc; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public HierarchyGraph(Descriptor d) { + this(); + desc = d; + name = d.toString(); + } + + public Map> getMapHNodeToDescSet() { + return mapHNodeToDescSet; + } + + public void setMapHNodeToDescSet(Map> map) { + mapHNodeToDescSet.putAll(map); + } + + public Map getMapDescToHNode() { + return mapDescToHNode; + } + + public void setMapDescToHNode(Map map) { + mapDescToHNode.putAll(map); + } + + public Set getNodeSet() { + return nodeSet; + } + + public void addEdge(HNode srcHNode, HNode dstHNode) { + + if (!nodeSet.contains(srcHNode)) { + nodeSet.add(srcHNode); + } + + if (!nodeSet.contains(dstHNode)) { + nodeSet.add(dstHNode); + } + + Set possibleCycleSet = getPossibleCycleNodes(srcHNode, dstHNode); + + System.out.println("src=" + srcHNode + " dstHNode=" + dstHNode + " possibleCycleSet=" + + possibleCycleSet); + + if (possibleCycleSet.size() > 0) { + HNode newMergeNode = mergeNodes(possibleCycleSet, false); + newMergeNode.setSharedNode(true); + System.out.println("### CYCLIC VALUE FLOW: " + srcHNode + " -> " + dstHNode); + System.out.println("### INTRODUCE A NEW MERGE NODE: " + newMergeNode); + } else { + getIncomingNodeSet(dstHNode).add(srcHNode); + getOutgoingNodeSet(srcHNode).add(dstHNode); + System.out.println("add an edge " + srcHNode + " -> " + dstHNode); + } + + } + + public void addNode(HNode node) { + nodeSet.add(node); + } + + public void addEdge(Descriptor src, Descriptor dst) { + HNode srcHNode = getHNode(src); + HNode dstHNode = getHNode(dst); + + addEdge(srcHNode, dstHNode); + + } + + public void setParamHNode(Descriptor d) { + getHNode(d).setSkeleton(true); + } + + public HNode getHNode(Descriptor d) { + if (!mapDescToHNode.containsKey(d)) { + HNode newNode = new HNode(d); + if (d instanceof FieldDescriptor) { + newNode.setSkeleton(true); + } + mappingDescriptorToHNode(d, newNode); + nodeSet.add(newNode); + } + return mapDescToHNode.get(d); + } + + private void mappingDescriptorToHNode(Descriptor desc, HNode node) { + mapDescToHNode.put(desc, node); + if (!mapHNodeToDescSet.containsKey(node)) { + mapHNodeToDescSet.put(node, new HashSet()); + } + mapHNodeToDescSet.get(node).add(desc); + } + + public HierarchyGraph generateSkeletonGraph() { + + // compose a skeleton graph that only consists of fields or parameters + HierarchyGraph skeletonGraph = new HierarchyGraph(desc); + skeletonGraph.setName(desc + "_SKELETON"); + + for (Iterator iterator = nodeSet.iterator(); iterator.hasNext();) { + HNode src = (HNode) iterator.next(); + if (src.isSkeleton()) { + Set reachSet = getDirectlyReachSkeletonSet(src); + if (reachSet.size() > 0) { + for (Iterator iterator2 = reachSet.iterator(); iterator2.hasNext();) { + HNode dst = (HNode) iterator2.next(); + skeletonGraph.addEdge(src, dst); + } + } else { + skeletonGraph.addNode(src); + } + } + } + + skeletonGraph.setMapDescToHNode(getMapDescToHNode()); + skeletonGraph.setMapHNodeToDescSet(getMapHNodeToDescSet()); + + return skeletonGraph; + + } + + private Set getDirectlyReachSkeletonSet(HNode node) { + + Set visited = new HashSet(); + Set connected = new HashSet(); + recurReachSkeletonSet(node, connected, visited); + + return connected; + } + + private void removeRedundantEdges() { + + for (Iterator iterator = nodeSet.iterator(); iterator.hasNext();) { + HNode src = (HNode) iterator.next(); + Set connectedSet = getOutgoingNodeSet(src); + Set toberemovedSet = new HashSet(); + for (Iterator iterator2 = connectedSet.iterator(); iterator2.hasNext();) { + HNode dst = (HNode) iterator2.next(); + Set otherNeighborSet = new HashSet(); + otherNeighborSet.addAll(connectedSet); + otherNeighborSet.remove(dst); + for (Iterator iterator3 = otherNeighborSet.iterator(); iterator3.hasNext();) { + HNode neighbor = (HNode) iterator3.next(); + if (reachTo(neighbor, dst, new HashSet())) { + toberemovedSet.add(dst); + } + } + } + if (toberemovedSet.size() > 0) { + connectedSet.removeAll(toberemovedSet); + + for (Iterator iterator2 = toberemovedSet.iterator(); iterator2.hasNext();) { + HNode node = (HNode) iterator2.next(); + getIncomingNodeSet(node).remove(src); + } + + } + } + + } + + public void simplifyHierarchyGraph() { + removeRedundantEdges(); + combineRedundantNodes(false); + } + + public void simplifySkeletonCombinationHierarchyGraph() { + removeRedundantEdges(); + combineRedundantNodes(true); + } + + private void combineRedundantNodes(boolean onlyCombinationNodes) { + // Combine field/parameter nodes who have the same set of incoming/outgoing edges. + boolean isUpdated = false; + do { + isUpdated = combineTwoRedundatnNodes(onlyCombinationNodes); + } while (isUpdated); + } + + private Set getIncomingNodeSet(HNode node) { + if (!mapHNodeToIncomingSet.containsKey(node)) { + mapHNodeToIncomingSet.put(node, new HashSet()); + } + return mapHNodeToIncomingSet.get(node); + } + + private Set getOutgoingNodeSet(HNode node) { + if (!mapHNodeToOutgoingSet.containsKey(node)) { + mapHNodeToOutgoingSet.put(node, new HashSet()); + } + return mapHNodeToOutgoingSet.get(node); + } + + private boolean combineTwoRedundatnNodes(boolean onlyCombinationNodes) { + for (Iterator iterator = nodeSet.iterator(); iterator.hasNext();) { + HNode node1 = (HNode) iterator.next(); + + if ((onlyCombinationNodes && (!node1.isCombinationNode())) + || (!onlyCombinationNodes && (!node1.isSkeleton()))) { + continue; + } + + Set incomingNodeSet1 = getIncomingNodeSet(node1); + Set outgoingNodeSet1 = getOutgoingNodeSet(node1); + + for (Iterator iterator2 = nodeSet.iterator(); iterator2.hasNext();) { + HNode node2 = (HNode) iterator2.next(); + + if ((onlyCombinationNodes && (!node2.isCombinationNode())) + || (!onlyCombinationNodes && (!node2.isSkeleton()))) { + continue; + } + + if (!node1.equals(node2)) { + + Set incomingNodeSet2 = getIncomingNodeSet(node2); + Set outgoingNodeSet2 = getOutgoingNodeSet(node2); + + if (incomingNodeSet1.equals(incomingNodeSet2) + && outgoingNodeSet1.equals(outgoingNodeSet2)) { + // need to merge node1 and node2 + + Set mergeSet = new HashSet(); + mergeSet.add(node1); + mergeSet.add(node2); + mergeNodes(mergeSet, onlyCombinationNodes); + return true; + } + + } + } + + } + return false; + } + + private void addEdgeWithNoCycleCheck(HNode srcHNode, HNode dstHNode) { + getIncomingNodeSet(dstHNode).add(srcHNode); + getOutgoingNodeSet(srcHNode).add(dstHNode); + System.out.println("addEdgeWithNoCycleCheck src=" + srcHNode + " -> " + dstHNode); + } + + private HNode mergeNodes(Set set, boolean onlyCombinationNodes) { + + Set incomingNodeSet = new HashSet(); + Set outgoingNodeSet = new HashSet(); + + for (Iterator iterator = set.iterator(); iterator.hasNext();) { + HNode node = (HNode) iterator.next(); + incomingNodeSet.addAll(getIncomingNodeSet(node)); + outgoingNodeSet.addAll(getOutgoingNodeSet(node)); + } + + String nodeName; + if (onlyCombinationNodes) { + nodeName = "Comb" + (seed++); + } else { + nodeName = "Node" + (seed++); + } + HNode newMergeNode = new HNode(nodeName); + + nodeSet.add(newMergeNode); + nodeSet.removeAll(set); + + // if the input set contains a skeleton node, need to set a new merge node as skeleton also + boolean hasSkeleton = false; + for (Iterator iterator = set.iterator(); iterator.hasNext();) { + HNode inNode = (HNode) iterator.next(); + if (inNode.isSkeleton()) { + hasSkeleton = true; + break; + } + } + newMergeNode.setSkeleton(hasSkeleton); + + for (Iterator iterator = set.iterator(); iterator.hasNext();) { + HNode node = (HNode) iterator.next(); + Set descSetOfNode = getDescSetOfNode(node); + for (Iterator iterator2 = descSetOfNode.iterator(); iterator2.hasNext();) { + Descriptor desc = (Descriptor) iterator2.next(); + mappingDescriptorToHNode(desc, newMergeNode); + } + } + + for (Iterator iterator = incomingNodeSet.iterator(); iterator.hasNext();) { + HNode inNode = (HNode) iterator.next(); + Set outSet = getOutgoingNodeSet(inNode); + outSet.removeAll(set); + if (!set.contains(inNode)) { + addEdgeWithNoCycleCheck(inNode, newMergeNode); + } + } + + for (Iterator iterator = outgoingNodeSet.iterator(); iterator.hasNext();) { + HNode outNode = (HNode) iterator.next(); + Set inSet = getIncomingNodeSet(outNode); + inSet.removeAll(set); + if (!set.contains(outNode)) { + addEdgeWithNoCycleCheck(newMergeNode, outNode); + } + } + + System.out.println("#MERGING NODE=" + set + " new node=" + newMergeNode); + return newMergeNode; + } + + private Set getDescSetOfNode(HNode node) { + if (!mapHNodeToDescSet.containsKey(node)) { + mapHNodeToDescSet.put(node, new HashSet()); + } + return mapHNodeToDescSet.get(node); + } + + private boolean reachTo(HNode src, HNode dst, Set visited) { + Set connectedSet = getOutgoingNodeSet(src); + for (Iterator iterator = connectedSet.iterator(); iterator.hasNext();) { + HNode n = iterator.next(); + if (n.equals(dst)) { + return true; + } + if (!visited.contains(n)) { + visited.add(n); + if (reachTo(n, dst, visited)) { + return true; + } + } + } + return false; + } + + private void recurReachSkeletonSet(HNode node, Set connected, Set visited) { + + Set outSet = getOutgoingNodeSet(node); + for (Iterator iterator = outSet.iterator(); iterator.hasNext();) { + HNode outNode = (HNode) iterator.next(); + + if (outNode.isSkeleton()) { + connected.add(outNode); + } else if (!visited.contains(outNode)) { + visited.add(outNode); + recurReachSkeletonSet(outNode, connected, visited); + } + } + + } + + public Set getPossibleCycleNodes(HNode src, HNode dst) { + // if an edge from src to dst introduces a new cycle flow, + // the method returns the set of elements consisting of the cycle + Set cycleNodeSet = new HashSet(); + // if the dst node reaches to the src node, the new relation + // introduces a cycle to the lattice + if (dst.equals(src)) { + cycleNodeSet.add(dst); + cycleNodeSet.add(src); + } else if (reachTo(dst, src)) { + cycleNodeSet.add(dst); + cycleNodeSet.add(src); + getInBetweenElements(dst, src, cycleNodeSet); + } + return cycleNodeSet; + } + + private void getInBetweenElements(HNode start, HNode end, Set nodeSet) { + Set connectedSet = getOutgoingNodeSet(start); + for (Iterator iterator = connectedSet.iterator(); iterator.hasNext();) { + HNode cur = (HNode) iterator.next(); + if ((!start.equals(cur)) && (!cur.equals(end)) && reachTo(cur, end)) { + nodeSet.add(cur); + getInBetweenElements(cur, end, nodeSet); + } + } + } + + public boolean reachTo(HNode node1, HNode node2) { + return reachTo(node1, node2, new HashSet()); + } + + public Set getCombineSetByCombinationNode(HNode node) { + if (!mapCombinationNodeToCombineNodeSet.containsKey(node)) { + mapCombinationNodeToCombineNodeSet.put(node, new HashSet()); + } + return mapCombinationNodeToCombineNodeSet.get(node); + } + + private HNode getCombinationNode(Set combineSet) { + if (!mapCombineNodeSetToCombinationNode.containsKey(combineSet)) { + String name = "COMB" + (seed++); + HNode node = new HNode(name); + node.setCombinationNode(true); + nodeSet.add(node); + mapCombineNodeSetToCombinationNode.put(combineSet, node); + mapCombinationNodeToCombineNodeSet.put(node, combineSet); + } + + return mapCombineNodeSetToCombinationNode.get(combineSet); + } + + public Set> getCombineNodeSet() { + return mapCombineNodeSetToOutgoingNodeSet.keySet(); + } + + public void insertCombinationNodesToGraph(HierarchyGraph hierarchyGraph) { + // add a new combination node where parameter/field flows are actually combined. + + hierarchyGraph.identifyCombinationNodes(); + + Set> keySet = hierarchyGraph.getCombineNodeSet(); + for (Iterator iterator = keySet.iterator(); iterator.hasNext();) { + Set combineSet = (Set) iterator.next(); + System.out.println("combineSet=" + combineSet); + HNode combinationNode = getCombinationNode(combineSet); + + // add an edge from a skeleton node to a combination node + for (Iterator iterator2 = combineSet.iterator(); iterator2.hasNext();) { + HNode inSkeletonNode = (HNode) iterator2.next(); + HNode srcNode = getHNode(inSkeletonNode.getDescriptor()); + System.out.println("inSkeletonNode=" + inSkeletonNode + " srcNode=" + srcNode); + addEdgeWithNoCycleCheck(srcNode, combinationNode); + } + + // add an edge from the combination node to outgoing nodes + Set outSet = hierarchyGraph.getOutgoingNodeSetByCombineSet(combineSet); + for (Iterator iterator2 = outSet.iterator(); iterator2.hasNext();) { + HNode curNode = (HNode) iterator2.next(); + if (curNode.isCombinationNode()) { + Set combineNode = hierarchyGraph.getCombineSetByCombinationNode(curNode); + HNode outNode = getCombinationNode(combineNode); + addEdgeWithNoCycleCheck(combinationNode, outNode); + } else if (curNode.isSkeleton()) { + addEdgeWithNoCycleCheck(combinationNode, curNode); + } + } + + } + + } + + private void addCombinationNode(HNode curNode, Set reachToSet, Set reachableSet) { + if (!mapSkeletonNodeSetToCombinationNode.containsKey(reachToSet)) { + // need to create a new combination node + String nodeName = "Comb" + (seed++); + HNode newCombinationNode = new HNode(nodeName); + newCombinationNode.setCombinationNode(true); + + nodeSet.add(newCombinationNode); + mapSkeletonNodeSetToCombinationNode.put(reachToSet, newCombinationNode); + + for (Iterator iterator = reachToSet.iterator(); iterator.hasNext();) { + HNode reachToNode = (HNode) iterator.next(); + addEdge(reachToNode, newCombinationNode); + } + + } + + HNode combinationNode = mapSkeletonNodeSetToCombinationNode.get(reachToSet); + for (Iterator iterator = reachableSet.iterator(); iterator.hasNext();) { + HNode reachableNode = (HNode) iterator.next(); + addEdge(combinationNode, reachableNode); + } + + } + + private Set getSkeleteNodeSetReachTo(HNode node) { + + Set reachToSet = new HashSet(); + Set visited = new HashSet(); + + recurSkeletonReachTo(node, reachToSet, visited); + + return reachToSet; + } + + private void recurSkeletonReachTo(HNode node, Set reachToSet, Set visited) { + + Set inSet = getIncomingNodeSet(node); + for (Iterator iterator = inSet.iterator(); iterator.hasNext();) { + HNode inNode = (HNode) iterator.next(); + + if (inNode.isSkeleton()) { + reachToSet.add(inNode); + } else if (!visited.contains(inNode)) { + visited.add(inNode); + recurSkeletonReachTo(inNode, reachToSet, visited); + } + } + + } + + public Map> getMapHNodeToOutgoingSet() { + return mapHNodeToOutgoingSet; + } + + public Map> getMapHNodeToIncomingSet() { + return mapHNodeToIncomingSet; + } + + public void setMapHNodeToOutgoingSet(Map> in) { + mapHNodeToOutgoingSet.clear(); + Set keySet = in.keySet(); + for (Iterator iterator = keySet.iterator(); iterator.hasNext();) { + HNode key = (HNode) iterator.next(); + Set inSet = in.get(key); + Set newSet = new HashSet(); + newSet.addAll(inSet); + mapHNodeToOutgoingSet.put(key, newSet); + } + } + + public void setMapHNodeToIncomingSet(Map> in) { + mapHNodeToIncomingSet.clear(); + Set keySet = in.keySet(); + for (Iterator iterator = keySet.iterator(); iterator.hasNext();) { + HNode key = (HNode) iterator.next(); + Set inSet = in.get(key); + Set newSet = new HashSet(); + newSet.addAll(inSet); + mapHNodeToIncomingSet.put(key, newSet); + } + } + + public void setNodeSet(Set inSet) { + nodeSet.clear(); + nodeSet.addAll(inSet); + } + + public HierarchyGraph clone() { + HierarchyGraph clone = new HierarchyGraph(); + clone.setDesc(getDesc()); + clone.setName(getName()); + clone.setNodeSet(getNodeSet()); + clone.setMapHNodeToIncomingSet(getMapHNodeToIncomingSet()); + clone.setMapHNodeToOutgoingSet(getMapHNodeToOutgoingSet()); + clone.setMapDescToHNode(getMapDescToHNode()); + clone.setMapHNodeToDescSet(getMapHNodeToDescSet()); + + return clone; + } + + public Set getOutgoingNodeSetByCombineSet(Set combineSet) { + + if (!mapCombineNodeSetToOutgoingNodeSet.containsKey(combineSet)) { + mapCombineNodeSetToOutgoingNodeSet.put(combineSet, new HashSet()); + } + return mapCombineNodeSetToOutgoingNodeSet.get(combineSet); + } + + public void identifyCombinationNodes() { + + // 1) set combination node flag if a node combines more than one skeleton node. + for (Iterator iterator = nodeSet.iterator(); iterator.hasNext();) { + HNode node = (HNode) iterator.next(); + if (!node.isSkeleton()) { + Set reachToSet = getSkeleteNodeSetReachTo(node); + if (reachToSet.size() > 1) { + node.setCombinationNode(true); + mapCombinationNodeToCombineNodeSet.put(node, reachToSet); + } + } + } + + // 2) compute the outgoing set that needs to be directly connected from the combination node + for (Iterator iterator = nodeSet.iterator(); iterator.hasNext();) { + HNode node = (HNode) iterator.next(); + if (node.isCombinationNode()) { + Set combineSet = mapCombinationNodeToCombineNodeSet.get(node); + Set outSet = getDirectlyReachableNodeSetFromCombinationNode(node); + addMapCombineSetToOutgoingSet(combineSet, outSet); + } + } + + } + + private void addMapCombineSetToOutgoingSet(Set combineSet, Set outSet) { + if (!mapCombineNodeSetToOutgoingNodeSet.containsKey(combineSet)) { + mapCombineNodeSetToOutgoingNodeSet.put(combineSet, new HashSet()); + } + mapCombineNodeSetToOutgoingNodeSet.get(combineSet).addAll(outSet); + } + + private Set getDirectlyReachableNodeSetFromCombinationNode(HNode node) { + // the method returns the set of nodes that are reachable from the current node + // and do not combine the same set of skeleton nodes... + + Set visited = new HashSet(); + Set reachableSet = new HashSet(); + Set combineSet = mapCombinationNodeToCombineNodeSet.get(node); + + recurDirectlyReachableNodeSetFromCombinationNode(node, combineSet, reachableSet, visited); + + return reachableSet; + } + + private void recurDirectlyReachableNodeSetFromCombinationNode(HNode node, Set combineSet, + Set reachableSet, Set visited) { + + Set outSet = getOutgoingNodeSet(node); + for (Iterator iterator = outSet.iterator(); iterator.hasNext();) { + HNode outNode = (HNode) iterator.next(); + + if (outNode.isCombinationNode()) { + Set combineSetOfOutNode = mapCombinationNodeToCombineNodeSet.get(outNode); + if (combineSetOfOutNode.equals(combineSet)) { + recurDirectlyReachableNodeSetFromCombinationNode(outNode, combineSet, reachableSet, + visited); + } else { + reachableSet.add(outNode); + } + } else if (outNode.isSkeleton()) { + reachableSet.add(outNode); + } + + } + + } + + public void writeGraph() { + + String graphName = "hierarchy" + name; + graphName = graphName.replaceAll("[\\W]", ""); + + try { + BufferedWriter bw = new BufferedWriter(new FileWriter(graphName + ".dot")); + + bw.write("digraph " + graphName + " {\n"); + + Iterator iter = nodeSet.iterator(); + + Set addedNodeSet = new HashSet(); + + while (iter.hasNext()) { + HNode u = iter.next(); + + Set outSet = getOutgoingNodeSet(u); + + if (outSet.size() == 0) { + if (!addedNodeSet.contains(u)) { + drawNode(bw, u); + addedNodeSet.add(u); + } + } else { + for (Iterator iterator = outSet.iterator(); iterator.hasNext();) { + HNode v = (HNode) iterator.next(); + if (!addedNodeSet.contains(u)) { + drawNode(bw, u); + addedNodeSet.add(u); + } + if (!addedNodeSet.contains(v)) { + drawNode(bw, v); + addedNodeSet.add(v); + } + bw.write("" + u.getName() + " -> " + v.getName() + ";\n"); + } + } + + } + + bw.write("}\n"); + bw.close(); + + } catch (IOException e) { + e.printStackTrace(); + } + } + + private void drawNode(BufferedWriter bw, HNode node) throws IOException { + String shared = ""; + if (node.isSharedNode()) { + shared = "*"; + } + bw.write(node.getName() + " [label=\"" + node.getName() + shared + "\"]" + ";\n"); + } + +} diff --git a/Robust/src/Analysis/SSJava/Location.java b/Robust/src/Analysis/SSJava/Location.java index 1c90d577..98a05497 100644 --- a/Robust/src/Analysis/SSJava/Location.java +++ b/Robust/src/Analysis/SSJava/Location.java @@ -17,7 +17,15 @@ public class Location implements TypeExtension { public Location(Descriptor d, String loc) { this.d = d; this.loc = loc; - this.type = NORMAL; + + if (loc.equals(SSJavaAnalysis.TOP)) { + type = TOP; + } else if (loc.equals(SSJavaAnalysis.BOTTOM)) { + type = BOTTOM; + } else { + type = NORMAL; + } + } public Location(Descriptor d, int type) { diff --git a/Robust/src/Analysis/SSJava/LocationDescriptor.java b/Robust/src/Analysis/SSJava/LocationDescriptor.java new file mode 100644 index 00000000..ed2497d1 --- /dev/null +++ b/Robust/src/Analysis/SSJava/LocationDescriptor.java @@ -0,0 +1,12 @@ +package Analysis.SSJava; + +import IR.Descriptor; + +public class LocationDescriptor extends Descriptor { + + public LocationDescriptor(String name) { + super(name); + } + + +} diff --git a/Robust/src/Analysis/SSJava/LocationInference.java b/Robust/src/Analysis/SSJava/LocationInference.java index 9eea1427..31900f10 100644 --- a/Robust/src/Analysis/SSJava/LocationInference.java +++ b/Robust/src/Analysis/SSJava/LocationInference.java @@ -47,6 +47,7 @@ import IR.Tree.NameNode; import IR.Tree.OpNode; import IR.Tree.ReturnNode; import IR.Tree.SubBlockNode; +import IR.Tree.SwitchBlockNode; import IR.Tree.SwitchStatementNode; import IR.Tree.TertiaryNode; import IR.Tree.TreeNode; @@ -73,12 +74,28 @@ public class LocationInference { // map a method descriptor to a method lattice private Map> md2lattice; + // map a method/class descriptor to a hierarchy graph + private Map mapDescriptorToHierarchyGraph; + + // map a method/class descriptor to a skeleton hierarchy graph + private Map mapDescriptorToSkeletonHierarchyGraph; + + private Map mapDescriptorToSimpleHierarchyGraph; + + // map a method/class descriptor to a skeleton hierarchy graph with combination nodes + private Map mapDescriptorToCombineSkeletonHierarchyGraph; + + // map a method descriptor to a method summary + private Map mapMethodDescToMethodSummary; + // map a method descriptor to the set of method invocation nodes which are // invoked by the method descriptor private Map> mapMethodDescriptorToMethodInvokeNodeSet; private Map> mapMethodInvokeNodeToArgIdxMap; + private Map> mapMethodInvokeNodeToBaseTuple; + private Map mapMethodDescToMethodLocationInfo; private Map mapClassToLocationInfo; @@ -107,6 +124,8 @@ public class LocationInference { boolean debug = true; + private static int locSeed = 0; + public LocationInference(SSJavaAnalysis ssjava, State state) { this.ssjava = ssjava; this.state = state; @@ -128,6 +147,15 @@ public class LocationInference { this.mapDescToDefinitionLine = new HashMap(); this.mapMethodDescToParamNodeFlowsToReturnValue = new HashMap>(); + + this.mapDescriptorToHierarchyGraph = new HashMap(); + this.mapMethodDescToMethodSummary = new HashMap(); + this.mapMethodInvokeNodeToBaseTuple = new HashMap>(); + + this.mapDescriptorToSkeletonHierarchyGraph = new HashMap(); + this.mapDescriptorToCombineSkeletonHierarchyGraph = new HashMap(); + this.mapDescriptorToSimpleHierarchyGraph = new HashMap(); + } public void setupToAnalyze() { @@ -174,21 +202,257 @@ public class LocationInference { // 1) construct value flow graph constructFlowGraph(); + constructHierarchyGraph(); + + simplifyHierarchyGraph(); + + constructSkeletonHierarchyGraph(); + + insertCombinationNodes(); + + debug_writeHierarchyDotFile(); + + System.exit(0); + // 2) construct lattices inferLattices(); simplifyLattices(); - debug_writeLatticeDotFile(); - // 3) check properties checkLattices(); + // calculate RETURNLOC,PCLOC + calculateExtraLocations(); + + debug_writeLatticeDotFile(); + // 4) generate annotated source codes generateAnnoatedCode(); } + private void simplifyHierarchyGraph() { + Set keySet = mapDescriptorToHierarchyGraph.keySet(); + for (Iterator iterator = keySet.iterator(); iterator.hasNext();) { + Descriptor desc = (Descriptor) iterator.next(); + HierarchyGraph simpleHierarchyGraph = getHierarchyGraph(desc).clone(); + simpleHierarchyGraph.setName(desc + "_SIMPLE"); + simpleHierarchyGraph.simplifyHierarchyGraph(); + mapDescriptorToSimpleHierarchyGraph.put(desc, simpleHierarchyGraph); + } + } + + private void insertCombinationNodes() { + Set keySet = mapDescriptorToSkeletonHierarchyGraph.keySet(); + for (Iterator iterator = keySet.iterator(); iterator.hasNext();) { + Descriptor desc = (Descriptor) iterator.next(); + System.out.println("\nSSJAVA: Insering Combination Nodes:" + desc); + HierarchyGraph skeletonGraph = getSkeletonHierarchyGraph(desc); + HierarchyGraph skeletonGraphWithCombinationNode = skeletonGraph.clone(); + skeletonGraphWithCombinationNode.setName(desc + "_SC"); + + HierarchyGraph hierarchyGraph = getHierarchyGraph(desc); + skeletonGraphWithCombinationNode.insertCombinationNodesToGraph(hierarchyGraph); + skeletonGraphWithCombinationNode.simplifySkeletonCombinationHierarchyGraph(); + mapDescriptorToCombineSkeletonHierarchyGraph.put(desc, skeletonGraphWithCombinationNode); + } + } + + private void constructSkeletonHierarchyGraph() { + Set keySet = mapDescriptorToHierarchyGraph.keySet(); + for (Iterator iterator = keySet.iterator(); iterator.hasNext();) { + Descriptor desc = (Descriptor) iterator.next(); + HierarchyGraph simpleGraph = getSimpleHierarchyGraph(desc); + HierarchyGraph skeletonGraph = simpleGraph.generateSkeletonGraph(); + skeletonGraph.setMapDescToHNode(simpleGraph.getMapDescToHNode()); + skeletonGraph.setMapHNodeToDescSet(simpleGraph.getMapHNodeToDescSet()); + mapDescriptorToSkeletonHierarchyGraph.put(desc, skeletonGraph); + } + } + + private void debug_writeHierarchyDotFile() { + + Set keySet = mapDescriptorToHierarchyGraph.keySet(); + for (Iterator iterator = keySet.iterator(); iterator.hasNext();) { + Descriptor desc = (Descriptor) iterator.next(); + getHierarchyGraph(desc).writeGraph(); + getSimpleHierarchyGraph(desc).writeGraph(); + getSkeletonHierarchyGraph(desc).writeGraph(); + getSkeletonCombinationHierarchyGraph(desc).writeGraph(); + } + + } + + public HierarchyGraph getSimpleHierarchyGraph(Descriptor d) { + return mapDescriptorToSimpleHierarchyGraph.get(d); + } + + private HierarchyGraph getSkeletonHierarchyGraph(Descriptor d) { + if (!mapDescriptorToSkeletonHierarchyGraph.containsKey(d)) { + mapDescriptorToSkeletonHierarchyGraph.put(d, new HierarchyGraph(d)); + } + return mapDescriptorToSkeletonHierarchyGraph.get(d); + } + + private HierarchyGraph getSkeletonCombinationHierarchyGraph(Descriptor d) { + if (!mapDescriptorToCombineSkeletonHierarchyGraph.containsKey(d)) { + mapDescriptorToCombineSkeletonHierarchyGraph.put(d, new HierarchyGraph(d)); + } + return mapDescriptorToCombineSkeletonHierarchyGraph.get(d); + } + + private void constructHierarchyGraph() { + + // do fixed-point analysis + + ssjava.init(); + LinkedList descriptorListToAnalyze = ssjava.getSortedDescriptors(); + + // Collections.sort(descriptorListToAnalyze, new + // Comparator() { + // public int compare(MethodDescriptor o1, MethodDescriptor o2) { + // return o1.getSymbol().compareToIgnoreCase(o2.getSymbol()); + // } + // }); + + // current descriptors to visit in fixed-point interprocedural analysis, + // prioritized by dependency in the call graph + methodDescriptorsToVisitStack.clear(); + + Set methodDescriptorToVistSet = new HashSet(); + methodDescriptorToVistSet.addAll(descriptorListToAnalyze); + + while (!descriptorListToAnalyze.isEmpty()) { + MethodDescriptor md = descriptorListToAnalyze.removeFirst(); + methodDescriptorsToVisitStack.add(md); + } + + // analyze scheduled methods until there are no more to visit + while (!methodDescriptorsToVisitStack.isEmpty()) { + // start to analyze leaf node + MethodDescriptor md = methodDescriptorsToVisitStack.pop(); + + HierarchyGraph methodGraph = new HierarchyGraph(md); + MethodSummary methodSummary = new MethodSummary(); + + MethodLocationInfo methodInfo = new MethodLocationInfo(md); + curMethodInfo = methodInfo; + + System.out.println(); + System.out.println("SSJAVA: Construcing the hierarchy graph from " + md); + + constructHierarchyGraph(md, methodGraph, methodSummary); + + HierarchyGraph prevMethodGraph = getHierarchyGraph(md); + MethodSummary prevMethodSummary = getMethodSummary(md); + + if ((!methodGraph.equals(prevMethodGraph)) || (!methodSummary.equals(prevMethodSummary))) { + + mapDescriptorToHierarchyGraph.put(md, methodGraph); + mapMethodDescToMethodSummary.put(md, methodSummary); + + // results for callee changed, so enqueue dependents caller for + // further analysis + Iterator depsItr = ssjava.getDependents(md).iterator(); + while (depsItr.hasNext()) { + MethodDescriptor methodNext = depsItr.next(); + if (!methodDescriptorsToVisitStack.contains(methodNext) + && methodDescriptorToVistSet.contains(methodNext)) { + methodDescriptorsToVisitStack.add(methodNext); + } + } + + } + + } + + } + + private HierarchyGraph getHierarchyGraph(Descriptor d) { + if (!mapDescriptorToHierarchyGraph.containsKey(d)) { + mapDescriptorToHierarchyGraph.put(d, new HierarchyGraph(d)); + } + return mapDescriptorToHierarchyGraph.get(d); + } + + private void constructHierarchyGraph(MethodDescriptor md, HierarchyGraph methodGraph, + MethodSummary methodSummary) { + + // visit each node of method flow graph + FlowGraph fg = getFlowGraph(md); + Set nodeSet = fg.getNodeSet(); + + Set paramDescSet = fg.getMapParamDescToIdx().keySet(); + for (Iterator iterator = paramDescSet.iterator(); iterator.hasNext();) { + Descriptor desc = (Descriptor) iterator.next(); + methodGraph.getHNode(desc).setSkeleton(true); + } + + // for the method lattice, we need to look at the first element of + // NTuple + for (Iterator iterator = nodeSet.iterator(); iterator.hasNext();) { + FlowNode srcNode = (FlowNode) iterator.next(); + + Set outEdgeSet = srcNode.getOutEdgeSet(); + for (Iterator iterator2 = outEdgeSet.iterator(); iterator2.hasNext();) { + FlowEdge outEdge = (FlowEdge) iterator2.next(); + FlowNode dstNode = outEdge.getDst(); + + NTuple srcNodeTuple = srcNode.getDescTuple(); + NTuple dstNodeTuple = dstNode.getDescTuple(); + + if (outEdge.getInitTuple().equals(srcNodeTuple) + && outEdge.getEndTuple().equals(dstNodeTuple)) { + + NTuple srcCurTuple = srcNode.getCurrentDescTuple(); + NTuple dstCurTuple = dstNode.getCurrentDescTuple(); + + if ((srcCurTuple.size() > 1 && dstCurTuple.size() > 1) + && srcCurTuple.get(0).equals(dstCurTuple.get(0))) { + + // value flows between fields + Descriptor desc = srcCurTuple.get(0); + ClassDescriptor classDesc; + + if (desc.equals(GLOBALDESC)) { + classDesc = md.getClassDesc(); + } else { + VarDescriptor varDesc = (VarDescriptor) srcCurTuple.get(0); + classDesc = varDesc.getType().getClassDesc(); + } + extractFlowsBetweenFields(classDesc, srcNode, dstNode, 1); + + } else { + // value flow between local var - local var or local var - field + + Descriptor srcDesc = srcCurTuple.get(0); + Descriptor dstDesc = dstCurTuple.get(0); + + methodGraph.addEdge(srcDesc, dstDesc); + + if (fg.isParamDesc(srcDesc)) { + methodGraph.setParamHNode(srcDesc); + } + if (fg.isParamDesc(dstDesc)) { + methodGraph.setParamHNode(dstDesc); + } + + } + + } + } + } + + } + + private MethodSummary getMethodSummary(MethodDescriptor md) { + if (!mapMethodDescToMethodSummary.containsKey(md)) { + mapMethodDescToMethodSummary.put(md, new MethodSummary()); + } + return mapMethodDescToMethodSummary.get(md); + } + private void addMapClassDefinitionToLineNum(ClassDescriptor cd, String strLine, int lineNum) { String classSymbol = cd.getSymbol(); @@ -576,7 +840,28 @@ public class LocationInference { private void simplifyLattices() { - // generate lattice dot file + setupToAnalyze(); + + while (!toAnalyzeIsEmpty()) { + ClassDescriptor cd = toAnalyzeNext(); + setupToAnalazeMethod(cd); + + SSJavaLattice classLattice = cd2lattice.get(cd); + if (classLattice != null) { + System.out.println("@@@check lattice=" + cd); + checkLatticeProperty(cd, classLattice); + } + + while (!toAnalyzeMethodIsEmpty()) { + MethodDescriptor md = toAnalyzeMethodNext(); + SSJavaLattice methodLattice = md2lattice.get(md); + if (methodLattice != null) { + System.out.println("@@@check lattice=" + md); + checkLatticeProperty(md, methodLattice); + } + } + } + setupToAnalyze(); while (!toAnalyzeIsEmpty()) { @@ -600,6 +885,113 @@ public class LocationInference { } + private boolean checkLatticeProperty(Descriptor d, SSJavaLattice lattice) { + // if two elements has the same incoming node set, + // we need to merge two elements ... + + boolean isUpdated; + boolean isModified = false; + do { + isUpdated = removeNodeSharingSameIncomingNodes(d, lattice); + if (!isModified && isUpdated) { + isModified = true; + } + } while (isUpdated); + + return isModified; + } + + private boolean removeNodeSharingSameIncomingNodes(Descriptor d, SSJavaLattice lattice) { + LocationInfo locInfo = getLocationInfo(d); + Map> map = lattice.getIncomingElementMap(); + Set keySet = map.keySet(); + for (Iterator iterator = keySet.iterator(); iterator.hasNext();) { + String key = (String) iterator.next(); + Set incomingSetKey = map.get(key); + + // System.out.println("key=" + key + " incomingSetKey=" + + // incomingSetKey); + if (incomingSetKey.size() > 0) { + for (Iterator iterator2 = keySet.iterator(); iterator2.hasNext();) { + String cur = (String) iterator2.next(); + if (!cur.equals(key)) { + Set incomingSetCur = map.get(cur); + if (incomingSetCur.equals(incomingSetKey)) { + if (!(incomingSetCur.size() == 1 && incomingSetCur.contains(lattice.getTopItem()))) { + // NEED TO MERGE HERE!!!! + System.out.println("@@@Try merge=" + cur + " " + key); + + Set mergeSet = new HashSet(); + mergeSet.add(cur); + mergeSet.add(key); + + String newMergeLoc = "MLoc" + (SSJavaLattice.seed++); + + System.out.println("---ASSIGN NEW MERGE LOC=" + newMergeLoc + " to " + mergeSet); + lattice.mergeIntoNewLocation(mergeSet, newMergeLoc); + + for (Iterator miterator = mergeSet.iterator(); miterator.hasNext();) { + String oldLocSymbol = (String) miterator.next(); + + Set> inferLocSet = + locInfo.getRelatedInferLocSet(oldLocSymbol); + System.out.println("---update related locations=" + inferLocSet + + " oldLocSymbol=" + oldLocSymbol); + + for (Iterator miterator2 = inferLocSet.iterator(); miterator2.hasNext();) { + Pair pair = + (Pair) miterator2.next(); + Descriptor enclosingDesc = pair.getFirst(); + Descriptor desc = pair.getSecond(); + + System.out.println("---inferLoc pair=" + pair); + + CompositeLocation inferLoc = + getLocationInfo(enclosingDesc).getInferLocation(desc); + System.out.println("oldLoc=" + inferLoc); + // if (curMethodInfo.md.equals(enclosingDesc)) { + // inferLoc = curMethodInfo.getInferLocation(desc); + // } else { + // inferLoc = + // getLocationInfo(enclosingDesc).getInferLocation(desc); + // } + + Location locElement = inferLoc.get(inferLoc.getSize() - 1); + + locElement.setLocIdentifier(newMergeLoc); + locInfo.addMapLocSymbolToRelatedInferLoc(newMergeLoc, enclosingDesc, desc); + + // if (curMethodInfo.md.equals(enclosingDesc)) { + // inferLoc = curMethodInfo.getInferLocation(desc); + // } else { + // inferLoc = + // getLocationInfo(enclosingDesc).getInferLocation(desc); + // } + + inferLoc = getLocationInfo(enclosingDesc).getInferLocation(desc); + System.out.println("---New Infer Loc=" + inferLoc); + + } + + locInfo.removeRelatedInferLocSet(oldLocSymbol, newMergeLoc); + + } + + for (Iterator iterator3 = mergeSet.iterator(); iterator3.hasNext();) { + String oldLoc = (String) iterator3.next(); + lattice.remove(oldLoc); + } + return true; + } + } + } + } + } + + } + return false; + } + private void checkLattices() { LinkedList descriptorListToAnalyze = ssjava.getSortedDescriptors(); @@ -732,12 +1124,14 @@ public class LocationInference { } - descriptorListToAnalyze = ssjava.getSortedDescriptors(); + } + + private void calculateExtraLocations() { + LinkedList descriptorListToAnalyze = ssjava.getSortedDescriptors(); for (Iterator iterator = descriptorListToAnalyze.iterator(); iterator.hasNext();) { MethodDescriptor md = (MethodDescriptor) iterator.next(); calculateExtraLocations(md); } - } private void setMethodLocInfo(MethodDescriptor md, MethodLocationInfo methodInfo) { @@ -1192,6 +1586,327 @@ public class LocationInference { } + private void propagateFlowsFromCallee(MethodInvokeNode min, MethodDescriptor mdCaller, + MethodDescriptor mdCallee) { + + // the transformation for a call site propagates all relations between + // parameters from the callee + // if the method is virtual, it also grab all relations from any possible + // callees + + Set setPossibleCallees = new HashSet(); + if (mdCallee.isStatic()) { + setPossibleCallees.add(mdCallee); + } else { + Set calleeSet = ssjava.getCallGraph().getMethods(mdCallee); + // removes method descriptors that are not invoked by the caller + calleeSet.retainAll(mapMethodToCalleeSet.get(mdCaller)); + setPossibleCallees.addAll(calleeSet); + } + + for (Iterator iterator2 = setPossibleCallees.iterator(); iterator2.hasNext();) { + MethodDescriptor possibleMdCallee = (MethodDescriptor) iterator2.next(); + propagateFlowsToCaller(min, mdCaller, possibleMdCallee); + } + + } + + private void propagateFlowsToCaller(MethodInvokeNode min, MethodDescriptor mdCaller, + MethodDescriptor mdCallee) { + + // if the parameter A reaches to the parameter B + // then, add an edge the argument A -> the argument B to the caller's flow + // graph + + FlowGraph calleeFlowGraph = getFlowGraph(mdCallee); + FlowGraph callerFlowGraph = getFlowGraph(mdCaller); + int numParam = calleeFlowGraph.getNumParameters(); + + for (int i = 0; i < numParam; i++) { + for (int k = 0; k < numParam; k++) { + + if (i != k) { + + FlowNode paramNode1 = calleeFlowGraph.getParamFlowNode(i); + FlowNode paramNode2 = calleeFlowGraph.getParamFlowNode(k); + + NodeTupleSet tupleSetArg1 = getNodeTupleSetByArgIdx(min, i); + NodeTupleSet tupleSetArg2 = getNodeTupleSetByArgIdx(min, k); + + for (Iterator> iter1 = tupleSetArg1.iterator(); iter1.hasNext();) { + NTuple arg1Tuple = iter1.next(); + + for (Iterator> iter2 = tupleSetArg2.iterator(); iter2.hasNext();) { + NTuple arg2Tuple = iter2.next(); + + // check if the callee propagates an ordering constraints through + // parameters + + Set localReachSet = + calleeFlowGraph.getLocalReachFlowNodeSetFrom(paramNode1); + + if (localReachSet.contains(paramNode2)) { + // need to propagate an ordering relation s.t. arg1 is higher + // than arg2 + + if (!min.getMethod().isStatic()) { + // check if this is the case that values flow to/from the + // current object reference 'this' + + NTuple baseTuple = mapMethodInvokeNodeToBaseTuple.get(min); + Descriptor baseRef = baseTuple.get(baseTuple.size() - 1); + + // calculate the prefix of the argument + if (arg2Tuple.size() == 1 && arg2Tuple.get(0).equals(baseRef)) { + + if (!paramNode1.getCurrentDescTuple().startsWith(mdCallee.getThis())) { + + NTuple param1Prefix = + calculatePrefixForParam(callerFlowGraph, calleeFlowGraph, min, arg1Tuple, + paramNode1); + + if (param1Prefix != null && param1Prefix.startsWith(mdCallee.getThis())) { + // in this case, we need to create a new edge + // 'this.FIELD'->'this' + // but we couldn't... instead we assign the + // corresponding + // parameter a new composite location started with + // 'this' + // reference + + CompositeLocation compLocForParam1 = + generateCompositeLocation(mdCallee, param1Prefix); + + // System.out.println("set comp loc=" + compLocForParam1 + // + + // " to " + paramNode1); + paramNode1.setCompositeLocation(compLocForParam1); + continue; + } + } + + } else if (arg1Tuple.size() == 1 && arg1Tuple.get(0).equals(baseRef)) { + + if (!paramNode2.getCurrentDescTuple().startsWith(mdCallee.getThis())) { + + NTuple param2Prefix = + calculatePrefixForParam(callerFlowGraph, calleeFlowGraph, min, arg1Tuple, + paramNode1); + + if (param2Prefix != null && param2Prefix.startsWith(mdCallee.getThis())) { + // in this case, we need to create a new edge 'this' -> + // 'this.FIELD' + // but we couldn't... instead we assign the + // corresponding + // parameter a new composite location started with + // 'this' + // reference + + CompositeLocation compLocForParam2 = + generateCompositeLocation(mdCallee, param2Prefix); + + // System.out.println("set comp loc=" + compLocForParam2 + // + + // " to " + paramNode2); + paramNode1.setCompositeLocation(compLocForParam2); + continue; + } + } + + } + } + + // otherwise, flows between method/field locations... + callerFlowGraph.addValueFlowEdge(arg1Tuple, arg2Tuple); + // System.out.println("arg1=" + arg1Tuple + " arg2=" + + // arg2Tuple); + + } + + } + + } + } + } + } + + } + + private CompositeLocation generateCompositeLocation(MethodDescriptor md, + NTuple param1Prefix) { + + CompositeLocation newCompLoc = convertToCompositeLocation(md, param1Prefix); + + LocationDescriptor newLocDescriptor = generateNewLocationDescriptor(); + + Descriptor enclosingDescriptor = param1Prefix.get(param1Prefix.size() - 1); + Location newLoc = new Location(enclosingDescriptor, newLocDescriptor.getSymbol()); + newLoc.setLocDescriptor(newLocDescriptor); + newCompLoc.addLocation(newLoc); + + System.out.println("newCompLoc=" + newCompLoc); + return newCompLoc; + } + + private NTuple calculatePrefixForParam(FlowGraph callerFlowGraph, + FlowGraph calleeFlowGraph, MethodInvokeNode min, NTuple arg1Tuple, + FlowNode paramNode1) { + + NTuple baseTuple = mapMethodInvokeNodeToBaseTuple.get(min); + Descriptor baseRef = baseTuple.get(baseTuple.size() - 1); + System.out.println("baseRef=" + baseRef); + + FlowNode flowNodeArg1 = callerFlowGraph.getFlowNode(arg1Tuple); + List> callerPrefixList = calculatePrefixList(callerFlowGraph, flowNodeArg1); + System.out.println("callerPrefixList=" + callerPrefixList); + + List> calleePrefixList = + translatePrefixListToCallee(baseRef, min.getMethod(), callerPrefixList); + + System.out.println("calleePrefixList=" + calleePrefixList); + + Set reachNodeSetFromParam1 = calleeFlowGraph.getReachFlowNodeSetFrom(paramNode1); + System.out.println("reachNodeSetFromParam1=" + reachNodeSetFromParam1); + + for (int i = 0; i < calleePrefixList.size(); i++) { + NTuple curPrefix = calleePrefixList.get(i); + Set> reachableCommonPrefixSet = new HashSet>(); + + for (Iterator iterator2 = reachNodeSetFromParam1.iterator(); iterator2.hasNext();) { + FlowNode reachNode = (FlowNode) iterator2.next(); + if (reachNode.getCurrentDescTuple().startsWith(curPrefix)) { + reachableCommonPrefixSet.add(reachNode.getCurrentDescTuple()); + } + } + + if (!reachableCommonPrefixSet.isEmpty()) { + System.out.println("###REACHABLECOMONPREFIX=" + reachableCommonPrefixSet + + " with curPreFix=" + curPrefix); + return curPrefix; + } + + } + + return null; + } + + private List> translatePrefixListToCallee(Descriptor baseRef, + MethodDescriptor mdCallee, List> callerPrefixList) { + + List> calleePrefixList = new ArrayList>(); + + for (int i = 0; i < callerPrefixList.size(); i++) { + NTuple prefix = callerPrefixList.get(i); + if (prefix.startsWith(baseRef)) { + NTuple calleePrefix = new NTuple(); + calleePrefix.add(mdCallee.getThis()); + for (int k = 1; k < prefix.size(); k++) { + calleePrefix.add(prefix.get(k)); + } + calleePrefixList.add(calleePrefix); + } + } + + return calleePrefixList; + + } + + private List> calculatePrefixList(FlowGraph flowGraph, FlowNode flowNode) { + + Set inNodeSet = flowGraph.getIncomingFlowNodeSet(flowNode); + inNodeSet.add(flowNode); + + List> prefixList = new ArrayList>(); + + for (Iterator iterator = inNodeSet.iterator(); iterator.hasNext();) { + FlowNode inNode = (FlowNode) iterator.next(); + + NTuple inNodeTuple = inNode.getCurrentDescTuple(); + + // CompositeLocation inNodeInferredLoc = + // generateInferredCompositeLocation(methodInfo, inNodeTuple); + // NTuple inNodeInferredLocTuple = inNodeInferredLoc.getTuple(); + + for (int i = 1; i < inNodeTuple.size(); i++) { + NTuple prefix = inNodeTuple.subList(0, i); + if (!prefixList.contains(prefix)) { + prefixList.add(prefix); + } + } + } + + Collections.sort(prefixList, new Comparator>() { + public int compare(NTuple arg0, NTuple arg1) { + int s0 = arg0.size(); + int s1 = arg1.size(); + if (s0 > s1) { + return -1; + } else if (s0 == s1) { + return 0; + } else { + return 1; + } + } + }); + + return prefixList; + + } + + public CompositeLocation convertToCompositeLocation(MethodDescriptor md, NTuple tuple) { + + CompositeLocation compLoc = new CompositeLocation(); + + Descriptor enclosingDescriptor = md; + + for (int i = 0; i < tuple.size(); i++) { + Descriptor curDescriptor = tuple.get(i); + Location locElement = new Location(enclosingDescriptor, curDescriptor.getSymbol()); + locElement.setLocDescriptor(curDescriptor); + compLoc.addLocation(locElement); + + if (curDescriptor instanceof VarDescriptor) { + enclosingDescriptor = md.getClassDesc(); + } else if (curDescriptor instanceof NameDescriptor) { + // it is "GLOBAL LOC" case! + enclosingDescriptor = GLOBALDESC; + } else { + enclosingDescriptor = ((FieldDescriptor) curDescriptor).getClassDescriptor(); + } + + } + + System.out.println("convertToCompositeLocation from=" + tuple + " to " + compLoc); + + return compLoc; + } + + private LocationDescriptor generateNewLocationDescriptor() { + return new LocationDescriptor("Loc" + (locSeed++)); + } + + private int getPrefixIndex(NTuple tuple1, NTuple tuple2) { + + // return the index where the prefix shared by tuple1 and tuple2 is ended + // if there is no prefix shared by both of them, return -1 + + int minSize = tuple1.size(); + if (minSize > tuple2.size()) { + minSize = tuple2.size(); + } + + int idx = -1; + for (int i = 0; i < minSize; i++) { + if (!tuple1.get(i).equals(tuple2.get(i))) { + break; + } else { + idx++; + } + } + + return idx; + } + private void analyzeLatticeMethodInvocationNode(MethodDescriptor mdCaller, SSJavaLattice methodLattice, MethodLocationInfo methodInfo) throws CyclicFlowException { @@ -1501,7 +2216,7 @@ public class LocationInference { } Set inNodeSet = flowGraph.getIncomingFlowNodeSet(flowNode); - Set reachableNodeSet = flowGraph.getReachableFlowNodeSet(flowNode); + Set reachableNodeSet = flowGraph.getReachFlowNodeSetFrom(flowNode); Map, Set>> mapPrefixToIncomingLocTupleSet = new HashMap, Set>>(); @@ -1791,7 +2506,8 @@ public class LocationInference { String newSharedLoc = "SharedLoc" + (SSJavaLattice.seed++); System.out.println("---ASSIGN NEW SHARED LOC=" + newSharedLoc + " to " + cycleElementSet); - lattice.mergeIntoSharedLocation(cycleElementSet, newSharedLoc); + lattice.mergeIntoNewLocation(cycleElementSet, newSharedLoc); + lattice.addSharedLoc(newSharedLoc); for (Iterator iterator = cycleElementSet.iterator(); iterator.hasNext();) { String oldLocSymbol = (String) iterator.next(); @@ -1890,6 +2606,40 @@ public class LocationInference { md2lattice.put(md, lattice); } + private void extractFlowsBetweenFields(ClassDescriptor cd, FlowNode srcNode, FlowNode dstNode, + int idx) { + + NTuple srcCurTuple = srcNode.getCurrentDescTuple(); + NTuple dstCurTuple = dstNode.getCurrentDescTuple(); + + if (srcCurTuple.get(idx).equals(dstCurTuple.get(idx)) && srcCurTuple.size() > (idx + 1) + && dstCurTuple.size() > (idx + 1)) { + // value flow between fields: we don't need to add a binary relation + // for this case + + Descriptor desc = srcCurTuple.get(idx); + ClassDescriptor classDesc; + + if (idx == 0) { + classDesc = ((VarDescriptor) desc).getType().getClassDesc(); + } else { + classDesc = ((FieldDescriptor) desc).getType().getClassDesc(); + } + + extractFlowsBetweenFields(classDesc, srcNode, dstNode, idx + 1); + + } else { + + Descriptor srcFieldDesc = srcCurTuple.get(idx); + Descriptor dstFieldDesc = dstCurTuple.get(idx); + + // add a new edge + getHierarchyGraph(cd).addEdge(srcFieldDesc, dstFieldDesc); + + } + + } + private void extractRelationFromFieldFlows(ClassDescriptor cd, FlowNode srcNode, FlowNode dstNode, int idx) throws CyclicFlowException { @@ -1992,8 +2742,12 @@ public class LocationInference { public void constructFlowGraph() { + System.out.println(""); LinkedList methodDescList = computeMethodList(); + System.out.println("@@@methodDescList=" + methodDescList); + // System.exit(0); + while (!methodDescList.isEmpty()) { MethodDescriptor md = methodDescList.removeLast(); if (state.SSJAVADEBUG) { @@ -2017,12 +2771,90 @@ public class LocationInference { mapMethodDescriptorToFlowGraph.put(md, fg); analyzeMethodBody(md.getClassDesc(), md); + propagateFlowsFromCallees(md); + assignCompositeLocation(md); + } } _debug_printGraph(); } + private void assignCompositeLocation(MethodDescriptor md) { + + FlowGraph flowGraph = getFlowGraph(md); + + Set nodeSet = flowGraph.getNodeSet(); + + next: for (Iterator iterator = nodeSet.iterator(); iterator.hasNext();) { + FlowNode flowNode = (FlowNode) iterator.next(); + + // assign a composite location only to the local variable + if (flowNode.getCurrentDescTuple().size() == 1) { + + List> prefixList = calculatePrefixList(flowGraph, flowNode); + Set reachSet = flowGraph.getReachFlowNodeSetFrom(flowNode); + + for (int i = 0; i < prefixList.size(); i++) { + NTuple curPrefix = prefixList.get(i); + Set> reachableCommonPrefixSet = new HashSet>(); + + for (Iterator iterator2 = reachSet.iterator(); iterator2.hasNext();) { + FlowNode reachNode = (FlowNode) iterator2.next(); + if (reachNode.getCurrentDescTuple().startsWith(curPrefix)) { + reachableCommonPrefixSet.add(reachNode.getCurrentDescTuple()); + } + } + + if (!reachableCommonPrefixSet.isEmpty()) { + System.out.println("NEED TO ASSIGN COMP LOC TO " + flowNode + " with prefix=" + + curPrefix); + CompositeLocation newCompLoc = generateCompositeLocation(md, curPrefix); + flowNode.setCompositeLocation(newCompLoc); + continue next; + } + + } + } + + } + + } + + private void propagateFlowsFromCallees(MethodDescriptor mdCaller) { + + // the transformation for a call site propagates flows through parameters + // if the method is virtual, it also grab all relations from any possible + // callees + + Set setMethodInvokeNode = + mapMethodDescriptorToMethodInvokeNodeSet.get(mdCaller); + + if (setMethodInvokeNode != null) { + + for (Iterator iterator = setMethodInvokeNode.iterator(); iterator.hasNext();) { + MethodInvokeNode min = (MethodInvokeNode) iterator.next(); + MethodDescriptor mdCallee = min.getMethod(); + Set setPossibleCallees = new HashSet(); + if (mdCallee.isStatic()) { + setPossibleCallees.add(mdCallee); + } else { + Set calleeSet = ssjava.getCallGraph().getMethods(mdCallee); + // removes method descriptors that are not invoked by the caller + calleeSet.retainAll(mapMethodToCalleeSet.get(mdCaller)); + setPossibleCallees.addAll(calleeSet); + } + + for (Iterator iterator2 = setPossibleCallees.iterator(); iterator2.hasNext();) { + MethodDescriptor possibleMdCallee = (MethodDescriptor) iterator2.next(); + propagateFlowsToCaller(min, mdCaller, possibleMdCallee); + } + + } + } + + } + private void analyzeMethodBody(ClassDescriptor cd, MethodDescriptor md) { BlockNode bn = state.getMethodBody(md); NodeTupleSet implicitFlowTupleSet = new NodeTupleSet(); @@ -2072,16 +2904,48 @@ public class LocationInference { break; case Kind.SwitchStatementNode: - analyzeSwitchStatementNode(md, nametable, (SwitchStatementNode) bsn); + analyzeSwitchStatementNode(md, nametable, (SwitchStatementNode) bsn, implicitFlowTupleSet); break; } } + private void analyzeSwitchBlockNode(MethodDescriptor md, SymbolTable nametable, + SwitchBlockNode sbn, NodeTupleSet implicitFlowTupleSet) { + + analyzeFlowBlockNode(md, nametable, sbn.getSwitchBlockStatement(), implicitFlowTupleSet); + + } + private void analyzeSwitchStatementNode(MethodDescriptor md, SymbolTable nametable, - SwitchStatementNode bsn) { - // TODO Auto-generated method stub + SwitchStatementNode ssn, NodeTupleSet implicitFlowTupleSet) { + + NodeTupleSet condTupleNode = new NodeTupleSet(); + analyzeFlowExpressionNode(md, nametable, ssn.getCondition(), condTupleNode, null, + implicitFlowTupleSet, false); + + NodeTupleSet newImplicitTupleSet = new NodeTupleSet(); + + newImplicitTupleSet.addTupleSet(implicitFlowTupleSet); + newImplicitTupleSet.addTupleSet(condTupleNode); + + if (newImplicitTupleSet.size() > 1) { + // need to create an intermediate node for the GLB of conditional locations & implicit flows + NTuple interTuple = getFlowGraph(md).createIntermediateNode().getDescTuple(); + for (Iterator> idxIter = newImplicitTupleSet.iterator(); idxIter.hasNext();) { + NTuple tuple = idxIter.next(); + addFlowGraphEdge(md, tuple, interTuple); + } + newImplicitTupleSet.clear(); + newImplicitTupleSet.addTuple(interTuple); + } + + BlockNode sbn = ssn.getSwitchBody(); + for (int i = 0; i < sbn.size(); i++) { + analyzeSwitchBlockNode(md, nametable, (SwitchBlockNode) sbn.get(i), newImplicitTupleSet); + } + } private void analyzeFlowSubBlockNode(MethodDescriptor md, SymbolTable nametable, @@ -2096,19 +2960,46 @@ public class LocationInference { if (returnExp != null) { NodeTupleSet nodeSet = new NodeTupleSet(); + // if a return expression returns a literal value, nodeSet is empty analyzeFlowExpressionNode(md, nametable, returnExp, nodeSet, false); - FlowGraph fg = getFlowGraph(md); - // annotate the elements of the node set as the return location - for (Iterator iterator = nodeSet.iterator(); iterator.hasNext();) { - NTuple returnDescTuple = (NTuple) iterator.next(); - fg.addReturnFlowNode(returnDescTuple); - for (Iterator iterator2 = implicitFlowTupleSet.iterator(); iterator2.hasNext();) { - NTuple implicitFlowDescTuple = (NTuple) iterator2.next(); - fg.addValueFlowEdge(implicitFlowDescTuple, returnDescTuple); + // if (implicitFlowTupleSet.size() == 1 + // && fg.getFlowNode(implicitFlowTupleSet.iterator().next()).isIntermediate()) { + // + // // since there is already an intermediate node for the GLB of implicit flows + // // we don't need to create another intermediate node. + // // just re-use the intermediate node for implicit flows. + // + // FlowNode meetNode = fg.getFlowNode(implicitFlowTupleSet.iterator().next()); + // + // for (Iterator iterator = nodeSet.iterator(); iterator.hasNext();) { + // NTuple returnNodeTuple = (NTuple) iterator.next(); + // fg.addValueFlowEdge(returnNodeTuple, meetNode.getDescTuple()); + // } + // + // } + + NodeTupleSet currentFlowTupleSet = new NodeTupleSet(); + + // add tuples from return node + currentFlowTupleSet.addTupleSet(nodeSet); + + // add tuples corresponding to the current implicit flows + currentFlowTupleSet.addTupleSet(implicitFlowTupleSet); + + if (currentFlowTupleSet.size() > 1) { + FlowNode meetNode = fg.createIntermediateNode(); + for (Iterator iterator = currentFlowTupleSet.iterator(); iterator.hasNext();) { + NTuple currentFlowTuple = (NTuple) iterator.next(); + fg.addValueFlowEdge(currentFlowTuple, meetNode.getDescTuple()); } + fg.addReturnFlowNode(meetNode.getDescTuple()); + } else if (currentFlowTupleSet.size() == 1) { + NTuple tuple = currentFlowTupleSet.iterator().next(); + fg.addReturnFlowNode(tuple); } + } } @@ -2121,10 +3012,49 @@ public class LocationInference { NodeTupleSet condTupleNode = new NodeTupleSet(); analyzeFlowExpressionNode(md, nametable, ln.getCondition(), condTupleNode, null, implicitFlowTupleSet, false); - condTupleNode.addTupleSet(implicitFlowTupleSet); + + NodeTupleSet newImplicitTupleSet = new NodeTupleSet(); + + newImplicitTupleSet.addTupleSet(implicitFlowTupleSet); + newImplicitTupleSet.addTupleSet(condTupleNode); + + if (newImplicitTupleSet.size() > 1) { + // need to create an intermediate node for the GLB of conditional locations & implicit flows + NTuple interTuple = getFlowGraph(md).createIntermediateNode().getDescTuple(); + for (Iterator> idxIter = newImplicitTupleSet.iterator(); idxIter + .hasNext();) { + NTuple tuple = idxIter.next(); + addFlowGraphEdge(md, tuple, interTuple); + } + newImplicitTupleSet.clear(); + newImplicitTupleSet.addTuple(interTuple); + + } + + // /////////// + // System.out.println("condTupleNode="+condTupleNode); + // NTuple interTuple = getFlowGraph(md).createIntermediateNode().getDescTuple(); + // + // for (Iterator> idxIter = condTupleNode.iterator(); idxIter.hasNext();) { + // NTuple tuple = idxIter.next(); + // addFlowGraphEdge(md, tuple, interTuple); + // } + + // for (Iterator> idxIter = implicitFlowTupleSet.iterator(); idxIter + // .hasNext();) { + // NTuple tuple = idxIter.next(); + // addFlowGraphEdge(md, tuple, interTuple); + // } + + // NodeTupleSet newImplicitSet = new NodeTupleSet(); + // newImplicitSet.addTuple(interTuple); + analyzeFlowBlockNode(md, nametable, ln.getBody(), newImplicitTupleSet); + // /////////// + + // condTupleNode.addTupleSet(implicitFlowTupleSet); // add edges from condNodeTupleSet to all nodes of conditional nodes - analyzeFlowBlockNode(md, nametable, ln.getBody(), condTupleNode); + // analyzeFlowBlockNode(md, nametable, ln.getBody(), condTupleNode); } else { // check 'for loop' case @@ -2138,10 +3068,33 @@ public class LocationInference { NodeTupleSet condTupleNode = new NodeTupleSet(); analyzeFlowExpressionNode(md, bn.getVarTable(), ln.getCondition(), condTupleNode, null, implicitFlowTupleSet, false); - condTupleNode.addTupleSet(implicitFlowTupleSet); - analyzeFlowBlockNode(md, bn.getVarTable(), ln.getUpdate(), condTupleNode); - analyzeFlowBlockNode(md, bn.getVarTable(), ln.getBody(), condTupleNode); + // /////////// + NTuple interTuple = getFlowGraph(md).createIntermediateNode().getDescTuple(); + + for (Iterator> idxIter = condTupleNode.iterator(); idxIter.hasNext();) { + NTuple tuple = idxIter.next(); + addFlowGraphEdge(md, tuple, interTuple); + } + + for (Iterator> idxIter = implicitFlowTupleSet.iterator(); idxIter + .hasNext();) { + NTuple tuple = idxIter.next(); + addFlowGraphEdge(md, tuple, interTuple); + } + + NodeTupleSet newImplicitSet = new NodeTupleSet(); + newImplicitSet.addTuple(interTuple); + analyzeFlowBlockNode(md, bn.getVarTable(), ln.getUpdate(), newImplicitSet); + analyzeFlowBlockNode(md, bn.getVarTable(), ln.getBody(), newImplicitSet); + // /////////// + + // condTupleNode.addTupleSet(implicitFlowTupleSet); + // + // analyzeFlowBlockNode(md, bn.getVarTable(), ln.getUpdate(), + // condTupleNode); + // analyzeFlowBlockNode(md, bn.getVarTable(), ln.getBody(), + // condTupleNode); } @@ -2150,35 +3103,37 @@ public class LocationInference { private void analyzeFlowIfStatementNode(MethodDescriptor md, SymbolTable nametable, IfStatementNode isn, NodeTupleSet implicitFlowTupleSet) { + System.out.println("analyzeFlowIfStatementNode=" + isn.printNode(0)); + NodeTupleSet condTupleNode = new NodeTupleSet(); analyzeFlowExpressionNode(md, nametable, isn.getCondition(), condTupleNode, null, implicitFlowTupleSet, false); -// NTuple interTuple = getFlowGraph(md).createIntermediateNode().getDescTuple(); -// for (Iterator> idxIter = condTupleNode.iterator(); idxIter.hasNext();) { -// NTuple tuple = idxIter.next(); -// addFlowGraphEdge(md, tuple, interTuple); -// } -// -// for (Iterator> idxIter = implicitFlowTupleSet.iterator(); idxIter.hasNext();) { -// NTuple tuple = idxIter.next(); -// addFlowGraphEdge(md, tuple, interTuple); -// } -// -// NodeTupleSet newImplicitSet = new NodeTupleSet(); -// newImplicitSet.addTuple(interTuple); -// analyzeFlowBlockNode(md, nametable, isn.getTrueBlock(), newImplicitSet); -// -// if (isn.getFalseBlock() != null) { -// analyzeFlowBlockNode(md, nametable, isn.getFalseBlock(), newImplicitSet); -// } - - // add edges from condNodeTupleSet to all nodes of conditional nodes - condTupleNode.addTupleSet(implicitFlowTupleSet); - analyzeFlowBlockNode(md, nametable, isn.getTrueBlock(), condTupleNode); + NodeTupleSet newImplicitTupleSet = new NodeTupleSet(); + + newImplicitTupleSet.addTupleSet(implicitFlowTupleSet); + newImplicitTupleSet.addTupleSet(condTupleNode); + + System.out.println("condTupleNode=" + condTupleNode); + System.out.println("implicitFlowTupleSet=" + implicitFlowTupleSet); + System.out.println("newImplicitTupleSet=" + newImplicitTupleSet); + + if (newImplicitTupleSet.size() > 1) { + + // need to create an intermediate node for the GLB of conditional locations & implicit flows + NTuple interTuple = getFlowGraph(md).createIntermediateNode().getDescTuple(); + for (Iterator> idxIter = newImplicitTupleSet.iterator(); idxIter.hasNext();) { + NTuple tuple = idxIter.next(); + addFlowGraphEdge(md, tuple, interTuple); + } + newImplicitTupleSet.clear(); + newImplicitTupleSet.addTuple(interTuple); + } + + analyzeFlowBlockNode(md, nametable, isn.getTrueBlock(), newImplicitTupleSet); if (isn.getFalseBlock() != null) { - analyzeFlowBlockNode(md, nametable, isn.getFalseBlock(), condTupleNode); + analyzeFlowBlockNode(md, nametable, isn.getFalseBlock(), newImplicitTupleSet); } } @@ -2195,14 +3150,25 @@ public class LocationInference { if (dn.getExpression() != null) { - NodeTupleSet tupleSetRHS = new NodeTupleSet(); - analyzeFlowExpressionNode(md, nametable, dn.getExpression(), tupleSetRHS, null, + NodeTupleSet nodeSetRHS = new NodeTupleSet(); + analyzeFlowExpressionNode(md, nametable, dn.getExpression(), nodeSetRHS, null, implicitFlowTupleSet, false); - // add a new flow edge from rhs to lhs - for (Iterator> iter = tupleSetRHS.iterator(); iter.hasNext();) { - NTuple from = iter.next(); - addFlowGraphEdge(md, from, tupleLHS); + // creates edges from RHS to LHS + NTuple interTuple = null; + if (nodeSetRHS.size() > 1) { + interTuple = getFlowGraph(md).createIntermediateNode().getDescTuple(); + } + + for (Iterator> iter = nodeSetRHS.iterator(); iter.hasNext();) { + NTuple fromTuple = iter.next(); + addFlowGraphEdge(md, fromTuple, interTuple, tupleLHS); + } + + // creates edges from implicitFlowTupleSet to LHS + for (Iterator> iter = implicitFlowTupleSet.iterator(); iter.hasNext();) { + NTuple implicitTuple = iter.next(); + addFlowGraphEdge(md, implicitTuple, tupleLHS); } } @@ -2361,8 +3327,6 @@ public class LocationInference { nodeSet = new NodeTupleSet(); } - addMapCallerMethodDescToMethodInvokeNodeSet(md, min); - MethodDescriptor calleeMethodDesc = min.getMethod(); NameDescriptor baseName = min.getBaseName(); @@ -2374,15 +3338,22 @@ public class LocationInference { if (!ssjava.isSSJavaUtil(calleeMethodDesc.getClassDesc()) && !ssjava.isTrustMethod(calleeMethodDesc) && !isSystemout) { + addMapCallerMethodDescToMethodInvokeNodeSet(md, min); + FlowGraph calleeFlowGraph = getFlowGraph(calleeMethodDesc); Set calleeReturnSet = calleeFlowGraph.getReturnNodeSet(); + System.out.println("#calleeReturnSet=" + calleeReturnSet); + if (min.getExpression() != null) { NodeTupleSet baseNodeSet = new NodeTupleSet(); analyzeFlowExpressionNode(md, nametable, min.getExpression(), baseNodeSet, null, implicitFlowTupleSet, false); + assert (baseNodeSet.size() == 1); + mapMethodInvokeNodeToBaseTuple.put(min, baseNodeSet.iterator().next()); + if (!min.getMethod().isStatic()) { addArgIdxMap(min, 0, baseNodeSet); @@ -2410,6 +3381,7 @@ public class LocationInference { } } } + } // analyze parameter flows @@ -2427,7 +3399,7 @@ public class LocationInference { ExpressionNode en = min.getArg(i); int idx = i + offset; NodeTupleSet argTupleSet = new NodeTupleSet(); - analyzeFlowExpressionNode(md, nametable, en, argTupleSet, true); + analyzeFlowExpressionNode(md, nametable, en, argTupleSet, false); // if argument is liternal node, argTuple is set to NULL. addArgIdxMap(min, idx, argTupleSet); FlowNode paramNode = calleeFlowGraph.getParamFlowNode(idx); @@ -2440,13 +3412,15 @@ public class LocationInference { } + // propagateFlowsFromCallee(min, md, min.getMethod()); + } } private boolean hasInFlowTo(FlowGraph fg, FlowNode inNode, Set nodeSet) { // return true if inNode has in-flows to nodeSet - Set reachableSet = fg.getReachableFlowNodeSet(inNode); + Set reachableSet = fg.getReachFlowNodeSetFrom(inNode); for (Iterator iterator = reachableSet.iterator(); iterator.hasNext();) { FlowNode fn = (FlowNode) iterator.next(); if (nodeSet.contains(fn)) { @@ -2600,6 +3574,8 @@ public class LocationInference { private NTuple analyzeFlowNameNode(MethodDescriptor md, SymbolTable nametable, NameNode nn, NodeTupleSet nodeSet, NTuple base, NodeTupleSet implicitFlowTupleSet) { + // System.out.println("analyzeFlowNameNode=" + nn.printNode(0)); + if (base == null) { base = new NTuple(); } @@ -2650,7 +3626,7 @@ public class LocationInference { } else if (d == null) { // access static field base.add(GLOBALDESC); - // base.add(nn.getField()); + base.add(nn.getField()); return base; // FieldDescriptor fd = nn.getField();addFlowGraphEdge @@ -2736,7 +3712,6 @@ public class LocationInference { getFlowGraph(md).addValueFlowEdge(idxTuple, flowFieldTuple); } } - return flowFieldTuple; } @@ -2772,11 +3747,11 @@ public class LocationInference { analyzeFlowExpressionNode(md, nametable, an.getSrc(), nodeSetRHS, null, implicitFlowTupleSet, false); - // System.out.println("-analyzeFlowAssignmentNode=" + an.printNode(0)); - // System.out.println("-nodeSetLHS=" + nodeSetLHS); - // System.out.println("-nodeSetRHS=" + nodeSetRHS); - // System.out.println("-implicitFlowTupleSet=" + implicitFlowTupleSet); - // System.out.println("-"); + System.out.println("-analyzeFlowAssignmentNode=" + an.printNode(0)); + System.out.println("-nodeSetLHS=" + nodeSetLHS); + System.out.println("-nodeSetRHS=" + nodeSetRHS); + System.out.println("-implicitFlowTupleSet=" + implicitFlowTupleSet); + System.out.println("-"); if (an.getOperation().getOp() >= 2 && an.getOperation().getOp() <= 12) { // if assignment contains OP+EQ operator, creates edges from LHS to LHS diff --git a/Robust/src/Analysis/SSJava/LocationInfo.java b/Robust/src/Analysis/SSJava/LocationInfo.java index ed90ed50..c7cedcdf 100644 --- a/Robust/src/Analysis/SSJava/LocationInfo.java +++ b/Robust/src/Analysis/SSJava/LocationInfo.java @@ -45,7 +45,7 @@ public class LocationInfo { public Map getMapDescToInferLocation() { return mapDescToInferCompositeLocation; } - + public void addMapLocSymbolToRelatedInferLoc(String locSymbol, Descriptor enclosingDesc, Descriptor desc) { if (!mapLocSymbolToRelatedInferLocSet.containsKey(locSymbol)) { @@ -53,11 +53,15 @@ public class LocationInfo { } mapLocSymbolToRelatedInferLocSet.get(locSymbol).add( new Pair(enclosingDesc, desc)); - + addMapLocSymbolToDescSet(locSymbol, desc); } public Set> getRelatedInferLocSet(String locSymbol) { + + if (!mapLocSymbolToRelatedInferLocSet.containsKey(locSymbol)) { + mapLocSymbolToRelatedInferLocSet.put(locSymbol, new HashSet>()); + } return mapLocSymbolToRelatedInferLocSet.get(locSymbol); } @@ -80,7 +84,7 @@ public class LocationInfo { newInferLoc.addLocation(loc); mapDescToInferCompositeLocation.put(desc, newInferLoc); -// addMapLocSymbolToDescSet(desc.getSymbol(), desc); + // addMapLocSymbolToDescSet(desc.getSymbol(), desc); addMapLocSymbolToRelatedInferLoc(desc.getSymbol(), enclosingDesc, desc); } return mapDescToInferCompositeLocation.get(desc); diff --git a/Robust/src/Analysis/SSJava/MethodSummary.java b/Robust/src/Analysis/SSJava/MethodSummary.java new file mode 100644 index 00000000..ebe588b0 --- /dev/null +++ b/Robust/src/Analysis/SSJava/MethodSummary.java @@ -0,0 +1,5 @@ +package Analysis.SSJava; + +public class MethodSummary { + +} diff --git a/Robust/src/Analysis/SSJava/NodeTupleSet.java b/Robust/src/Analysis/SSJava/NodeTupleSet.java index 805a210d..62c313b7 100644 --- a/Robust/src/Analysis/SSJava/NodeTupleSet.java +++ b/Robust/src/Analysis/SSJava/NodeTupleSet.java @@ -58,4 +58,8 @@ public class NodeTupleSet { public int size() { return list.size(); } + + public void clear() { + list.clear(); + } } diff --git a/Robust/src/Analysis/SSJava/SSJavaLattice.java b/Robust/src/Analysis/SSJava/SSJavaLattice.java index 2611d9f7..4343f2de 100644 --- a/Robust/src/Analysis/SSJava/SSJavaLattice.java +++ b/Robust/src/Analysis/SSJava/SSJavaLattice.java @@ -1,7 +1,9 @@ package Analysis.SSJava; +import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; +import java.util.Map; import java.util.Set; import Util.Lattice; @@ -27,17 +29,17 @@ public class SSJavaLattice extends Lattice { public boolean isSharedLoc(T loc) { return sharedLocSet.contains(loc); } - - public Set getElementSet(){ - Set set=new HashSet(); - - Set keySet=getKeySet(); + + public Set getElementSet() { + Set set = new HashSet(); + + Set keySet = getKeySet(); for (Iterator iterator = keySet.iterator(); iterator.hasNext();) { T key = (T) iterator.next(); set.add(key); set.addAll(getTable().get(key)); } - + set.remove(getTopItem()); set.remove(getBottomItem()); return set; @@ -108,11 +110,10 @@ public class SSJavaLattice extends Lattice { } } - public void mergeIntoSharedLocation(Set cycleSet, T newLoc) { + public void mergeIntoNewLocation(Set cycleSet, T newLoc) { - // add a new shared loc + // add a new loc put(newLoc); - addSharedLoc(newLoc); Set keySet = getKeySet(); @@ -297,4 +298,25 @@ public class SSJavaLattice extends Lattice { } return false; } + + public Map> getIncomingElementMap() { + Map> map = new HashMap>(); + + Set keySet = getKeySet(); + for (Iterator iterator = keySet.iterator(); iterator.hasNext();) { + T key = (T) iterator.next(); + + Set incomingSet = new HashSet(); + + for (Iterator iterator2 = keySet.iterator(); iterator2.hasNext();) { + T in = (T) iterator2.next(); + if (!in.equals(key) && get(in).contains(key)) { + incomingSet.add(in); + } + } + map.put(key, incomingSet); + } + + return map; + } }