2 Lonestar Benchmark Suite for irregular applications that exhibit
3 amorphous data-parallelism.
5 Center for Grid and Distributed Computing
6 The University of Texas at Austin
8 Copyright (C) 2007, 2008, 2009 The University of Texas at Austin
10 Licensed under the Eclipse Public License, Version 1.0 (the "License");
11 you may not use this file except in compliance with the License.
12 You may obtain a copy of the License at
14 http://www.eclipse.org/legal/epl-v10.html
16 Unless required by applicable law or agreed to in writing, software
17 distributed under the License is distributed on an "AS IS" BASIS,
18 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
19 See the License for the specific language governing permissions and
20 limitations under the License.
22 File: UndirectedEdgeGraph.java
26 public class SerialDelaunayRefinement {
27 public boolean isFirstRun;
28 public SerialDelaunayRefinement() {
32 public static void main(String args[]) {
33 SerialDelaunayRefinement sdr = new SerialDelaunayRefinement();
37 public void runMain(String args[]) {
39 //Numbers below are Long.Max_Value
40 long lasttime = 0x7fffffffffffffffL;
41 long mintime = 0x7fffffffffffffffL;
42 for (long run = 0; ((run < 3) || Math.abs(lasttime - runtime) * 64 > Math.min(lasttime, runtime)) && run < 7; run++) {
44 if (runtime < mintime) {
47 System.out.println( "Post-run garbage collection started..." );
49 System.out.println( "done gc" );
52 System.out.println("minimum runtime: " + mintime + " ms");
53 System.out.println("");
57 public long run(String args[]) {
60 System.out.println("Lonestar Benchmark Suite v2.1");
61 System.out.println("Copyright (C) 2007, 2008, 2009 The University of Texas at Austin");
62 System.out.println("http://iss.ices.utexas.edu/lonestar/");
64 System.out.println("application: Delaunay Mesh Refinement (serial version)");
65 System.out.println("Refines a Delaunay triangulation mesh such that no angle");
66 System.out.println("in the mesh is less than 30 degrees");
67 System.out.println("http://iss.ices.utexas.edu/lonestar/delaunayrefinement.html");
70 if (args.length < 2) {
71 System.out.println("Arguments: <input file> <num workers> [verify]");
75 EdgeGraph mesh = new UndirectedEdgeGraph();
78 m.read(mesh, args[0]);
80 int numWorkers = Integer.parseInt( args[1] );
82 Stack worklist = new Stack();
84 // REPLACE worklist.addAll(Mesh.getBad(mesh)); WITH...
85 HashMapIterator it = m.getBad(mesh).iterator();
86 while (it.hasNext()) {
87 worklist.push(it.next());
91 System.out.println("configuration: " +
92 mesh.getNumNodes() + " total triangles, " +
93 worklist.size() + " bad triangles");
97 System.out.println( "Post-config garbage collection started..." );
99 System.out.println( "done gc" );
102 // long id = Time.getNewTimeId();
103 long startTime = System.currentTimeMillis();
104 while (!worklist.isEmpty()) {
106 // Phase 1, grab enough work from list for
107 // each worker in the parent
108 Node[] nodesForBadTris = new Node[numWorkers];
109 for(int i=0;i<numWorkers;i++) {
110 if(worklist.isEmpty()) {
111 nodesForBadTris[i] = null;
113 nodesForBadTris[i] = (Node) worklist.pop();
117 // Phase 2, read mesh and compute cavities in parallel
118 Cavity[] cavities = new Cavity[numWorkers];
119 for(int i=0;i<numWorkers;i++) {
121 Node nodeForBadTri = nodesForBadTris[i];
122 if (nodeForBadTri != null
123 //&& mesh.containsNode(nodeForBadTri)
124 && nodeForBadTri.inGraph
129 Cavity cavity = new Cavity(mesh);
131 //Appears to only call getters (only possible conflict is inherent in Hashmap)
132 cavity.initialize(nodeForBadTri);
134 //Takes up 15% of computation
135 //Problem:: Build is recursive upon building neighbor upon neighbor upon neighbor
136 //This is could be a problem....
137 //TODO check dimensions problem
140 //Takes up a whooping 50% of computation time and changes NOTHING but cavity :D
145 cavities[i] = cavity;
150 // Phase 3, apply cavities to mesh, if still applicable
151 // this phase can proceed in parallel when a cavity's
152 // start nodes are still present
153 for(int i=0;i<numWorkers;i++) {
155 Cavity cavity = cavities[i];
157 Node nodeForBadTri = null;
161 // go ahead with applying cavity when all of its
162 // pre-nodes are still in
163 if( cavity.getPre().allNodesStillInCompleteGraph() ) {
168 //Takes up 8.9% of runtime
169 for (Iterator iterator = cavity.getPre().getNodes().iterator(); iterator.hasNext();) {
170 node = (Node) iterator.next();
171 mesh.removeNode(node);
175 //Takes up 1.7% of runtime
176 for (Iterator iterator1 = cavity.getPost().getNodes().iterator(); iterator1.hasNext();) {
177 node = (Node) iterator1.next();
181 //Takes up 7.8% of runtime
183 for (Iterator iterator2 = cavity.getPost().getEdges().iterator(); iterator2.hasNext();) {
184 edge = (Edge_d) iterator2.next();
189 // otherwise forget the cavity and reschedule the bad triangle
190 nodeForBadTri = nodesForBadTris[i];
195 sese scheduleMoreBad {
197 // if we couldn't even apply this cavity, just
198 // throw it back on the worklist
199 if( nodeForBadTri != null ) {
201 if( nodeForBadTri.inGraph ) {
202 worklist.push( nodeForBadTri );
206 // otherwise we did apply the cavity, but we
207 // may have introduced new bad triangles
208 HashMapIterator it2 = cavity.getPost().newBad(mesh).iterator();
209 while (it2.hasNext()) {
210 worklist.push((Node)it2.next());
213 } // end scheduleMoreBad
216 } // end while( !worklist.isEmpty() )
218 long time = System.currentTimeMillis() - startTime;
219 System.out.println("runtime: " + time + " ms");
220 //TODO note how we only verify on first run...
221 //TODO verify is outside timing, do it each time
222 // since we MIGHT compute something different when
233 public void verify(EdgeGraph result) {
234 //Put in cuz of static issues.
236 if (!m.verify(result)) {
237 // throw new IllegalStateException("refinement failed.");
238 System.out.println("Refinement Failed.");
242 int size = m.getBad(result).size();
244 System.out.println("refinement failed\nstill have "+size+" bad triangles left.\n");
247 System.out.println("OK");