*/
bool CycleGraph::checkReachable(CycleNode *from, CycleNode *to) {
std::vector<CycleNode *, MyAlloc<CycleNode *> > queue;
- HashTable<CycleNode *, CycleNode *, uintptr_t, 4, model_malloc, model_calloc, MYFREE> discovered;
+ HashTable<CycleNode *, CycleNode *, uintptr_t, 4, model_malloc, model_calloc, model_free> discovered;
queue.push_back(from);
discovered.put(from, from);
}
/** Non-snapshotting free for our use. */
-void MYFREE(void *ptr) {
+void model_free(void *ptr) {
#if USE_MPROTECT_SNAPSHOT
static void (*freep)(void *);
char *error;
return model_malloc(size);\
}\
void operator delete(void *p, size_t size) { \
- MYFREE( p ); \
+ model_free( p ); \
}\
void * operator new[](size_t size) { \
return model_malloc(size);\
}\
void operator delete[](void *p, size_t size) {\
- MYFREE(p);\
+ model_free(p);\
}
/** SNAPSHOTALLOC declares the allocators for a class to allocate
void *model_malloc(size_t size);
void *model_calloc(size_t count, size_t size);
-void MYFREE(void *ptr);
+void model_free(void *ptr);
static inline void * snapshot_malloc(size_t size) {
return malloc(size);
// deallocate storage p of deleted elements
void deallocate (pointer p, size_type num) {
- MYFREE((void*)p);
+ model_free((void*)p);
}
};
if (action)
delete action;
if (enabled_array)
- MYFREE(enabled_array);
+ model_free(enabled_array);
}
/** Prints debugging info for the ModelAction associated with this Node */
}
struct stackEntry *tmp=stack;
stack=stack->next;
- MYFREE(tmp);
+ model_free(tmp);
}
}
*/
void rollBack( snapshot_id theID ){
#if USE_MPROTECT_SNAPSHOT
- HashTable< void *, bool, uintptr_t, 4, model_malloc, model_calloc, MYFREE> duplicateMap;
+ HashTable< void *, bool, uintptr_t, 4, model_malloc, model_calloc, model_free> duplicateMap;
for(unsigned int region=0; region<snapshotrecord->lastRegion;region++) {
if( mprotect(snapshotrecord->regionsToSnapShot[region].basePtr, snapshotrecord->regionsToSnapShot[region].sizeInPages*sizeof(struct SnapShotPage), PROT_READ | PROT_WRITE ) == -1 ){
perror("mprotect");