Rename .data.cacheline_aligned to .data..cacheline_aligned.
authorTim Abbott <tabbott@ksplice.com>
Sat, 20 Feb 2010 00:03:34 +0000 (01:03 +0100)
committerMichal Marek <mmarek@suse.cz>
Wed, 3 Mar 2010 10:25:58 +0000 (11:25 +0100)
Signed-off-by: Tim Abbott <tabbott@ksplice.com>
Cc: Sam Ravnborg <sam@ravnborg.org>
Signed-off-by: Denys Vlasenko <vda.linux@googlemail.com>
Signed-off-by: Michal Marek <mmarek@suse.cz>
arch/powerpc/kernel/vmlinux.lds.S
arch/x86/kernel/init_task.c
include/asm-generic/vmlinux.lds.h
include/linux/cache.h

index dcd01c82e7013c8e08f797eaa40c9543d5f2fc01..3229c06221611d315386a2e8c316e47120c07f5d 100644 (file)
@@ -231,7 +231,7 @@ SECTIONS
                PAGE_ALIGNED_DATA(PAGE_SIZE)
        }
 
-       .data.cacheline_aligned : AT(ADDR(.data.cacheline_aligned) - LOAD_OFFSET) {
+       .data..cacheline_aligned : AT(ADDR(.data..cacheline_aligned) - LOAD_OFFSET) {
                CACHELINE_ALIGNED_DATA(L1_CACHE_BYTES)
        }
 
index 3a54dcb9cd0e6c82b8b9231e88afd7305be6e5da..43e9ccf449471286fd0013ae1bc28ce83778b9a6 100644 (file)
@@ -34,7 +34,7 @@ EXPORT_SYMBOL(init_task);
 /*
  * per-CPU TSS segments. Threads are completely 'soft' on Linux,
  * no more per-task TSS's. The TSS size is kept cacheline-aligned
- * so they are allowed to end up in the .data.cacheline_aligned
+ * so they are allowed to end up in the .data..cacheline_aligned
  * section. Since TSS's are completely CPU-local, we want them
  * on exact cacheline boundaries, to eliminate cacheline ping-pong.
  */
index 67e652068e0e45b8031cafd2fb236be5e692c7c4..78450aaab9ef35f7f590ad1d91250f0b1baebd3b 100644 (file)
 
 #define CACHELINE_ALIGNED_DATA(align)                                  \
        . = ALIGN(align);                                               \
-       *(.data.cacheline_aligned)
+       *(.data..cacheline_aligned)
 
 #define INIT_TASK_DATA(align)                                          \
        . = ALIGN(align);                                               \
index 97e24881c4c6f477496130c982e1962e770df9eb..4c570653ab84f9822ba9e1e1f19a41242c16b74f 100644 (file)
@@ -31,7 +31,7 @@
 #ifndef __cacheline_aligned
 #define __cacheline_aligned                                    \
   __attribute__((__aligned__(SMP_CACHE_BYTES),                 \
-                __section__(".data.cacheline_aligned")))
+                __section__(".data..cacheline_aligned")))
 #endif /* __cacheline_aligned */
 
 #ifndef __cacheline_aligned_in_smp