arch.h revision 7d8d95fb
1/*
2 * Copyright (c) 2005 Topspin Communications.  All rights reserved.
3 *
4 * This software is available to you under a choice of one of two
5 * licenses.  You may choose to be licensed under the terms of the GNU
6 * General Public License (GPL) Version 2, available from the file
7 * COPYING in the main directory of this source tree, or the
8 * OpenIB.org BSD license below:
9 *
10 *     Redistribution and use in source and binary forms, with or
11 *     without modification, are permitted provided that the following
12 *     conditions are met:
13 *
14 *      - Redistributions of source code must retain the above
15 *        copyright notice, this list of conditions and the following
16 *        disclaimer.
17 *
18 *      - Redistributions in binary form must reproduce the above
19 *        copyright notice, this list of conditions and the following
20 *        disclaimer in the documentation and/or other materials
21 *        provided with the distribution.
22 *
23 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
24 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
25 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
26 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
27 * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
28 * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
29 * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
30 * SOFTWARE.
31 */
32
33#ifndef INFINIBAND_ARCH_H
34#define INFINIBAND_ARCH_H
35
36#include <stdint.h>
37#include <endian.h>
38#include <byteswap.h>
39
40#ifdef htonll
41#undef htonll
42#endif
43
44#ifdef ntohll
45#undef ntohll
46#endif
47
48#if __BYTE_ORDER == __LITTLE_ENDIAN
49static inline uint64_t htonll(uint64_t x) { return bswap_64(x); }
50static inline uint64_t ntohll(uint64_t x) { return bswap_64(x); }
51#elif __BYTE_ORDER == __BIG_ENDIAN
52static inline uint64_t htonll(uint64_t x) { return x; }
53static inline uint64_t ntohll(uint64_t x) { return x; }
54#else
55#error __BYTE_ORDER is neither __LITTLE_ENDIAN nor __BIG_ENDIAN
56#endif
57
58/*
59 * Architecture-specific defines.  Currently, an architecture is
60 * required to implement the following operations:
61 *
62 * mb() - memory barrier.  No loads or stores may be reordered across
63 *     this macro by either the compiler or the CPU.
64 * rmb() - read memory barrier.  No loads may be reordered across this
65 *     macro by either the compiler or the CPU.
66 * wmb() - write memory barrier.  No stores may be reordered across
67 *     this macro by either the compiler or the CPU.
68 * wc_wmb() - flush write combine buffers.  No write-combined writes
69 *     will be reordered across this macro by either the compiler or
70 *     the CPU.
71 */
72
73#if defined(__i386__)
74
75#define mb()	 asm volatile("lock; addl $0,0(%%esp) " ::: "memory")
76#define rmb()	 mb()
77#define wmb()	 asm volatile("" ::: "memory")
78#define wc_wmb() mb()
79#define nc_wmb() wmb()
80
81#elif defined(__x86_64__)
82
83#define mb()	 asm volatile("" ::: "memory")
84#define rmb()	 mb()
85#define wmb()	 asm volatile("" ::: "memory")
86#define wc_wmb() asm volatile("sfence" ::: "memory")
87#define nc_wmb() wmb()
88#define WC_AUTO_EVICT_SIZE 64
89
90#elif defined(__PPC64__)
91
92#define mb()	 asm volatile("sync" ::: "memory")
93#define rmb()	 asm volatile("lwsync" ::: "memory")
94#define wmb()	 rmb()
95#define wc_wmb() mb()
96#define nc_wmb() mb()
97#define WC_AUTO_EVICT_SIZE 64
98
99#elif defined(__ia64__)
100
101#define mb()	 asm volatile("mf" ::: "memory")
102#define rmb()	 mb()
103#define wmb()	 mb()
104#define wc_wmb() asm volatile("fwb" ::: "memory")
105#define nc_wmb() wmb()
106
107#elif defined(__PPC__)
108
109#define mb()	 asm volatile("sync" ::: "memory")
110#define rmb()	 mb()
111#define wmb()	 mb()
112#define wc_wmb() wmb()
113#define nc_wmb() wmb()
114
115#elif defined(__sparc_v9__)
116
117#define mb()	 asm volatile("membar #LoadLoad | #LoadStore | #StoreStore | #StoreLoad" ::: "memory")
118#define rmb()	 asm volatile("membar #LoadLoad" ::: "memory")
119#define wmb()	 asm volatile("membar #StoreStore" ::: "memory")
120#define wc_wmb() wmb()
121#define nc_wmb() wmb()
122
123#elif defined(__sparc__)
124
125#define mb()	 asm volatile("" ::: "memory")
126#define rmb()	 mb()
127#define wmb()	 mb()
128#define wc_wmb() wmb()
129#define nc_wmb() wmb()
130
131#elif defined(__aarch64__)
132
133/* Perhaps dmb would be sufficient? Let us be conservative for now. */
134#define mb()	asm volatile("dsb sy" ::: "memory")
135#define rmb()	asm volatile("dsb ld" ::: "memory")
136#define wmb()	asm volatile("dsb st" ::: "memory")
137#define wc_wmb() wmb()
138#define nc_wmb() wmb()
139
140#elif defined(__s390x__)
141
142#define mb()     asm volatile("" ::: "memory")
143#define rmb()    mb()
144#define wmb()    mb()
145#define wc_wmb() wmb()
146#define nc_wmb() wmb()
147
148#else
149
150#error No architecture specific memory barrier defines found!
151
152#endif
153
154#ifdef WC_AUTO_EVICT_SIZE
155static inline int wc_auto_evict_size(void) { return WC_AUTO_EVICT_SIZE; };
156#else
157static inline int wc_auto_evict_size(void) { return 0; };
158#endif
159
160#endif /* INFINIBAND_ARCH_H */
161