Subversion Repositories shark

Rev

Rev 92 | Details | Compare with Previous | Last modification | View Log | RSS feed

Rev Author Line No. Line
42 pj 1
/* Project:     OSLib
2
 * Description: The OS Construction Kit
3
 * Date:                1.6.2000
4
 * Idea by:             Luca Abeni & Gerardo Lamastra
5
 *
6
 * OSLib is an SO project aimed at developing a common, easy-to-use
7
 * low-level infrastructure for developing OS kernels and Embedded
8
 * Applications; it partially derives from the HARTIK project but it
9
 * currently is independently developed.
10
 *
11
 * OSLib is distributed under GPL License, and some of its code has
12
 * been derived from the Linux kernel source; also some important
13
 * ideas come from studying the DJGPP go32 extender.
14
 *
15
 * We acknowledge the Linux Community, Free Software Foundation,
16
 * D.J. Delorie and all the other developers who believe in the
17
 * freedom of software and ideas.
18
 *
19
 * For legalese, check out the included GPL license.
20
 */
21
 
22
/*      Memory manipulation functions...
23
        Some of them are derived from Linux     */
24
 
25
#ifndef __LL_I386_MEM_H__
26
#define __LL_I386_MEM_H__
27
 
28
#include <ll/i386/defs.h>
29
BEGIN_DEF
30
 
31
/* Various string manipulation functions */
32
 
33
/* Assembler low level routines         */
34
/* File: Mem.S                          */
35
 
36
#ifndef NULL
37
    #define NULL 0L
38
#endif
39
 
40
#include <ll/sys/types.h>
41
#include <ll/i386/hw-data.h>
42
 
43
/*
44
#ifndef __HW_DEP_H__
45
    #include "hw_dep.h"
46
#endif
47
*/
48
 
49
extern inline void * __memcpy(void * to, const void * from, size_t n)
50
{
51
int d0, d1, d2;
52
__asm__ __volatile__(
53
        "cld\n\t"
54
        "rep ; movsl\n\t"
55
        "testb $2,%b4\n\t"
56
        "je 1f\n\t"
57
        "movsw\n"
58
        "1:\ttestb $1,%b4\n\t"
59
        "je 2f\n\t"
60
        "movsb\n"
61
        "2:"
62
        : "=&c" (d0), "=&D" (d1), "=&S" (d2)
63
        :"0" (n/4), "q" (n),"1" ((long) to),"2" ((long) from)
64
        : "memory");
65
return (to);
66
}
67
 
68
/*
69
 * This looks horribly ugly, but the compiler can optimize it totally,
70
 * as the count is constant.
71
 */
72
extern inline void * __constant_memcpy(void * to, const void * from, size_t n)
73
{
74
        switch (n) {
75
                case 0:
76
                        return to;
77
                case 1:
78
                        *(unsigned char *)to = *(const unsigned char *)from;
79
                        return to;
80
                case 2:
81
                        *(unsigned short *)to = *(const unsigned short *)from;
82
                        return to;
83
                case 3:
84
                        *(unsigned short *)to = *(const unsigned short *)from;
85
                        *(2+(unsigned char *)to) = *(2+(const unsigned char *)from);
86
                        return to;
87
                case 4:
88
                        *(unsigned long *)to = *(const unsigned long *)from;
89
                        return to;
90
                case 6: /* for Ethernet addresses */
91
                        *(unsigned long *)to = *(const unsigned long *)from;
92
                        *(2+(unsigned short *)to) = *(2+(const unsigned short *)from);
93
                        return to;
94
                case 8:
95
                        *(unsigned long *)to = *(const unsigned long *)from;
96
                        *(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
97
                        return to;
98
                case 12:
99
                        *(unsigned long *)to = *(const unsigned long *)from;
100
                        *(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
101
                        *(2+(unsigned long *)to) = *(2+(const unsigned long *)from);
102
                        return to;
103
                case 16:
104
                        *(unsigned long *)to = *(const unsigned long *)from;
105
                        *(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
106
                        *(2+(unsigned long *)to) = *(2+(const unsigned long *)from);
107
                        *(3+(unsigned long *)to) = *(3+(const unsigned long *)from);
108
                        return to;
109
                case 20:
110
                        *(unsigned long *)to = *(const unsigned long *)from;
111
                        *(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
112
                        *(2+(unsigned long *)to) = *(2+(const unsigned long *)from);
113
                        *(3+(unsigned long *)to) = *(3+(const unsigned long *)from);
114
                        *(4+(unsigned long *)to) = *(4+(const unsigned long *)from);
115
                        return to;
116
        }
117
#define COMMON(x) \
118
__asm__ __volatile__( \
119
        "cld\n\t" \
120
        "rep ; movsl" \
121
        x \
122
        : "=&c" (d0), "=&D" (d1), "=&S" (d2) \
123
        : "0" (n/4),"1" ((long) to),"2" ((long) from) \
124
        : "memory");
125
{
126
        int d0, d1, d2;
127
        switch (n % 4) {
128
                case 0: COMMON(""); return to;
129
                case 1: COMMON("\n\tmovsb"); return to;
130
                case 2: COMMON("\n\tmovsw"); return to;
131
                default: COMMON("\n\tmovsw\n\tmovsb"); return to;
132
        }
133
}
134
 
135
#undef COMMON
136
}
137
 
138
#define __HAVE_ARCH_MEMCPY
1056 tullio 139
 
42 pj 140
#define memcpy(t, f, n) \
141
(__builtin_constant_p(n) ? \
142
 __constant_memcpy((t),(f),(n)) : \
143
 __memcpy((t),(f),(n)))
144
 
145
extern inline void *lmemcpy(LIN_ADDR t, LIN_ADDR f, size_t n)
146
{
147
        void *p1;
148
        void *p2;
149
 
150
        p1 = (void *)(t);
151
        p2 = (void *)(f);
152
        return memcpy(p1, p2, n);
153
}
154
 
155
#define __HAVE_ARCH_MEMMOVE
156
extern inline void * memmove(void * dest,const void * src, size_t n)
157
{
158
int d0, d1, d2;
159
if (dest<src)
160
__asm__ __volatile__(
161
        "cld\n\t"
162
        "rep\n\t"
163
        "movsb"
164
        : "=&c" (d0), "=&S" (d1), "=&D" (d2)
165
        :"0" (n),"1" (src),"2" (dest)
166
        : "memory");
167
else
168
__asm__ __volatile__(
169
        "std\n\t"
170
        "rep\n\t"
171
        "movsb\n\t"
172
        "cld"
173
        : "=&c" (d0), "=&S" (d1), "=&D" (d2)
174
        :"0" (n),
175
         "1" (n-1+(const char *)src),
176
         "2" (n-1+(char *)dest)
177
        :"memory");
178
return dest;
179
}
180
 
181
#define memcmp __builtin_memcmp
182
 
183
#define __HAVE_ARCH_MEMCHR
184
extern inline void * memchr(const void * cs,int c,size_t count)
185
{
186
int d0;
187
register void * __res;
188
if (!count)
189
        return NULL;
190
__asm__ __volatile__(
191
        "cld\n\t"
192
        "repne\n\t"
193
        "scasb\n\t"
194
        "je 1f\n\t"
195
        "movl $1,%0\n"
196
        "1:\tdecl %0"
197
        :"=D" (__res), "=&c" (d0) : "a" (c),"0" (cs),"1" (count));
198
return __res;
199
}
200
 
201
extern inline void * __memset_generic(void * s, char c,size_t count)
202
{
203
int d0, d1;
204
__asm__ __volatile__(
205
        "cld\n\t"
206
        "rep\n\t"
207
        "stosb"
208
        : "=&c" (d0), "=&D" (d1)
209
        :"a" (c),"1" (s),"0" (count)
210
        :"memory");
211
return s;
212
}
213
 
214
/* we might want to write optimized versions of these later */
215
#define __constant_count_memset(s,c,count) __memset_generic((s),(c),(count))
216
 
217
/*
218
 * memset(x,0,y) is a reasonably common thing to do, so we want to fill
219
 * things 32 bits at a time even when we don't know the size of the
220
 * area at compile-time..
221
 */
222
extern inline void * __constant_c_memset(void * s, unsigned long c, size_t count)
223
{
224
int d0, d1;
225
__asm__ __volatile__(
226
        "cld\n\t"
227
        "rep ; stosl\n\t"
228
        "testb $2,%b3\n\t"
229
        "je 1f\n\t"
230
        "stosw\n"
231
        "1:\ttestb $1,%b3\n\t"
232
        "je 2f\n\t"
233
        "stosb\n"
234
        "2:"
235
        : "=&c" (d0), "=&D" (d1)
236
        :"a" (c), "q" (count), "0" (count/4), "1" ((long) s)
237
        :"memory");
238
return (s);    
239
}
240
 
241
/*
242
 * This looks horribly ugly, but the compiler can optimize it totally,
243
 * as we by now know that both pattern and count is constant..
244
 */
245
extern inline void * __constant_c_and_count_memset(void * s, unsigned long pattern, size_t count)
246
{
247
        switch (count) {
248
                case 0:
249
                        return s;
250
                case 1:
251
                        *(unsigned char *)s = pattern;
252
                        return s;
253
                case 2:
254
                        *(unsigned short *)s = pattern;
255
                        return s;
256
                case 3:
257
                        *(unsigned short *)s = pattern;
258
                        *(2+(unsigned char *)s) = pattern;
259
                        return s;
260
                case 4:
261
                        *(unsigned long *)s = pattern;
262
                        return s;
263
        }
264
#define COMMON(x) \
265
__asm__  __volatile__("cld\n\t" \
266
        "rep ; stosl" \
267
        x \
268
        : "=&c" (d0), "=&D" (d1) \
269
        : "a" (pattern),"0" (count/4),"1" ((long) s) \
270
        : "memory")
271
{
272
        int d0, d1;
273
        switch (count % 4) {
274
                case 0: COMMON(""); return s;
275
                case 1: COMMON("\n\tstosb"); return s;
276
                case 2: COMMON("\n\tstosw"); return s;
277
                default: COMMON("\n\tstosw\n\tstosb"); return s;
278
        }
279
}
280
 
281
#undef COMMON
282
}
283
 
284
#define __constant_c_x_memset(s, c, count) \
285
(__builtin_constant_p(count) ? \
286
 __constant_c_and_count_memset((s),(c),(count)) : \
287
 __constant_c_memset((s),(c),(count)))
288
 
289
#define __memset(s, c, count) \
290
(__builtin_constant_p(count) ? \
291
 __constant_count_memset((s),(c),(count)) : \
292
 __memset_generic((s),(c),(count)))
293
 
294
#define __HAVE_ARCH_MEMSET
295
#define memset(s, c, count) \
296
(__builtin_constant_p(c) ? \
297
 __constant_c_x_memset((s),(0x01010101UL*(unsigned char)c),(count)) : \
298
 __memset((s),(c),(count)))
299
 
300
/*
301
 * find the first occurrence of byte 'c', or 1 past the area if none
302
 */
303
#define __HAVE_ARCH_MEMSCAN
304
extern inline void * memscan(void * addr, int c, size_t size)
305
{
306
        if (!size)
307
                return addr;
92 giacomo 308
        __asm__("cld\n\t"
309
                "repnz; scasb\n\t"
310
                "jnz 1f\n\t"
311
                "dec %%edi\n\t"
312
                "1:\n\t"
42 pj 313
                : "=D" (addr), "=c" (size)
314
                : "0" (addr), "1" (size), "a" (c));
315
        return addr;
316
}
317
 
318
void fmemcpy(unsigned short ds,unsigned long dof,unsigned short ss,unsigned long sof,unsigned n);
319
#if 0
320
extern inline void fmemcpy(unsigned short ds,unsigned long dof,unsigned short ss,unsigned long sof,unsigned n)
321
{
322
        /* Build the standard stack frame */
323
__asm__ __volatile__(
324
                /* Get parms into register */
325
                movl    8(%ebp),%eax
326
                movw    %ax,%es
327
                movl    12(%ebp),%edi
328
                movl    16(%ebp),%eax
329
                movw    %ax,%ds
330
                movl    20(%ebp),%esi          
331
                movl    24(%ebp),%ecx
332
                cld
333
                rep
334
                "movsb"
335
 
336
        "2:"
337
        : "=&c" (d0), "=&D" (d1), "=&S" (d2)
338
        :"0" (n), "q" (n),"1" ((long) to),"2" ((long) from)
339
        : "memory");
340
 
341
);
342
 
343
 
344
                popw    %es
345
                popw    %ds    
346
                popl    %edi
347
                popl    %esi           
348
                leave
349
                ret
350
 
351
#endif
352
 
353
END_DEF
354
 
355
#endif