Subversion Repositories shark

Rev

Rev 2 | Details | Compare with Previous | Last modification | View Log | RSS feed

Rev Author Line No. Line
2 pj 1
/* Project:     OSLib
2
 * Description: The OS Construction Kit
3
 * Date:                1.6.2000
4
 * Idea by:             Luca Abeni & Gerardo Lamastra
5
 *
6
 * OSLib is an SO project aimed at developing a common, easy-to-use
7
 * low-level infrastructure for developing OS kernels and Embedded
8
 * Applications; it partially derives from the HARTIK project but it
9
 * currently is independently developed.
10
 *
11
 * OSLib is distributed under GPL License, and some of its code has
12
 * been derived from the Linux kernel source; also some important
13
 * ideas come from studying the DJGPP go32 extender.
14
 *
15
 * We acknowledge the Linux Community, Free Software Foundation,
16
 * D.J. Delorie and all the other developers who believe in the
17
 * freedom of software and ideas.
18
 *
19
 * For legalese, check out the included GPL license.
20
 */
21
 
22
/*      Memory manipulation functions...
23
        Some of them are derived from Linux     */
24
 
25
#ifndef __LL_I386_MEM_H__
26
#define __LL_I386_MEM_H__
27
 
28
#include <ll/i386/defs.h>
29
BEGIN_DEF
30
 
31
/* Various string manipulation functions */
32
 
33
/* Assembler low level routines         */
34
/* File: Mem.S                          */
35
 
36
#ifndef NULL
37
    #define NULL 0L
38
#endif
39
 
40
#include <ll/sys/types.h>
41
#include <ll/i386/hw-data.h>
42
 
43
/*
44
#ifndef __HW_DEP_H__
45
    #include "hw_dep.h"
46
#endif
47
*/
48
 
49
extern inline void * __memcpy(void * to, const void * from, size_t n)
50
{
51
int d0, d1, d2;
52
__asm__ __volatile__(
53
        "cld\n\t"
54
        "rep ; movsl\n\t"
55
        "testb $2,%b4\n\t"
56
        "je 1f\n\t"
57
        "movsw\n"
58
        "1:\ttestb $1,%b4\n\t"
59
        "je 2f\n\t"
60
        "movsb\n"
61
        "2:"
62
        : "=&c" (d0), "=&D" (d1), "=&S" (d2)
63
        :"0" (n/4), "q" (n),"1" ((long) to),"2" ((long) from)
64
        : "memory");
65
return (to);
66
}
67
 
68
/*
69
 * This looks horribly ugly, but the compiler can optimize it totally,
70
 * as the count is constant.
71
 */
72
extern inline void * __constant_memcpy(void * to, const void * from, size_t n)
73
{
74
        switch (n) {
75
                case 0:
76
                        return to;
77
                case 1:
78
                        *(unsigned char *)to = *(const unsigned char *)from;
79
                        return to;
80
                case 2:
81
                        *(unsigned short *)to = *(const unsigned short *)from;
82
                        return to;
83
                case 3:
84
                        *(unsigned short *)to = *(const unsigned short *)from;
85
                        *(2+(unsigned char *)to) = *(2+(const unsigned char *)from);
86
                        return to;
87
                case 4:
88
                        *(unsigned long *)to = *(const unsigned long *)from;
89
                        return to;
90
                case 6: /* for Ethernet addresses */
91
                        *(unsigned long *)to = *(const unsigned long *)from;
92
                        *(2+(unsigned short *)to) = *(2+(const unsigned short *)from);
93
                        return to;
94
                case 8:
95
                        *(unsigned long *)to = *(const unsigned long *)from;
96
                        *(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
97
                        return to;
98
                case 12:
99
                        *(unsigned long *)to = *(const unsigned long *)from;
100
                        *(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
101
                        *(2+(unsigned long *)to) = *(2+(const unsigned long *)from);
102
                        return to;
103
                case 16:
104
                        *(unsigned long *)to = *(const unsigned long *)from;
105
                        *(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
106
                        *(2+(unsigned long *)to) = *(2+(const unsigned long *)from);
107
                        *(3+(unsigned long *)to) = *(3+(const unsigned long *)from);
108
                        return to;
109
                case 20:
110
                        *(unsigned long *)to = *(const unsigned long *)from;
111
                        *(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
112
                        *(2+(unsigned long *)to) = *(2+(const unsigned long *)from);
113
                        *(3+(unsigned long *)to) = *(3+(const unsigned long *)from);
114
                        *(4+(unsigned long *)to) = *(4+(const unsigned long *)from);
115
                        return to;
116
        }
117
#define COMMON(x) \
118
__asm__ __volatile__( \
119
        "cld\n\t" \
120
        "rep ; movsl" \
121
        x \
122
        : "=&c" (d0), "=&D" (d1), "=&S" (d2) \
123
        : "0" (n/4),"1" ((long) to),"2" ((long) from) \
124
        : "memory");
125
{
126
        int d0, d1, d2;
127
        switch (n % 4) {
128
                case 0: COMMON(""); return to;
129
                case 1: COMMON("\n\tmovsb"); return to;
130
                case 2: COMMON("\n\tmovsw"); return to;
131
                default: COMMON("\n\tmovsw\n\tmovsb"); return to;
132
        }
133
}
134
 
135
#undef COMMON
136
}
137
 
138
#define __HAVE_ARCH_MEMCPY
139
#define memcpy(t, f, n) \
140
(__builtin_constant_p(n) ? \
141
 __constant_memcpy((t),(f),(n)) : \
142
 __memcpy((t),(f),(n)))
143
 
144
extern inline void *lmemcpy(LIN_ADDR t, LIN_ADDR f, size_t n)
145
{
146
        void *p1;
147
        void *p2;
148
 
149
        p1 = (void *)(t);
150
        p2 = (void *)(f);
151
        return memcpy(p1, p2, n);
152
}
153
 
154
#define __HAVE_ARCH_MEMMOVE
155
extern inline void * memmove(void * dest,const void * src, size_t n)
156
{
157
int d0, d1, d2;
158
if (dest<src)
159
__asm__ __volatile__(
160
        "cld\n\t"
161
        "rep\n\t"
162
        "movsb"
163
        : "=&c" (d0), "=&S" (d1), "=&D" (d2)
164
        :"0" (n),"1" (src),"2" (dest)
165
        : "memory");
166
else
167
__asm__ __volatile__(
168
        "std\n\t"
169
        "rep\n\t"
170
        "movsb\n\t"
171
        "cld"
172
        : "=&c" (d0), "=&S" (d1), "=&D" (d2)
173
        :"0" (n),
174
         "1" (n-1+(const char *)src),
175
         "2" (n-1+(char *)dest)
176
        :"memory");
177
return dest;
178
}
179
 
180
#define memcmp __builtin_memcmp
181
 
182
#define __HAVE_ARCH_MEMCHR
183
extern inline void * memchr(const void * cs,int c,size_t count)
184
{
185
int d0;
186
register void * __res;
187
if (!count)
188
        return NULL;
189
__asm__ __volatile__(
190
        "cld\n\t"
191
        "repne\n\t"
192
        "scasb\n\t"
193
        "je 1f\n\t"
194
        "movl $1,%0\n"
195
        "1:\tdecl %0"
196
        :"=D" (__res), "=&c" (d0) : "a" (c),"0" (cs),"1" (count));
197
return __res;
198
}
199
 
200
extern inline void * __memset_generic(void * s, char c,size_t count)
201
{
202
int d0, d1;
203
__asm__ __volatile__(
204
        "cld\n\t"
205
        "rep\n\t"
206
        "stosb"
207
        : "=&c" (d0), "=&D" (d1)
208
        :"a" (c),"1" (s),"0" (count)
209
        :"memory");
210
return s;
211
}
212
 
213
/* we might want to write optimized versions of these later */
214
#define __constant_count_memset(s,c,count) __memset_generic((s),(c),(count))
215
 
216
/*
217
 * memset(x,0,y) is a reasonably common thing to do, so we want to fill
218
 * things 32 bits at a time even when we don't know the size of the
219
 * area at compile-time..
220
 */
221
extern inline void * __constant_c_memset(void * s, unsigned long c, size_t count)
222
{
223
int d0, d1;
224
__asm__ __volatile__(
225
        "cld\n\t"
226
        "rep ; stosl\n\t"
227
        "testb $2,%b3\n\t"
228
        "je 1f\n\t"
229
        "stosw\n"
230
        "1:\ttestb $1,%b3\n\t"
231
        "je 2f\n\t"
232
        "stosb\n"
233
        "2:"
234
        : "=&c" (d0), "=&D" (d1)
235
        :"a" (c), "q" (count), "0" (count/4), "1" ((long) s)
236
        :"memory");
237
return (s);    
238
}
239
 
240
/*
241
 * This looks horribly ugly, but the compiler can optimize it totally,
242
 * as we by now know that both pattern and count is constant..
243
 */
244
extern inline void * __constant_c_and_count_memset(void * s, unsigned long pattern, size_t count)
245
{
246
        switch (count) {
247
                case 0:
248
                        return s;
249
                case 1:
250
                        *(unsigned char *)s = pattern;
251
                        return s;
252
                case 2:
253
                        *(unsigned short *)s = pattern;
254
                        return s;
255
                case 3:
256
                        *(unsigned short *)s = pattern;
257
                        *(2+(unsigned char *)s) = pattern;
258
                        return s;
259
                case 4:
260
                        *(unsigned long *)s = pattern;
261
                        return s;
262
        }
263
#define COMMON(x) \
264
__asm__  __volatile__("cld\n\t" \
265
        "rep ; stosl" \
266
        x \
267
        : "=&c" (d0), "=&D" (d1) \
268
        : "a" (pattern),"0" (count/4),"1" ((long) s) \
269
        : "memory")
270
{
271
        int d0, d1;
272
        switch (count % 4) {
273
                case 0: COMMON(""); return s;
274
                case 1: COMMON("\n\tstosb"); return s;
275
                case 2: COMMON("\n\tstosw"); return s;
276
                default: COMMON("\n\tstosw\n\tstosb"); return s;
277
        }
278
}
279
 
280
#undef COMMON
281
}
282
 
283
#define __constant_c_x_memset(s, c, count) \
284
(__builtin_constant_p(count) ? \
285
 __constant_c_and_count_memset((s),(c),(count)) : \
286
 __constant_c_memset((s),(c),(count)))
287
 
288
#define __memset(s, c, count) \
289
(__builtin_constant_p(count) ? \
290
 __constant_count_memset((s),(c),(count)) : \
291
 __memset_generic((s),(c),(count)))
292
 
293
#define __HAVE_ARCH_MEMSET
294
#define memset(s, c, count) \
295
(__builtin_constant_p(c) ? \
296
 __constant_c_x_memset((s),(0x01010101UL*(unsigned char)c),(count)) : \
297
 __memset((s),(c),(count)))
298
 
299
/*
300
 * find the first occurrence of byte 'c', or 1 past the area if none
301
 */
302
#define __HAVE_ARCH_MEMSCAN
303
extern inline void * memscan(void * addr, int c, size_t size)
304
{
305
        if (!size)
306
                return addr;
307
        __asm__("cld
308
                repnz; scasb
309
                jnz 1f
310
                dec %%edi
311
1:              "
312
                : "=D" (addr), "=c" (size)
313
                : "0" (addr), "1" (size), "a" (c));
314
        return addr;
315
}
316
 
317
void fmemcpy(unsigned short ds,unsigned long dof,unsigned short ss,unsigned long sof,unsigned n);
318
#if 0
319
extern inline void fmemcpy(unsigned short ds,unsigned long dof,unsigned short ss,unsigned long sof,unsigned n)
320
{
321
        /* Build the standard stack frame */
322
__asm__ __volatile__(
323
                /* Get parms into register */
324
                movl    8(%ebp),%eax
325
                movw    %ax,%es
326
                movl    12(%ebp),%edi
327
                movl    16(%ebp),%eax
328
                movw    %ax,%ds
329
                movl    20(%ebp),%esi          
330
                movl    24(%ebp),%ecx
331
                cld
332
                rep
333
                "movsb"
334
 
335
        "2:"
336
        : "=&c" (d0), "=&D" (d1), "=&S" (d2)
337
        :"0" (n), "q" (n),"1" ((long) to),"2" ((long) from)
338
        : "memory");
339
 
340
);
341
 
342
 
343
                popw    %es
344
                popw    %ds    
345
                popl    %edi
346
                popl    %esi           
347
                leave
348
                ret
349
 
350
#endif
351
 
352
END_DEF
353
 
354
#endif