Go to the documentation of this file.
27 #ifndef IFXCPU_INTRINSICSGNUC_H
28 #define IFXCPU_INTRINSICSGNUC_H
31 #if defined(SCTB_EMBEDDED)
32 # define IFXCPU_INTRINSICSGNUC_USE_MACHINE_INTRINSICS 0
34 # define IFXCPU_INTRINSICSGNUC_USE_MACHINE_INTRINSICS 1
40 #if IFXCPU_INTRINSICSGNUC_USE_MACHINE_INTRINSICS
41 #include "machine/intrinsics.h"
46 #define STRINGIFY(x) #x
50 #define __non_return_call(fun) __asm__ volatile ("ji %0"::"a"(fun))
56 __asm__ volatile (
"jli %0"::
"a"(fun));
66 #define __minX(X,Y) ( ((X) < (Y)) ? (X) : (Y) )
67 #define __maxX(X,Y) ( ((X) > (Y)) ? (X) : (Y) )
68 #define __saturateX(X,Min,Max) ( __minX(__maxX(X, Min), Max) )
69 #define __checkrangeX(X,Min,Max) (((X) >= (Min)) && ((X) <= (Max)))
76 #define __saturate(X,Min,Max) ( __min(__max(X, Min), Max) )
83 #define __saturateu(X,Min,Max) ( __minu(__maxu(X, Min), Max) )
97 __asm__ volatile (
"max %0, %1, %2":
"=d" (res) :
"d" (a),
"d" (b));
106 __asm__ volatile (
"max.h %0, %1, %2":
"=d" (res) :
"d" (a),
"d" (b));
114 __asm__ volatile (
"max.u %0, %1, %2":
"=d" (res) :
"d" (a),
"d" (b));
123 __asm__ volatile (
"min %0, %1, %2":
"=d" (res) :
"d" (a),
"d" (b));
132 __asm__ volatile (
"min.h %0, %1, %2":
"=d" (res) :
"d" (a),
"d" (b));
141 __asm__ volatile (
"min.u %0, %1, %2":
"=d" (res) :
"d" (a),
"d" (b));
152 #define __sqrf(X) ((X) * (X))
153 #define __sqrtf(X) sqrtf(X)
154 #define __checkrange(X,Min,Max) (((X) >= (Min)) && ((X) <= (Max)))
156 #define __roundf(X) ((((X) - (sint32)(X)) > 0.5) ? (1 + (sint32)(X)) : ((sint32)(X)))
157 #define __absf(X) ( ((X) < 0.0) ? -(X) : (X) )
158 #define __minf(X,Y) ( ((X) < (Y)) ? (X) : (Y) )
159 #define __maxf(X,Y) ( ((X) > (Y)) ? (X) : (Y) )
160 #define __saturatef(X,Min,Max) ( __minf(__maxf(X, Min), Max) )
161 #define __checkrangef(X,Min,Max) (((X) >= (Min)) && ((X) <= (Max)))
163 #define __abs_stdreal(X) ( ((X) > 0.0) ? (X) : -(X) )
164 #define __min_stdreal(X,Y) ( ((X) < (Y)) ? (X) : (Y) )
165 #define __max_stdreal(X,Y) ( ((X) > (Y)) ? (X) : (Y) )
166 #define __saturate_stdreal(X,Min,Max) ( __min_stdreal(__max_stdreal(X, Min), Max) )
168 #define __neqf(X,Y) ( ((X) > (Y)) || ((X) < (Y)) )
169 #define __leqf(X,Y) ( !((X) > (Y)) )
170 #define __geqf(X,Y) ( !((X) < (Y)) )
187 __asm__ volatile (
"cls %0,%1":
"=d"(res):
"d"(a):
"memory");
196 __asm__ volatile (
"q31tof %0,%1,%2":
"=d"(res):
"d"(a),
"d"(0):
"memory");
205 __asm__ volatile (
"ftoq31 %0,%1,%2":
"=d"(res):
"d"(a),
"d"(0):
"memory");
214 __asm__ volatile (
"dextr %0,%H1,%L1,0x11":
"=&d" (res):
"d" (a):
"memory");
223 __asm__ volatile (
"maddrs.q %0,%1,%2U,%3U,1":
"=d"(res):
"d"(a),
"d"(b),
"d"(c):
"memory");
232 __asm__ volatile (
"madds.q %0,%1,%2U,%3U,1":
"=d"(res):
"d"(a),
"d"(b),
"d"(c):
"memory");
241 __asm__ volatile (
"mul.q %0,%1,%2,1":
"=d"(res):
"d"(a),
"d"(b):
"memory");
250 __asm__ volatile (
"mul.q %0,%1,%2,1":
"=d"(res):
"d"(a),
"d"(b):
"memory");
259 __asm__ volatile (
"mov.u %0,0x8000 \n\
261 insert %0,%0,0,0,0x10 "
262 :
"=&d"(res):
"d"(a):
"memory");
271 __asm__ volatile (
"sh %0,%1,16":
"=d"(res):
"d"(a):
"memory");
280 __asm__ volatile (
"sh %0,%1,-16":
"=d"(res):
"d"(a):
"memory");
289 __asm__ volatile (
"sh %0,%1,-16":
"=d"(res):
"d"(a):
"memory");
298 __asm__ volatile (
"jge %2,0,0f \n\
301 dextr %L0,%H1,%L1,%2 \n\
303 0:dextr %H0,%H1,%L1,%2 \n\
306 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
315 __asm__ volatile (
"shas %0,%1,%2":
"=d"(res):
"d"(a),
"d"(b):
"memory");
324 __asm__ volatile (
"shas %0,%1,%2":
"=d"(res):
"d"(a),
"d"(b):
"memory");
333 __asm__ volatile (
"sh %0,%1,16":
"=d"(res):
"d"(a):
"memory");
349 __asm__ volatile (
"mov %%d14,%2 \n\
352 :
"=d" (res) :
"d" (a),
"d" (p),
"d" (w):
"d14",
"d15");
361 __asm__ volatile (
"mov %%d14,%2 \n\
364 :
"=d" (res) :
"d" (a),
"d" (p),
"d" (w):
"d14",
"d15");
371 #define __getbit(address, bitoffset) ((*(address) & (1U << (bitoffset))) != 0)
375 #define __imaskldmst(address, value, bitoffset, bits) \
377 __asm__("imask %A0,%1,%2,%3"\
378 :"=d"((long long)tmp)\
379 :"d"(value),"d"(bitoffset),"i"(bits): "memory");\
380 __asm__("ldmst [%0]0,%A1"::"a"(address),"d"(tmp): "memory");}
387 __asm__ volatile (
"ins.t %0,%1,%2,%3,%4":
"=d"(res):
"d"(trg),
"i"(trgbit),
"d"(src),
"i"(srcbit));
396 __asm__ volatile (
"mov %%d14,%2 \n\
398 insert %0,%1,%2,%%e14"
399 :
"=d"(res):
"d"(a),
"d"(b),
"d"(p),
"d"(w):
"d14",
"d15");
408 __asm__ volatile (
"insn.t %0,%1,%2,%3,%4":
"=d"(res):
"d"(trg),
"i"(trgbit),
"d"(src),
"i"(srcbit));
414 #define __putbit(value,address,bitoffset ) __imaskldmst(address, value, bitoffset,1)
424 #if !IFXCPU_INTRINSICSGNUC_USE_MACHINE_INTRINSICS
427 #define __bisr(intlvl) __asm__ volatile ("bisr "#intlvl : : : "memory")
432 #define __disable() __asm__ volatile ("disable" : : : "memory")
439 __asm__ volatile(
"disable %0":
"=d"(res));
445 #define __enable() __asm__ volatile ("enable" : : : "memory")
451 __asm__ volatile (
"restore %0"::
"d"(ie));
454 #if !IFXCPU_INTRINSICSGNUC_USE_MACHINE_INTRINSICS
457 #define __syscall(svcno) __tric_syscall(svcno)
458 #define __tric_syscall(svcno) __asm__ volatile ("syscall "STRINGIFY(svcno) : : : "memory")
472 __asm__ volatile(
"cachea.wi [%0]0"::
"a"(p));
478 __asm__ volatile(
"cachei.wi [%0]0"::
"a"(p));
486 __asm__ volatile(
"cachea.wi [%0+]0"::
"a"(p));
497 __asm__ volatile(
"mul %%e12,%1,%2 \n\
498 dextr %0,%%d13,%%d12,%3"
499 :
"=d"(res):
"d"(a),
"d"(b),
"d"(offset):
"d12",
"d13");
508 __asm__ volatile(
"dextr %0,%1,%1,%2":
"=d"(res):
"d"(operand),
"d"(count):
"memory");
517 __asm__ volatile(
"rsub %2,%2,0 \n\
519 :
"=d"(res):
"d"(operand),
"d"(count):
"memory");
535 __asm__ volatile (
"abs.b %0,%1"
536 :
"=d"(res):
"d"(a):
"memory");
545 __asm__ volatile (
"abs.h %0,%1"
546 :
"=d"(res):
"d"(a):
"memory");
555 __asm__ volatile (
"abss.h %0,%1"
556 :
"=d"(res):
"d"(a):
"memory");
565 __asm__ volatile (
"extr %0,%1,0,8"
566 :
"=d"(res):
"d"(a):
"memory");
575 __asm__ volatile (
"extr %0,%1,8,8"
576 :
"=d"(res):
"d"(a):
"memory");
585 __asm__ volatile (
"extr %0,%1,16,8"
586 :
"=d"(res):
"d"(a):
"memory");
595 __asm__ volatile (
"extr %0,%1,24,8"
596 :
"=d"(res):
"d"(a):
"memory");
605 __asm__ volatile (
"extr %0,%1,0,16"
606 :
"=d"(res):
"d"(a):
"memory");
615 __asm__ volatile (
"extr %0,%1,16,16"
616 :
"=d"(res):
"d"(a):
"memory");
625 __asm__ volatile (
"extr %0,%1,0,8"
626 :
"=d"(res):
"d"(a):
"memory");
635 __asm__ volatile (
"extr %0,%1,8,8"
636 :
"=d"(res):
"d"(a):
"memory");
645 __asm__ volatile (
"extr %0,%1,16,8"
646 :
"=d"(res):
"d"(a):
"memory");
655 __asm__ volatile (
"extr %0,%1,24,8"
656 :
"=d"(res):
"d"(a):
"memory");
665 __asm__ volatile (
"extr %0,%1,0,16"
666 :
"=d"(res):
"d"(a):
"memory");
675 __asm__ volatile (
"extr %0,%1,16,16"
676 :
"=d"(res):
"d"(a):
"memory");
685 __asm__ volatile (
"ld.w %0,[%1]0 \n\
687 :
"=d"(res):
"a"(a):
"memory");
697 __asm__ volatile (
"ld.w %0,[%1]0 \n\
699 :
"=d"(res):
"a"(a):
"memory");
709 __asm__ volatile (
"ld.w %0,[%1]0 \n\
711 :
"=d"(res):
"a"(a):
"memory");
721 __asm__ volatile (
"ld.w %0,[%1]0 \n\
723 :
"=d"(res):
"a"(a):
"memory");
733 __asm__ volatile (
"ld.w %0,[%1]0 \n\
735 :
"=d"(res):
"a"(a):
"memory");
743 __asm__ volatile (
"ld.w %0,[%1]0 \n\
745 :
"=d"(res):
"a"(a):
"memory");
754 __asm__ volatile (
"ld.w %0,[%1]0 \n\
756 :
"=d"(res):
"a"(a):
"memory");
765 __asm__ volatile (
"ld.w %0,[%1]0 \n\
767 :
"=d"(res):
"a"(a):
"memory");
776 __asm__ volatile (
"ld.w %0,[%1]0 \n\
778 :
"=d"(res):
"a"(a):
"memory");
787 __asm__ volatile (
"ld.w %0,[%1]0 \n\
789 :
"=d"(res):
"a"(a):
"memory");
798 __asm__ volatile (
"ld.w %0,[%1]0 \n\
800 :
"=d"(res):
"a"(a):
"memory");
809 __asm__ volatile (
"ld.w %0,[%1]0 \n\
811 :
"=d"(res):
"a"(a):
"memory");
820 __asm__ volatile (
"insert %3,%3,%4,8,8 \n\
821 insert %4,%1,%2,8,8 \n\
822 insert %0,%4,%3,16,16 "
823 :
"=d"(res):
"d"(a),
"d"(b),
"d"(c),
"d"(d):
"memory");
839 __asm__ volatile (
"insert %0,%1,%2,16,16"
840 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
856 __asm__ volatile (
"insert %3,%3,%4,8,8 \n\
857 insert %1,%1,%2,8,8 \n\
858 insert %0,%1,%3,16,16"
859 :
"=d"(res):
"d"(a),
"d"(b),
"d"(c),
"d"(d):
"memory");
868 __asm__ volatile (
"insert %0,%1,%2,16,16"
869 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
878 __asm__ volatile (
"insert %0,%1,%2,0,8"
879 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
888 __asm__ volatile (
"insert %0,%1,%2,8,8"
889 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
898 __asm__ volatile (
"insert %0,%1,%2,16,8"
899 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
908 __asm__ volatile (
"insert %0,%1,%2,24,8"
909 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
918 __asm__ volatile (
"insert %0,%1,%2,0,8"
919 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
928 __asm__ volatile (
"insert %0,%1,%2,8,8"
929 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
938 __asm__ volatile (
"insert %0,%1,%2,16,8"
939 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
948 __asm__ volatile (
"insert %0,%1,%2,24,8"
949 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
958 __asm__ volatile (
"insert %0,%1,%2,0,16"
959 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
968 __asm__ volatile (
"insert %0,%1,%2,16,16"
969 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
978 __asm__ volatile (
"insert %0,%1,%2,0,16"
979 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
988 __asm__ volatile (
"insert %0,%1,%2,16,16"
989 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
998 __asm__ volatile (
"min.b %0,%1,%2"
999 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
1008 __asm__ volatile (
"min.bu %0,%1,%2"
1009 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
1018 __asm__ volatile (
"min.h %0,%1,%2"
1019 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
1028 __asm__ volatile (
"min.hu %0,%1,%2"
1029 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
1037 __asm__ volatile (
"ld.w %%d15,[%0] \n\
1038 insert %%d15,%%d15,%1,0,8 \n\
1040 ::
"a"(a),
"d"(b):
"d15",
"memory");
1047 __asm__ volatile (
"ld.w %%d15,[%0] \n\
1048 insert %%d15,%%d15,%1,8,8 \n\
1050 ::
"a"(a),
"d"(b):
"d15",
"memory");
1057 __asm__ volatile (
"ld.w %%d15,[%0] \n\
1058 insert %%d15,%%d15,%1,16,8 \n\
1060 ::
"a"(a),
"d"(b):
"d15",
"memory");
1067 __asm__ volatile (
"ld.w %%d15,[%0] \n\
1068 insert %%d15,%%d15,%1,24,8 \n\
1070 ::
"a"(a),
"d"(b):
"d15",
"memory");
1077 __asm__ volatile (
"ld.w %%d15,[%0] \n\
1078 insert %%d15,%%d15,%1,0,16 \n\
1080 ::
"a"(a),
"d"(b):
"d15",
"memory");
1087 __asm__ volatile (
"ld.w %%d15,[%0] \n\
1088 insert %%d15,%%d15,%1,16,16 \n\
1090 ::
"a"(a),
"d"(b):
"d15",
"memory");
1097 __asm__ volatile (
"ld.w %%d15,[%0] \n\
1098 insert %%d15,%%d15,%1,0,8 \n\
1100 ::
"a"(a),
"d"(b):
"d15",
"memory");
1107 __asm__ volatile (
"ld.w %%d15,[%0] \n\
1108 insert %%d15,%%d15,%1,8,8 \n\
1110 ::
"a"(a),
"d"(b):
"d15",
"memory");
1117 __asm__ volatile (
"ld.w %%d15,[%0] \n\
1118 insert %%d15,%%d15,%1,16,8 \n\
1120 ::
"a"(a),
"d"(b):
"d15",
"memory");
1127 __asm__ volatile (
"ld.w %%d15,[%0] \n\
1128 insert %%d15,%%d15,%1,24,8 \n\
1130 ::
"a"(a),
"d"(b):
"d15",
"memory");
1137 __asm__ volatile (
"ld.w %%d15,[%0] \n\
1138 insert %%d15,%%d15,%1,0,16 \n\
1140 ::
"a"(a),
"d"(b):
"d15",
"memory");
1147 __asm__ volatile (
"ld.w %%d15,[%0] \n\
1148 insert %%d15,%%d15,%1,16,16 \n\
1150 ::
"a"(a),
"d"(b):
"d15",
"memory");
1163 #define __abs(a) __builtin_abs(a)
1170 __asm__ volatile (
"absdif %0, %1, %2":
"=d" (res) :
"d" (a),
"d" (b));
1179 __asm__ volatile (
"abss %0, %1":
"=d" (res) :
"d" (a));
1188 __asm__ volatile (
"clo %0,%1":
"=d"(res):
"d"(a));
1197 __asm__ volatile (
"cls %0,%1":
"=d"(res):
"d"(a));
1203 #define __clz(a) __builtin_clz(a)
1210 __asm__ volatile (
"insert %0,%1,0,31,1":
"=d" (res) :
"d" (d):
"memory");
1219 __asm__ volatile (
"insert %0,%1,0,31,1":
"=d" (res) :
"d" (f):
"memory");
1223 #if !IFXCPU_INTRINSICSGNUC_USE_MACHINE_INTRINSICS
1226 #define __mfcr(regaddr) \
1227 ({ sint32 res; __asm__ volatile ("mfcr %0,%1": "=d" (res) :"i"(regaddr): "memory"); res; })
1233 #define __mtcr(regaddr,val) __asm__ volatile ("mtcr %0,%1\n\tisync"::"i"(regaddr),"d"(val):"memory")
1241 __asm__ volatile (
"parity %0,%1":
"=d" (res) :
"d" (a):
"memory");
1250 __asm__ volatile (
"sat.b %0,%1":
"=d"(res):
"d"(a));
1259 __asm__ volatile (
"sat.bu %0,%1":
"=d"(res):
"d"(a));
1268 __asm__ volatile (
"sat.h %0,%1":
"=d"(res):
"d"(a));
1277 __asm__ volatile (
"sat.hu %0,%1":
"=d"(res):
"d"(a));
1294 __asm__ volatile (
"adds %0, %1, %2":
"=d" (res) :
"d" (a),
"d" (b));
1303 __asm__ volatile (
"adds.u %0, %1, %2":
"=d" (res) :
"d" (a),
"d" (b));
1312 __asm__ volatile (
"subs %0, %1, %2":
"=d" (res) :
"d" (a),
"d" (b));
1321 __asm__ volatile (
"subs.u %0, %1, %2":
"=d" (res) :
"d" (a),
"d" (b));
1339 __asm__ volatile (
"debug" : : :
"memory");
1346 __asm__ volatile (
"dsync" : : :
"memory");
1353 __asm__ volatile (
"isync" : : :
"memory");
1360 __asm__ volatile(
"mov %H2,%1 \n\
1362 ::
"a"(address),
"d"(mask),
"d"((
long long)value));
1369 __asm__ volatile (
"nop" : : :
"memory");
1378 ::
"a"(((
sint8*)cnt)-1));
1385 __asm__ volatile (
"rslcx" : : :
"memory");
1392 __asm__ volatile (
"svlcx" : : :
"memory");
1400 __asm__ volatile(
"swap.w [%1]0,%2":
"=d"(res):
"a"(place),
"0"(value));
1406 #define NOP(n) __asm(".rept " #n "\n\tnop\n\t.endr\n")
1411 #define __extru(src,start,size) \
1412 ({ sint32 res; asm volatile (" extr.u\t %0,%1,%2,%3" : "=d" (res) : \
1413 "d" (src),"i" (start),"i" (size) : "memory"); res; })
1416 #define __setareg(areg,val) \
1417 { uint32 reg_val= (uint32)val; \
1418 asm volatile (" mov.a\t %%"#areg",%0"::"d"(reg_val)); }
1424 __asm__ volatile(
"mov %%d0,0\n\
1437 unsigned int value,
unsigned int condition)
1440 #ifdef IFX_USE_GNUC_TRICORE_1_6
1441 __extension__
unsigned long long reg64
1442 = value | (
unsigned long long) condition << 32;
1444 __asm__ __volatile__ (
"cmpswap.w [%[addr]]0, %A[reg]"
1445 : [reg]
"+d" (reg64)
1446 : [addr]
"a" (address)
1453 if (condition == *address)
1455 __swap((
void *)address,value );
1478 __asm__ volatile(
"q31tof %0, %1, %2":
"=d" (result) :
"d" (value),
"d" (shift));
1485 __asm__ volatile (
"mov.aa %0, %%a11":
"=a" (res) : :
"a11");
1491 __asm__ volatile (
"mov.aa %%a10, %0": :
"a" (stackAddr) :
"a10");