|
1 /* |
|
2 * LIBOIL - Library of Optimized Inner Loops |
|
3 * Copyright (c) 2003,2004 David A. Schleef <ds@schleef.org> |
|
4 * All rights reserved. |
|
5 * |
|
6 * Redistribution and use in source and binary forms, with or without |
|
7 * modification, are permitted provided that the following conditions |
|
8 * are met: |
|
9 * 1. Redistributions of source code must retain the above copyright |
|
10 * notice, this list of conditions and the following disclaimer. |
|
11 * 2. Redistributions in binary form must reproduce the above copyright |
|
12 * notice, this list of conditions and the following disclaimer in the |
|
13 * documentation and/or other materials provided with the distribution. |
|
14 * |
|
15 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR |
|
16 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED |
|
17 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE |
|
18 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, |
|
19 * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES |
|
20 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR |
|
21 * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) |
|
22 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, |
|
23 * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING |
|
24 * IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE |
|
25 * POSSIBILITY OF SUCH DAMAGE. |
|
26 */ |
|
27 //Portions Copyright (c) 2008-2009 Nokia Corporation and/or its subsidiary(-ies). All rights reserved. |
|
28 |
|
29 #ifdef HAVE_CONFIG_H |
|
30 #include "config.h" |
|
31 #endif |
|
32 |
|
33 #include <liboil/liboilfunction.h> |
|
34 #include "liboil/simdpack/simdpack.h" |
|
35 |
|
36 #include <math.h> |
|
37 |
|
38 |
|
39 static void |
|
40 mult8x8_s16_mmx(int16_t *dest, int16_t *src1, int16_t *src2, int dstr, int sstr1, |
|
41 int sstr2) |
|
42 { |
|
43 #if !defined(__WINSCW__) && !defined(__WINS__) |
|
44 asm volatile( |
|
45 " movq 0(%1), %%mm0 \n" |
|
46 " pmullw 0(%2), %%mm0 \n" |
|
47 " movq %%mm0, 0(%0) \n" |
|
48 " movq 8(%1), %%mm1 \n" |
|
49 " pmullw 8(%2), %%mm1 \n" |
|
50 " movq %%mm1, 8(%0) \n" |
|
51 |
|
52 " add %3, %0 \n" |
|
53 " add %4, %1 \n" |
|
54 " add %5, %2 \n" |
|
55 |
|
56 " movq 0(%1), %%mm0 \n" |
|
57 " pmullw 0(%2), %%mm0 \n" |
|
58 " movq %%mm0, 0(%0) \n" |
|
59 " movq 8(%1), %%mm1 \n" |
|
60 " pmullw 8(%2), %%mm1 \n" |
|
61 " movq %%mm1, 8(%0) \n" |
|
62 |
|
63 " add %3, %0 \n" |
|
64 " add %4, %1 \n" |
|
65 " add %5, %2 \n" |
|
66 |
|
67 " movq 0(%1), %%mm0 \n" |
|
68 " pmullw 0(%2), %%mm0 \n" |
|
69 " movq %%mm0, 0(%0) \n" |
|
70 " movq 8(%1), %%mm1 \n" |
|
71 " pmullw 8(%2), %%mm1 \n" |
|
72 " movq %%mm1, 8(%0) \n" |
|
73 |
|
74 " add %3, %0 \n" |
|
75 " add %4, %1 \n" |
|
76 " add %5, %2 \n" |
|
77 |
|
78 " movq 0(%1), %%mm0 \n" |
|
79 " pmullw 0(%2), %%mm0 \n" |
|
80 " movq %%mm0, 0(%0) \n" |
|
81 " movq 8(%1), %%mm1 \n" |
|
82 " pmullw 8(%2), %%mm1 \n" |
|
83 " movq %%mm1, 8(%0) \n" |
|
84 |
|
85 " add %3, %0 \n" |
|
86 " add %4, %1 \n" |
|
87 " add %5, %2 \n" |
|
88 |
|
89 " movq 0(%1), %%mm0 \n" |
|
90 " pmullw 0(%2), %%mm0 \n" |
|
91 " movq %%mm0, 0(%0) \n" |
|
92 " movq 8(%1), %%mm1 \n" |
|
93 " pmullw 8(%2), %%mm1 \n" |
|
94 " movq %%mm1, 8(%0) \n" |
|
95 |
|
96 " add %3, %0 \n" |
|
97 " add %4, %1 \n" |
|
98 " add %5, %2 \n" |
|
99 |
|
100 " movq 0(%1), %%mm0 \n" |
|
101 " pmullw 0(%2), %%mm0 \n" |
|
102 " movq %%mm0, 0(%0) \n" |
|
103 " movq 8(%1), %%mm1 \n" |
|
104 " pmullw 8(%2), %%mm1 \n" |
|
105 " movq %%mm1, 8(%0) \n" |
|
106 |
|
107 " add %3, %0 \n" |
|
108 " add %4, %1 \n" |
|
109 " add %5, %2 \n" |
|
110 |
|
111 " movq 0(%1), %%mm0 \n" |
|
112 " pmullw 0(%2), %%mm0 \n" |
|
113 " movq %%mm0, 0(%0) \n" |
|
114 " movq 8(%1), %%mm1 \n" |
|
115 " pmullw 8(%2), %%mm1 \n" |
|
116 " movq %%mm1, 8(%0) \n" |
|
117 |
|
118 " add %3, %0 \n" |
|
119 " add %4, %1 \n" |
|
120 " add %5, %2 \n" |
|
121 |
|
122 " movq 0(%1), %%mm0 \n" |
|
123 " pmullw 0(%2), %%mm0 \n" |
|
124 " movq %%mm0, 0(%0) \n" |
|
125 " movq 8(%1), %%mm1 \n" |
|
126 " pmullw 8(%2), %%mm1 \n" |
|
127 " movq %%mm1, 8(%0) \n" |
|
128 " emms\n " |
|
129 |
|
130 : "+r" (dest), "+r" (src1), "+r" (src2) |
|
131 : "m" (dstr), "m" (sstr1), "m" (sstr2)); |
|
132 #endif |
|
133 } |
|
134 |
|
135 OIL_DEFINE_IMPL_FULL (mult8x8_s16_mmx, mult8x8_s16, OIL_IMPL_FLAG_MMX); |
|
136 |
|
137 |
|
138 |
|
139 #ifdef __SYMBIAN32__ |
|
140 |
|
141 OilFunctionImpl* __oil_function_impl_mult8x8_s16_mmx, mult8x8_s16() { |
|
142 return &_oil_function_impl_mult8x8_s16_mmx, mult8x8_s16; |
|
143 } |
|
144 #endif |
|
145 |