1/* strchr (str, ch) -- Return pointer to first occurrence of CH in STR.
2 For Motorola 68000.
3 Copyright (C) 1999-2022 Free Software Foundation, Inc.
4 This file is part of the GNU C Library.
5
6 The GNU C Library is free software; you can redistribute it and/or
7 modify it under the terms of the GNU Lesser General Public
8 License as published by the Free Software Foundation; either
9 version 2.1 of the License, or (at your option) any later version.
10
11 The GNU C Library is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 Lesser General Public License for more details.
15
16 You should have received a copy of the GNU Lesser General Public
17 License along with the GNU C Library. If not, see
18 <https://www.gnu.org/licenses/>. */
19
20#include <sysdep.h>
21#include "asm-syntax.h"
22
23 TEXT
24ENTRY(strchr)
25 /* Save the callee-saved registers we use. */
26 movel R(d2),MEM_PREDEC(sp)
27 cfi_adjust_cfa_offset (4)
28 movel R(d3),MEM_PREDEC(sp)
29 cfi_adjust_cfa_offset (4)
30 cfi_rel_offset (R(d2),4)
31 cfi_rel_offset (R(d3),0)
32
33 /* Get string pointer and character. */
34 movel MEM_DISP(sp,12),R(a0)
35 moveb MEM_DISP(sp,19),R(d0)
36
37 /* Distribute the character to all bytes of a longword. */
38 movel R(d0),R(d1)
39 lsll #8,R(d1)
40 moveb R(d0),R(d1)
41 movel R(d1),R(d0)
42 swap R(d0)
43 movew R(d1),R(d0)
44
45 /* First search for the character one byte at a time until the
46 pointer is aligned to a longword boundary. */
47 movel R(a0),R(d1)
48#ifdef __mcoldfire__
49 andl #3,R(d1)
50#else
51 andw #3,R(d1)
52#endif
53 beq L(L1)
54 moveb MEM(a0),R(d2)
55 cmpb R(d0),R(d2)
56 beq L(L9)
57 tstb R(d2)
58 beq L(L3)
59 addql #1,R(a0)
60
61#ifdef __mcoldfire__
62 subql #3,R(d1)
63#else
64 subqw #3,R(d1)
65#endif
66 beq L(L1)
67 moveb MEM(a0),R(d2)
68 cmpb R(d0),R(d2)
69 beq L(L9)
70 tstb R(d2)
71 beq L(L3)
72 addql #1,R(a0)
73
74#ifdef __mcoldfire__
75 addql #1,R(d1)
76#else
77 addqw #1,R(d1)
78#endif
79 beq L(L1)
80 moveb MEM(a0),R(d2)
81 cmpb R(d0),R(d2)
82 beq L(L9)
83 tstb R(d2)
84 beq L(L3)
85 addql #1,R(a0)
86
87L(L1:)
88 /* Load the magic bits. Unlike the generic implementation we can
89 use the carry bit as the fourth hole. */
90 movel #0xfefefeff,R(d3)
91
92 /* We exit the loop if adding MAGIC_BITS to LONGWORD fails to
93 change any of the hole bits of LONGWORD.
94
95 1) Is this safe? Will it catch all the zero bytes?
96 Suppose there is a byte with all zeros. Any carry bits
97 propagating from its left will fall into the hole at its
98 least significant bit and stop. Since there will be no
99 carry from its most significant bit, the LSB of the
100 byte to the left will be unchanged, and the zero will be
101 detected.
102
103 2) Is this worthwhile? Will it ignore everything except
104 zero bytes? Suppose every byte of LONGWORD has a bit set
105 somewhere. There will be a carry into bit 8. If bit 8
106 is set, this will carry into bit 16. If bit 8 is clear,
107 one of bits 9-15 must be set, so there will be a carry
108 into bit 16. Similarly, there will be a carry into bit
109 24. If one of bits 24-31 is set, there will be a carry
110 into bit 32 (=carry flag), so all of the hole bits will
111 be changed.
112
113 3) But wait! Aren't we looking for C, not zero?
114 Good point. So what we do is XOR LONGWORD with a longword,
115 each of whose bytes is C. This turns each byte that is C
116 into a zero. */
117
118L(L2:)
119 /* Get the longword in question. */
120 movel MEM_POSTINC(a0),R(d1)
121 /* XOR with the byte we search for. */
122 eorl R(d0),R(d1)
123
124 /* Add the magic value. We get carry bits reported for each byte
125 which is not C. */
126 movel R(d3),R(d2)
127 addl R(d1),R(d2)
128
129 /* Check the fourth carry bit before it is clobbered by the next
130 XOR. If it is not set we have a hit. */
131 bcc L(L8)
132
133 /* We are only interested in carry bits that change due to the
134 previous add, so remove original bits. */
135 eorl R(d1),R(d2)
136
137 /* Now test for the other three overflow bits.
138 Set all non-carry bits. */
139 orl R(d3),R(d2)
140 /* Add 1 to get zero if all carry bits were set. */
141 addql #1,R(d2)
142
143 /* If we don't get zero then at least one byte of the word equals
144 C. */
145 bne L(L8)
146
147 /* Next look for a NUL byte.
148 Restore original longword without reload. */
149 eorl R(d0),R(d1)
150 /* Add the magic value. We get carry bits reported for each byte
151 which is not NUL. */
152 movel R(d3),R(d2)
153 addl R(d1),R(d2)
154
155 /* Check the fourth carry bit before it is clobbered by the next
156 XOR. If it is not set we have a hit, and return NULL. */
157 bcc L(L3)
158
159 /* We are only interested in carry bits that change due to the
160 previous add, so remove original bits. */
161 eorl R(d1),R(d2)
162
163 /* Now test for the other three overflow bits.
164 Set all non-carry bits. */
165 orl R(d3),R(d2)
166 /* Add 1 to get zero if all carry bits were set. */
167 addql #1,R(d2)
168
169 /* If we don't get zero then at least one byte of the word was NUL
170 and we return NULL. Otherwise continue with the next longword. */
171 bne L(L3)
172
173 /* Get the longword in question. */
174 movel MEM_POSTINC(a0),R(d1)
175 /* XOR with the byte we search for. */
176 eorl R(d0),R(d1)
177
178 /* Add the magic value. We get carry bits reported for each byte
179 which is not C. */
180 movel R(d3),R(d2)
181 addl R(d1),R(d2)
182
183 /* Check the fourth carry bit before it is clobbered by the next
184 XOR. If it is not set we have a hit. */
185 bcc L(L8)
186
187 /* We are only interested in carry bits that change due to the
188 previous add, so remove original bits */
189 eorl R(d1),R(d2)
190
191 /* Now test for the other three overflow bits.
192 Set all non-carry bits. */
193 orl R(d3),R(d2)
194 /* Add 1 to get zero if all carry bits were set. */
195 addql #1,R(d2)
196
197 /* If we don't get zero then at least one byte of the word equals
198 C. */
199 bne L(L8)
200
201 /* Next look for a NUL byte.
202 Restore original longword without reload. */
203 eorl R(d0),R(d1)
204 /* Add the magic value. We get carry bits reported for each byte
205 which is not NUL. */
206 movel R(d3),R(d2)
207 addl R(d1),R(d2)
208
209 /* Check the fourth carry bit before it is clobbered by the next
210 XOR. If it is not set we have a hit, and return NULL. */
211 bcc L(L3)
212
213 /* We are only interested in carry bits that change due to the
214 previous add, so remove original bits */
215 eorl R(d1),R(d2)
216
217 /* Now test for the other three overflow bits.
218 Set all non-carry bits. */
219 orl R(d3),R(d2)
220 /* Add 1 to get zero if all carry bits were set. */
221 addql #1,R(d2)
222
223 /* If we don't get zero then at least one byte of the word was NUL
224 and we return NULL. Otherwise continue with the next longword. */
225 beq L(L2)
226
227L(L3:)
228 /* Return NULL. */
229 clrl R(d0)
230 movel R(d0),R(a0)
231 movel MEM_POSTINC(sp),R(d3)
232 cfi_remember_state
233 cfi_adjust_cfa_offset (-4)
234 cfi_restore (R(d3))
235 movel MEM_POSTINC(sp),R(d2)
236 cfi_adjust_cfa_offset (-4)
237 cfi_restore (R(d2))
238 rts
239
240 cfi_restore_state
241L(L8:)
242 /* We have a hit. Check to see which byte it was. First
243 compensate for the autoincrement in the loop. */
244 subql #4,R(a0)
245
246 moveb MEM(a0),R(d1)
247 cmpb R(d0),R(d1)
248 beq L(L9)
249 tstb R(d1)
250 beq L(L3)
251 addql #1,R(a0)
252
253 moveb MEM(a0),R(d1)
254 cmpb R(d0),R(d1)
255 beq L(L9)
256 tstb R(d1)
257 beq L(L3)
258 addql #1,R(a0)
259
260 moveb MEM(a0),R(d1)
261 cmpb R(d0),R(d1)
262 beq L(L9)
263 tstb R(d1)
264 beq L(L3)
265 addql #1,R(a0)
266
267 /* Otherwise the fourth byte must equal C. */
268L(L9:)
269 movel R(a0),R(d0)
270 movel MEM_POSTINC(sp),R(d3)
271 cfi_adjust_cfa_offset (-4)
272 cfi_restore (R(d3))
273 movel MEM_POSTINC(sp),R(d2)
274 cfi_adjust_cfa_offset (-4)
275 cfi_restore (R(d2))
276 rts
277END(strchr)
278
279weak_alias (strchr, index)
280libc_hidden_builtin_def (strchr)
281

source code of glibc/sysdeps/m68k/strchr.S