1 | /* Vector optimized 32/64 bit S/390 version of stpncpy. |
2 | Copyright (C) 2015-2022 Free Software Foundation, Inc. |
3 | This file is part of the GNU C Library. |
4 | |
5 | The GNU C Library is free software; you can redistribute it and/or |
6 | modify it under the terms of the GNU Lesser General Public |
7 | License as published by the Free Software Foundation; either |
8 | version 2.1 of the License, or (at your option) any later version. |
9 | |
10 | The GNU C Library is distributed in the hope that it will be useful, |
11 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
12 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
13 | Lesser General Public License for more details. |
14 | |
15 | You should have received a copy of the GNU Lesser General Public |
16 | License along with the GNU C Library; if not, see |
17 | <https://www.gnu.org/licenses/>. */ |
18 | |
19 | #include <ifunc-stpncpy.h> |
20 | |
21 | #if HAVE_STPNCPY_Z13 |
22 | |
23 | # include "sysdep.h" |
24 | # include "asm-syntax.h" |
25 | |
26 | .text |
27 | |
28 | /* char * stpncpy (char *dest, const char *src, size_t n) |
29 | Copies at most n characters of string src to dest |
30 | returning a pointer to its end or dest+n |
31 | if src is smaller than n. |
32 | |
33 | Register usage: |
34 | -%r0 = return value |
35 | -%r1 = zero byte index |
36 | -%r2 = curr dst pointer |
37 | -%r3 = curr src pointer |
38 | -%r4 = n |
39 | -%r5 = current_len |
40 | -%r6 = loaded bytes |
41 | -%r7 = border, tmp |
42 | */ |
43 | ENTRY(STPNCPY_Z13) |
44 | .machine "z13" |
45 | .machinemode "zarch_nohighgprs" |
46 | |
47 | # if !defined __s390x__ |
48 | llgfr %r4,%r4 |
49 | # endif /* !defined __s390x__ */ |
50 | |
51 | clgfi %r4,0 |
52 | ber %r14 /* Nothing to do, if n == 0. */ |
53 | |
54 | la %r0,0(%r4,%r2) /* Save destination pointer + n for return. */ |
55 | vlvgp %v31,%r6,%r7 /* Save registers. */ |
56 | |
57 | vlbb %v16,0(%r3),6 /* Load s until next 4k-byte boundary. */ |
58 | lcbb %r6,0(%r3),6 /* Get bytes to 4k-byte boundary or 16. */ |
59 | llgfr %r6,%r6 /* Convert 32bit to 64bit. */ |
60 | |
61 | lghi %r5,0 /* current_len = 0. */ |
62 | |
63 | clgrjle %r4,%r6,.Lremaining_v16 /* If n <= loaded-bytes |
64 | -> process remaining. */ |
65 | |
66 | /* n > loaded-byte-count */ |
67 | vfenezb %v17,%v16,%v16 /* Find element not equal with zero search. */ |
68 | vlgvb %r1,%v17,7 /* Load zero index or 16 if not found. */ |
69 | clrjl %r1,%r6,.Lfound_v16_store /* Found zero within loaded bytes, |
70 | copy and return. */ |
71 | |
72 | /* Align s to 16 byte. */ |
73 | risbgn %r7,%r3,60,128+63,0 /* %r3 = bits 60-63 of %r2 'and' 15. */ |
74 | lghi %r5,15 /* current_len = 15. */ |
75 | slr %r5,%r7 /* Compute highest index to 16byte boundary. */ |
76 | |
77 | /* Zero not found and n > loaded-byte-count. */ |
78 | vstl %v16,%r5,0(%r2) /* Copy loaded characters - no zero. */ |
79 | ahi %r5,1 /* Start loop at next character. */ |
80 | |
81 | /* Now we are 16byte aligned, so we can load a full vreg |
82 | without page fault. */ |
83 | lgr %r1,%r5 /* If %r5 + 64 < maxlen? -> loop64. */ |
84 | aghi %r1,64 |
85 | clgrjl %r1,%r4,.Lloop64 |
86 | |
87 | vl %v16,0(%r5,%r3) /* Load s. */ |
88 | clgijl %r4,17,.Lremaining_v16 /* If n <= 16, process remaining |
89 | bytes. */ |
90 | .Llt64: |
91 | lgr %r7,%r4 |
92 | slgfi %r7,16 /* border_len = n - 16. */ |
93 | |
94 | clgrjhe %r5,%r7,.Lremaining_v16 /* If current_len >= border |
95 | then process remaining bytes. */ |
96 | vfenezbs %v17,%v16,%v16 /* Find element not equal with zero search. */ |
97 | je .Lfound_v16 /* Jump away if zero was found. */ |
98 | vl %v18,16(%r5,%r3) /* Load next part of s. */ |
99 | vst %v16,0(%r5,%r2) /* Save previous part without zero to dst. */ |
100 | aghi %r5,16 |
101 | |
102 | clgrjhe %r5,%r7,.Lremaining_v18 |
103 | vfenezbs %v17,%v18,%v18 |
104 | je .Lfound_v18 |
105 | vl %v16,16(%r5,%r3) |
106 | vst %v18,0(%r5,%r2) |
107 | aghi %r5,16 |
108 | |
109 | clgrjhe %r5,%r7,.Lremaining_v16 |
110 | vfenezbs %v17,%v16,%v16 |
111 | je .Lfound_v16 |
112 | vl %v18,16(%r5,%r3) |
113 | vst %v16,0(%r5,%r2) |
114 | aghi %r5,16 |
115 | |
116 | .Lremaining_v18: |
117 | vlr %v16,%v18 |
118 | .Lremaining_v16: |
119 | /* v16 contains the remaining bytes [1...16]. |
120 | Store remaining bytes and append string-termination. */ |
121 | vfenezb %v17,%v16,%v16 /* Find element not equal with zero search. */ |
122 | slgrk %r7,%r4,%r5 /* Remaining bytes = maxlen - current_len */ |
123 | aghi %r7,-1 /* vstl needs highest index. */ |
124 | la %r2,0(%r5,%r2) /* vstl has no index register. */ |
125 | vlgvb %r1,%v17,7 /* Load zero index or 16 if not found. */ |
126 | /* Zero in remaining bytes? -> jump away (zero-index <= max-index). */ |
127 | clrjle %r1,%r7,.Lfound_v16_store |
128 | vstl %v16,%r7,0(%r2) /* Store remaining bytes without null |
129 | termination! */ |
130 | .Lend: |
131 | /* Restore saved registers. */ |
132 | vlgvg %r6,%v31,0 |
133 | vlgvg %r7,%v31,1 |
134 | lgr %r2,%r0 /* Load saved dest-ptr. */ |
135 | br %r14 |
136 | |
137 | .Lfound_v16_32: |
138 | aghi %r5,32 |
139 | j .Lfound_v16 |
140 | .Lfound_v18_48: |
141 | aghi %r5,32 |
142 | .Lfound_v18_16: |
143 | aghi %r5,16 |
144 | .Lfound_v18: |
145 | vlr %v16,%v18 |
146 | .Lfound_v16: |
147 | /* v16 contains a zero. Store remaining bytes to zero. current_len |
148 | has not reached border, thus checking for n is not needed! */ |
149 | vlgvb %r1,%v17,7 /* Load byte index of zero. */ |
150 | la %r2,0(%r5,%r2) /* vstl has no support for index-register. */ |
151 | .Lfound_v16_store: |
152 | vstl %v16,%r1,0(%r2) /* Copy characters including zero. */ |
153 | /* Fill remaining bytes with zero - remaining count always > 0. */ |
154 | algr %r5,%r1 /* Remaining bytes (=%r4) = ... */ |
155 | slgr %r4,%r5 /* = maxlen - (currlen + zero_index + 1) */ |
156 | la %r2,0(%r1,%r2) /* Pointer to zero. start filling beyond. */ |
157 | lgr %r0,%r2 /* Save return-pointer to found zero. */ |
158 | clgije %r4,1,.Lend /* Skip zero-filling, if found zero is last |
159 | possible character. |
160 | (1 is substracted from r4 below!). */ |
161 | aghi %r4,-2 /* mvc with exrl needs count - 1. |
162 | (additional -1, see remaining bytes above) */ |
163 | srlg %r6,%r4,8 /* Split into 256 byte blocks. */ |
164 | ltgr %r6,%r6 |
165 | je .Lzero_lt256 |
166 | .Lzero_loop256: |
167 | mvc 1(256,%r2),0(%r2) /* Fill 256 zeros at once. */ |
168 | la %r2,256(%r2) |
169 | brctg %r6,.Lzero_loop256 /* Loop until all blocks are processed. */ |
170 | .Lzero_lt256: |
171 | exrl %r4,.Lmvc_lt256 |
172 | j .Lend |
173 | .Lmvc_lt256: |
174 | mvc 1(1,%r2),0(%r2) |
175 | |
176 | .Lloop64: |
177 | vl %v16,0(%r5,%r3) |
178 | vfenezbs %v17,%v16,%v16 /* Find element not equal with zero search. */ |
179 | je .Lfound_v16 /* Jump away if zero was found. */ |
180 | vl %v18,16(%r5,%r3) /* Load next part of s. */ |
181 | vst %v16,0(%r5,%r2) /* Save previous part without zero to dst. */ |
182 | vfenezbs %v17,%v18,%v18 |
183 | je .Lfound_v18_16 |
184 | vl %v16,32(%r5,%r3) |
185 | vst %v18,16(%r5,%r2) |
186 | vfenezbs %v17,%v16,%v16 |
187 | je .Lfound_v16_32 |
188 | vl %v18,48(%r5,%r3) |
189 | vst %v16,32(%r5,%r2) |
190 | vfenezbs %v17,%v18,%v18 |
191 | je .Lfound_v18_48 |
192 | vst %v18,48(%r5,%r2) |
193 | |
194 | aghi %r5,64 |
195 | lgr %r1,%r5 /* If %r5 + 64 < maxlen? -> loop64. */ |
196 | aghi %r1,64 |
197 | clgrjl %r1,%r4,.Lloop64 |
198 | |
199 | vl %v16,0(%r5,%r3) /* Load s. */ |
200 | j .Llt64 |
201 | END(STPNCPY_Z13) |
202 | |
203 | # if ! HAVE_STPNCPY_IFUNC |
204 | strong_alias (STPNCPY_Z13, __stpncpy) |
205 | weak_alias (__stpncpy, stpncpy) |
206 | # endif |
207 | |
208 | # if ! HAVE_STPNCPY_C && defined SHARED && IS_IN (libc) |
209 | strong_alias (STPNCPY_Z13, __GI___stpncpy) |
210 | # endif |
211 | #endif |
212 | |