summaryrefslogtreecommitdiffstatshomepage
path: root/py/nlrx64.S
blob: caea35de2b6e177a40aa52f8227558af3421e74b (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
/*
 * This file is part of the Micro Python project, http://micropython.org/
 *
 * The MIT License (MIT)
 *
 * Copyright (c) 2013, 2014 Damien P. George
 *
 * Permission is hereby granted, free of charge, to any person obtaining a copy
 * of this software and associated documentation files (the "Software"), to deal
 * in the Software without restriction, including without limitation the rights
 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
 * copies of the Software, and to permit persons to whom the Software is
 * furnished to do so, subject to the following conditions:
 *
 * The above copyright notice and this permission notice shall be included in
 * all copies or substantial portions of the Software.
 *
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
 * THE SOFTWARE.
 */

#if defined(__x86_64__) && !MICROPY_NLR_SETJMP

// We only need the functions here if we are on x86-64, and we are not
// using setjmp/longjmp.
//
// For reference, x86-64 callee save regs are:
//      rbx, rbp, rsp, r12, r13, r14, r15

// the offset of nlr_top within mp_state_ctx_t
#define NLR_TOP_OFFSET (2 * 8)

#if defined(__APPLE__) && defined(__MACH__)
#define NLR_TOP (_mp_state_ctx + NLR_TOP_OFFSET)
#define MP_THREAD_GET_STATE _mp_thread_get_state
#else
#define NLR_TOP (mp_state_ctx + NLR_TOP_OFFSET)
#define MP_THREAD_GET_STATE mp_thread_get_state
#endif

// offset of nlr_top within mp_state_thread_t structure
#define NLR_TOP_TH_OFF (0)

#if defined(_WIN32) || defined(__CYGWIN__)
#define NLR_OS_WINDOWS
#endif

    .file   "nlr.s"
    .text

#if !defined(NLR_OS_WINDOWS)

/******************************************************************************/
//
// Functions for *nix and OSX.
// OSX needs _ prefix for binding to C, and doesn't support some directives.
//
/******************************************************************************/

/**************************************/
// mp_uint_t nlr_push(rdi=nlr_buf_t *nlr)

#if !(defined(__APPLE__) && defined(__MACH__))
    .globl  nlr_push
    .type   nlr_push, @function
nlr_push:
#else
    .globl  _nlr_push
_nlr_push:
#endif
    movq    (%rsp), %rax            # load return %rip
    movq    %rax, 16(%rdi)          # store %rip into nlr_buf
    movq    %rbp, 24(%rdi)          # store %rbp into nlr_buf
    movq    %rsp, 32(%rdi)          # store %rsp into nlr_buf
    movq    %rbx, 40(%rdi)          # store %rbx into nlr_buf
    movq    %r12, 48(%rdi)          # store %r12 into nlr_buf
    movq    %r13, 56(%rdi)          # store %r13 into nlr_buf
    movq    %r14, 64(%rdi)          # store %r14 into nlr_buf
    movq    %r15, 72(%rdi)          # store %r15 into nlr_buf

#if !MICROPY_PY_THREAD
    movq    NLR_TOP(%rip), %rax     # get last nlr_buf
    movq    %rax, (%rdi)            # store it
    movq    %rdi, NLR_TOP(%rip)     # stor new nlr_buf (to make linked list)
#else
    movq    %rdi, %rbp              # since we make a call, must save rdi in rbp
    callq   MP_THREAD_GET_STATE     # get mp_state_thread ptr into rax
    movq    NLR_TOP_TH_OFF(%rax), %rsi # get thread.nlr_top (last nlr_buf)
    movq    %rsi, (%rbp)            # store it
    movq    %rbp, NLR_TOP_TH_OFF(%rax) # store new nlr_buf (to make linked list)
    movq    24(%rbp), %rbp          # restore rbp
#endif

    xorq    %rax, %rax              # return 0, normal return
    ret                             # return
#if !(defined(__APPLE__) && defined(__MACH__))
    .size   nlr_push, .-nlr_push
#endif

/**************************************/
// void nlr_pop()

#if !(defined(__APPLE__) && defined(__MACH__))
    .globl  nlr_pop
    .type   nlr_pop, @function
nlr_pop:
#else
    .globl  _nlr_pop
_nlr_pop:
#endif

#if !MICROPY_PY_THREAD
    movq    NLR_TOP(%rip), %rax     # get nlr_top into %rax
    movq    (%rax), %rax            # load prev nlr_buf
    movq    %rax, NLR_TOP(%rip)     # store prev nlr_buf (to unlink list)
#else
    callq   MP_THREAD_GET_STATE     # get mp_state_thread ptr into rax
    movq    NLR_TOP_TH_OFF(%rax), %rdi # get thread.nlr_top (last nlr_buf)
    movq    (%rdi), %rdi            # load prev nlr_buf
    movq    %rdi, NLR_TOP_TH_OFF(%rax) # store prev nlr_buf (to unlink list)
#endif

    ret                             # return
#if !(defined(__APPLE__) && defined(__MACH__))
    .size   nlr_pop, .-nlr_pop
#endif

/**************************************/
// void nlr_jump(rdi=mp_uint_t val)

#if !(defined(__APPLE__) && defined(__MACH__))
    .globl  nlr_jump
    .type   nlr_jump, @function
nlr_jump:
#else
    .globl  _nlr_jump
    _nlr_jump:
#endif

#if !MICROPY_PY_THREAD
    movq    %rdi, %rax              # put return value in %rax
    movq    NLR_TOP(%rip), %rdi     # get nlr_top into %rdi
    test    %rdi, %rdi              # check for nlr_top being NULL
    je      .fail                   # fail if nlr_top is NULL
    movq    %rax, 8(%rdi)           # store return value
    movq    (%rdi), %rax            # load prev nlr_buf
    movq    %rax, NLR_TOP(%rip)     # store prev nlr_buf (to unlink list)
#else
    movq    %rdi, %rbp              # put return value in rbp
    callq   MP_THREAD_GET_STATE     # get thread ptr in rax
    movq    %rax, %rsi              # put thread ptr in rsi
    movq    %rbp, %rax              # put return value to rax (for je .fail)
    movq    NLR_TOP_TH_OFF(%rsi), %rdi # get thread.nlr_top in rdi
    test    %rdi, %rdi              # check for nlr_top being NULL
    je      .fail                   # fail if nlr_top is NULL
    movq    %rax, 8(%rdi)           # store return value
    movq    (%rdi), %rax            # load prev nlr_buf
    movq    %rax, NLR_TOP_TH_OFF(%rsi) # store prev nlr_buf (to unlink list)
#endif

    movq    72(%rdi), %r15          # load saved %r15
    movq    64(%rdi), %r14          # load saved %r14
    movq    56(%rdi), %r13          # load saved %r13
    movq    48(%rdi), %r12          # load saved %r12
    movq    40(%rdi), %rbx          # load saved %rbx
    movq    32(%rdi), %rsp          # load saved %rsp
    movq    24(%rdi), %rbp          # load saved %rbp
    movq    16(%rdi), %rax          # load saved %rip
    movq    %rax, (%rsp)            # store saved %rip to stack
    xorq    %rax, %rax              # clear return register
    inc     %al                     # increase to make 1, non-local return
    ret                             # return
.fail:
    movq    %rax, %rdi              # put argument back in first-arg register
#if !(defined(__APPLE__) && defined(__MACH__))
    je      nlr_jump_fail           # transfer control to nlr_jump_fail
    .size   nlr_jump, .-nlr_jump
#else
    je      _nlr_jump_fail          # transfer control to nlr_jump_fail
#endif

#else // !defined(NLR_OS_WINDOWS)

/******************************************************************************/
//
// Functions for Windows
//
/******************************************************************************/

/**************************************/
// mp_uint_t nlr_push(rcx=nlr_buf_t *nlr)

    .globl  nlr_push
nlr_push:
    movq    (%rsp), %rax            # load return %rip
    movq    %rax, 16(%rcx)          # store %rip into nlr_buf
    movq    %rbp, 24(%rcx)          # store %rbp into nlr_buf
    movq    %rsp, 32(%rcx)          # store %rsp into nlr_buf
    movq    %rbx, 40(%rcx)          # store %rbx into nlr_buf
    movq    %r12, 48(%rcx)          # store %r12 into nlr_buf
    movq    %r13, 56(%rcx)          # store %r13 into nlr_buf
    movq    %r14, 64(%rcx)          # store %r14 into nlr_buf
    movq    %r15, 72(%rcx)          # store %r15 into
    movq    %rdi, 80(%rcx)          # store %rdr into
    movq    %rsi, 88(%rcx)          # store %rsi into
    movq    NLR_TOP(%rip), %rax     # get last nlr_buf
    movq    %rax, (%rcx)            # store it
    movq    %rcx, NLR_TOP(%rip)     # stor new nlr_buf (to make linked list)
    xorq    %rax, %rax              # return 0, normal return
    ret                             # return

/**************************************/
// void nlr_pop()

    .globl  nlr_pop
nlr_pop:
    movq    NLR_TOP(%rip), %rax     # get nlr_top into %rax
    movq    (%rax), %rax            # load prev nlr_buf
    movq    %rax, NLR_TOP(%rip)     # store prev nlr_buf (to unlink list)
    ret                             # return

/**************************************/
// void nlr_jump(rcx=mp_uint_t val)

    .globl  nlr_jump
nlr_jump:
    movq    %rcx, %rax              # put return value in %rax
    movq    NLR_TOP(%rip), %rcx     # get nlr_top into %rcx
    test    %rcx, %rcx              # check for nlr_top being NULL
    je      .fail                   # fail if nlr_top is NULL
    movq    %rax, 8(%rcx)           # store return value
    movq    (%rcx), %rax            # load prev nlr_buf
    movq    %rax, NLR_TOP(%rip)     # store prev nlr_buf (to unlink list)
    movq    72(%rcx), %r15          # load saved %r15
    movq    64(%rcx), %r14          # load saved %r14
    movq    56(%rcx), %r13          # load saved %r13
    movq    48(%rcx), %r12          # load saved %r12
    movq    40(%rcx), %rbx          # load saved %rbx
    movq    32(%rcx), %rsp          # load saved %rsp
    movq    24(%rcx), %rbp          # load saved %rbp
    movq    16(%rcx), %rax          # load saved %rip
    movq    80(%rcx), %rdi          # store %rdr into
    movq    88(%rcx), %rsi          # store %rsi into
    movq    %rax, (%rsp)            # store saved %rip to stack
    xorq    %rax, %rax              # clear return register
    inc     %al                     # increase to make 1, non-local return
    ret                             # return
.fail:
    movq    %rax, %rcx              # put argument back in first-arg register
    je      nlr_jump_fail           # transfer control to nlr_jump_fail

#endif // !defined(NLR_OS_WINDOWS)

#endif // defined(__x86_64__) && !MICROPY_NLR_SETJMP