/home/travis/build/MoarVM/MoarVM/src/core/exceptions.c
Line | Count | Source (jump to first uncovered line) |
1 | | #include "moar.h" |
2 | | |
3 | | #ifdef _MSC_VER |
4 | | #define snprintf _snprintf |
5 | | #define vsnprintf _vsnprintf |
6 | | #endif |
7 | | |
8 | | static int crash_on_error = 0; |
9 | | |
10 | | /* Function for getting effective (specialized or not) frame handlers. */ |
11 | 1.13M | MVM_STATIC_INLINE MVMFrameHandler * MVM_frame_effective_handlers(MVMFrame *f) { |
12 | 1.13M | MVMSpeshCandidate *spesh_cand = f->spesh_cand; |
13 | 1.03M | return spesh_cand ? spesh_cand->handlers : f->static_info->body.handlers; |
14 | 1.13M | } |
15 | | |
16 | | /* Maps ID of exception category to its name. */ |
17 | 0 | static const char * cat_name(MVMThreadContext *tc, MVMint32 cat) { |
18 | 0 | switch (cat) { |
19 | 0 | case MVM_EX_CAT_CATCH: |
20 | 0 | return "catch"; |
21 | 0 | case MVM_EX_CAT_CONTROL: |
22 | 0 | return "control"; |
23 | 0 | case MVM_EX_CAT_NEXT: |
24 | 0 | return "next"; |
25 | 0 | case MVM_EX_CAT_REDO: |
26 | 0 | return "redo"; |
27 | 0 | case MVM_EX_CAT_LAST: |
28 | 0 | return "last"; |
29 | 0 | case MVM_EX_CAT_RETURN: |
30 | 0 | return "return"; |
31 | 0 | case MVM_EX_CAT_TAKE: |
32 | 0 | return "take"; |
33 | 0 | case MVM_EX_CAT_WARN: |
34 | 0 | return "warn"; |
35 | 0 | case MVM_EX_CAT_SUCCEED: |
36 | 0 | return "succeed"; |
37 | 0 | case MVM_EX_CAT_PROCEED: |
38 | 0 | return "proceed"; |
39 | 0 | case MVM_EX_CAT_NEXT | MVM_EX_CAT_LABELED: |
40 | 0 | return "next_label"; |
41 | 0 | case MVM_EX_CAT_REDO | MVM_EX_CAT_LABELED: |
42 | 0 | return "redo_label"; |
43 | 0 | case MVM_EX_CAT_LAST | MVM_EX_CAT_LABELED: |
44 | 0 | return "last_label"; |
45 | 0 | default: |
46 | 0 | return "unknown"; |
47 | 0 | } |
48 | 0 | } |
49 | | |
50 | | /* Checks if an exception handler is already on the active handler stack, |
51 | | * so we don't re-trigger the same exception handler. Note: We have static |
52 | | * handlers that get reused, so also check for the same handler being in |
53 | | * the same frame, otherwise we consider the handler as being another one. */ |
54 | 464k | static MVMuint8 in_handler_stack(MVMThreadContext *tc, MVMFrameHandler *fh, MVMFrame *f) { |
55 | 464k | if (tc->active_handlers) { |
56 | 19 | MVMActiveHandler *ah = tc->active_handlers; |
57 | 40 | while (ah) { |
58 | 33 | if (ah->handler == fh && ah->frame == f) |
59 | 12 | return 1; |
60 | 21 | ah = ah->next_handler; |
61 | 21 | } |
62 | 19 | } |
63 | 464k | return 0; |
64 | 464k | } |
65 | | |
66 | | /* Checks if a frame is still active. Naively, we could scan the call stack |
67 | | * for it, but since we always clean up ->work when a frame is removed from |
68 | | * the call stack we can do it in O(1) that way. */ |
69 | 330k | static MVMuint8 in_caller_chain(MVMThreadContext *tc, MVMFrame *f_maybe) { |
70 | 330k | return f_maybe->work ? 1 : 0; |
71 | 330k | } |
72 | | |
73 | | |
74 | | /* Information about a located handler. */ |
75 | | typedef struct { |
76 | | MVMFrame *frame; |
77 | | MVMFrameHandler *handler; |
78 | | MVMJitHandler *jit_handler; |
79 | | MVMint32 handler_out_of_dynamic_scope; |
80 | | } LocatedHandler; |
81 | | |
82 | 1.27M | static MVMint32 handler_can_handle(MVMFrame *f, MVMFrameHandler *fh, MVMint32 cat, MVMObject *payload) { |
83 | 1.27M | MVMuint32 category_mask = fh->category_mask; |
84 | 1.27M | MVMuint64 block_has_label = category_mask & MVM_EX_CAT_LABELED; |
85 | 1.27M | MVMuint64 block_label = block_has_label ? (MVMuint64)(f->work[fh->label_reg].o) : 0; |
86 | 1.27M | MVMuint64 thrown_label = payload ? (MVMuint64)payload : 0; |
87 | 1.27M | MVMuint64 identical_label_found = thrown_label == block_label; |
88 | 617k | return ((cat & category_mask) == cat && (!(cat & MVM_EX_CAT_LABELED) || identical_label_found)) |
89 | 655k | || ((category_mask & MVM_EX_CAT_CONTROL) && cat != MVM_EX_CAT_CATCH); |
90 | 1.27M | } |
91 | | |
92 | | /* Looks through the handlers of a particular frame, including inlines in |
93 | | * dynamic scope, and sees if one will match what we're looking for. Returns |
94 | | * 1 to it if so, and 0 if not; in the case 1 is returned the *lh will be |
95 | | * populated with details of the located handler. Since upon inlining, the |
96 | | * dynamic scope becomes lexical so far as the optimized bytecode is |
97 | | * concerned, then this just needs a scan of the table without any further |
98 | | * checks being needed. */ |
99 | | static MVMint32 search_frame_handlers_dyn(MVMThreadContext *tc, MVMFrame *f, |
100 | | MVMuint32 cat, MVMObject *payload, |
101 | 137k | LocatedHandler *lh) { |
102 | 137k | MVMuint32 i; |
103 | 137k | if (f->spesh_cand && f->spesh_cand->jitcode && f->jit_entry_label) { |
104 | 2.19k | MVMJitCode *jitcode = f->spesh_cand->jitcode; |
105 | 2.19k | void *current_position = MVM_jit_code_get_current_position(tc, jitcode, f); |
106 | 2.19k | MVMJitHandler *jhs = f->spesh_cand->jitcode->handlers; |
107 | 2.19k | MVMFrameHandler *fhs = MVM_frame_effective_handlers(f); |
108 | 2.19k | for (i = MVM_jit_code_get_active_handlers(tc, jitcode, current_position, 0); |
109 | 2.79k | i < jitcode->num_handlers; |
110 | 734 | i = MVM_jit_code_get_active_handlers(tc, jitcode, current_position, i+1)) { |
111 | 734 | if (handler_can_handle(f, &fhs[i], cat, payload) && |
112 | 135 | !in_handler_stack(tc, &fhs[i], f)) { |
113 | 135 | lh->handler = &fhs[i]; |
114 | 135 | lh->jit_handler = &jhs[i]; |
115 | 135 | return 1; |
116 | 135 | } |
117 | 734 | } |
118 | 135k | } else { |
119 | 135k | MVMint32 num_handlers = f->spesh_cand |
120 | 1.25k | ? f->spesh_cand->num_handlers |
121 | 134k | : f->static_info->body.num_handlers; |
122 | 135k | MVMint32 pc; |
123 | 135k | if (f == tc->cur_frame) |
124 | 132k | pc = (MVMuint32)(*tc->interp_cur_op - *tc->interp_bytecode_start); |
125 | 135k | else |
126 | 3.19k | pc = (MVMuint32)(f->return_address - MVM_frame_effective_bytecode(f)); |
127 | 807k | for (i = 0; i < num_handlers; i++) { |
128 | 805k | MVMFrameHandler *fh = &(MVM_frame_effective_handlers(f)[i]); |
129 | 805k | if (!handler_can_handle(f, fh, cat, payload)) |
130 | 518k | continue; |
131 | 286k | if (pc >= fh->start_offset && pc <= fh->end_offset && !in_handler_stack(tc, fh, f)) { |
132 | 133k | lh->handler = fh; |
133 | 133k | return 1; |
134 | 133k | } |
135 | 286k | } |
136 | 135k | } |
137 | 4.08k | return 0; |
138 | 137k | } |
139 | | |
140 | | /* Looks for lexically applicable handlers in the current frame, accounting |
141 | | * for any inlines. The skip_first_inlinee flag indicates that we should skip |
142 | | * looking until we have encountered an inline boundary indicator saying that |
143 | | * we have crossed from an inlinee to its inliner's handlers; this is used to |
144 | | * handle the THROW_LEX_CALLER mode. If we never encounter an inline boundary |
145 | | * when skip_first_inlinee is true then we'll always return 0. |
146 | | * |
147 | | * If skip_first_inlinee is false or we already saw an inline boundary, then |
148 | | * we start looking for a matching handler. If one is found before seeing |
149 | | * another inline boundary, then it is applicable; the data pointed to by lh |
150 | | * will be updated with the frame handler details and 1 will be returned. |
151 | | * |
152 | | * Upon reaching an (or, in the case of skip_first_inlinee, a second) inline |
153 | | * boundary indicator, there are two cases that apply. We take the inline |
154 | | * that we are leaving, and look up the code ref using the code_ref_reg in |
155 | | * the inline. We know that we can never inline a frame that was closed over |
156 | | * (due to capturelex or takeclosure being marked :noinline). Thus, either: |
157 | | * 1. The outer of the inlinee is actually the current frame f. In this case, |
158 | | * we skip all inlined handlers and just consider those of f itself. |
159 | | * 2. The next frame we should search in is the ->outer of the inlinee, and |
160 | | * thus all the rest of the handlers in this frame should be ignored. In |
161 | | * this case, the MVMFrame **next_outer will be populated with a pointer |
162 | | * to that frame. |
163 | | * |
164 | | * The skip_all_inlinees flag is set once we are below the frame on the stack |
165 | | * to where the search started. Again, this is because a frame that did a |
166 | | * lexical capture may not be inlined, so we only need to consider the topmost |
167 | | * frame's handlers, not anything it might have inlined into it. |
168 | | */ |
169 | | static MVMint32 search_frame_handlers_lex(MVMThreadContext *tc, MVMFrame *f, |
170 | | MVMuint32 cat, MVMObject *payload, |
171 | | LocatedHandler *lh, |
172 | | MVMuint32 *skip_first_inlinee, |
173 | | MVMuint32 skip_all_inlinees, |
174 | 330k | MVMFrame **next_outer) { |
175 | 330k | MVMuint32 i; |
176 | 330k | MVMuint32 skipping = *skip_first_inlinee; |
177 | 330k | MVMFrameHandler *fhs = MVM_frame_effective_handlers(f); |
178 | 330k | if (f->spesh_cand && f->spesh_cand->jitcode && f->jit_entry_label) { |
179 | 61.9k | MVMJitCode *jitcode = f->spesh_cand->jitcode; |
180 | 61.9k | void *current_position = MVM_jit_code_get_current_position(tc, jitcode, f); |
181 | 61.9k | MVMJitHandler *jhs = jitcode->handlers; |
182 | 61.9k | for (i = MVM_jit_code_get_active_handlers(tc, jitcode, current_position, 0); |
183 | 76.9k | i < jitcode->num_handlers; |
184 | 76.9k | i = MVM_jit_code_get_active_handlers(tc, jitcode, current_position, i+1)) { |
185 | 76.9k | MVMFrameHandler *fh = &(fhs[i]); |
186 | 76.9k | if (skip_all_inlinees && fh->inlinee >= 0) |
187 | 0 | continue; |
188 | 76.9k | if (fh->category_mask == MVM_EX_INLINE_BOUNDARY) { |
189 | 0 | if (skipping) { |
190 | 0 | skipping = 0; |
191 | 0 | *skip_first_inlinee = 0; |
192 | 0 | } |
193 | 0 | else { |
194 | 0 | MVMuint16 cr_reg = f->spesh_cand->inlines[fh->inlinee].code_ref_reg; |
195 | 0 | MVMFrame *inline_outer = ((MVMCode *)f->work[cr_reg].o)->body.outer; |
196 | 0 | if (inline_outer == f) { |
197 | 0 | skip_all_inlinees = 1; |
198 | 0 | } |
199 | 0 | else { |
200 | 0 | *next_outer = inline_outer; |
201 | 0 | return 0; |
202 | 0 | } |
203 | 0 | } |
204 | 0 | } |
205 | 76.9k | if (skipping || !handler_can_handle(f, fh, cat, payload)) |
206 | 14.9k | continue; |
207 | 61.9k | if (!in_handler_stack(tc, fh, f)) { |
208 | 61.9k | if (skipping && f->static_info->body.is_thunk) |
209 | 0 | return 0; |
210 | 61.9k | lh->handler = fh; |
211 | 61.9k | lh->jit_handler = &jhs[i]; |
212 | 61.9k | return 1; |
213 | 61.9k | } |
214 | 61.9k | } |
215 | 61.9k | } |
216 | 268k | else { |
217 | 268k | MVMint32 num_handlers = f->spesh_cand |
218 | 35.7k | ? f->spesh_cand->num_handlers |
219 | 233k | : f->static_info->body.num_handlers; |
220 | 268k | MVMint32 pc; |
221 | 268k | if (f == tc->cur_frame) |
222 | 268k | pc = (MVMuint32)(*tc->interp_cur_op - *tc->interp_bytecode_start); |
223 | 268k | else |
224 | 4 | pc = (MVMuint32)(f->return_address - MVM_frame_effective_bytecode(f)); |
225 | 390k | for (i = 0; i < num_handlers; i++) { |
226 | 390k | MVMFrameHandler *fh = &(fhs[i]); |
227 | 390k | if (skip_all_inlinees && fh->inlinee >= 0) |
228 | 0 | continue; |
229 | 390k | if (fh->category_mask == MVM_EX_INLINE_BOUNDARY) { |
230 | 0 | if (pc >= fh->start_offset && pc <= fh->end_offset) { |
231 | 0 | if (skipping) { |
232 | 0 | skipping = 0; |
233 | 0 | *skip_first_inlinee = 0; |
234 | 0 | } |
235 | 0 | else { |
236 | 0 | MVMuint16 cr_reg = f->spesh_cand->inlines[fh->inlinee].code_ref_reg; |
237 | 0 | MVMFrame *inline_outer = ((MVMCode *)f->work[cr_reg].o)->body.outer; |
238 | 0 | if (inline_outer == f) { |
239 | 0 | skip_all_inlinees = 1; |
240 | 0 | } |
241 | 0 | else { |
242 | 0 | *next_outer = inline_outer; |
243 | 0 | return 0; |
244 | 0 | } |
245 | 0 | } |
246 | 0 | } |
247 | 0 | continue; |
248 | 0 | } |
249 | 390k | if (skipping || !handler_can_handle(f, fh, cat, payload)) |
250 | 121k | continue; |
251 | 268k | if (pc >= fh->start_offset && |
252 | 268k | pc <= fh->end_offset && |
253 | 268k | !in_handler_stack(tc, fh, f)) { |
254 | 268k | if (skipping && f->static_info->body.is_thunk) |
255 | 0 | return 0; |
256 | 268k | lh->handler = fh; |
257 | 268k | return 1; |
258 | 268k | } |
259 | 268k | } |
260 | 268k | } |
261 | 6 | return 0; |
262 | 330k | } |
263 | | |
264 | | /* Searches for a handler of the specified category, relative to the given |
265 | | * starting frame, searching according to the chosen mode. */ |
266 | | static LocatedHandler search_for_handler_from(MVMThreadContext *tc, MVMFrame *f, |
267 | 464k | MVMuint8 mode, MVMuint32 cat, MVMObject *payload) { |
268 | 464k | MVMuint32 skip_first_inlinee = 0; |
269 | 464k | LocatedHandler lh; |
270 | 464k | lh.frame = NULL; |
271 | 464k | lh.handler = NULL; |
272 | 464k | lh.jit_handler = NULL; |
273 | 464k | lh.handler_out_of_dynamic_scope = 0; |
274 | 464k | switch (mode) { |
275 | 2 | case MVM_EX_THROW_LEX_CALLER: |
276 | 2 | skip_first_inlinee = 1; |
277 | 2 | /* fallthrough */ |
278 | 330k | case MVM_EX_THROW_LEX: { |
279 | 330k | MVMint32 skip_all_inlinees = 0; |
280 | 330k | while (f != NULL) { |
281 | 330k | MVMFrame *outer_from_inlinee = NULL; |
282 | 330k | if (search_frame_handlers_lex(tc, f, cat, payload, &lh, &skip_first_inlinee, |
283 | 330k | skip_all_inlinees, &outer_from_inlinee)) { |
284 | 330k | if (in_caller_chain(tc, f)) |
285 | 330k | lh.frame = f; |
286 | 330k | else |
287 | 0 | lh.handler_out_of_dynamic_scope = 1; |
288 | 330k | return lh; |
289 | 330k | } |
290 | 6 | if (skip_first_inlinee) { |
291 | 2 | /* If this is still set, it means that the topmost frame |
292 | 2 | * had no inlines, so we didn't already reach a chain of |
293 | 2 | * outers to traverse. In this case, skip over any thunks |
294 | 2 | * and continue the search. */ |
295 | 2 | skip_first_inlinee = 0; |
296 | 2 | f = f->caller; |
297 | 2 | while (f && f->static_info->body.is_thunk) |
298 | 0 | f = f->caller; |
299 | 2 | } |
300 | 4 | else { |
301 | 4 | f = outer_from_inlinee ? outer_from_inlinee : f->outer; |
302 | 4 | skip_all_inlinees = 1; |
303 | 4 | } |
304 | 6 | } |
305 | 2 | return lh; |
306 | 330k | } |
307 | 133k | case MVM_EX_THROW_DYN: |
308 | 137k | while (f != NULL) { |
309 | 137k | if (search_frame_handlers_dyn(tc, f, cat, payload, &lh)) { |
310 | 133k | lh.frame = f; |
311 | 133k | return lh; |
312 | 133k | } |
313 | 4.08k | f = f->caller; |
314 | 4.08k | } |
315 | 0 | return lh; |
316 | 0 | case MVM_EX_THROW_LEXOTIC: |
317 | 0 | while (f != NULL) { |
318 | 0 | lh = search_for_handler_from(tc, f, MVM_EX_THROW_LEX, cat, payload); |
319 | 0 | if (lh.frame != NULL) |
320 | 0 | return lh; |
321 | 0 | f = f->caller; |
322 | 0 | } |
323 | 0 | return lh; |
324 | 0 | default: |
325 | 0 | MVM_panic(1, "Unhandled exception throw mode %d", (int)mode); |
326 | 464k | } |
327 | 464k | } |
328 | | |
329 | | /* Runs an exception handler (which really means updating interpreter state |
330 | | * so that when we return to the runloop, we're in the handler). If there is |
331 | | * an exception object already, it will be used; NULL can be passed if there |
332 | | * is not one, meaning it will be created if needed (based on the category |
333 | | * parameter; if ex_obj is passed, the category is not used). */ |
334 | | static void unwind_after_handler(MVMThreadContext *tc, void *sr_data); |
335 | | static void cleanup_active_handler(MVMThreadContext *tc, void *sr_data); |
336 | | static void run_handler(MVMThreadContext *tc, LocatedHandler lh, MVMObject *ex_obj, |
337 | 464k | MVMuint32 category, MVMObject *payload) { |
338 | 464k | switch (lh.handler->action) { |
339 | 330k | case MVM_EX_ACTION_GOTO_WITH_PAYLOAD: |
340 | 330k | if (payload) |
341 | 330k | tc->last_payload = payload; |
342 | 0 | else if (ex_obj && ((MVMException *)ex_obj)->body.payload) |
343 | 0 | tc->last_payload = ((MVMException *)ex_obj)->body.payload; |
344 | 0 | else |
345 | 0 | tc->last_payload = tc->instance->VMNull; |
346 | 330k | /* Deliberate fallthrough to unwind below. */ |
347 | 330k | |
348 | 463k | case MVM_EX_ACTION_GOTO: |
349 | 463k | if (lh.jit_handler) { |
350 | 62.1k | void **labels = lh.frame->spesh_cand->jitcode->labels; |
351 | 62.1k | MVMuint8 *pc = lh.frame->spesh_cand->jitcode->bytecode; |
352 | 62.1k | MVM_frame_unwind_to(tc, lh.frame, pc, 0, NULL, labels[lh.jit_handler->goto_label]); |
353 | 401k | } else { |
354 | 401k | MVM_frame_unwind_to(tc, lh.frame, NULL, lh.handler->goto_offset, NULL, NULL); |
355 | 401k | } |
356 | 463k | |
357 | 463k | break; |
358 | 330k | |
359 | 457 | case MVM_EX_ACTION_INVOKE: { |
360 | 457 | /* Create active handler record. */ |
361 | 457 | MVMActiveHandler *ah = MVM_malloc(sizeof(MVMActiveHandler)); |
362 | 457 | MVMFrame *cur_frame = tc->cur_frame; |
363 | 457 | MVMObject *handler_code; |
364 | 457 | |
365 | 457 | /* Ensure we have an exception object. */ |
366 | 457 | if (ex_obj == NULL) { |
367 | 1 | MVMROOT3(tc, cur_frame, lh.frame, payload, { |
368 | 1 | ex_obj = MVM_repr_alloc_init(tc, tc->instance->boot_types.BOOTException); |
369 | 1 | }); |
370 | 1 | ((MVMException *)ex_obj)->body.category = category; |
371 | 1 | MVM_ASSIGN_REF(tc, &(ex_obj->header), ((MVMException *)ex_obj)->body.payload, payload); |
372 | 1 | } |
373 | 457 | |
374 | 457 | /* Find frame to invoke. */ |
375 | 457 | handler_code = MVM_frame_find_invokee(tc, lh.frame->work[lh.handler->block_reg].o, NULL); |
376 | 457 | |
377 | 457 | /* Install active handler record. */ |
378 | 457 | ah->frame = lh.frame; |
379 | 457 | ah->handler = lh.handler; |
380 | 457 | ah->jit_handler = lh.jit_handler; |
381 | 457 | ah->ex_obj = ex_obj; |
382 | 457 | ah->next_handler = tc->active_handlers; |
383 | 457 | tc->active_handlers = ah; |
384 | 457 | |
385 | 457 | /* Set up special return to unwinding after running the |
386 | 457 | * handler. */ |
387 | 457 | cur_frame->return_value = (MVMRegister *)&tc->last_handler_result; |
388 | 457 | cur_frame->return_type = MVM_RETURN_OBJ; |
389 | 457 | MVM_frame_special_return(tc, cur_frame, unwind_after_handler, cleanup_active_handler, |
390 | 457 | ah, NULL); |
391 | 457 | |
392 | 457 | /* Invoke the handler frame and return to runloop. */ |
393 | 457 | STABLE(handler_code)->invoke(tc, handler_code, MVM_callsite_get_common(tc, MVM_CALLSITE_ID_NULL_ARGS), |
394 | 457 | cur_frame->args); |
395 | 457 | break; |
396 | 330k | } |
397 | 0 | default: |
398 | 0 | MVM_panic(1, "Unimplemented handler action"); |
399 | 464k | } |
400 | 464k | } |
401 | | |
402 | | /* Unwinds after a handler. */ |
403 | 441 | static void unwind_after_handler(MVMThreadContext *tc, void *sr_data) { |
404 | 441 | MVMFrame *frame; |
405 | 441 | MVMException *exception; |
406 | 441 | MVMuint32 goto_offset; |
407 | 441 | MVMuint8 *abs_address; |
408 | 441 | void *jit_return_label; |
409 | 441 | |
410 | 441 | /* Get active handler; sanity check (though it's possible other cases |
411 | 441 | * should be supported). */ |
412 | 441 | MVMActiveHandler *ah = (MVMActiveHandler *)sr_data; |
413 | 441 | if (tc->active_handlers != ah) |
414 | 0 | MVM_panic(1, "Trying to unwind from wrong handler"); |
415 | 441 | |
416 | 441 | /* Grab info we'll need to unwind. */ |
417 | 441 | frame = ah->frame; |
418 | 441 | exception = (MVMException *)ah->ex_obj; |
419 | 441 | if (ah->jit_handler) { |
420 | 0 | void **labels = frame->spesh_cand->jitcode->labels; |
421 | 0 | jit_return_label = labels[ah->jit_handler->goto_label]; |
422 | 0 | abs_address = frame->spesh_cand->jitcode->bytecode; |
423 | 0 | goto_offset = 0; |
424 | 0 | } |
425 | 441 | else { |
426 | 441 | goto_offset = ah->handler->goto_offset; |
427 | 441 | abs_address = NULL; |
428 | 441 | jit_return_label = NULL; |
429 | 441 | } |
430 | 441 | /* Clean up. */ |
431 | 441 | tc->active_handlers = ah->next_handler; |
432 | 441 | MVM_free(ah); |
433 | 441 | |
434 | 441 | /* Do the unwinding as needed. */ |
435 | 441 | if (exception && exception->body.return_after_unwind) { |
436 | 0 | /* we can't very well return to our the unwod JIT address */ |
437 | 0 | MVM_frame_unwind_to(tc, frame->caller, NULL, 0, tc->last_handler_result, NULL); |
438 | 0 | } |
439 | 441 | else { |
440 | 441 | MVM_frame_unwind_to(tc, frame, abs_address, goto_offset, NULL, jit_return_label); |
441 | 441 | } |
442 | 441 | } |
443 | | |
444 | | /* Cleans up an active handler record if we unwind over it. */ |
445 | 7 | static void cleanup_active_handler(MVMThreadContext *tc, void *sr_data) { |
446 | 7 | /* Get active handler; sanity check (though it's possible other cases |
447 | 7 | * should be supported). */ |
448 | 7 | MVMActiveHandler *ah = (MVMActiveHandler *)sr_data; |
449 | 7 | if (tc->active_handlers != ah) |
450 | 0 | MVM_panic(1, "Trying to unwind over wrong handler"); |
451 | 7 | |
452 | 7 | /* Clean up. */ |
453 | 7 | tc->active_handlers = ah->next_handler; |
454 | 7 | MVM_free(ah); |
455 | 7 | } |
456 | | |
457 | | char * MVM_exception_backtrace_line(MVMThreadContext *tc, MVMFrame *cur_frame, |
458 | 22 | MVMuint16 not_top, MVMuint8 *throw_address) { |
459 | 22 | MVMString *filename = cur_frame->static_info->body.cu->body.filename; |
460 | 22 | MVMString *name = cur_frame->static_info->body.name; |
461 | 22 | /* XXX TODO: make the caller pass in a char ** and a length pointer so |
462 | 22 | * we can update it if necessary, and the caller can cache it. */ |
463 | 22 | char *o = MVM_malloc(1024); |
464 | 21 | MVMuint8 *cur_op = not_top ? cur_frame->return_address : throw_address; |
465 | 22 | MVMuint32 offset = cur_op - MVM_frame_effective_bytecode(cur_frame); |
466 | 22 | MVMBytecodeAnnotation *annot = MVM_bytecode_resolve_annotation(tc, &cur_frame->static_info->body, |
467 | 22 | offset > 0 ? offset - 1 : 0); |
468 | 22 | |
469 | 18 | MVMuint32 line_number = annot ? annot->line_number : 1; |
470 | 18 | MVMuint16 string_heap_index = annot ? annot->filename_string_heap_index : 0; |
471 | 18 | char *tmp1 = annot && string_heap_index < cur_frame->static_info->body.cu->body.num_strings |
472 | 18 | ? MVM_string_utf8_encode_C_string(tc, MVM_cu_string(tc, |
473 | 18 | cur_frame->static_info->body.cu, string_heap_index)) |
474 | 22 | : NULL; |
475 | 22 | |
476 | 22 | char *filename_c = filename |
477 | 22 | ? MVM_string_utf8_encode_C_string(tc, filename) |
478 | 0 | : "<ephemeral file>"; |
479 | 22 | char *name_c = name |
480 | 22 | ? MVM_string_utf8_encode_C_string(tc, name) |
481 | 0 | : "<anonymous frame>"; |
482 | 22 | |
483 | 22 | snprintf(o, 1024, " %s %s:%u (%s:%s)", |
484 | 21 | not_top ? "from" : " at", |
485 | 18 | tmp1 ? tmp1 : "<unknown>", |
486 | 22 | line_number, |
487 | 22 | filename_c, |
488 | 22 | name_c |
489 | 22 | ); |
490 | 22 | if (filename) |
491 | 22 | MVM_free(filename_c); |
492 | 22 | if (name) |
493 | 22 | MVM_free(name_c); |
494 | 22 | |
495 | 22 | if (tmp1) |
496 | 18 | MVM_free(tmp1); |
497 | 22 | if (annot) |
498 | 18 | MVM_free(annot); |
499 | 22 | |
500 | 22 | return o; |
501 | 22 | } |
502 | | |
503 | | /* Returns a list of hashes containing file, line, sub and annotations. */ |
504 | 0 | MVMObject * MVM_exception_backtrace(MVMThreadContext *tc, MVMObject *ex_obj) { |
505 | 0 | MVMFrame *cur_frame; |
506 | 0 | MVMObject *arr = NULL, *annotations = NULL, *row = NULL, *value = NULL; |
507 | 0 | MVMuint32 count = 0; |
508 | 0 | MVMString *k_file = NULL, *k_line = NULL, *k_sub = NULL, *k_anno = NULL; |
509 | 0 | MVMuint8 *throw_address; |
510 | 0 |
|
511 | 0 | if (IS_CONCRETE(ex_obj) && REPR(ex_obj)->ID == MVM_REPR_ID_MVMException) { |
512 | 0 | cur_frame = ((MVMException *)ex_obj)->body.origin; |
513 | 0 | throw_address = ((MVMException *)ex_obj)->body.throw_address; |
514 | 0 | } |
515 | 0 | else { |
516 | 0 | MVM_exception_throw_adhoc(tc, "Op 'backtrace' needs an exception object"); |
517 | 0 | } |
518 | 0 |
|
519 | 0 | MVM_gc_root_temp_push(tc, (MVMCollectable **)&arr); |
520 | 0 | MVM_gc_root_temp_push(tc, (MVMCollectable **)&annotations); |
521 | 0 | MVM_gc_root_temp_push(tc, (MVMCollectable **)&row); |
522 | 0 | MVM_gc_root_temp_push(tc, (MVMCollectable **)&value); |
523 | 0 | MVM_gc_root_temp_push(tc, (MVMCollectable **)&k_file); |
524 | 0 | MVM_gc_root_temp_push(tc, (MVMCollectable **)&k_line); |
525 | 0 | MVM_gc_root_temp_push(tc, (MVMCollectable **)&k_sub); |
526 | 0 | MVM_gc_root_temp_push(tc, (MVMCollectable **)&k_anno); |
527 | 0 | MVM_gc_root_temp_push(tc, (MVMCollectable **)&cur_frame); |
528 | 0 |
|
529 | 0 | k_file = MVM_string_ascii_decode_nt(tc, tc->instance->VMString, "file"); |
530 | 0 | k_line = MVM_string_ascii_decode_nt(tc, tc->instance->VMString, "line"); |
531 | 0 | k_sub = MVM_string_ascii_decode_nt(tc, tc->instance->VMString, "sub"); |
532 | 0 | k_anno = MVM_string_ascii_decode_nt(tc, tc->instance->VMString, "annotations"); |
533 | 0 |
|
534 | 0 | arr = MVM_repr_alloc_init(tc, tc->instance->boot_types.BOOTArray); |
535 | 0 |
|
536 | 0 | while (cur_frame != NULL) { |
537 | 0 | MVMuint8 *cur_op = count ? cur_frame->return_address : throw_address; |
538 | 0 | MVMuint32 offset = cur_op - MVM_frame_effective_bytecode(cur_frame); |
539 | 0 | MVMBytecodeAnnotation *annot = MVM_bytecode_resolve_annotation(tc, &cur_frame->static_info->body, |
540 | 0 | offset > 0 ? offset - 1 : 0); |
541 | 0 | MVMint32 fshi = annot ? (MVMint32)annot->filename_string_heap_index : -1; |
542 | 0 | char *line_number = MVM_malloc(16); |
543 | 0 | MVMString *filename_str; |
544 | 0 | snprintf(line_number, 16, "%d", annot ? annot->line_number : 1); |
545 | 0 |
|
546 | 0 | /* annotations hash will contain "file" and "line" */ |
547 | 0 | annotations = MVM_repr_alloc_init(tc, tc->instance->boot_types.BOOTHash); |
548 | 0 |
|
549 | 0 | /* file */ |
550 | 0 | filename_str = fshi >= 0 && fshi < cur_frame->static_info->body.cu->body.num_strings |
551 | 0 | ? MVM_cu_string(tc, cur_frame->static_info->body.cu, fshi) |
552 | 0 | : cur_frame->static_info->body.cu->body.filename; |
553 | 0 | value = MVM_repr_box_str(tc, MVM_hll_current(tc)->str_box_type, |
554 | 0 | filename_str ? filename_str : tc->instance->str_consts.empty); |
555 | 0 | MVM_repr_bind_key_o(tc, annotations, k_file, value); |
556 | 0 |
|
557 | 0 | /* line */ |
558 | 0 | value = (MVMObject *)MVM_string_ascii_decode_nt(tc, tc->instance->VMString, line_number); |
559 | 0 | value = MVM_repr_box_str(tc, MVM_hll_current(tc)->str_box_type, (MVMString *)value); |
560 | 0 | MVM_repr_bind_key_o(tc, annotations, k_line, value); |
561 | 0 | MVM_free(line_number); |
562 | 0 |
|
563 | 0 | /* row will contain "sub" and "annotations" */ |
564 | 0 | row = MVM_repr_alloc_init(tc, tc->instance->boot_types.BOOTHash); |
565 | 0 | MVM_repr_bind_key_o(tc, row, k_sub, cur_frame->code_ref); |
566 | 0 | MVM_repr_bind_key_o(tc, row, k_anno, annotations); |
567 | 0 |
|
568 | 0 | MVM_repr_push_o(tc, arr, row); |
569 | 0 | MVM_free(annot); |
570 | 0 |
|
571 | 0 | cur_frame = cur_frame->caller; |
572 | 0 | while (cur_frame && cur_frame->static_info->body.is_thunk) |
573 | 0 | cur_frame = cur_frame->caller; |
574 | 0 | count++; |
575 | 0 | } |
576 | 0 |
|
577 | 0 | MVM_gc_root_temp_pop_n(tc, 9); |
578 | 0 |
|
579 | 0 | return arr; |
580 | 0 | } |
581 | | |
582 | | /* Returns the lines (backtrace) of an exception-object as an array. */ |
583 | 1 | MVMObject * MVM_exception_backtrace_strings(MVMThreadContext *tc, MVMObject *ex_obj) { |
584 | 1 | MVMException *ex; |
585 | 1 | MVMFrame *cur_frame; |
586 | 1 | MVMObject *arr; |
587 | 1 | |
588 | 1 | if (IS_CONCRETE(ex_obj) && REPR(ex_obj)->ID == MVM_REPR_ID_MVMException) |
589 | 1 | ex = (MVMException *)ex_obj; |
590 | 1 | else |
591 | 0 | MVM_exception_throw_adhoc(tc, "Op 'backtracestrings' needs an exception object"); |
592 | 1 | |
593 | 1 | arr = MVM_repr_alloc_init(tc, tc->instance->boot_types.BOOTArray); |
594 | 1 | cur_frame = ex->body.origin; |
595 | 1 | |
596 | 1 | MVMROOT2(tc, arr, cur_frame, { |
597 | 1 | MVMuint32 count = 0; |
598 | 1 | while (cur_frame != NULL) { |
599 | 1 | char *line = MVM_exception_backtrace_line(tc, cur_frame, count++, |
600 | 1 | ex->body.throw_address); |
601 | 1 | MVMString *line_str = MVM_string_utf8_decode(tc, tc->instance->VMString, line, strlen(line)); |
602 | 1 | MVMObject *line_obj = MVM_repr_box_str(tc, tc->instance->boot_types.BOOTStr, line_str); |
603 | 1 | MVM_repr_push_o(tc, arr, line_obj); |
604 | 1 | cur_frame = cur_frame->caller; |
605 | 1 | MVM_free(line); |
606 | 1 | } |
607 | 1 | }); |
608 | 1 | |
609 | 1 | return arr; |
610 | 1 | } |
611 | | |
612 | | /* Dumps a backtrace relative to the current frame to stderr. */ |
613 | 0 | void MVM_dump_backtrace(MVMThreadContext *tc) { |
614 | 0 | MVMFrame *cur_frame = tc->cur_frame; |
615 | 0 | MVMuint32 count = 0; |
616 | 0 | MVMROOT(tc, cur_frame, { |
617 | 0 | while (cur_frame != NULL) { |
618 | 0 | char *line = MVM_exception_backtrace_line(tc, cur_frame, count++, |
619 | 0 | *(tc->interp_cur_op)); |
620 | 0 | fprintf(stderr, "%s\n", line); |
621 | 0 | MVM_free(line); |
622 | 0 | cur_frame = cur_frame->caller; |
623 | 0 | } |
624 | 0 | }); |
625 | 0 | } |
626 | | |
627 | | /* Panic over an unhandled exception throw by category. */ |
628 | 0 | static void panic_unhandled_cat(MVMThreadContext *tc, MVMuint32 cat) { |
629 | 0 | /* If it's a control exception, try promoting it to a catch one. */ |
630 | 0 | if (cat != MVM_EX_CAT_CATCH) { |
631 | 0 | MVM_exception_throw_adhoc(tc, "No exception handler located for %s", |
632 | 0 | cat_name(tc, cat)); |
633 | 0 | } |
634 | 0 | else { |
635 | 0 | fprintf(stderr, "No exception handler located for %s\n", cat_name(tc, cat)); |
636 | 0 | MVM_dump_backtrace(tc); |
637 | 0 | if (crash_on_error) |
638 | 0 | abort(); |
639 | 0 | else |
640 | 0 | exit(1); |
641 | 0 | } |
642 | 0 | } |
643 | | |
644 | | /* Panic over an unhandled exception object. */ |
645 | 0 | static void panic_unhandled_ex(MVMThreadContext *tc, MVMException *ex) { |
646 | 0 | char *backtrace; |
647 | 0 |
|
648 | 0 | /* If a debug session is running, notify the client. */ |
649 | 0 | MVM_debugserver_notify_unhandled_exception(tc, ex); |
650 | 0 |
|
651 | 0 | /* If it's a control exception, try promoting it to a catch one; use |
652 | 0 | * the category name. */ |
653 | 0 | if (ex->body.category != MVM_EX_CAT_CATCH) |
654 | 0 | panic_unhandled_cat(tc, ex->body.category); |
655 | 0 |
|
656 | 0 | /* If there's no message, fall back to category also. */ |
657 | 0 | if (!ex->body.message) |
658 | 0 | panic_unhandled_cat(tc, ex->body.category); |
659 | 0 |
|
660 | 0 | /* Otherwise, dump message and a backtrace. */ |
661 | 0 | backtrace = MVM_string_utf8_encode_C_string(tc, ex->body.message); |
662 | 0 | fprintf(stderr, "Unhandled exception: %s\n", backtrace); |
663 | 0 | MVM_free(backtrace); |
664 | 0 | MVM_dump_backtrace(tc); |
665 | 0 | if (crash_on_error) |
666 | 0 | abort(); |
667 | 0 | else |
668 | 0 | exit(1); |
669 | 0 | } |
670 | | |
671 | | /* Checks if we're throwing lexically, and - if yes - if the current HLL has |
672 | | * a handler for unlocated lexical handlers. */ |
673 | 2 | static MVMint32 use_lexical_handler_hll_error(MVMThreadContext *tc, MVMuint8 mode) { |
674 | 2 | return (mode == MVM_EX_THROW_LEX || mode == MVM_EX_THROW_LEX_CALLER) && |
675 | 2 | !MVM_is_null(tc, MVM_hll_current(tc)->lexical_handler_not_found_error); |
676 | 2 | } |
677 | | |
678 | | /* Invokes the HLL's handler for unresolved lexical throws. */ |
679 | 2 | static void invoke_lexical_handler_hll_error(MVMThreadContext *tc, MVMint64 cat, LocatedHandler lh) { |
680 | 2 | MVMObject *handler = MVM_hll_current(tc)->lexical_handler_not_found_error; |
681 | 2 | MVMCallsite *callsite = MVM_callsite_get_common(tc, MVM_CALLSITE_ID_INT_INT); |
682 | 2 | handler = MVM_frame_find_invokee(tc, handler, NULL); |
683 | 2 | MVM_args_setup_thunk(tc, NULL, MVM_RETURN_VOID, callsite); |
684 | 2 | tc->cur_frame->args[0].i64 = cat; |
685 | 2 | tc->cur_frame->args[1].i64 = lh.handler_out_of_dynamic_scope; |
686 | 2 | STABLE(handler)->invoke(tc, handler, callsite, tc->cur_frame->args); |
687 | 2 | } |
688 | | |
689 | | /* Throws an exception by category, searching for a handler according to |
690 | | * the specified mode. If the handler resumes, the resumption result will |
691 | | * be put into resume_result. Leaves the interpreter in a state where it |
692 | | * will next run the instruction of the handler. If there is no handler, |
693 | | * it will panic and exit with a backtrace. */ |
694 | 132k | void MVM_exception_throwcat(MVMThreadContext *tc, MVMuint8 mode, MVMuint32 cat, MVMRegister *resume_result) { |
695 | 132k | LocatedHandler lh = search_for_handler_from(tc, tc->cur_frame, mode, cat, NULL); |
696 | 132k | if (lh.frame == NULL) { |
697 | 0 | if (use_lexical_handler_hll_error(tc, mode)) { |
698 | 0 | invoke_lexical_handler_hll_error(tc, cat, lh); |
699 | 0 | return; |
700 | 0 | } |
701 | 0 | panic_unhandled_cat(tc, cat); |
702 | 0 | } |
703 | 132k | run_handler(tc, lh, NULL, cat, NULL); |
704 | 132k | } |
705 | | |
706 | 251 | void MVM_exception_die(MVMThreadContext *tc, MVMString *str, MVMRegister *rr) { |
707 | 251 | MVMException *ex; |
708 | 251 | MVMROOT(tc, str, { |
709 | 251 | ex = (MVMException *)MVM_repr_alloc_init(tc, tc->instance->boot_types.BOOTException); |
710 | 251 | }); |
711 | 251 | ex->body.category = MVM_EX_CAT_CATCH; |
712 | 251 | MVM_ASSIGN_REF(tc, &(ex->common.header), ex->body.message, str); |
713 | 251 | MVM_exception_throwobj(tc, MVM_EX_THROW_DYN, (MVMObject *)ex, rr); |
714 | 251 | } |
715 | | |
716 | | /* Throws the specified exception object, taking the category from it. If |
717 | | * the handler resumes, the resumption result will be put into resume_result. |
718 | | * Leaves the interpreter in a state where it will next run the instruction of |
719 | | * the handler. If there is no handler, it will panic and exit with a backtrace. */ |
720 | 306 | void MVM_exception_throwobj(MVMThreadContext *tc, MVMuint8 mode, MVMObject *ex_obj, MVMRegister *resume_result) { |
721 | 306 | LocatedHandler lh; |
722 | 306 | MVMException *ex; |
723 | 306 | |
724 | 306 | /* The current frame will be assigned as the thrower of the exception, so |
725 | 306 | * force it onto the heap before we begin (promoting it later would mean |
726 | 306 | * outer handler search result would be outdated). */ |
727 | 306 | MVMROOT(tc, ex_obj, { |
728 | 306 | MVM_frame_force_to_heap(tc, tc->cur_frame); |
729 | 306 | }); |
730 | 306 | |
731 | 306 | if (IS_CONCRETE(ex_obj) && REPR(ex_obj)->ID == MVM_REPR_ID_MVMException) |
732 | 306 | ex = (MVMException *)ex_obj; |
733 | 306 | else |
734 | 0 | MVM_exception_throw_adhoc(tc, "Can only throw an exception object"); |
735 | 306 | |
736 | 306 | if (!ex->body.category) |
737 | 10 | ex->body.category = MVM_EX_CAT_CATCH; |
738 | 306 | if (resume_result) { |
739 | 297 | ex->body.resume_addr = *tc->interp_cur_op; |
740 | 297 | /* Ensure that we store label where the JIT should return, if any */ |
741 | 297 | if (tc->jit_return_address != NULL) { |
742 | 62 | ex->body.jit_resume_label = MVM_jit_code_get_current_position(tc, tc->cur_frame->spesh_cand->jitcode, tc->cur_frame); |
743 | 62 | } |
744 | 297 | } |
745 | 306 | lh = search_for_handler_from(tc, tc->cur_frame, mode, ex->body.category, ex->body.payload); |
746 | 306 | if (lh.frame == NULL) { |
747 | 0 | if (use_lexical_handler_hll_error(tc, mode)) { |
748 | 0 | invoke_lexical_handler_hll_error(tc, ex->body.category, lh); |
749 | 0 | return; |
750 | 0 | } |
751 | 0 | panic_unhandled_ex(tc, ex); |
752 | 0 | } |
753 | 306 | |
754 | 306 | if (!ex->body.origin) { |
755 | 297 | MVM_ASSIGN_REF(tc, &(ex->common.header), ex->body.origin, tc->cur_frame); |
756 | 297 | ex->body.throw_address = *(tc->interp_cur_op); |
757 | 297 | } |
758 | 306 | |
759 | 306 | run_handler(tc, lh, ex_obj, 0, NULL); |
760 | 306 | } |
761 | | |
762 | | /* Throws an exception of the specified category and with the specified payload. |
763 | | * If a goto or payload handler exists, then no exception object will be created. */ |
764 | 330k | void MVM_exception_throwpayload(MVMThreadContext *tc, MVMuint8 mode, MVMuint32 cat, MVMObject *payload, MVMRegister *resume_result) { |
765 | 330k | LocatedHandler lh = search_for_handler_from(tc, tc->cur_frame, mode, cat, NULL); |
766 | 330k | if (lh.frame == NULL) { |
767 | 2 | if (use_lexical_handler_hll_error(tc, mode)) { |
768 | 2 | invoke_lexical_handler_hll_error(tc, cat, lh); |
769 | 2 | return; |
770 | 2 | } |
771 | 0 | panic_unhandled_cat(tc, cat); |
772 | 0 | } |
773 | 330k | run_handler(tc, lh, NULL, cat, payload); |
774 | 330k | } |
775 | | |
776 | 9 | void MVM_exception_resume(MVMThreadContext *tc, MVMObject *ex_obj) { |
777 | 9 | MVMException *ex; |
778 | 9 | MVMFrame *target; |
779 | 9 | MVMActiveHandler *ah; |
780 | 9 | |
781 | 9 | if (IS_CONCRETE(ex_obj) && REPR(ex_obj)->ID == MVM_REPR_ID_MVMException) |
782 | 9 | ex = (MVMException *)ex_obj; |
783 | 9 | else |
784 | 0 | MVM_exception_throw_adhoc(tc, "Can only resume an exception object"); |
785 | 9 | |
786 | 9 | /* Check that everything is in place to do the resumption. */ |
787 | 9 | if (!ex->body.resume_addr) |
788 | 0 | MVM_exception_throw_adhoc(tc, "This exception is not resumable"); |
789 | 9 | target = ex->body.origin; |
790 | 9 | if (!target) |
791 | 0 | MVM_exception_throw_adhoc(tc, "This exception is not resumable"); |
792 | 9 | if (!target->extra || target->extra->special_return != unwind_after_handler) |
793 | 0 | MVM_exception_throw_adhoc(tc, "This exception is not resumable"); |
794 | 9 | if (!in_caller_chain(tc, target)) |
795 | 0 | MVM_exception_throw_adhoc(tc, "Too late to resume this exception"); |
796 | 9 | |
797 | 9 | /* Check that this is the exception we're currently handling. */ |
798 | 9 | if (!tc->active_handlers) |
799 | 0 | MVM_exception_throw_adhoc(tc, "Can only resume an exception in its handler"); |
800 | 9 | if (tc->active_handlers->ex_obj != ex_obj) |
801 | 0 | MVM_exception_throw_adhoc(tc, "Can only resume the current exception"); |
802 | 9 | |
803 | 9 | /* Clear special return handler; we'll do its work here. */ |
804 | 9 | MVM_frame_clear_special_return(tc, target); |
805 | 9 | |
806 | 9 | /* Clear the current active handler. */ |
807 | 9 | ah = tc->active_handlers; |
808 | 9 | tc->active_handlers = ah->next_handler; |
809 | 9 | MVM_free(ah); |
810 | 9 | |
811 | 9 | /* Unwind to the thrower of the exception; set PC and jit entry label. */ |
812 | 9 | MVM_frame_unwind_to(tc, target, ex->body.resume_addr, 0, NULL, ex->body.jit_resume_label); |
813 | 9 | } |
814 | | |
815 | | /* Panics and shuts down the VM. Don't do this unless it's something quite |
816 | | * unrecoverable, and a thread context is either not available or stands a |
817 | | * good chance of being too corrupt to print (or is not relevant information). |
818 | | * Use MVM_oops in the case a thread context is available. |
819 | | * TODO: Some hook for embedders. |
820 | | */ |
821 | 0 | MVM_NO_RETURN void MVM_panic(MVMint32 exitCode, const char *messageFormat, ...) { |
822 | 0 | va_list args; |
823 | 0 | fprintf(stderr, "MoarVM panic: "); |
824 | 0 | va_start(args, messageFormat); |
825 | 0 | vfprintf(stderr, messageFormat, args); |
826 | 0 | va_end(args); |
827 | 0 | fwrite("\n", 1, 1, stderr); |
828 | 0 | if (crash_on_error) |
829 | 0 | abort(); |
830 | 0 | else |
831 | 0 | exit(exitCode); |
832 | 0 | } |
833 | | |
834 | 0 | MVM_NO_RETURN void MVM_panic_allocation_failed(size_t len) { |
835 | 0 | MVM_panic(1, "Memory allocation failed; could not allocate %"MVM_PRSz" bytes", len); |
836 | 0 | } |
837 | | |
838 | | /* A kinder MVM_panic() that doesn't assume our memory is corrupted (but does kill the |
839 | | * process to indicate that we've made an error */ |
840 | 0 | MVM_NO_RETURN void MVM_oops(MVMThreadContext *tc, const char *messageFormat, ...) { |
841 | 0 | va_list args; |
842 | 0 | fprintf(stderr, "MoarVM oops: "); |
843 | 0 | va_start(args, messageFormat); |
844 | 0 | vfprintf(stderr, messageFormat, args); |
845 | 0 | va_end(args); |
846 | 0 | fprintf(stderr, "\n"); |
847 | 0 | MVM_dump_backtrace(tc); |
848 | 0 | fprintf(stderr, "\n"); |
849 | 0 | if (tc->instance->jit_log_fh) |
850 | 0 | fflush(tc->instance->jit_log_fh); |
851 | 0 | exit(1); |
852 | 0 | } |
853 | | |
854 | | /* Throws an ad-hoc (untyped) exception. */ |
855 | 147 | MVM_NO_RETURN void MVM_exception_throw_adhoc(MVMThreadContext *tc, const char *messageFormat, ...) { |
856 | 147 | va_list args; |
857 | 147 | va_start(args, messageFormat); |
858 | 147 | MVM_exception_throw_adhoc_free_va(tc, NULL, messageFormat, args); |
859 | 147 | va_end(args); |
860 | 147 | } |
861 | | |
862 | | /* Throws an ad-hoc (untyped) exception. */ |
863 | 0 | MVM_NO_RETURN void MVM_exception_throw_adhoc_va(MVMThreadContext *tc, const char *messageFormat, va_list args) { |
864 | 0 | MVM_exception_throw_adhoc_free_va(tc, NULL, messageFormat, args); |
865 | 0 | } |
866 | | |
867 | | /* Throws an ad-hoc (untyped) exception, taking a NULL-terminated array of |
868 | | * char pointers to deallocate after message construction. */ |
869 | 17 | MVM_NO_RETURN void MVM_exception_throw_adhoc_free(MVMThreadContext *tc, char **waste, const char *messageFormat, ...) { |
870 | 17 | va_list args; |
871 | 17 | va_start(args, messageFormat); |
872 | 17 | MVM_exception_throw_adhoc_free_va(tc, waste, messageFormat, args); |
873 | 17 | va_end(args); |
874 | 17 | } |
875 | | |
876 | | /* Throws an ad-hoc (untyped) exception, taking a NULL-terminated array of |
877 | | * char pointers to deallocate after message construction. */ |
878 | 164 | MVM_NO_RETURN void MVM_exception_throw_adhoc_free_va(MVMThreadContext *tc, char **waste, const char *messageFormat, va_list args) { |
879 | 164 | LocatedHandler lh; |
880 | 164 | MVMException *ex; |
881 | 164 | /* The current frame will be assigned as the thrower of the exception, so |
882 | 164 | * force it onto the heap before we begin. */ |
883 | 164 | if (tc->cur_frame) |
884 | 164 | MVM_frame_force_to_heap(tc, tc->cur_frame); |
885 | 164 | |
886 | 164 | /* Create and set up an exception object. */ |
887 | 164 | ex = (MVMException *)MVM_repr_alloc_init(tc, tc->instance->boot_types.BOOTException); |
888 | 164 | MVMROOT(tc, ex, { |
889 | 164 | char *c_message = MVM_malloc(1024); |
890 | 164 | int bytes = vsnprintf(c_message, 1024, messageFormat, args); |
891 | 164 | int to_encode = bytes > 1024 ? 1024 : bytes; |
892 | 164 | MVMString *message = MVM_string_utf8_decode(tc, tc->instance->VMString, c_message, to_encode); |
893 | 164 | MVM_free(c_message); |
894 | 164 | |
895 | 164 | /* Clean up after ourselves to avoid leaking C strings. */ |
896 | 164 | if (waste) { |
897 | 164 | while(*waste) |
898 | 164 | MVM_free(*waste++); |
899 | 164 | } |
900 | 164 | |
901 | 164 | MVM_ASSIGN_REF(tc, &(ex->common.header), ex->body.message, message); |
902 | 164 | if (tc->cur_frame) { |
903 | 164 | ex->body.origin = tc->cur_frame; |
904 | 164 | ex->body.throw_address = *(tc->interp_cur_op); |
905 | 164 | } |
906 | 164 | else { |
907 | 164 | ex->body.origin = NULL; |
908 | 164 | } |
909 | 164 | ex->body.category = MVM_EX_CAT_CATCH; |
910 | 164 | }); |
911 | 164 | |
912 | 164 | /* Try to locate a handler, so long as we're in the interpreter. */ |
913 | 164 | if (tc->interp_cur_op) |
914 | 164 | lh = search_for_handler_from(tc, tc->cur_frame, MVM_EX_THROW_DYN, ex->body.category, NULL); |
915 | 164 | else |
916 | 0 | lh.frame = NULL; |
917 | 164 | |
918 | 164 | /* Do we have a handler to unwind to? */ |
919 | 164 | if (lh.frame == NULL) { |
920 | 0 | /* No handler. Should we crash on these? */ |
921 | 0 | if (crash_on_error) { |
922 | 0 | /* Yes, abort. */ |
923 | 0 | vfprintf(stderr, messageFormat, args); |
924 | 0 | fwrite("\n", 1, 1, stderr); |
925 | 0 | MVM_dump_backtrace(tc); |
926 | 0 | abort(); |
927 | 0 | } |
928 | 0 | else { |
929 | 0 | /* No, just the usual panic. */ |
930 | 0 | panic_unhandled_ex(tc, ex); |
931 | 0 | } |
932 | 0 | } |
933 | 164 | |
934 | 164 | /* Run the handler, which doesn't actually run it but rather sets up the |
935 | 164 | * interpreter so that when we return to it, we'll be at the handler. */ |
936 | 164 | run_handler(tc, lh, (MVMObject *)ex, MVM_EX_CAT_CATCH, NULL); |
937 | 164 | |
938 | 164 | /* Clear any C stack temporaries that code may have pushed before throwing |
939 | 164 | * the exception, and release any needed mutex. */ |
940 | 164 | MVM_gc_root_temp_pop_all(tc); |
941 | 164 | MVM_tc_release_ex_release_mutex(tc); |
942 | 164 | |
943 | 164 | /* Jump back into the interpreter. */ |
944 | 164 | longjmp(tc->interp_jump, 1); |
945 | 164 | } |
946 | | |
947 | 0 | void MVM_crash_on_error(void) { |
948 | 0 | crash_on_error = 1; |
949 | 0 | } |
950 | | |
951 | 458 | MVMint32 MVM_get_exception_category(MVMThreadContext *tc, MVMObject *ex) { |
952 | 458 | if (IS_CONCRETE(ex) && REPR(ex)->ID == MVM_REPR_ID_MVMException) |
953 | 458 | return ((MVMException *)ex)->body.category; |
954 | 458 | else |
955 | 0 | MVM_exception_throw_adhoc(tc, "getexcategory needs a VMException, got %s (%s)", REPR(ex)->name, MVM_6model_get_debug_name(tc, ex)); |
956 | 458 | } |
957 | | |
958 | 18 | MVMObject * MVM_get_exception_payload(MVMThreadContext *tc, MVMObject *ex) { |
959 | 18 | MVMObject *result; |
960 | 18 | if (IS_CONCRETE(ex) && REPR(ex)->ID == MVM_REPR_ID_MVMException) |
961 | 18 | result = ((MVMException *)ex)->body.payload; |
962 | 18 | else |
963 | 0 | MVM_exception_throw_adhoc(tc, "getexpayload needs a VMException, got %s (%s)", REPR(ex)->name, MVM_6model_get_debug_name(tc, ex)); |
964 | 18 | if (!result) |
965 | 2 | result = tc->instance->VMNull; |
966 | 18 | return result; |
967 | 18 | } |
968 | | |
969 | 45 | void MVM_bind_exception_payload(MVMThreadContext *tc, MVMObject *ex, MVMObject *payload) { |
970 | 45 | if (IS_CONCRETE(ex) && REPR(ex)->ID == MVM_REPR_ID_MVMException) { |
971 | 45 | MVM_ASSIGN_REF(tc, &(ex->header), ((MVMException *)ex)->body.payload, |
972 | 45 | payload); |
973 | 45 | } |
974 | 0 | else { |
975 | 0 | MVM_exception_throw_adhoc(tc, "bindexpayload needs a VMException, got %s (%s)", REPR(ex)->name, MVM_6model_get_debug_name(tc, ex)); |
976 | 0 | } |
977 | 45 | } |
978 | | |
979 | 36 | void MVM_bind_exception_category(MVMThreadContext *tc, MVMObject *ex, MVMint32 category) { |
980 | 36 | if (IS_CONCRETE(ex) && REPR(ex)->ID == MVM_REPR_ID_MVMException) |
981 | 36 | ((MVMException *)ex)->body.category = category; |
982 | 36 | else |
983 | 0 | MVM_exception_throw_adhoc(tc, "bindexcategory needs a VMException, got %s (%s)", REPR(ex)->name, MVM_6model_get_debug_name(tc, ex)); |
984 | 36 | } |
985 | 0 | void MVM_exception_returnafterunwind(MVMThreadContext *tc, MVMObject *ex) { |
986 | 0 | if (IS_CONCRETE(ex) && REPR(ex)->ID == MVM_REPR_ID_MVMException) |
987 | 0 | ((MVMException *)ex)->body.return_after_unwind = 1; |
988 | 0 | else |
989 | 0 | MVM_exception_throw_adhoc(tc, "exreturnafterunwind needs a VMException, got %s (%s)", REPR(ex)->name, MVM_6model_get_debug_name(tc, ex)); |
990 | 0 | } |