|
@@ -10,54 +10,59 @@
|
|
|
/*
|
|
/*
|
|
|
* Arguments passed in:
|
|
* Arguments passed in:
|
|
|
*
|
|
*
|
|
|
- * r0 function pntr
|
|
|
|
|
|
|
+ * r0 function ptr
|
|
|
* r1 argv
|
|
* r1 argv
|
|
|
* r2 argc
|
|
* r2 argc
|
|
|
*/
|
|
*/
|
|
|
|
|
|
|
|
invokeNative:
|
|
invokeNative:
|
|
|
stmfd sp!, {r4, r5, r6, r7, lr}
|
|
stmfd sp!, {r4, r5, r6, r7, lr}
|
|
|
- mov ip, r0 /* get function ptr */
|
|
|
|
|
- mov r4, r1 /* get argv */
|
|
|
|
|
- mov r5, r2 /* get argc */
|
|
|
|
|
|
|
+ mov ip, r0 /* ip = function ptr */
|
|
|
|
|
+ mov r4, r1 /* r4 = argv */
|
|
|
|
|
+ mov r5, r2 /* r5 = argc */
|
|
|
|
|
|
|
|
- cmp r5, #2 /* is argc < 2 ? */
|
|
|
|
|
|
|
+ cmp r5, #1 /* at least one argument required: module_inst */
|
|
|
blt return
|
|
blt return
|
|
|
|
|
|
|
|
- ldr r0, [r4], #4 /* argv[0] */
|
|
|
|
|
- ldr r1, [r4], #4 /* argv[1] */
|
|
|
|
|
|
|
+ mov r6, #0 /* increased stack size */
|
|
|
|
|
|
|
|
- mov r6, #0
|
|
|
|
|
|
|
+ ldr r0, [r4], #4 /* r0 = argv[0] = module_inst */
|
|
|
|
|
+ cmp r5, #1
|
|
|
|
|
+ beq call_func
|
|
|
|
|
|
|
|
|
|
+ ldr r1, [r4], #4 /* r1 = argv[1] */
|
|
|
cmp r5, #2
|
|
cmp r5, #2
|
|
|
beq call_func
|
|
beq call_func
|
|
|
- ldr r2, [r4], #4
|
|
|
|
|
|
|
+
|
|
|
|
|
+ ldr r2, [r4], #4 /* r2 = argv[2] */
|
|
|
cmp r5, #3
|
|
cmp r5, #3
|
|
|
beq call_func
|
|
beq call_func
|
|
|
- ldr r3, [r4], #4
|
|
|
|
|
|
|
|
|
|
- subs r5, r5, #4 /* now we have r0 ~ r3 */
|
|
|
|
|
|
|
+ ldr r3, [r4], #4 /* r3 = argv[3] */
|
|
|
|
|
+ cmp r5, #4
|
|
|
|
|
+ beq call_func
|
|
|
|
|
+
|
|
|
|
|
+ sub r5, r5, #4 /* argc -= 4, now we have r0 ~ r3 */
|
|
|
|
|
|
|
|
/* Ensure address is 8 byte aligned */
|
|
/* Ensure address is 8 byte aligned */
|
|
|
- mov r6, r5, lsl#2
|
|
|
|
|
- add r6, r6, #7
|
|
|
|
|
|
|
+ mov r6, r5, lsl#2 /* r6 = argc * 4 */
|
|
|
|
|
+ add r6, r6, #7 /* r6 = (r6 + 7) & ~7 */
|
|
|
bic r6, r6, #7
|
|
bic r6, r6, #7
|
|
|
- add r6, r6, #4 /* +4 because only odd(5) registers are in stack */
|
|
|
|
|
- subs sp, sp, r6 /* for stacked args */
|
|
|
|
|
|
|
+ add r6, r6, #4 /* +4 because odd(5) registers are in stack */
|
|
|
|
|
+ sub sp, sp, r6 /* reserved stack space for left arguments */
|
|
|
mov r7, sp
|
|
mov r7, sp
|
|
|
|
|
|
|
|
-loop_args:
|
|
|
|
|
|
|
+loop_args: /* copy left arguments to stack */
|
|
|
cmp r5, #0
|
|
cmp r5, #0
|
|
|
beq call_func
|
|
beq call_func
|
|
|
ldr lr, [r4], #4
|
|
ldr lr, [r4], #4
|
|
|
str lr, [r7], #4
|
|
str lr, [r7], #4
|
|
|
- subs r5, r5, #1
|
|
|
|
|
|
|
+ sub r5, r5, #1
|
|
|
b loop_args
|
|
b loop_args
|
|
|
|
|
|
|
|
call_func:
|
|
call_func:
|
|
|
blx ip
|
|
blx ip
|
|
|
-
|
|
|
|
|
- add sp, sp, r6 /* recover sp */
|
|
|
|
|
|
|
+ add sp, sp, r6 /* restore sp */
|
|
|
|
|
|
|
|
return:
|
|
return:
|
|
|
ldmfd sp!, {r4, r5, r6, r7, lr}
|
|
ldmfd sp!, {r4, r5, r6, r7, lr}
|