Guest User

Untitled

a guest
Feb 16th, 2019
107
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 22.76 KB | None | 0 0
  1. Index: configure
  2. ===================================================================
  3. RCS file: /caml/ocaml/configure,v
  4. retrieving revision 1.266
  5. diff -u -r1.266 configure
  6. --- configure 6 Oct 2008 13:31:47 -0000 1.266
  7. +++ configure 21 Oct 2008 02:15:29 -0000
  8. @@ -256,6 +256,14 @@
  9. # Almost the same as NeXTStep
  10. bytecccompopts="-fno-defer-pop $gcc_warnings -DSHRINKED_GNUC"
  11. mathlib="";;
  12. + *,x86_64-*-darwin*)
  13. + bytecc="$bytecc -arch x86_64"
  14. + bytecccompopts="-fno-defer-pop -no-cpp-precomp $gcc_warnings"
  15. + # Tell gcc that we can use 32-bit code addresses for threaded code
  16. + # unless we are compiled for a shared library (-fPIC option)
  17. + echo "#ifndef __PIC__" >> m.h
  18. + echo "# define ARCH_CODE32" >> m.h
  19. + echo "#endif" >> m.h;;
  20. *,*-*-darwin*)
  21. bytecccompopts="-fno-defer-pop -no-cpp-precomp $gcc_warnings"
  22. mathlib=""
  23. @@ -566,16 +574,17 @@
  24. mksharedlibrpath="-rpath "
  25. shared_libraries_supported=true;;
  26. i[3456]86-*-darwin*)
  27. - dyld=ld
  28. - if test -f /usr/bin/ld_classic; then
  29. - # The new linker in Mac OS X 10.5 does not support read_only_relocs
  30. - # dyld=/usr/bin/ld_classic XXX FIXME incompatible with X11 libs
  31. - :
  32. - fi
  33. + dyld=gcc
  34. mksharedlib="$dyld -bundle -flat_namespace -undefined suppress -read_only_relocs suppress"
  35. bytecccompopts="$dl_defs $bytecccompopts"
  36. dl_needs_underscore=false
  37. shared_libraries_supported=true;;
  38. + x86_64-*-darwin*)
  39. + dyld=gcc
  40. + mksharedlib="$dyld -m64 -bundle -flat_namespace -undefined suppress -read_only_relocs suppress"
  41. + bytecccompopts="$dl_defs $bytecccompopts"
  42. + dl_needs_underscore=false
  43. + shared_libraries_supported=true;;
  44. *-apple-darwin*)
  45. mksharedlib="$bytecc -bundle -flat_namespace -undefined suppress"
  46. bytecccompopts="$dl_defs $bytecccompopts"
  47. @@ -647,6 +656,7 @@
  48. x86_64-*-freebsd*) arch=amd64; system=freebsd;;
  49. x86_64-*-netbsd*) arch=amd64; system=netbsd;;
  50. x86_64-*-openbsd*) arch=amd64; system=openbsd;;
  51. + x86_64-*-darwin9.5) arch=amd64; system=macosx;;
  52. esac
  53.  
  54. # Some platforms exist both in 32-bit and 64-bit variants, not distinguished
  55. @@ -685,6 +695,7 @@
  56. *,*,rhapsody,*) nativecccompopts="$gcc_warnings -DDARWIN_VERSION_6 $dl_defs"
  57. if $arch64; then partialld="ld -r -arch ppc64"; fi;;
  58. *,gcc*,cygwin,*) nativecccompopts="$gcc_warnings -U_WIN32";;
  59. + amd64,gcc*,macosx,*) partialld="ld -r -arch x86_64";;
  60. *,gcc*,*,*) nativecccompopts="$gcc_warnings";;
  61. esac
  62.  
  63. @@ -696,6 +707,8 @@
  64. asppprofflags='-pg -DPROFILING';;
  65. alpha,*,*) as='as'
  66. aspp='gcc -c';;
  67. + amd64,*,macosx) as='as -arch x86_64'
  68. + aspp='gcc -arch x86_64 -c';;
  69. amd64,*,*) as='as'
  70. aspp='gcc -c';;
  71. arm,*,*) as='as';
  72. @@ -734,6 +747,7 @@
  73. i386,*,linux_elf) profiling='prof';;
  74. i386,*,gnu) profiling='prof';;
  75. i386,*,bsd_elf) profiling='prof';;
  76. + amd64,*,macosx) profiling='prof';;
  77. i386,*,macosx) profiling='prof';;
  78. sparc,*,solaris)
  79. profiling='prof'
  80. @@ -1092,13 +1106,20 @@
  81. # Determine if system stack overflows can be detected
  82.  
  83. case "$arch,$system" in
  84. - i386,linux_elf|amd64,linux|power,rhapsody|i386,macosx)
  85. + i386,linux_elf|amd64,linux|power,rhapsody|amd64,macosx|i386,macosx)
  86. echo "System stack overflow can be detected."
  87. echo "#define HAS_STACK_OVERFLOW_DETECTION" >> s.h;;
  88. *)
  89. echo "Cannot detect system stack overflow.";;
  90. esac
  91.  
  92. +#Remap the MAP_ANONYMOUS for macosx amd64
  93. +case "$arch,$system" in
  94. + amd64,macosx)
  95. + echo "#define MAP_ANONYMOUS MAP_ANON" >> s.h;;
  96. + *);;
  97. +esac
  98. +
  99. # Determine the target architecture for the "num" library
  100.  
  101. case "$host" in
  102. Index: asmcomp/amd64/emit.mlp
  103. ===================================================================
  104. RCS file: /caml/ocaml/asmcomp/amd64/emit.mlp,v
  105. retrieving revision 1.16
  106. diff -u -r1.16 emit.mlp
  107. --- asmcomp/amd64/emit.mlp 1 Aug 2008 08:04:57 -0000 1.16
  108. +++ asmcomp/amd64/emit.mlp 21 Oct 2008 02:15:29 -0000
  109. @@ -23,6 +23,14 @@
  110. open Linearize
  111. open Emitaux
  112.  
  113. +(* The things that MacOSX need *)
  114. +let setcnt = ref (-1)
  115. +let macosx =
  116. + match Config.system with
  117. + | "macosx" -> true
  118. + | _ -> false
  119. +
  120. +
  121. (* Tradeoff between code size and code speed *)
  122.  
  123. let fastcode_flag = ref true
  124. @@ -54,15 +62,16 @@
  125. (* Symbols *)
  126.  
  127. let emit_symbol s =
  128. - Emitaux.emit_symbol '$' s
  129. + if macosx then emit_string "_";
  130. + Emitaux.emit_symbol '$' s
  131.  
  132. let emit_call s =
  133. - if !Clflags.dlcode
  134. + if !Clflags.dlcode && not macosx
  135. then `call {emit_symbol s}@PLT`
  136. else `call {emit_symbol s}`
  137.  
  138. let emit_jump s =
  139. - if !Clflags.dlcode
  140. + if !Clflags.dlcode && not macosx
  141. then `jmp {emit_symbol s}@PLT`
  142. else `jmp {emit_symbol s}`
  143.  
  144. @@ -82,6 +91,7 @@
  145. (* Output a .align directive. *)
  146.  
  147. let emit_align n =
  148. + let n = if macosx then Misc.log2 n else n in
  149. ` .align {emit_int n}\n`
  150.  
  151. let emit_Llabel fallthrough lbl =
  152. @@ -588,7 +598,8 @@
  153. end else begin
  154. ` jmp *{emit_label lbl}(, {emit_reg i.arg.(0)}, 8)\n`
  155. end;
  156. - ` .section .rodata\n`;
  157. + if macosx then ` .section .rodata,\"\"\n`
  158. + else ` .section .rodata\n`;
  159. emit_align 8;
  160. `{emit_label lbl}:`;
  161. for i = 0 to Array.length jumptbl - 1 do
  162. @@ -670,7 +681,8 @@
  163. List.iter emit_call_gc !call_gc_sites;
  164. emit_call_bound_errors ();
  165. if !float_constants <> [] then begin
  166. - ` .section .rodata.cst8,\"a\",@progbits\n`;
  167. + if macosx then ` .section .rodata.cst8,\"a\"\n`
  168. + else ` .section .rodata.cst8,\"a\",@progbits\n`;
  169. List.iter emit_float_constant !float_constants
  170. end
  171.  
  172. @@ -715,11 +727,20 @@
  173. let begin_assembly() =
  174. if !Clflags.dlcode then begin
  175. (* from amd64.S; could emit these constants on demand *)
  176. - ` .section .rodata.cst8,\"a\",@progbits\n`;
  177. - ` .align 16\n`;
  178. - `caml_negf_mask: .quad 0x8000000000000000, 0\n`;
  179. - ` .align 16\n`;
  180. - `caml_absf_mask: .quad 0x7FFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF\n`;
  181. + if macosx then begin
  182. + ` .section .rodata.cst8,\"a\"\n`;
  183. + ` .align 4\n`;
  184. + `caml_negf_mask: .quad 0x8000000000000000, 0\n`;
  185. + ` .align 4\n`;
  186. + `caml_absf_mask: .quad 0x7FFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF\n`;
  187. + end else begin
  188. + (* from amd64.S; could emit these constants on demand *)
  189. + ` .section .rodata.cst8,\"a\",@progbits\n`;
  190. + ` .align 16\n`;
  191. + `caml_negf_mask: .quad 0x8000000000000000, 0\n`;
  192. + ` .align 16\n`;
  193. + `caml_absf_mask: .quad 0x7FFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF\n`;
  194. + end;
  195. end;
  196. let lbl_begin = Compilenv.make_symbol (Some "data_begin") in
  197. ` .data\n`;
  198. @@ -750,7 +771,12 @@
  199. efa_word = (fun n -> ` .quad {emit_int n}\n`);
  200. efa_align = emit_align;
  201. efa_label_rel = (fun lbl ofs ->
  202. - ` .long ({emit_label lbl} - .) + {emit_int32 ofs}\n`);
  203. + incr setcnt;
  204. + if macosx then
  205. + ` .set L$set${emit_int !setcnt},({emit_label lbl} - .) + {emit_int32 ofs}\n .long L$set${emit_int !setcnt}\n`
  206. + else
  207. + ` .set ({emit_label lbl} - .) + {emit_int32 ofs}\n .long L$set${emit_int !setcnt}\n`
  208. + );
  209. efa_def_label = (fun l -> `{emit_label l}:\n`);
  210. efa_string = (fun s -> emit_string_directive " .asciz " s) };
  211. if Config.system = "linux" then
  212. Index: asmrun/amd64.S
  213. ===================================================================
  214. RCS file: /caml/ocaml/asmrun/amd64.S,v
  215. retrieving revision 1.12
  216. diff -u -r1.12 amd64.S
  217. --- asmrun/amd64.S 1 Aug 2008 08:04:57 -0000 1.12
  218. +++ asmrun/amd64.S 21 Oct 2008 02:15:29 -0000
  219. @@ -16,11 +16,20 @@
  220. /* Asm part of the runtime system, AMD64 processor */
  221. /* Must be preprocessed by cpp */
  222.  
  223. +#ifdef SYS_macosx
  224. +#define FUNCTION_ALIGN 2
  225. +#define EIGHT 3
  226. +#define SIXTEEN 4
  227. +#define PREPARE_SYMBOL(r) _##r
  228. +#else
  229. #define FUNCTION_ALIGN 4
  230. +#define PREPARE_SYMBOL(r) r
  231. +#define EIGHT 8
  232. +#define SIXTEEN 16
  233. +#endif
  234.  
  235. #define FUNCTION(name) \
  236. .globl name; \
  237. - .type name,@function; \
  238. .align FUNCTION_ALIGN; \
  239. name:
  240.  
  241. @@ -28,17 +37,17 @@
  242.  
  243. /* Allocation */
  244.  
  245. -FUNCTION(caml_call_gc)
  246. +FUNCTION(PREPARE_SYMBOL(caml_call_gc))
  247. /* Record lowest stack address and return address */
  248. movq 0(%rsp), %rax
  249. - movq %rax, caml_last_return_address(%rip)
  250. + movq %rax, PREPARE_SYMBOL(caml_last_return_address)(%rip)
  251. leaq 8(%rsp), %rax
  252. - movq %rax, caml_bottom_of_stack(%rip)
  253. + movq %rax, PREPARE_SYMBOL(caml_bottom_of_stack)(%rip)
  254. .L105:
  255. - /* Save caml_young_ptr, caml_exception_pointer */
  256. - movq %r15, caml_young_ptr(%rip)
  257. - movq %r14, caml_exception_pointer(%rip)
  258. - /* Build array of registers, save it into caml_gc_regs */
  259. + /* Save PREPARE_SYMBOL(caml_young_ptr), PREPARE_SYMBOL(caml_exception_pointer) */
  260. + movq %r15, PREPARE_SYMBOL(caml_young_ptr)(%rip)
  261. + movq %r14, PREPARE_SYMBOL(caml_exception_pointer)(%rip)
  262. + /* Build array of registers, save it into PREPARE_SYMBOL(caml_gc_regs) */
  263. pushq %r13
  264. pushq %r12
  265. pushq %rbp
  266. @@ -52,7 +61,7 @@
  267. pushq %rdi
  268. pushq %rbx
  269. pushq %rax
  270. - movq %rsp, caml_gc_regs(%rip)
  271. + movq %rsp, PREPARE_SYMBOL(caml_gc_regs)(%rip)
  272. /* Save floating-point registers */
  273. subq $(16*8), %rsp
  274. movlpd %xmm0, 0*8(%rsp)
  275. @@ -72,7 +81,7 @@
  276. movlpd %xmm14, 14*8(%rsp)
  277. movlpd %xmm15, 15*8(%rsp)
  278. /* Call the garbage collector */
  279. - call caml_garbage_collection
  280. + call PREPARE_SYMBOL(caml_garbage_collection)
  281. /* Restore all regs used by the code generator */
  282. movlpd 0*8(%rsp), %xmm0
  283. movlpd 1*8(%rsp), %xmm1
  284. @@ -104,93 +113,93 @@
  285. popq %rbp
  286. popq %r12
  287. popq %r13
  288. - /* Restore caml_young_ptr, caml_exception_pointer */
  289. - movq caml_young_ptr(%rip), %r15
  290. - movq caml_exception_pointer(%rip), %r14
  291. + /* Restore PREPARE_SYMBOL(caml_young_ptr), PREPARE_SYMBOL(caml_exception_pointer) */
  292. + movq PREPARE_SYMBOL(caml_young_ptr)(%rip), %r15
  293. + movq PREPARE_SYMBOL(caml_exception_pointer)(%rip), %r14
  294. /* Return to caller */
  295. ret
  296.  
  297. -FUNCTION(caml_alloc1)
  298. +FUNCTION(PREPARE_SYMBOL(caml_alloc1))
  299. subq $16, %r15
  300. - cmpq caml_young_limit(%rip), %r15
  301. + cmpq PREPARE_SYMBOL(caml_young_limit)(%rip), %r15
  302. jb .L100
  303. ret
  304. .L100:
  305. movq 0(%rsp), %rax
  306. - movq %rax, caml_last_return_address(%rip)
  307. + movq %rax, PREPARE_SYMBOL(caml_last_return_address)(%rip)
  308. leaq 8(%rsp), %rax
  309. - movq %rax, caml_bottom_of_stack(%rip)
  310. + movq %rax, PREPARE_SYMBOL(caml_bottom_of_stack)(%rip)
  311. subq $8, %rsp
  312. call .L105
  313. addq $8, %rsp
  314. - jmp caml_alloc1
  315. + jmp PREPARE_SYMBOL(caml_alloc1)
  316.  
  317. -FUNCTION(caml_alloc2)
  318. +FUNCTION(PREPARE_SYMBOL(caml_alloc2))
  319. subq $24, %r15
  320. - cmpq caml_young_limit(%rip), %r15
  321. + cmpq PREPARE_SYMBOL(caml_young_limit)(%rip), %r15
  322. jb .L101
  323. ret
  324. .L101:
  325. movq 0(%rsp), %rax
  326. - movq %rax, caml_last_return_address(%rip)
  327. + movq %rax, PREPARE_SYMBOL(caml_last_return_address)(%rip)
  328. leaq 8(%rsp), %rax
  329. - movq %rax, caml_bottom_of_stack(%rip)
  330. + movq %rax, PREPARE_SYMBOL(caml_bottom_of_stack)(%rip)
  331. subq $8, %rsp
  332. call .L105
  333. addq $8, %rsp
  334. - jmp caml_alloc2
  335. + jmp PREPARE_SYMBOL(caml_alloc2)
  336.  
  337. -FUNCTION(caml_alloc3)
  338. +FUNCTION(PREPARE_SYMBOL(caml_alloc3))
  339. subq $32, %r15
  340. - cmpq caml_young_limit(%rip), %r15
  341. + cmpq PREPARE_SYMBOL(caml_young_limit)(%rip), %r15
  342. jb .L102
  343. ret
  344. .L102:
  345. movq 0(%rsp), %rax
  346. - movq %rax, caml_last_return_address(%rip)
  347. + movq %rax, PREPARE_SYMBOL(caml_last_return_address)(%rip)
  348. leaq 8(%rsp), %rax
  349. - movq %rax, caml_bottom_of_stack(%rip)
  350. + movq %rax, PREPARE_SYMBOL(caml_bottom_of_stack)(%rip)
  351. subq $8, %rsp
  352. call .L105
  353. addq $8, %rsp
  354. - jmp caml_alloc3
  355. + jmp PREPARE_SYMBOL(caml_alloc3)
  356.  
  357. -FUNCTION(caml_allocN)
  358. +FUNCTION(PREPARE_SYMBOL(caml_allocN))
  359. subq %rax, %r15
  360. - cmpq caml_young_limit(%rip), %r15
  361. + cmpq PREPARE_SYMBOL(caml_young_limit)(%rip), %r15
  362. jb .L103
  363. ret
  364. .L103:
  365. pushq %rax /* save desired size */
  366. movq 8(%rsp), %rax
  367. - movq %rax, caml_last_return_address(%rip)
  368. + movq %rax, PREPARE_SYMBOL(caml_last_return_address)(%rip)
  369. leaq 16(%rsp), %rax
  370. - movq %rax, caml_bottom_of_stack(%rip)
  371. + movq %rax, PREPARE_SYMBOL(caml_bottom_of_stack)(%rip)
  372. call .L105
  373. popq %rax /* recover desired size */
  374. - jmp caml_allocN
  375. + jmp PREPARE_SYMBOL(caml_allocN)
  376.  
  377. /* Call a C function from Caml */
  378.  
  379. -FUNCTION(caml_c_call)
  380. +FUNCTION(PREPARE_SYMBOL(caml_c_call))
  381. /* Record lowest stack address and return address */
  382. popq %r12
  383. - movq %r12, caml_last_return_address(%rip)
  384. - movq %rsp, caml_bottom_of_stack(%rip)
  385. + movq %r12, PREPARE_SYMBOL(caml_last_return_address)(%rip)
  386. + movq %rsp, PREPARE_SYMBOL(caml_bottom_of_stack)(%rip)
  387. /* Make the exception handler and alloc ptr available to the C code */
  388. - movq %r15, caml_young_ptr(%rip)
  389. - movq %r14, caml_exception_pointer(%rip)
  390. + movq %r15, PREPARE_SYMBOL(caml_young_ptr)(%rip)
  391. + movq %r14, PREPARE_SYMBOL(caml_exception_pointer)(%rip)
  392. /* Call the function (address in %rax) */
  393. call *%rax
  394. /* Reload alloc ptr */
  395. - movq caml_young_ptr(%rip), %r15
  396. + movq PREPARE_SYMBOL(caml_young_ptr)(%rip), %r15
  397. /* Return to caller */
  398. pushq %r12
  399. ret
  400.  
  401. /* Start the Caml program */
  402.  
  403. -FUNCTION(caml_start_program)
  404. +FUNCTION(PREPARE_SYMBOL(caml_start_program))
  405. /* Save callee-save registers */
  406. pushq %rbx
  407. pushq %rbp
  408. @@ -199,18 +208,18 @@
  409. pushq %r14
  410. pushq %r15
  411. subq $8, %rsp /* stack 16-aligned */
  412. - /* Initial entry point is caml_program */
  413. - leaq caml_program(%rip), %r12
  414. - /* Common code for caml_start_program and caml_callback* */
  415. + /* Initial entry point is PREPARE_SYMBOL(caml_program) */
  416. + leaq PREPARE_SYMBOL(caml_program)(%rip), %r12
  417. + /* Common code for PREPARE_SYMBOL(caml_start_program) and PREPARE_SYMBOL(caml_callback)* */
  418. .L106:
  419. /* Build a callback link */
  420. subq $8, %rsp /* stack 16-aligned */
  421. - pushq caml_gc_regs(%rip)
  422. - pushq caml_last_return_address(%rip)
  423. - pushq caml_bottom_of_stack(%rip)
  424. + pushq PREPARE_SYMBOL(caml_gc_regs)(%rip)
  425. + pushq PREPARE_SYMBOL(caml_last_return_address)(%rip)
  426. + pushq PREPARE_SYMBOL(caml_bottom_of_stack)(%rip)
  427. /* Setup alloc ptr and exception ptr */
  428. - movq caml_young_ptr(%rip), %r15
  429. - movq caml_exception_pointer(%rip), %r14
  430. + movq PREPARE_SYMBOL(caml_young_ptr)(%rip), %r15
  431. + movq PREPARE_SYMBOL(caml_exception_pointer)(%rip), %r14
  432. /* Build an exception handler */
  433. lea .L108(%rip), %r13
  434. pushq %r13
  435. @@ -224,12 +233,12 @@
  436. popq %r12 /* dummy register */
  437. .L109:
  438. /* Update alloc ptr and exception ptr */
  439. - movq %r15, caml_young_ptr(%rip)
  440. - movq %r14, caml_exception_pointer(%rip)
  441. + movq %r15, PREPARE_SYMBOL(caml_young_ptr)(%rip)
  442. + movq %r14, PREPARE_SYMBOL(caml_exception_pointer)(%rip)
  443. /* Pop the callback link, restoring the global variables */
  444. - popq caml_bottom_of_stack(%rip)
  445. - popq caml_last_return_address(%rip)
  446. - popq caml_gc_regs(%rip)
  447. + popq PREPARE_SYMBOL(caml_bottom_of_stack)(%rip)
  448. + popq PREPARE_SYMBOL(caml_last_return_address)(%rip)
  449. + popq PREPARE_SYMBOL(caml_gc_regs)(%rip)
  450. addq $8, %rsp
  451. /* Restore callee-save registers. */
  452. addq $8, %rsp
  453. @@ -249,8 +258,8 @@
  454.  
  455. /* Raise an exception from Caml */
  456.  
  457. -FUNCTION(caml_raise_exn)
  458. - testl $1, caml_backtrace_active(%rip)
  459. +FUNCTION(PREPARE_SYMBOL(caml_raise_exn))
  460. + testl $1, PREPARE_SYMBOL(caml_backtrace_active)(%rip)
  461. jne .L110
  462. movq %r14, %rsp
  463. popq %r14
  464. @@ -261,7 +270,7 @@
  465. movq 0(%rsp), %rsi /* arg 2: pc of raise */
  466. leaq 8(%rsp), %rdx /* arg 3: sp of raise */
  467. movq %r14, %rcx /* arg 4: sp of handler */
  468. - call caml_stash_backtrace
  469. + call PREPARE_SYMBOL(caml_stash_backtrace)
  470. movq %r12, %rax /* Recover exception bucket */
  471. movq %r14, %rsp
  472. popq %r14
  473. @@ -269,30 +278,30 @@
  474.  
  475. /* Raise an exception from C */
  476.  
  477. -FUNCTION(caml_raise_exception)
  478. - testl $1, caml_backtrace_active(%rip)
  479. +FUNCTION(PREPARE_SYMBOL(caml_raise_exception))
  480. + testl $1, PREPARE_SYMBOL(caml_backtrace_active)(%rip)
  481. jne .L111
  482. movq %rdi, %rax
  483. - movq caml_exception_pointer(%rip), %rsp
  484. + movq PREPARE_SYMBOL(caml_exception_pointer)(%rip), %rsp
  485. popq %r14 /* Recover previous exception handler */
  486. - movq caml_young_ptr(%rip), %r15 /* Reload alloc ptr */
  487. + movq PREPARE_SYMBOL(caml_young_ptr)(%rip), %r15 /* Reload alloc ptr */
  488. ret
  489. .L111:
  490. movq %rdi, %r12 /* Save exception bucket */
  491. /* arg 1: exception bucket */
  492. - movq caml_last_return_address(%rip), %rsi /* arg 2: pc of raise */
  493. - movq caml_bottom_of_stack(%rip), %rdx /* arg 3: sp of raise */
  494. - movq caml_exception_pointer(%rip), %rcx /* arg 4: sp of handler */
  495. - call caml_stash_backtrace
  496. + movq PREPARE_SYMBOL(caml_last_return_address)(%rip), %rsi /* arg 2: pc of raise */
  497. + movq PREPARE_SYMBOL(caml_bottom_of_stack)(%rip), %rdx /* arg 3: sp of raise */
  498. + movq PREPARE_SYMBOL(caml_exception_pointer)(%rip), %rcx /* arg 4: sp of handler */
  499. + call PREPARE_SYMBOL(caml_stash_backtrace)
  500. movq %r12, %rax /* Recover exception bucket */
  501. - movq caml_exception_pointer(%rip), %rsp
  502. + movq PREPARE_SYMBOL(caml_exception_pointer)(%rip), %rsp
  503. popq %r14 /* Recover previous exception handler */
  504. - movq caml_young_ptr(%rip), %r15 /* Reload alloc ptr */
  505. + movq PREPARE_SYMBOL(caml_young_ptr)(%rip), %r15 /* Reload alloc ptr */
  506. ret
  507.  
  508. /* Callback from C to Caml */
  509.  
  510. -FUNCTION(caml_callback_exn)
  511. +FUNCTION(PREPARE_SYMBOL(caml_callback_exn))
  512. /* Save callee-save registers */
  513. pushq %rbx
  514. pushq %rbp
  515. @@ -307,7 +316,7 @@
  516. movq 0(%rbx), %r12 /* code pointer */
  517. jmp .L106
  518.  
  519. -FUNCTION(caml_callback2_exn)
  520. +FUNCTION(PREPARE_SYMBOL(caml_callback2_exn))
  521. /* Save callee-save registers */
  522. pushq %rbx
  523. pushq %rbp
  524. @@ -320,10 +329,10 @@
  525. /* closure stays in %rdi */
  526. movq %rsi, %rax /* first argument */
  527. movq %rdx, %rbx /* second argument */
  528. - leaq caml_apply2(%rip), %r12 /* code pointer */
  529. + leaq PREPARE_SYMBOL(caml_apply2)(%rip), %r12 /* code pointer */
  530. jmp .L106
  531.  
  532. -FUNCTION(caml_callback3_exn)
  533. +FUNCTION(PREPARE_SYMBOL(caml_callback3_exn))
  534. /* Save callee-save registers */
  535. pushq %rbx
  536. pushq %rbp
  537. @@ -337,34 +346,35 @@
  538. movq %rdx, %rbx /* second argument */
  539. movq %rdi, %rsi /* closure */
  540. movq %rcx, %rdi /* third argument */
  541. - leaq caml_apply3(%rip), %r12 /* code pointer */
  542. + leaq PREPARE_SYMBOL(caml_apply3)(%rip), %r12 /* code pointer */
  543. jmp .L106
  544.  
  545. -FUNCTION(caml_ml_array_bound_error)
  546. - leaq caml_array_bound_error(%rip), %rax
  547. - jmp caml_c_call
  548. +FUNCTION(PREPARE_SYMBOL(caml_ml_array_bound_error))
  549. + leaq PREPARE_SYMBOL(caml_array_bound_error)(%rip), %rax
  550. + jmp PREPARE_SYMBOL(caml_c_call)
  551.  
  552. .data
  553. - .globl caml_system__frametable
  554. - .type caml_system__frametable,@object
  555. - .align 8
  556. -caml_system__frametable:
  557. + .globl PREPARE_SYMBOL(caml_system__frametable)
  558. + .align EIGHT
  559. +PREPARE_SYMBOL(caml_system__frametable):
  560. .quad 1 /* one descriptor */
  561. .quad .L107 /* return address into callback */
  562. .value -1 /* negative frame size => use callback link */
  563. .value 0 /* no roots here */
  564. - .align 8
  565. + .align EIGHT
  566.  
  567. - .section .rodata.cst8,"a",@progbits
  568. - .globl caml_negf_mask
  569. - .type caml_negf_mask,@object
  570. - .align 16
  571. -caml_negf_mask:
  572. +#ifdef SYS_macosx
  573. + .section __TEXT,__eh_frame,coalesced,no_toc+strip_static_syms+live_support
  574. +#else
  575. + .section .rodata.cst8,"a",@progbits
  576. +#endif
  577. + .globl PREPARE_SYMBOL(caml_negf_mask)
  578. + .align SIXTEEN
  579. +PREPARE_SYMBOL(caml_negf_mask):
  580. .quad 0x8000000000000000, 0
  581. - .globl caml_absf_mask
  582. - .type caml_absf_mask,@object
  583. - .align 16
  584. -caml_absf_mask:
  585. + .globl PREPARE_SYMBOL(caml_absf_mask)
  586. + .align SIXTEEN
  587. +PREPARE_SYMBOL(caml_absf_mask):
  588. .quad 0x7FFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF
  589.  
  590. #if defined(SYS_linux)
  591. Index: asmrun/signals_osdep.h
  592. ===================================================================
  593. RCS file: /caml/ocaml/asmrun/signals_osdep.h,v
  594. retrieving revision 1.11
  595. diff -u -r1.11 signals_osdep.h
  596. --- asmrun/signals_osdep.h 11 Jan 2008 16:13:11 -0000 1.11
  597. +++ asmrun/signals_osdep.h 21 Oct 2008 02:15:30 -0000
  598. @@ -34,8 +34,6 @@
  599.  
  600. /****************** AMD64, Linux */
  601.  
  602. -#elif defined(TARGET_amd64) && defined (SYS_linux)
  603. -
  604. #define DECLARE_SIGNAL_HANDLER(name) \
  605. static void name(int sig, siginfo_t * info, ucontext_t * context)
  606.  
  607. @@ -49,6 +47,29 @@
  608. #define CONTEXT_YOUNG_PTR (context->uc_mcontext.gregs[REG_R15])
  609. #define CONTEXT_FAULTING_ADDRESS ((char *) context->uc_mcontext.gregs[REG_CR2])
  610.  
  611. +/****************** AMD64, MacOSX */
  612. +#elif defined(TARGET_amd64) && defined (SYS_macosx)
  613. +
  614. + #define DECLARE_SIGNAL_HANDLER(name) \
  615. + static void name(int sig, siginfo_t * info, void * context)
  616. +
  617. + #define SET_SIGACT(sigact,name) \
  618. + sigact.sa_sigaction = (name); \
  619. + sigact.sa_flags = SA_SIGINFO
  620. +
  621. + #include <sys/ucontext.h>
  622. + #include <AvailabilityMacros.h>
  623. +
  624. +#if !defined(MAC_OS_X_VERSION_10_5) || MAC_OS_X_VERSION_MIN_REQUIRED < MAC_OS_X_VERSION_10_5
  625. + #define CONTEXT_REG(r) r
  626. + #else
  627. + #define CONTEXT_REG(r) __##r
  628. + #endif
  629. +
  630. + #define CONTEXT_STATE (((ucontext_t *)context)->uc_mcontext->CONTEXT_REG(ss))
  631. + #define CONTEXT_PC (CONTEXT_STATE.CONTEXT_REG(rip))
  632. + #define CONTEXT_FAULTING_ADDRESS ((char *) info->si_addr)
  633. +
  634. /****************** I386, Linux */
  635.  
  636. #elif defined(TARGET_i386) && defined(SYS_linux_elf)
Add Comment
Please, Sign In to add comment