@@ -466,6 +466,146 @@ TLI_DEFINE_VECFUNC("__exp2f_finite", "__svml_exp2f4", FIXED(4))
466
466
TLI_DEFINE_VECFUNC(" __exp2f_finite" , " __svml_exp2f8" , FIXED(8 ))
467
467
TLI_DEFINE_VECFUNC(" __exp2f_finite" , " __svml_exp2f16" , FIXED(16 ))
468
468
469
+ #elif defined(TLI_DEFINE_SLEEFGNUABI_VF2_VECFUNCS)
470
+
471
+ TLI_DEFINE_VECFUNC ( " acos" , " _ZGVnN2v_acos" , FIXED(2 ))
472
+ TLI_DEFINE_VECFUNC( " llvm.acos.f64" , " _ZGVnN2v_acos" , FIXED(2 ))
473
+
474
+ TLI_DEFINE_VECFUNC( " asin" , " _ZGVnN2v_asin" , FIXED(2 ))
475
+ TLI_DEFINE_VECFUNC( " llvm.asin.f64" , " _ZGVnN2v_asin" , FIXED(2 ))
476
+
477
+ TLI_DEFINE_VECFUNC( " atan" , " _ZGVnN2v_atan" , FIXED(2 ))
478
+ TLI_DEFINE_VECFUNC( " llvm.atan.f64" , " _ZGVnN2v_atan" , FIXED(2 ))
479
+
480
+ TLI_DEFINE_VECFUNC( " atan2" , " _ZGVnN2vv_atan2" , FIXED(2 ))
481
+ TLI_DEFINE_VECFUNC( " llvm.atan2.f64" , " _ZGVnN2vv_atan2" , FIXED(2 ))
482
+ TLI_DEFINE_VECFUNC( " llvm.atan2.v2f64" , " _ZGVnN2vv_atan2" , FIXED(2 ))
483
+
484
+ TLI_DEFINE_VECFUNC( " atanh" , " _ZGVnN2v_atanh" , FIXED(2 ))
485
+ TLI_DEFINE_VECFUNC( " llvm.atanh.f64" , " _ZGVnN2v_atanh" , FIXED(2 ))
486
+
487
+ TLI_DEFINE_VECFUNC( " cos" , " _ZGVnN2v_cos" , FIXED(2 ))
488
+ TLI_DEFINE_VECFUNC( " llvm.cos.f64" , " _ZGVnN2v_cos" , FIXED(2 ))
489
+
490
+ TLI_DEFINE_VECFUNC( " cosh" , " _ZGVnN2v_cosh" , FIXED(2 ))
491
+ TLI_DEFINE_VECFUNC( " llvm.cosh.f64" , " _ZGVnN2v_cosh" , FIXED(2 ))
492
+
493
+ TLI_DEFINE_VECFUNC( " exp" , " _ZGVnN2v_exp" , FIXED(2 ))
494
+ TLI_DEFINE_VECFUNC( " llvm.exp.f64" , " _ZGVnN2v_exp" , FIXED(2 ))
495
+ TLI_DEFINE_VECFUNC( " llvm.exp.v2f64" , " _ZGVnN2v_exp" , FIXED(2 ))
496
+
497
+ TLI_DEFINE_VECFUNC( " exp2" , " _ZGVnN2v_exp2" , FIXED(2 ))
498
+ TLI_DEFINE_VECFUNC( " llvm.exp2.f64" , " _ZGVnN2v_exp2" , FIXED(2 ))
499
+ TLI_DEFINE_VECFUNC( " llvm.exp2.v2f64" , " _ZGVnN2v_exp2" , FIXED(2 ))
500
+
501
+ TLI_DEFINE_VECFUNC( " exp10" , " _ZGVnN2v_exp10" , FIXED(2 ))
502
+ TLI_DEFINE_VECFUNC( " llvm.exp10.f64" , " _ZGVnN2v_exp10" , FIXED(2 ))
503
+ TLI_DEFINE_VECFUNC( " llvm.exp10.v2f64" , " _ZGVnN2v_exp10" , FIXED(2 ))
504
+
505
+ TLI_DEFINE_VECFUNC( " lgamma" , " _ZGVnN2v_lgamma" , FIXED(2 ))
506
+ TLI_DEFINE_VECFUNC( " llvm.lgamma.f64" , " _ZGVnN2v_lgamma" , FIXED(2 ))
507
+
508
+ TLI_DEFINE_VECFUNC( " log" , " _ZGVnN2v_log" , FIXED(2 ))
509
+ TLI_DEFINE_VECFUNC( " llvm.log.f64" , " _ZGVnN2v_log" , FIXED(2 ))
510
+
511
+ TLI_DEFINE_VECFUNC( " log2" , " _ZGVnN2v_log2" , FIXED(2 ))
512
+ TLI_DEFINE_VECFUNC( " llvm.log2.f64" , " _ZGVnN2v_log2" , FIXED(2 ))
513
+
514
+ TLI_DEFINE_VECFUNC( " log10" , " _ZGVnN2v_log10" , FIXED(2 ))
515
+ TLI_DEFINE_VECFUNC( " llvm.log10.f64" , " _ZGVnN2v_log10" , FIXED(2 ))
516
+
517
+ TLI_DEFINE_VECFUNC( " pow" , " _ZGVnN2vv_pow" , FIXED(2 ))
518
+ TLI_DEFINE_VECFUNC( " llvm.pow.f64" , " _ZGVnN2vv_pow" , FIXED(2 ))
519
+ TLI_DEFINE_VECFUNC( " llvm.pow.v2f64" , " _ZGVnN2vv_pow" , FIXED(2 ))
520
+
521
+ TLI_DEFINE_VECFUNC( " sin" , " _ZGVnN2v_sin" , FIXED(2 ))
522
+ TLI_DEFINE_VECFUNC( " llvm.sin.f64" , " _ZGVnN2v_sin" , FIXED(2 ))
523
+
524
+ TLI_DEFINE_VECFUNC( " sinh" , " _ZGVnN2v_sinh" , FIXED(2 ))
525
+ TLI_DEFINE_VECFUNC( " llvm.sinh.f64" , " _ZGVnN2v_sinh" , FIXED(2 ))
526
+
527
+ TLI_DEFINE_VECFUNC( " sqrt" , " _ZGVnN2v_sqrt" , FIXED(2 ))
528
+ TLI_DEFINE_VECFUNC( " llvm.sqrt.f64" , " _ZGVnN2v_sqrt" , FIXED(2 ))
529
+
530
+ TLI_DEFINE_VECFUNC( " tan" , " _ZGVnN2v_tan" , FIXED(2 ))
531
+ TLI_DEFINE_VECFUNC( " llvm.tan.f64" , " _ZGVnN2v_tan" , FIXED(2 ))
532
+
533
+ TLI_DEFINE_VECFUNC( " tanh" , " _ZGVnN2v_tanh" , FIXED(2 ))
534
+ TLI_DEFINE_VECFUNC( " llvm.tanh.f64" , " _ZGVnN2v_tanh" , FIXED(2 ))
535
+
536
+ TLI_DEFINE_VECFUNC( " tgamma" , " _ZGVnN2v_tgamma" , FIXED(2 ))
537
+ TLI_DEFINE_VECFUNC( " llvm.tgamma.f64" , " _ZGVnN2v_tgamma" , FIXED(2 ))
538
+
539
+ #elif defined(TLI_DEFINE_SLEEFGNUABI_VF4_VECFUNCS)
540
+
541
+ TLI_DEFINE_VECFUNC ( " acosf" , " _ZGVnN4v_acosf" , FIXED(4 ))
542
+ TLI_DEFINE_VECFUNC( " llvm.acos.f32" , " _ZGVnN4v_acosf" , FIXED(4 ))
543
+
544
+ TLI_DEFINE_VECFUNC( " asinf" , " _ZGVnN4v_asinf" , FIXED(4 ))
545
+ TLI_DEFINE_VECFUNC( " llvm.asin.f32" , " _ZGVnN4v_asinf" , FIXED(4 ))
546
+
547
+ TLI_DEFINE_VECFUNC( " atanf" , " _ZGVnN4v_atanf" , FIXED(4 ))
548
+ TLI_DEFINE_VECFUNC( " llvm.atan.f32" , " _ZGVnN4v_atanf" , FIXED(4 ))
549
+
550
+ TLI_DEFINE_VECFUNC( " atan2f" , " _ZGVnN4vv_atan2f" , FIXED(4 ))
551
+ TLI_DEFINE_VECFUNC( " llvm.atan2.f32" , " _ZGVnN4vv_atan2f" , FIXED(4 ))
552
+ TLI_DEFINE_VECFUNC( " llvm.atan2.v4f32" , " _ZGVnN4vv_atan2f" , FIXED(4 ))
553
+
554
+ TLI_DEFINE_VECFUNC( " atanhf" , " _ZGVnN4v_atanhf" , FIXED(4 ))
555
+ TLI_DEFINE_VECFUNC( " llvm.atanh.f32" , " _ZGVnN4v_atanhf" , FIXED(4 ))
556
+
557
+ TLI_DEFINE_VECFUNC( " cosf" , " _ZGVnN4v_cosf" , FIXED(4 ))
558
+ TLI_DEFINE_VECFUNC( " llvm.cos.f32" , " _ZGVnN4v_cosf" , FIXED(4 ))
559
+
560
+ TLI_DEFINE_VECFUNC( " coshf" , " _ZGVnN4v_coshf" , FIXED(4 ))
561
+ TLI_DEFINE_VECFUNC( " llvm.cosh.f32" , " _ZGVnN4v_coshf" , FIXED(4 ))
562
+
563
+ TLI_DEFINE_VECFUNC( " expf" , " _ZGVnN4v_expf" , FIXED(4 ))
564
+ TLI_DEFINE_VECFUNC( " llvm.exp.f32" , " _ZGVnN4v_expf" , FIXED(4 ))
565
+ TLI_DEFINE_VECFUNC( " llvm.exp.v4f32" , " _ZGVnN4v_expf" , FIXED(4 ))
566
+
567
+ TLI_DEFINE_VECFUNC( " exp2f" , " _ZGVnN4v_exp2f" , FIXED(4 ))
568
+ TLI_DEFINE_VECFUNC( " llvm.exp2.f32" , " _ZGVnN4v_exp2f" , FIXED(4 ))
569
+ TLI_DEFINE_VECFUNC( " llvm.exp2.v4f32" , " _ZGVnN4v_exp2f" , FIXED(4 ))
570
+
571
+ TLI_DEFINE_VECFUNC( " exp10f" , " _ZGVnN4v_exp10f" , FIXED(4 ))
572
+ TLI_DEFINE_VECFUNC( " llvm.exp10.f32" , " _ZGVnN4v_exp10f" , FIXED(4 ))
573
+ TLI_DEFINE_VECFUNC( " llvm.exp10.v4f32" , " _ZGVnN4v_exp10f" , FIXED(4 ))
574
+
575
+ TLI_DEFINE_VECFUNC( " lgammaf" , " _ZGVnN4v_lgammaf" , FIXED(4 ))
576
+ TLI_DEFINE_VECFUNC( " llvm.lgamma.f32" , " _ZGVnN4v_lgammaf" , FIXED(4 ))
577
+
578
+ TLI_DEFINE_VECFUNC( " logf" , " _ZGVnN4v_logf" , FIXED(4 ))
579
+ TLI_DEFINE_VECFUNC( " llvm.log.f32" , " _ZGVnN4v_logf" , FIXED(4 ))
580
+
581
+ TLI_DEFINE_VECFUNC( " log2f" , " _ZGVnN4v_log2f" , FIXED(4 ))
582
+ TLI_DEFINE_VECFUNC( " llvm.log2.f32" , " _ZGVnN4v_log2f" , FIXED(4 ))
583
+
584
+ TLI_DEFINE_VECFUNC( " log10f" , " _ZGVnN4v_log10f" , FIXED(4 ))
585
+ TLI_DEFINE_VECFUNC( " llvm.log10.f32" , " _ZGVnN4v_log10f" , FIXED(4 ))
586
+
587
+ TLI_DEFINE_VECFUNC( " powf" , " _ZGVnN4vv_powf" , FIXED(4 ))
588
+ TLI_DEFINE_VECFUNC( " llvm.pow.f32" , " _ZGVnN4vv_powf" , FIXED(4 ))
589
+ TLI_DEFINE_VECFUNC( " llvm.pow.v4f32" , " _ZGVnN4vv_powf" , FIXED(4 ))
590
+
591
+ TLI_DEFINE_VECFUNC( " sinf" , " _ZGVnN4v_sinf" , FIXED(4 ))
592
+ TLI_DEFINE_VECFUNC( " llvm.sin.f32" , " _ZGVnN4v_sinf" , FIXED(4 ))
593
+
594
+ TLI_DEFINE_VECFUNC( " sinhf" , " _ZGVnN4v_sinhf" , FIXED(4 ))
595
+ TLI_DEFINE_VECFUNC( " llvm.sinh.f32" , " _ZGVnN4v_sinhf" , FIXED(4 ))
596
+
597
+ TLI_DEFINE_VECFUNC( " sqrtf" , " _ZGVnN4v_sqrtf" , FIXED(4 ))
598
+ TLI_DEFINE_VECFUNC( " llvm.sqrt.f32" , " _ZGVnN4v_sqrtf" , FIXED(4 ))
599
+
600
+ TLI_DEFINE_VECFUNC( " tanf" , " _ZGVnN4v_tanf" , FIXED(4 ))
601
+ TLI_DEFINE_VECFUNC( " llvm.tan.f32" , " _ZGVnN4v_tanf" , FIXED(4 ))
602
+
603
+ TLI_DEFINE_VECFUNC( " tanhf" , " _ZGVnN4v_tanhf" , FIXED(4 ))
604
+ TLI_DEFINE_VECFUNC( " llvm.tanh.f32" , " _ZGVnN4v_tanhf" , FIXED(4 ))
605
+
606
+ TLI_DEFINE_VECFUNC( " tgammaf" , " _ZGVnN4v_tgammaf" , FIXED(4 ))
607
+ TLI_DEFINE_VECFUNC( " llvm.tgamma.f32" , " _ZGVnN4v_tgammaf" , FIXED(4 ))
608
+
469
609
#else
470
610
#error "Must choose which vector library functions are to be defined."
471
611
#endif
@@ -476,4 +616,6 @@ TLI_DEFINE_VECFUNC("__exp2f_finite", "__svml_exp2f16", FIXED(16))
476
616
#undef TLI_DEFINE_LIBMVEC_X86_VECFUNCS
477
617
#undef TLI_DEFINE_MASSV_VECFUNCS
478
618
#undef TLI_DEFINE_SVML_VECFUNCS
619
+ #undef TLI_DEFINE_SLEEFGNUABI_VF2_VECFUNCS
620
+ #undef TLI_DEFINE_SLEEFGNUABI_VF4_VECFUNCS
479
621
#undef TLI_DEFINE_MASSV_VECFUNCS_NAMES
0 commit comments