diff options
Diffstat (limited to 'compiler')
291 files changed, 24622 insertions, 14365 deletions
diff --git a/compiler/rustc_abi/src/layout.rs b/compiler/rustc_abi/src/layout.rs index 58a7fcae9f6..80b44e432ee 100644 --- a/compiler/rustc_abi/src/layout.rs +++ b/compiler/rustc_abi/src/layout.rs @@ -432,7 +432,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> { align = align.min(AbiAlign::new(pack)); } // The unadjusted ABI alignment does not include repr(align), but does include repr(pack). - // See documentation on `LayoutS::unadjusted_abi_align`. + // See documentation on `LayoutData::unadjusted_abi_align`. let unadjusted_abi_align = align.abi; if let Some(repr_align) = repr.align { align = align.max(AbiAlign::new(repr_align)); @@ -602,10 +602,10 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> { dont_niche_optimize_enum: bool, ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> { // Until we've decided whether to use the tagged or - // niche filling LayoutS, we don't want to intern the + // niche filling LayoutData, we don't want to intern the // variant layouts, so we can't store them in the - // overall LayoutS. Store the overall LayoutS - // and the variant LayoutSs here until then. + // overall LayoutData. Store the overall LayoutData + // and the variant LayoutDatas here until then. struct TmpLayout<FieldIdx: Idx, VariantIdx: Idx> { layout: LayoutData<FieldIdx, VariantIdx>, variants: IndexVec<VariantIdx, LayoutData<FieldIdx, VariantIdx>>, @@ -1214,7 +1214,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> { match kind { StructKind::AlwaysSized | StructKind::MaybeUnsized => { - // Currently `LayoutS` only exposes a single niche so sorting is usually + // Currently `LayoutData` only exposes a single niche so sorting is usually // sufficient to get one niche into the preferred position. If it ever // supported multiple niches then a more advanced pick-and-pack approach could // provide better results. But even for the single-niche cache it's not @@ -1333,7 +1333,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> { } // The unadjusted ABI alignment does not include repr(align), but does include repr(pack). - // See documentation on `LayoutS::unadjusted_abi_align`. + // See documentation on `LayoutData::unadjusted_abi_align`. let unadjusted_abi_align = align.abi; if let Some(repr_align) = repr.align { align = align.max(AbiAlign::new(repr_align)); diff --git a/compiler/rustc_abi/src/layout/ty.rs b/compiler/rustc_abi/src/layout/ty.rs index bb880a58e52..18f0750aaa1 100644 --- a/compiler/rustc_abi/src/layout/ty.rs +++ b/compiler/rustc_abi/src/layout/ty.rs @@ -71,7 +71,7 @@ pub struct Layout<'a>(pub Interned<'a, LayoutData<FieldIdx, VariantIdx>>); impl<'a> fmt::Debug for Layout<'a> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - // See comment on `<LayoutS as Debug>::fmt` above. + // See comment on `<LayoutData as Debug>::fmt` above. self.0.0.fmt(f) } } diff --git a/compiler/rustc_abi/src/lib.rs b/compiler/rustc_abi/src/lib.rs index 4268e68b2e4..6d729b6919a 100644 --- a/compiler/rustc_abi/src/lib.rs +++ b/compiler/rustc_abi/src/lib.rs @@ -1785,7 +1785,7 @@ where { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { // This is how `Layout` used to print before it become - // `Interned<LayoutS>`. We print it like this to avoid having to update + // `Interned<LayoutData>`. We print it like this to avoid having to update // expected output in a lot of tests. let LayoutData { size, diff --git a/compiler/rustc_ast/src/ast_traits.rs b/compiler/rustc_ast/src/ast_traits.rs index 797ab297319..9d91f41d6c7 100644 --- a/compiler/rustc_ast/src/ast_traits.rs +++ b/compiler/rustc_ast/src/ast_traits.rs @@ -321,6 +321,13 @@ impl<Wrapped, Tag> AstNodeWrapper<Wrapped, Tag> { } } +// FIXME: remove after `stmt_expr_attributes` is stabilized. +impl<T, Tag> From<AstNodeWrapper<P<T>, Tag>> for AstNodeWrapper<T, Tag> { + fn from(value: AstNodeWrapper<P<T>, Tag>) -> Self { + AstNodeWrapper { wrapped: *value.wrapped, tag: value.tag } + } +} + impl<Wrapped: HasNodeId, Tag> HasNodeId for AstNodeWrapper<Wrapped, Tag> { fn node_id(&self) -> NodeId { self.wrapped.node_id() diff --git a/compiler/rustc_ast/src/attr/mod.rs b/compiler/rustc_ast/src/attr/mod.rs index 621e3042b62..44865c493b3 100644 --- a/compiler/rustc_ast/src/attr/mod.rs +++ b/compiler/rustc_ast/src/attr/mod.rs @@ -206,12 +206,24 @@ impl AttributeExt for Attribute { } } - fn style(&self) -> AttrStyle { - self.style + fn doc_resolution_scope(&self) -> Option<AttrStyle> { + match &self.kind { + AttrKind::DocComment(..) => Some(self.style), + AttrKind::Normal(normal) + if normal.item.path == sym::doc && normal.item.value_str().is_some() => + { + Some(self.style) + } + _ => None, + } } } impl Attribute { + pub fn style(&self) -> AttrStyle { + self.style + } + pub fn may_have_doc_links(&self) -> bool { self.doc_str().is_some_and(|s| comments::may_have_doc_links(s.as_str())) } @@ -806,7 +818,14 @@ pub trait AttributeExt: Debug { /// * `#[doc(...)]` returns `None`. fn doc_str_and_comment_kind(&self) -> Option<(Symbol, CommentKind)>; - fn style(&self) -> AttrStyle; + /// Returns outer or inner if this is a doc attribute or a sugared doc + /// comment, otherwise None. + /// + /// This is used in the case of doc comments on modules, to decide whether + /// to resolve intra-doc links against the symbols in scope within the + /// commented module (for inner doc) vs within its parent module (for outer + /// doc). + fn doc_resolution_scope(&self) -> Option<AttrStyle>; } // FIXME(fn_delegation): use function delegation instead of manually forwarding @@ -881,8 +900,4 @@ impl Attribute { pub fn doc_str_and_comment_kind(&self) -> Option<(Symbol, CommentKind)> { AttributeExt::doc_str_and_comment_kind(self) } - - pub fn style(&self) -> AttrStyle { - AttributeExt::style(self) - } } diff --git a/compiler/rustc_ast/src/expand/allocator.rs b/compiler/rustc_ast/src/expand/allocator.rs index dd8d5ae624a..7dee2ed17b4 100644 --- a/compiler/rustc_ast/src/expand/allocator.rs +++ b/compiler/rustc_ast/src/expand/allocator.rs @@ -22,7 +22,7 @@ pub fn alloc_error_handler_name(alloc_error_handler_kind: AllocatorKind) -> &'st } } -pub const NO_ALLOC_SHIM_IS_UNSTABLE: &str = "__rust_no_alloc_shim_is_unstable"; +pub const NO_ALLOC_SHIM_IS_UNSTABLE: &str = "__rust_no_alloc_shim_is_unstable_v2"; pub enum AllocatorTy { Layout, diff --git a/compiler/rustc_ast/src/format.rs b/compiler/rustc_ast/src/format.rs index b611ddea1d9..28d260419c5 100644 --- a/compiler/rustc_ast/src/format.rs +++ b/compiler/rustc_ast/src/format.rs @@ -50,6 +50,14 @@ pub struct FormatArgs { /// /// Generally only useful for lints that care about the raw bytes the user wrote. pub uncooked_fmt_str: (LitKind, Symbol), + /// Was the format literal written in the source? + /// - `format!("boo")` => true, + /// - `format!(concat!("b", "o", "o"))` => false, + /// - `format!(include_str!("boo.txt"))` => false, + /// + /// If it wasn't written in the source then we have to be careful with spans pointing into it + /// and suggestions about rewriting it. + pub is_source_literal: bool, } /// A piece of a format template string. diff --git a/compiler/rustc_ast/src/mut_visit.rs b/compiler/rustc_ast/src/mut_visit.rs index 07fbe8045fc..3eae19f4daa 100644 --- a/compiler/rustc_ast/src/mut_visit.rs +++ b/compiler/rustc_ast/src/mut_visit.rs @@ -13,7 +13,7 @@ use std::panic; use rustc_data_structures::flat_map_in_place::FlatMapInPlace; use rustc_span::source_map::Spanned; use rustc_span::{Ident, Span}; -use smallvec::{Array, SmallVec, smallvec}; +use smallvec::{SmallVec, smallvec}; use thin_vec::ThinVec; use crate::ast::*; @@ -21,17 +21,6 @@ use crate::ptr::P; use crate::tokenstream::*; use crate::visit::{AssocCtxt, BoundKind, FnCtxt, VisitorResult, try_visit, visit_opt, walk_list}; -pub trait ExpectOne<A: Array> { - fn expect_one(self, err: &'static str) -> A::Item; -} - -impl<A: Array> ExpectOne<A> for SmallVec<A> { - fn expect_one(self, err: &'static str) -> A::Item { - assert!(self.len() == 1, "{}", err); - self.into_iter().next().unwrap() - } -} - mod sealed { use rustc_ast_ir::visit::VisitorResult; @@ -47,323 +36,6 @@ mod sealed { use sealed::MutVisitorResult; -pub trait MutVisitor: Sized + MutVisitorResult<Result = ()> { - // Methods in this trait have one of three forms: - // - // fn visit_t(&mut self, t: &mut T); // common - // fn flat_map_t(&mut self, t: T) -> SmallVec<[T; 1]>; // rare - // fn filter_map_t(&mut self, t: T) -> Option<T>; // rarest - // - // When writing these methods, it is better to use destructuring like this: - // - // fn visit_abc(&mut self, ABC { a, b, c: _ }: &mut ABC) { - // visit_a(a); - // visit_b(b); - // } - // - // than to use field access like this: - // - // fn visit_abc(&mut self, abc: &mut ABC) { - // visit_a(&mut abc.a); - // visit_b(&mut abc.b); - // // ignore abc.c - // } - // - // As well as being more concise, the former is explicit about which fields - // are skipped. Furthermore, if a new field is added, the destructuring - // version will cause a compile error, which is good. In comparison, the - // field access version will continue working and it would be easy to - // forget to add handling for it. - - fn visit_crate(&mut self, c: &mut Crate) { - walk_crate(self, c) - } - - fn visit_meta_list_item(&mut self, list_item: &mut MetaItemInner) { - walk_meta_list_item(self, list_item); - } - - fn visit_meta_item(&mut self, meta_item: &mut MetaItem) { - walk_meta_item(self, meta_item); - } - - fn visit_use_tree(&mut self, use_tree: &mut UseTree) { - walk_use_tree(self, use_tree); - } - - fn visit_foreign_item(&mut self, ni: &mut ForeignItem) { - walk_item(self, ni); - } - - fn flat_map_foreign_item(&mut self, ni: P<ForeignItem>) -> SmallVec<[P<ForeignItem>; 1]> { - walk_flat_map_foreign_item(self, ni) - } - - fn visit_item(&mut self, i: &mut Item) { - walk_item(self, i); - } - - fn flat_map_item(&mut self, i: P<Item>) -> SmallVec<[P<Item>; 1]> { - walk_flat_map_item(self, i) - } - - fn visit_fn_header(&mut self, header: &mut FnHeader) { - walk_fn_header(self, header); - } - - fn visit_field_def(&mut self, fd: &mut FieldDef) { - walk_field_def(self, fd); - } - - fn flat_map_field_def(&mut self, fd: FieldDef) -> SmallVec<[FieldDef; 1]> { - walk_flat_map_field_def(self, fd) - } - - fn visit_assoc_item(&mut self, i: &mut AssocItem, ctxt: AssocCtxt) { - walk_assoc_item(self, i, ctxt) - } - - fn flat_map_assoc_item( - &mut self, - i: P<AssocItem>, - ctxt: AssocCtxt, - ) -> SmallVec<[P<AssocItem>; 1]> { - walk_flat_map_assoc_item(self, i, ctxt) - } - - fn visit_contract(&mut self, c: &mut FnContract) { - walk_contract(self, c); - } - - fn visit_fn_decl(&mut self, d: &mut FnDecl) { - walk_fn_decl(self, d); - } - - /// `Span` and `NodeId` are mutated at the caller site. - fn visit_fn(&mut self, fk: FnKind<'_>, _: Span, _: NodeId) { - walk_fn(self, fk) - } - - fn visit_coroutine_kind(&mut self, a: &mut CoroutineKind) { - walk_coroutine_kind(self, a); - } - - fn visit_closure_binder(&mut self, b: &mut ClosureBinder) { - walk_closure_binder(self, b); - } - - fn visit_block(&mut self, b: &mut Block) { - walk_block(self, b); - } - - fn flat_map_stmt(&mut self, s: Stmt) -> SmallVec<[Stmt; 1]> { - walk_flat_map_stmt(self, s) - } - - fn visit_arm(&mut self, arm: &mut Arm) { - walk_arm(self, arm); - } - - fn flat_map_arm(&mut self, arm: Arm) -> SmallVec<[Arm; 1]> { - walk_flat_map_arm(self, arm) - } - - fn visit_pat(&mut self, p: &mut Pat) { - walk_pat(self, p); - } - - fn visit_anon_const(&mut self, c: &mut AnonConst) { - walk_anon_const(self, c); - } - - fn visit_expr(&mut self, e: &mut Expr) { - walk_expr(self, e); - } - - /// This method is a hack to workaround unstable of `stmt_expr_attributes`. - /// It can be removed once that feature is stabilized. - fn visit_method_receiver_expr(&mut self, ex: &mut P<Expr>) { - self.visit_expr(ex) - } - - fn filter_map_expr(&mut self, e: P<Expr>) -> Option<P<Expr>> { - walk_filter_map_expr(self, e) - } - - fn visit_generic_arg(&mut self, arg: &mut GenericArg) { - walk_generic_arg(self, arg); - } - - fn visit_ty(&mut self, t: &mut Ty) { - walk_ty(self, t); - } - - fn visit_ty_pat(&mut self, t: &mut TyPat) { - walk_ty_pat(self, t); - } - - fn visit_lifetime(&mut self, l: &mut Lifetime) { - walk_lifetime(self, l); - } - - fn visit_assoc_item_constraint(&mut self, c: &mut AssocItemConstraint) { - walk_assoc_item_constraint(self, c); - } - - fn visit_foreign_mod(&mut self, nm: &mut ForeignMod) { - walk_foreign_mod(self, nm); - } - - fn visit_variant(&mut self, v: &mut Variant) { - walk_variant(self, v); - } - - fn flat_map_variant(&mut self, v: Variant) -> SmallVec<[Variant; 1]> { - walk_flat_map_variant(self, v) - } - - fn visit_ident(&mut self, i: &mut Ident) { - self.visit_span(&mut i.span); - } - - fn visit_path(&mut self, p: &mut Path) { - walk_path(self, p); - } - - fn visit_path_segment(&mut self, p: &mut PathSegment) { - walk_path_segment(self, p) - } - - fn visit_qself(&mut self, qs: &mut Option<P<QSelf>>) { - walk_qself(self, qs); - } - - fn visit_generic_args(&mut self, p: &mut GenericArgs) { - walk_generic_args(self, p); - } - - fn visit_local(&mut self, l: &mut Local) { - walk_local(self, l); - } - - fn visit_mac_call(&mut self, mac: &mut MacCall) { - walk_mac(self, mac); - } - - fn visit_macro_def(&mut self, def: &mut MacroDef) { - walk_macro_def(self, def); - } - - fn visit_label(&mut self, label: &mut Label) { - walk_label(self, label); - } - - fn visit_attribute(&mut self, at: &mut Attribute) { - walk_attribute(self, at); - } - - fn visit_param(&mut self, param: &mut Param) { - walk_param(self, param); - } - - fn flat_map_param(&mut self, param: Param) -> SmallVec<[Param; 1]> { - walk_flat_map_param(self, param) - } - - fn visit_generics(&mut self, generics: &mut Generics) { - walk_generics(self, generics); - } - - fn visit_trait_ref(&mut self, tr: &mut TraitRef) { - walk_trait_ref(self, tr); - } - - fn visit_poly_trait_ref(&mut self, p: &mut PolyTraitRef) { - walk_poly_trait_ref(self, p); - } - - fn visit_variant_data(&mut self, vdata: &mut VariantData) { - walk_variant_data(self, vdata); - } - - fn visit_generic_param(&mut self, param: &mut GenericParam) { - walk_generic_param(self, param) - } - - fn flat_map_generic_param(&mut self, param: GenericParam) -> SmallVec<[GenericParam; 1]> { - walk_flat_map_generic_param(self, param) - } - - fn visit_param_bound(&mut self, tpb: &mut GenericBound, _ctxt: BoundKind) { - walk_param_bound(self, tpb); - } - - fn visit_precise_capturing_arg(&mut self, arg: &mut PreciseCapturingArg) { - walk_precise_capturing_arg(self, arg); - } - - fn visit_expr_field(&mut self, f: &mut ExprField) { - walk_expr_field(self, f); - } - - fn flat_map_expr_field(&mut self, f: ExprField) -> SmallVec<[ExprField; 1]> { - walk_flat_map_expr_field(self, f) - } - - fn flat_map_where_predicate( - &mut self, - where_predicate: WherePredicate, - ) -> SmallVec<[WherePredicate; 1]> { - walk_flat_map_where_predicate(self, where_predicate) - } - - fn visit_where_predicate_kind(&mut self, kind: &mut WherePredicateKind) { - walk_where_predicate_kind(self, kind) - } - - fn visit_vis(&mut self, vis: &mut Visibility) { - walk_vis(self, vis); - } - - fn visit_id(&mut self, _id: &mut NodeId) { - // Do nothing. - } - - // Span visiting is no longer used, but we keep it for now, - // in case it's needed for something like #127241. - fn visit_span(&mut self, _sp: &mut Span) { - // Do nothing. - } - - fn visit_pat_field(&mut self, fp: &mut PatField) { - walk_pat_field(self, fp) - } - - fn flat_map_pat_field(&mut self, fp: PatField) -> SmallVec<[PatField; 1]> { - walk_flat_map_pat_field(self, fp) - } - - fn visit_inline_asm(&mut self, asm: &mut InlineAsm) { - walk_inline_asm(self, asm) - } - - fn visit_inline_asm_sym(&mut self, sym: &mut InlineAsmSym) { - walk_inline_asm_sym(self, sym) - } - - fn visit_format_args(&mut self, fmt: &mut FormatArgs) { - walk_format_args(self, fmt) - } - - fn visit_capture_by(&mut self, capture_by: &mut CaptureBy) { - walk_capture_by(self, capture_by) - } - - fn visit_fn_ret_ty(&mut self, fn_ret_ty: &mut FnRetTy) { - walk_fn_ret_ty(self, fn_ret_ty) - } -} - super::common_visitor_and_walkers!((mut) MutVisitor); macro_rules! generate_flat_map_visitor_fns { @@ -398,22 +70,6 @@ generate_flat_map_visitor_fns! { visit_arms, Arm, flat_map_arm; } -#[inline] -fn visit_thin_vec<T, F>(elems: &mut ThinVec<T>, mut visit_elem: F) -where - F: FnMut(&mut T), -{ - for elem in elems { - visit_elem(elem); - } -} - -fn visit_attrs<T: MutVisitor>(vis: &mut T, attrs: &mut AttrVec) { - for attr in attrs.iter_mut() { - vis.visit_attribute(attr); - } -} - pub fn walk_flat_map_pat_field<T: MutVisitor>( vis: &mut T, mut fp: PatField, @@ -431,47 +87,26 @@ fn visit_nested_use_tree<V: MutVisitor>( vis.visit_use_tree(nested_tree); } -pub fn walk_flat_map_arm<T: MutVisitor>(vis: &mut T, mut arm: Arm) -> SmallVec<[Arm; 1]> { - vis.visit_arm(&mut arm); - smallvec![arm] -} - -pub fn walk_flat_map_variant<T: MutVisitor>( - vis: &mut T, - mut variant: Variant, -) -> SmallVec<[Variant; 1]> { - vis.visit_variant(&mut variant); - smallvec![variant] -} - -fn walk_meta_list_item<T: MutVisitor>(vis: &mut T, li: &mut MetaItemInner) { - match li { - MetaItemInner::MetaItem(mi) => vis.visit_meta_item(mi), - MetaItemInner::Lit(_lit) => {} - } -} - -fn walk_meta_item<T: MutVisitor>(vis: &mut T, mi: &mut MetaItem) { - let MetaItem { unsafety: _, path: _, kind, span } = mi; - match kind { - MetaItemKind::Word => {} - MetaItemKind::List(mis) => visit_thin_vec(mis, |mi| vis.visit_meta_list_item(mi)), - MetaItemKind::NameValue(_s) => {} - } - vis.visit_span(span); -} - -pub fn walk_flat_map_param<T: MutVisitor>(vis: &mut T, mut param: Param) -> SmallVec<[Param; 1]> { - vis.visit_param(&mut param); - smallvec![param] +macro_rules! generate_walk_flat_map_fns { + ($($fn_name:ident($Ty:ty$(,$extra_name:ident: $ExtraTy:ty)*) => $visit_fn_name:ident;)+) => {$( + pub fn $fn_name<V: MutVisitor>(vis: &mut V, mut value: $Ty$(,$extra_name: $ExtraTy)*) -> SmallVec<[$Ty; 1]> { + vis.$visit_fn_name(&mut value$(,$extra_name)*); + smallvec![value] + } + )+}; } -pub fn walk_flat_map_generic_param<T: MutVisitor>( - vis: &mut T, - mut param: GenericParam, -) -> SmallVec<[GenericParam; 1]> { - vis.visit_generic_param(&mut param); - smallvec![param] +generate_walk_flat_map_fns! { + walk_flat_map_arm(Arm) => visit_arm; + walk_flat_map_variant(Variant) => visit_variant; + walk_flat_map_param(Param) => visit_param; + walk_flat_map_generic_param(GenericParam) => visit_generic_param; + walk_flat_map_where_predicate(WherePredicate) => visit_where_predicate; + walk_flat_map_field_def(FieldDef) => visit_field_def; + walk_flat_map_expr_field(ExprField) => visit_expr_field; + walk_flat_map_item(P<Item>) => visit_item; + walk_flat_map_foreign_item(P<ForeignItem>) => visit_foreign_item; + walk_flat_map_assoc_item(P<AssocItem>, ctxt: AssocCtxt) => visit_assoc_item; } fn walk_ty_alias_where_clauses<T: MutVisitor>(vis: &mut T, tawcs: &mut TyAliasWhereClauses) { @@ -482,63 +117,6 @@ fn walk_ty_alias_where_clauses<T: MutVisitor>(vis: &mut T, tawcs: &mut TyAliasWh vis.visit_span(span_after); } -pub fn walk_flat_map_where_predicate<T: MutVisitor>( - vis: &mut T, - mut pred: WherePredicate, -) -> SmallVec<[WherePredicate; 1]> { - walk_where_predicate(vis, &mut pred); - smallvec![pred] -} - -pub fn walk_flat_map_field_def<T: MutVisitor>( - vis: &mut T, - mut fd: FieldDef, -) -> SmallVec<[FieldDef; 1]> { - vis.visit_field_def(&mut fd); - smallvec![fd] -} - -pub fn walk_flat_map_expr_field<T: MutVisitor>( - vis: &mut T, - mut f: ExprField, -) -> SmallVec<[ExprField; 1]> { - vis.visit_expr_field(&mut f); - smallvec![f] -} - -pub fn walk_item_kind<K: WalkItemKind>( - kind: &mut K, - span: Span, - id: NodeId, - visibility: &mut Visibility, - ctxt: K::Ctxt, - vis: &mut impl MutVisitor, -) { - kind.walk(span, id, visibility, ctxt, vis) -} - -pub fn walk_flat_map_item(vis: &mut impl MutVisitor, mut item: P<Item>) -> SmallVec<[P<Item>; 1]> { - vis.visit_item(&mut item); - smallvec![item] -} - -pub fn walk_flat_map_foreign_item( - vis: &mut impl MutVisitor, - mut item: P<ForeignItem>, -) -> SmallVec<[P<ForeignItem>; 1]> { - vis.visit_foreign_item(&mut item); - smallvec![item] -} - -pub fn walk_flat_map_assoc_item( - vis: &mut impl MutVisitor, - mut item: P<AssocItem>, - ctxt: AssocCtxt, -) -> SmallVec<[P<AssocItem>; 1]> { - vis.visit_assoc_item(&mut item, ctxt); - smallvec![item] -} - pub fn walk_filter_map_expr<T: MutVisitor>(vis: &mut T, mut e: P<Expr>) -> Option<P<Expr>> { vis.visit_expr(&mut e); Some(e) @@ -576,35 +154,11 @@ fn walk_flat_map_stmt_kind<T: MutVisitor>(vis: &mut T, kind: StmtKind) -> SmallV StmtKind::Empty => smallvec![StmtKind::Empty], StmtKind::MacCall(mut mac) => { let MacCallStmt { mac: mac_, style: _, attrs, tokens: _ } = mac.deref_mut(); - visit_attrs(vis, attrs); + for attr in attrs { + vis.visit_attribute(attr); + } vis.visit_mac_call(mac_); smallvec![StmtKind::MacCall(mac)] } } } - -fn walk_capture_by<T: MutVisitor>(vis: &mut T, capture_by: &mut CaptureBy) { - match capture_by { - CaptureBy::Ref => {} - CaptureBy::Value { move_kw } => { - vis.visit_span(move_kw); - } - CaptureBy::Use { use_kw } => { - vis.visit_span(use_kw); - } - } -} - -#[derive(Debug)] -pub enum FnKind<'a> { - /// E.g., `fn foo()`, `fn foo(&self)`, or `extern "Abi" fn foo()`. - Fn(FnCtxt, &'a mut Visibility, &'a mut Fn), - - /// E.g., `|x, y| body`. - Closure( - &'a mut ClosureBinder, - &'a mut Option<CoroutineKind>, - &'a mut P<FnDecl>, - &'a mut P<Expr>, - ), -} diff --git a/compiler/rustc_ast/src/visit.rs b/compiler/rustc_ast/src/visit.rs index bd2ab34bfc1..d0c2b2bf68b 100644 --- a/compiler/rustc_ast/src/visit.rs +++ b/compiler/rustc_ast/src/visit.rs @@ -66,45 +66,6 @@ impl BoundKind { } #[derive(Copy, Clone, Debug)] -pub enum FnKind<'a> { - /// E.g., `fn foo()`, `fn foo(&self)`, or `extern "Abi" fn foo()`. - Fn(FnCtxt, &'a Visibility, &'a Fn), - - /// E.g., `|x, y| body`. - Closure(&'a ClosureBinder, &'a Option<CoroutineKind>, &'a FnDecl, &'a Expr), -} - -impl<'a> FnKind<'a> { - pub fn header(&self) -> Option<&'a FnHeader> { - match *self { - FnKind::Fn(_, _, Fn { sig, .. }) => Some(&sig.header), - FnKind::Closure(..) => None, - } - } - - pub fn ident(&self) -> Option<&Ident> { - match self { - FnKind::Fn(_, _, Fn { ident, .. }) => Some(ident), - _ => None, - } - } - - pub fn decl(&self) -> &'a FnDecl { - match self { - FnKind::Fn(_, _, Fn { sig, .. }) => &sig.decl, - FnKind::Closure(_, _, decl, _) => decl, - } - } - - pub fn ctxt(&self) -> Option<FnCtxt> { - match self { - FnKind::Fn(ctxt, ..) => Some(*ctxt), - FnKind::Closure(..) => None, - } - } -} - -#[derive(Copy, Clone, Debug)] pub enum LifetimeCtxt { /// Appears in a reference type. Ref, @@ -114,206 +75,405 @@ pub enum LifetimeCtxt { GenericArg, } -/// Each method of the `Visitor` trait is a hook to be potentially -/// overridden. Each method's default implementation recursively visits -/// the substructure of the input via the corresponding `walk` method; -/// e.g., the `visit_item` method by default calls `visit::walk_item`. -/// -/// If you want to ensure that your code handles every variant -/// explicitly, you need to override each method. (And you also need -/// to monitor future changes to `Visitor` in case a new method with a -/// new default implementation gets introduced.) -/// -/// Every `walk_*` method uses deconstruction to access fields of structs and -/// enums. This will result in a compile error if a field is added, which makes -/// it more likely the appropriate visit call will be added for it. -pub trait Visitor<'ast>: Sized { - /// The result type of the `visit_*` methods. Can be either `()`, - /// or `ControlFlow<T>`. - type Result: VisitorResult = (); - - fn visit_ident(&mut self, _ident: &'ast Ident) -> Self::Result { - Self::Result::output() - } - fn visit_foreign_mod(&mut self, nm: &'ast ForeignMod) -> Self::Result { - walk_foreign_mod(self, nm) - } - fn visit_foreign_item(&mut self, i: &'ast ForeignItem) -> Self::Result { - walk_item(self, i) - } - fn visit_item(&mut self, i: &'ast Item) -> Self::Result { - walk_item(self, i) - } - fn visit_local(&mut self, l: &'ast Local) -> Self::Result { - walk_local(self, l) - } - fn visit_block(&mut self, b: &'ast Block) -> Self::Result { - walk_block(self, b) - } - fn visit_stmt(&mut self, s: &'ast Stmt) -> Self::Result { - walk_stmt(self, s) - } - fn visit_param(&mut self, param: &'ast Param) -> Self::Result { - walk_param(self, param) - } - fn visit_arm(&mut self, a: &'ast Arm) -> Self::Result { - walk_arm(self, a) - } - fn visit_pat(&mut self, p: &'ast Pat) -> Self::Result { - walk_pat(self, p) - } - fn visit_anon_const(&mut self, c: &'ast AnonConst) -> Self::Result { - walk_anon_const(self, c) - } - fn visit_expr(&mut self, ex: &'ast Expr) -> Self::Result { - walk_expr(self, ex) - } - /// This method is a hack to workaround unstable of `stmt_expr_attributes`. - /// It can be removed once that feature is stabilized. - fn visit_method_receiver_expr(&mut self, ex: &'ast Expr) -> Self::Result { - self.visit_expr(ex) - } - fn visit_ty(&mut self, t: &'ast Ty) -> Self::Result { - walk_ty(self, t) - } - fn visit_ty_pat(&mut self, t: &'ast TyPat) -> Self::Result { - walk_ty_pat(self, t) - } - fn visit_generic_param(&mut self, param: &'ast GenericParam) -> Self::Result { - walk_generic_param(self, param) - } - fn visit_generics(&mut self, g: &'ast Generics) -> Self::Result { - walk_generics(self, g) - } - fn visit_closure_binder(&mut self, b: &'ast ClosureBinder) -> Self::Result { - walk_closure_binder(self, b) - } - fn visit_contract(&mut self, c: &'ast FnContract) -> Self::Result { - walk_contract(self, c) - } - fn visit_where_predicate(&mut self, p: &'ast WherePredicate) -> Self::Result { - walk_where_predicate(self, p) - } - fn visit_where_predicate_kind(&mut self, k: &'ast WherePredicateKind) -> Self::Result { - walk_where_predicate_kind(self, k) - } - fn visit_fn(&mut self, fk: FnKind<'ast>, _: Span, _: NodeId) -> Self::Result { - walk_fn(self, fk) - } - fn visit_assoc_item(&mut self, i: &'ast AssocItem, ctxt: AssocCtxt) -> Self::Result { - walk_assoc_item(self, i, ctxt) - } - fn visit_trait_ref(&mut self, t: &'ast TraitRef) -> Self::Result { - walk_trait_ref(self, t) - } - fn visit_param_bound(&mut self, bounds: &'ast GenericBound, _ctxt: BoundKind) -> Self::Result { - walk_param_bound(self, bounds) - } - fn visit_precise_capturing_arg(&mut self, arg: &'ast PreciseCapturingArg) -> Self::Result { - walk_precise_capturing_arg(self, arg) - } - fn visit_poly_trait_ref(&mut self, t: &'ast PolyTraitRef) -> Self::Result { - walk_poly_trait_ref(self, t) - } - fn visit_variant_data(&mut self, s: &'ast VariantData) -> Self::Result { - walk_variant_data(self, s) - } - fn visit_field_def(&mut self, s: &'ast FieldDef) -> Self::Result { - walk_field_def(self, s) - } - fn visit_variant(&mut self, v: &'ast Variant) -> Self::Result { - walk_variant(self, v) - } - fn visit_variant_discr(&mut self, discr: &'ast AnonConst) -> Self::Result { - self.visit_anon_const(discr) - } - fn visit_label(&mut self, label: &'ast Label) -> Self::Result { - walk_label(self, label) - } - fn visit_lifetime(&mut self, lifetime: &'ast Lifetime, _: LifetimeCtxt) -> Self::Result { - walk_lifetime(self, lifetime) - } - fn visit_mac_call(&mut self, mac: &'ast MacCall) -> Self::Result { - walk_mac(self, mac) - } - fn visit_id(&mut self, _id: NodeId) -> Self::Result { - Self::Result::output() - } - fn visit_macro_def(&mut self, macro_def: &'ast MacroDef) -> Self::Result { - walk_macro_def(self, macro_def) - } - fn visit_path(&mut self, path: &'ast Path) -> Self::Result { - walk_path(self, path) - } - fn visit_use_tree(&mut self, use_tree: &'ast UseTree) -> Self::Result { - walk_use_tree(self, use_tree) - } - fn visit_nested_use_tree(&mut self, use_tree: &'ast UseTree, id: NodeId) -> Self::Result { - try_visit!(self.visit_id(id)); - self.visit_use_tree(use_tree) - } - fn visit_path_segment(&mut self, path_segment: &'ast PathSegment) -> Self::Result { - walk_path_segment(self, path_segment) - } - fn visit_generic_args(&mut self, generic_args: &'ast GenericArgs) -> Self::Result { - walk_generic_args(self, generic_args) - } - fn visit_generic_arg(&mut self, generic_arg: &'ast GenericArg) -> Self::Result { - walk_generic_arg(self, generic_arg) - } - fn visit_assoc_item_constraint( - &mut self, - constraint: &'ast AssocItemConstraint, - ) -> Self::Result { - walk_assoc_item_constraint(self, constraint) - } - fn visit_attribute(&mut self, attr: &'ast Attribute) -> Self::Result { - walk_attribute(self, attr) - } - fn visit_vis(&mut self, vis: &'ast Visibility) -> Self::Result { - walk_vis(self, vis) - } - fn visit_fn_ret_ty(&mut self, ret_ty: &'ast FnRetTy) -> Self::Result { - walk_fn_ret_ty(self, ret_ty) - } - fn visit_fn_header(&mut self, header: &'ast FnHeader) -> Self::Result { - walk_fn_header(self, header) - } - fn visit_expr_field(&mut self, f: &'ast ExprField) -> Self::Result { - walk_expr_field(self, f) - } - fn visit_pat_field(&mut self, fp: &'ast PatField) -> Self::Result { - walk_pat_field(self, fp) - } - fn visit_crate(&mut self, krate: &'ast Crate) -> Self::Result { - walk_crate(self, krate) - } - fn visit_inline_asm(&mut self, asm: &'ast InlineAsm) -> Self::Result { - walk_inline_asm(self, asm) - } - fn visit_format_args(&mut self, fmt: &'ast FormatArgs) -> Self::Result { - walk_format_args(self, fmt) - } - fn visit_inline_asm_sym(&mut self, sym: &'ast InlineAsmSym) -> Self::Result { - walk_inline_asm_sym(self, sym) - } - fn visit_capture_by(&mut self, _capture_by: &'ast CaptureBy) -> Self::Result { - Self::Result::output() - } - fn visit_coroutine_kind(&mut self, coroutine_kind: &'ast CoroutineKind) -> Self::Result { - walk_coroutine_kind(self, coroutine_kind) - } - fn visit_fn_decl(&mut self, fn_decl: &'ast FnDecl) -> Self::Result { - walk_fn_decl(self, fn_decl) - } - fn visit_qself(&mut self, qs: &'ast Option<P<QSelf>>) -> Self::Result { - walk_qself(self, qs) - } -} - #[macro_export] macro_rules! common_visitor_and_walkers { ($(($mut: ident))? $Visitor:ident$(<$lt:lifetime>)?) => { + $(${ignore($lt)} + #[derive(Copy, Clone)] + )? + #[derive(Debug)] + pub enum FnKind<'a> { + /// E.g., `fn foo()`, `fn foo(&self)`, or `extern "Abi" fn foo()`. + Fn(FnCtxt, &'a $($mut)? Visibility, &'a $($mut)? Fn), + + /// E.g., `|x, y| body`. + Closure(&'a $($mut)? ClosureBinder, &'a $($mut)? Option<CoroutineKind>, &'a $($mut)? P<FnDecl>, &'a $($mut)? P<Expr>), + } + + impl<'a> FnKind<'a> { + pub fn header(&'a $($mut)? self) -> Option<&'a $($mut)? FnHeader> { + match *self { + FnKind::Fn(_, _, Fn { sig, .. }) => Some(&$($mut)? sig.header), + FnKind::Closure(..) => None, + } + } + + pub fn ident(&'a $($mut)? self) -> Option<&'a $($mut)? Ident> { + match self { + FnKind::Fn(_, _, Fn { ident, .. }) => Some(ident), + _ => None, + } + } + + pub fn decl(&'a $($mut)? self) -> &'a $($mut)? FnDecl { + match self { + FnKind::Fn(_, _, Fn { sig, .. }) => &$($mut)? sig.decl, + FnKind::Closure(_, _, decl, _) => decl, + } + } + + pub fn ctxt(&self) -> Option<FnCtxt> { + match self { + FnKind::Fn(ctxt, ..) => Some(*ctxt), + FnKind::Closure(..) => None, + } + } + } + + /// Each method of this trait is a hook to be potentially + /// overridden. Each method's default implementation recursively visits + /// the substructure of the input via the corresponding `walk` method; + #[doc = concat!(" e.g., the `visit_item` method by default calls `visit"$(, "_", stringify!($mut))?, "::walk_item`.")] + /// + /// If you want to ensure that your code handles every variant + /// explicitly, you need to override each method. (And you also need + /// to monitor future changes to this trait in case a new method with a + /// new default implementation gets introduced.) + /// + /// Every `walk_*` method uses deconstruction to access fields of structs and + /// enums. This will result in a compile error if a field is added, which makes + /// it more likely the appropriate visit call will be added for it. + pub trait $Visitor<$($lt)?> : Sized $(${ignore($mut)} + MutVisitorResult<Result = ()>)? { + $( + ${ignore($lt)} + /// The result type of the `visit_*` methods. Can be either `()`, + /// or `ControlFlow<T>`. + type Result: VisitorResult = (); + )? + + // Methods in this trait have one of three forms, with the last two forms + // only occuring on `MutVisitor`: + // + // fn visit_t(&mut self, t: &mut T); // common + // fn flat_map_t(&mut self, t: T) -> SmallVec<[T; 1]>; // rare + // fn filter_map_t(&mut self, t: T) -> Option<T>; // rarest + // + // When writing these methods, it is better to use destructuring like this: + // + // fn visit_abc(&mut self, ABC { a, b, c: _ }: &mut ABC) { + // visit_a(a); + // visit_b(b); + // } + // + // than to use field access like this: + // + // fn visit_abc(&mut self, abc: &mut ABC) { + // visit_a(&mut abc.a); + // visit_b(&mut abc.b); + // // ignore abc.c + // } + // + // As well as being more concise, the former is explicit about which fields + // are skipped. Furthermore, if a new field is added, the destructuring + // version will cause a compile error, which is good. In comparison, the + // field access version will continue working and it would be easy to + // forget to add handling for it. + fn visit_ident(&mut self, Ident { name: _, span }: &$($lt)? $($mut)? Ident) -> Self::Result { + visit_span(self, span) + } + + fn visit_foreign_mod(&mut self, nm: &$($lt)? $($mut)? ForeignMod) -> Self::Result { + walk_foreign_mod(self, nm) + } + + fn visit_foreign_item(&mut self, i: &$($lt)? $($mut)? ForeignItem) -> Self::Result { + walk_item(self, i) + } + + fn visit_item(&mut self, i: &$($lt)? $($mut)? Item) -> Self::Result { + walk_item(self, i) + } + + fn visit_local(&mut self, l: &$($lt)? $($mut)? Local) -> Self::Result { + walk_local(self, l) + } + + fn visit_block(&mut self, b: &$($lt)? $($mut)? Block) -> Self::Result { + walk_block(self, b) + } + + fn visit_param(&mut self, param: &$($lt)? $($mut)? Param) -> Self::Result { + walk_param(self, param) + } + + fn visit_arm(&mut self, a: &$($lt)? $($mut)? Arm) -> Self::Result { + walk_arm(self, a) + } + + fn visit_pat(&mut self, p: &$($lt)? $($mut)? Pat) -> Self::Result { + walk_pat(self, p) + } + + fn visit_anon_const(&mut self, c: &$($lt)? $($mut)? AnonConst) -> Self::Result { + walk_anon_const(self, c) + } + + fn visit_expr(&mut self, ex: &$($lt)? $($mut)? Expr) -> Self::Result { + walk_expr(self, ex) + } + + /// This method is a hack to workaround unstable of `stmt_expr_attributes`. + /// It can be removed once that feature is stabilized. + fn visit_method_receiver_expr(&mut self, ex: &$($lt)? $($mut)? Expr) -> Self::Result { + self.visit_expr(ex) + } + + fn visit_ty(&mut self, t: &$($lt)? $($mut)? Ty) -> Self::Result { + walk_ty(self, t) + } + + fn visit_ty_pat(&mut self, t: &$($lt)? $($mut)? TyPat) -> Self::Result { + walk_ty_pat(self, t) + } + + fn visit_generic_param(&mut self, param: &$($lt)? $($mut)? GenericParam) -> Self::Result { + walk_generic_param(self, param) + } + + fn visit_generics(&mut self, g: &$($lt)? $($mut)? Generics) -> Self::Result { + walk_generics(self, g) + } + fn visit_closure_binder(&mut self, b: &$($lt)? $($mut)? ClosureBinder) -> Self::Result { + walk_closure_binder(self, b) + } + fn visit_contract(&mut self, c: &$($lt)? $($mut)? FnContract) -> Self::Result { + walk_contract(self, c) + } + + fn visit_where_predicate(&mut self, p: &$($lt)? $($mut)? WherePredicate) -> Self::Result { + walk_where_predicate(self, p) + } + + fn visit_where_predicate_kind(&mut self, k: &$($lt)? $($mut)? WherePredicateKind) -> Self::Result { + walk_where_predicate_kind(self, k) + } + + // for `MutVisitor`: `Span` and `NodeId` are mutated at the caller site. + fn visit_fn( + &mut self, + fk: FnKind<$($lt)? $(${ignore($mut)} '_)?>, + _: Span, + _: NodeId + ) -> Self::Result { + walk_fn(self, fk) + } + + fn visit_assoc_item(&mut self, i: &$($lt)? $($mut)? AssocItem, ctxt: AssocCtxt) -> Self::Result { + walk_assoc_item(self, i, ctxt) + } + + fn visit_trait_ref(&mut self, t: &$($lt)? $($mut)? TraitRef) -> Self::Result { + walk_trait_ref(self, t) + } + + fn visit_param_bound(&mut self, bounds: &$($lt)? $($mut)? GenericBound, _ctxt: BoundKind) -> Self::Result { + walk_param_bound(self, bounds) + } + + fn visit_precise_capturing_arg(&mut self, arg: &$($lt)? $($mut)? PreciseCapturingArg) -> Self::Result { + walk_precise_capturing_arg(self, arg) + } + + fn visit_poly_trait_ref(&mut self, t: &$($lt)? $($mut)? PolyTraitRef) -> Self::Result { + walk_poly_trait_ref(self, t) + } + + fn visit_variant_data(&mut self, s: &$($lt)? $($mut)? VariantData) -> Self::Result { + walk_variant_data(self, s) + } + + fn visit_field_def(&mut self, s: &$($lt)? $($mut)? FieldDef) -> Self::Result { + walk_field_def(self, s) + } + + fn visit_variant(&mut self, v: &$($lt)? $($mut)? Variant) -> Self::Result { + walk_variant(self, v) + } + + fn visit_label(&mut self, label: &$($lt)? $($mut)? Label) -> Self::Result { + walk_label(self, label) + } + + fn visit_lifetime(&mut self, lifetime: &$($lt)? $($mut)? Lifetime, $(${ignore($lt)} _: LifetimeCtxt )?) -> Self::Result { + walk_lifetime(self, lifetime) + } + + fn visit_mac_call(&mut self, mac: &$($lt)? $($mut)? MacCall) -> Self::Result { + walk_mac(self, mac) + } + + fn visit_id(&mut self, _id: $(&$mut)? NodeId) -> Self::Result { + Self::Result::output() + } + + fn visit_macro_def(&mut self, macro_def: &$($lt)? $($mut)? MacroDef) -> Self::Result { + walk_macro_def(self, macro_def) + } + + fn visit_path(&mut self, path: &$($lt)? $($mut)? Path) -> Self::Result { + walk_path(self, path) + } + + fn visit_use_tree(&mut self, use_tree: &$($lt)? $($mut)? UseTree) -> Self::Result { + walk_use_tree(self, use_tree) + } + + fn visit_path_segment(&mut self, path_segment: &$($lt)? $($mut)? PathSegment) -> Self::Result { + walk_path_segment(self, path_segment) + } + + fn visit_generic_args(&mut self, generic_args: &$($lt)? $($mut)? GenericArgs) -> Self::Result { + walk_generic_args(self, generic_args) + } + + fn visit_generic_arg(&mut self, generic_arg: &$($lt)? $($mut)? GenericArg) -> Self::Result { + walk_generic_arg(self, generic_arg) + } + + fn visit_assoc_item_constraint( + &mut self, + constraint: &$($lt)? $($mut)? AssocItemConstraint, + ) -> Self::Result { + walk_assoc_item_constraint(self, constraint) + } + + fn visit_attribute(&mut self, attr: &$($lt)? $($mut)? Attribute) -> Self::Result { + walk_attribute(self, attr) + } + + fn visit_vis(&mut self, vis: &$($lt)? $($mut)? Visibility) -> Self::Result { + walk_vis(self, vis) + } + + fn visit_fn_ret_ty(&mut self, ret_ty: &$($lt)? $($mut)? FnRetTy) -> Self::Result { + walk_fn_ret_ty(self, ret_ty) + } + + fn visit_fn_header(&mut self, header: &$($lt)? $($mut)? FnHeader) -> Self::Result { + walk_fn_header(self, header) + } + + fn visit_expr_field(&mut self, f: &$($lt)? $($mut)? ExprField) -> Self::Result { + walk_expr_field(self, f) + } + + fn visit_pat_field(&mut self, fp: &$($lt)? $($mut)? PatField) -> Self::Result { + walk_pat_field(self, fp) + } + + fn visit_crate(&mut self, krate: &$($lt)? $($mut)? Crate) -> Self::Result { + walk_crate(self, krate) + } + + fn visit_inline_asm(&mut self, asm: &$($lt)? $($mut)? InlineAsm) -> Self::Result { + walk_inline_asm(self, asm) + } + + fn visit_format_args(&mut self, fmt: &$($lt)? $($mut)? FormatArgs) -> Self::Result { + walk_format_args(self, fmt) + } + + fn visit_inline_asm_sym(&mut self, sym: &$($lt)? $($mut)? InlineAsmSym) -> Self::Result { + walk_inline_asm_sym(self, sym) + } + + fn visit_capture_by(&mut self, capture_by: &$($lt)? $($mut)? CaptureBy) -> Self::Result { + walk_capture_by(self, capture_by) + } + + fn visit_coroutine_kind(&mut self, coroutine_kind: &$($lt)? $($mut)? CoroutineKind) -> Self::Result { + walk_coroutine_kind(self, coroutine_kind) + } + + fn visit_fn_decl(&mut self, fn_decl: &$($lt)? $($mut)? FnDecl) -> Self::Result { + walk_fn_decl(self, fn_decl) + } + + fn visit_qself(&mut self, qs: &$($lt)? $($mut)? Option<P<QSelf>>) -> Self::Result { + walk_qself(self, qs) + } + + // (non-mut) `Visitor`-only methods + $( + fn visit_stmt(&mut self, s: &$lt Stmt) -> Self::Result { + walk_stmt(self, s) + } + + fn visit_nested_use_tree(&mut self, use_tree: &$lt UseTree, id: NodeId) -> Self::Result { + try_visit!(self.visit_id(id)); + self.visit_use_tree(use_tree) + } + )? + + // `MutVisitor`-only methods + $( + fn flat_map_foreign_item(&mut self, ni: P<ForeignItem>) -> SmallVec<[P<ForeignItem>; 1]> { + walk_flat_map_foreign_item(self, ni) + } + + fn flat_map_item(&mut self, i: P<Item>) -> SmallVec<[P<Item>; 1]> { + walk_flat_map_item(self, i) + } + + fn flat_map_field_def(&mut self, fd: FieldDef) -> SmallVec<[FieldDef; 1]> { + walk_flat_map_field_def(self, fd) + } + + fn flat_map_assoc_item( + &mut self, + i: P<AssocItem>, + ctxt: AssocCtxt, + ) -> SmallVec<[P<AssocItem>; 1]> { + walk_flat_map_assoc_item(self, i, ctxt) + } + + fn flat_map_stmt(&mut self, s: Stmt) -> SmallVec<[Stmt; 1]> { + walk_flat_map_stmt(self, s) + } + + fn flat_map_arm(&mut self, arm: Arm) -> SmallVec<[Arm; 1]> { + walk_flat_map_arm(self, arm) + } + + fn filter_map_expr(&mut self, e: P<Expr>) -> Option<P<Expr>> { + walk_filter_map_expr(self, e) + } + + fn flat_map_variant(&mut self, v: Variant) -> SmallVec<[Variant; 1]> { + walk_flat_map_variant(self, v) + } + + fn flat_map_param(&mut self, param: Param) -> SmallVec<[Param; 1]> { + walk_flat_map_param(self, param) + } + + fn flat_map_generic_param(&mut self, param: GenericParam) -> SmallVec<[GenericParam; 1]> { + walk_flat_map_generic_param(self, param) + } + + fn flat_map_expr_field(&mut self, f: ExprField) -> SmallVec<[ExprField; 1]> { + walk_flat_map_expr_field(self, f) + } + + fn flat_map_where_predicate( + &mut self, + where_predicate: WherePredicate, + ) -> SmallVec<[WherePredicate; 1]> { + walk_flat_map_where_predicate(self, where_predicate) + } + + // Span visiting is no longer used, but we keep it for now, + // in case it's needed for something like #127241. + fn visit_span(&mut self, _sp: &$mut Span) { + // Do nothing. + } + + fn flat_map_pat_field(&mut self, fp: PatField) -> SmallVec<[PatField; 1]> { + walk_flat_map_pat_field(self, fp) + } + )? + } + pub trait WalkItemKind { type Ctxt; fn walk<$($lt,)? V: $Visitor$(<$lt>)?>( @@ -409,6 +569,24 @@ macro_rules! common_visitor_and_walkers { V::Result::output() } + $(${ignore($lt)} + #[inline] + )? + fn walk_capture_by<$($lt,)? V: $Visitor$(<$lt>)?>( + vis: &mut V, + capture_by: &$($lt)? $($mut)? CaptureBy + ) -> V::Result { + match capture_by { + CaptureBy::Ref => { V::Result::output() } + CaptureBy::Value { move_kw } => { + visit_span(vis, move_kw) + } + CaptureBy::Use { use_kw } => { + visit_span(vis, use_kw) + } + } + } + fn visit_bounds<$($lt,)? V: $Visitor$(<$lt>)?>(visitor: &mut V, bounds: &$($lt)? $($mut)? GenericBounds, ctxt: BoundKind) -> V::Result { walk_list!(visitor, visit_param_bound, bounds, ctxt); V::Result::output() @@ -989,8 +1167,7 @@ macro_rules! common_visitor_and_walkers { try_visit!(vis.visit_vis(visibility)); try_visit!(vis.visit_ident(ident)); try_visit!(vis.visit_variant_data(data)); - $(${ignore($lt)} visit_opt!(vis, visit_variant_discr, disr_expr); )? - $(${ignore($mut)} visit_opt!(vis, visit_anon_const, disr_expr); )? + visit_opt!(vis, visit_anon_const, disr_expr); visit_span(vis, span) } @@ -1389,7 +1566,7 @@ macro_rules! common_visitor_and_walkers { // FIXME: visit the template exhaustively. pub fn walk_format_args<$($lt,)? V: $Visitor$(<$lt>)?>(vis: &mut V, fmt: &$($lt)? $($mut)? FormatArgs) -> V::Result { - let FormatArgs { span, template: _, arguments, uncooked_fmt_str: _ } = fmt; + let FormatArgs { span, template: _, arguments, uncooked_fmt_str: _, is_source_literal: _ } = fmt; let args = $(${ignore($mut)} arguments.all_args_mut())? $(${ignore($lt)} arguments.all_args())? ; for FormatArgument { kind, expr } in args { match kind { diff --git a/compiler/rustc_ast_lowering/messages.ftl b/compiler/rustc_ast_lowering/messages.ftl index 50eb7c7ae99..c6472fd45fa 100644 --- a/compiler/rustc_ast_lowering/messages.ftl +++ b/compiler/rustc_ast_lowering/messages.ftl @@ -172,9 +172,6 @@ ast_lowering_template_modifier = template modifier ast_lowering_this_not_async = this is not `async` -ast_lowering_underscore_array_length_unstable = - using `_` for array lengths is unstable - ast_lowering_underscore_expr_lhs_assign = in expressions, `_` can only be used on the left-hand side of an assignment .label = `_` not allowed here diff --git a/compiler/rustc_ast_lowering/src/expr.rs b/compiler/rustc_ast_lowering/src/expr.rs index 718edad0cc6..f297bf9f4cf 100644 --- a/compiler/rustc_ast_lowering/src/expr.rs +++ b/compiler/rustc_ast_lowering/src/expr.rs @@ -2289,12 +2289,12 @@ impl<'hir> LoweringContext<'_, 'hir> { span: Span, elements: &'hir [hir::Expr<'hir>], ) -> hir::Expr<'hir> { - let addrof = hir::ExprKind::AddrOf( - hir::BorrowKind::Ref, - hir::Mutability::Not, - self.arena.alloc(self.expr(span, hir::ExprKind::Array(elements))), - ); - self.expr(span, addrof) + let array = self.arena.alloc(self.expr(span, hir::ExprKind::Array(elements))); + self.expr_ref(span, array) + } + + pub(super) fn expr_ref(&mut self, span: Span, expr: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> { + self.expr(span, hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Not, expr)) } pub(super) fn expr(&mut self, span: Span, kind: hir::ExprKind<'hir>) -> hir::Expr<'hir> { diff --git a/compiler/rustc_ast_lowering/src/format.rs b/compiler/rustc_ast_lowering/src/format.rs index 17b443b8ecc..943cde90dd2 100644 --- a/compiler/rustc_ast_lowering/src/format.rs +++ b/compiler/rustc_ast_lowering/src/format.rs @@ -1,12 +1,10 @@ -use core::ops::ControlFlow; use std::borrow::Cow; -use rustc_ast::visit::Visitor; use rustc_ast::*; use rustc_data_structures::fx::FxIndexMap; use rustc_hir as hir; use rustc_session::config::FmtDebug; -use rustc_span::{Ident, Span, Symbol, sym}; +use rustc_span::{DesugaringKind, Ident, Span, Symbol, sym}; use super::LoweringContext; @@ -16,6 +14,13 @@ impl<'hir> LoweringContext<'_, 'hir> { // format_args!() had any arguments _before_ flattening/inlining. let allow_const = fmt.arguments.all_args().is_empty(); let mut fmt = Cow::Borrowed(fmt); + + let sp = self.mark_span_with_reason( + DesugaringKind::FormatLiteral { source: fmt.is_source_literal }, + sp, + sp.ctxt().outer_expn_data().allow_internal_unstable, + ); + if self.tcx.sess.opts.unstable_opts.flatten_format_args { fmt = flatten_format_args(fmt); fmt = self.inline_literals(fmt); @@ -476,77 +481,52 @@ fn expand_format_args<'hir>( return hir::ExprKind::Call(new, new_args); } - // If the args array contains exactly all the original arguments once, - // in order, we can use a simple array instead of a `match` construction. - // However, if there's a yield point in any argument except the first one, - // we don't do this, because an Argument cannot be kept across yield points. - // - // This is an optimization, speeding up compilation about 1-2% in some cases. - // See https://github.com/rust-lang/rust/pull/106770#issuecomment-1380790609 - let use_simple_array = argmap.len() == arguments.len() - && argmap.iter().enumerate().all(|(i, (&(j, _), _))| i == j) - && arguments.iter().skip(1).all(|arg| !may_contain_yield_point(&arg.expr)); - - let args = if arguments.is_empty() { + let (let_statements, args) = if arguments.is_empty() { // Generate: - // &<core::fmt::Argument>::none() - // - // Note: - // `none()` just returns `[]`. We use `none()` rather than `[]` to limit the lifetime. - // - // This makes sure that this still fails to compile, even when the argument is inlined: + // [] + (vec![], ctx.arena.alloc(ctx.expr(macsp, hir::ExprKind::Array(&[])))) + } else if argmap.len() == 1 && arguments.len() == 1 { + // Only one argument, so we don't need to make the `args` tuple. // - // ``` - // let f = format_args!("{}", "a"); - // println!("{f}"); // error E0716 - // ``` - // - // Cases where keeping the object around is allowed, such as `format_args!("a")`, - // are handled above by the `allow_const` case. - let none_fn = ctx.arena.alloc(ctx.expr_lang_item_type_relative( - macsp, - hir::LangItem::FormatArgument, - sym::none, - )); - let none = ctx.expr_call(macsp, none_fn, &[]); - ctx.expr(macsp, hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Not, none)) - } else if use_simple_array { // Generate: - // &[ - // <core::fmt::Argument>::new_display(&arg0), - // <core::fmt::Argument>::new_lower_hex(&arg1), - // <core::fmt::Argument>::new_debug(&arg2), - // … - // ] - let elements = ctx.arena.alloc_from_iter(arguments.iter().zip(argmap).map( - |(arg, ((_, ty), placeholder_span))| { + // super let args = [<core::fmt::Argument>::new_display(&arg)]; + let args = ctx.arena.alloc_from_iter(argmap.iter().map( + |(&(arg_index, ty), &placeholder_span)| { + let arg = &arguments[arg_index]; let placeholder_span = placeholder_span.unwrap_or(arg.expr.span).with_ctxt(macsp.ctxt()); - let arg_span = match arg.kind { - FormatArgumentKind::Captured(_) => placeholder_span, - _ => arg.expr.span.with_ctxt(macsp.ctxt()), - }; let arg = ctx.lower_expr(&arg.expr); - let ref_arg = ctx.arena.alloc(ctx.expr( - arg_span, - hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Not, arg), - )); + let ref_arg = ctx.arena.alloc(ctx.expr_ref(arg.span.with_ctxt(macsp.ctxt()), arg)); make_argument(ctx, placeholder_span, ref_arg, ty) }, )); - ctx.expr_array_ref(macsp, elements) + let args = ctx.arena.alloc(ctx.expr(macsp, hir::ExprKind::Array(args))); + let args_ident = Ident::new(sym::args, macsp); + let (args_pat, args_hir_id) = ctx.pat_ident(macsp, args_ident); + let let_statement = ctx.stmt_super_let_pat(macsp, args_pat, Some(args)); + (vec![let_statement], ctx.arena.alloc(ctx.expr_ident_mut(macsp, args_ident, args_hir_id))) } else { // Generate: - // &match (&arg0, &arg1, &…) { - // args => [ - // <core::fmt::Argument>::new_display(args.0), - // <core::fmt::Argument>::new_lower_hex(args.1), - // <core::fmt::Argument>::new_debug(args.0), - // … - // ] - // } + // super let args = (&arg0, &arg1, &…); let args_ident = Ident::new(sym::args, macsp); let (args_pat, args_hir_id) = ctx.pat_ident(macsp, args_ident); + let elements = ctx.arena.alloc_from_iter(arguments.iter().map(|arg| { + let arg_expr = ctx.lower_expr(&arg.expr); + ctx.expr( + arg.expr.span.with_ctxt(macsp.ctxt()), + hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Not, arg_expr), + ) + })); + let args_tuple = ctx.arena.alloc(ctx.expr(macsp, hir::ExprKind::Tup(elements))); + let let_statement_1 = ctx.stmt_super_let_pat(macsp, args_pat, Some(args_tuple)); + + // Generate: + // super let args = [ + // <core::fmt::Argument>::new_display(args.0), + // <core::fmt::Argument>::new_lower_hex(args.1), + // <core::fmt::Argument>::new_debug(args.0), + // … + // ]; let args = ctx.arena.alloc_from_iter(argmap.iter().map( |(&(arg_index, ty), &placeholder_span)| { let arg = &arguments[arg_index]; @@ -567,58 +547,47 @@ fn expand_format_args<'hir>( make_argument(ctx, placeholder_span, arg, ty) }, )); - let elements = ctx.arena.alloc_from_iter(arguments.iter().map(|arg| { - let arg_expr = ctx.lower_expr(&arg.expr); - ctx.expr( - arg.expr.span.with_ctxt(macsp.ctxt()), - hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Not, arg_expr), - ) - })); - let args_tuple = ctx.arena.alloc(ctx.expr(macsp, hir::ExprKind::Tup(elements))); - let array = ctx.arena.alloc(ctx.expr(macsp, hir::ExprKind::Array(args))); - let match_arms = ctx.arena.alloc_from_iter([ctx.arm(args_pat, array)]); - let match_expr = ctx.arena.alloc(ctx.expr_match( - macsp, - args_tuple, - match_arms, - hir::MatchSource::FormatArgs, - )); - ctx.expr( - macsp, - hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Not, match_expr), + let args = ctx.arena.alloc(ctx.expr(macsp, hir::ExprKind::Array(args))); + let (args_pat, args_hir_id) = ctx.pat_ident(macsp, args_ident); + let let_statement_2 = ctx.stmt_super_let_pat(macsp, args_pat, Some(args)); + ( + vec![let_statement_1, let_statement_2], + ctx.arena.alloc(ctx.expr_ident_mut(macsp, args_ident, args_hir_id)), ) }; - if let Some(format_options) = format_options { + // Generate: + // &args + let args = ctx.expr_ref(macsp, args); + + let call = if let Some(format_options) = format_options { // Generate: - // <core::fmt::Arguments>::new_v1_formatted( - // lit_pieces, - // args, - // format_options, - // unsafe { ::core::fmt::UnsafeArg::new() } - // ) + // unsafe { + // <core::fmt::Arguments>::new_v1_formatted( + // lit_pieces, + // args, + // format_options, + // ) + // } let new_v1_formatted = ctx.arena.alloc(ctx.expr_lang_item_type_relative( macsp, hir::LangItem::FormatArguments, sym::new_v1_formatted, )); - let unsafe_arg_new = ctx.arena.alloc(ctx.expr_lang_item_type_relative( - macsp, - hir::LangItem::FormatUnsafeArg, - sym::new, - )); - let unsafe_arg_new_call = ctx.expr_call(macsp, unsafe_arg_new, &[]); + let args = ctx.arena.alloc_from_iter([lit_pieces, args, format_options]); + let call = ctx.expr_call(macsp, new_v1_formatted, args); let hir_id = ctx.next_id(); - let unsafe_arg = ctx.expr_block(ctx.arena.alloc(hir::Block { - stmts: &[], - expr: Some(unsafe_arg_new_call), - hir_id, - rules: hir::BlockCheckMode::UnsafeBlock(hir::UnsafeSource::CompilerGenerated), - span: macsp, - targeted_by_break: false, - })); - let args = ctx.arena.alloc_from_iter([lit_pieces, args, format_options, unsafe_arg]); - hir::ExprKind::Call(new_v1_formatted, args) + hir::ExprKind::Block( + ctx.arena.alloc(hir::Block { + stmts: &[], + expr: Some(call), + hir_id, + rules: hir::BlockCheckMode::UnsafeBlock(hir::UnsafeSource::CompilerGenerated), + span: macsp, + targeted_by_break: false, + }), + None, + ) } else { // Generate: // <core::fmt::Arguments>::new_v1( @@ -632,35 +601,21 @@ fn expand_format_args<'hir>( )); let new_args = ctx.arena.alloc_from_iter([lit_pieces, args]); hir::ExprKind::Call(new_v1, new_args) - } -} - -fn may_contain_yield_point(e: &ast::Expr) -> bool { - struct MayContainYieldPoint; - - impl Visitor<'_> for MayContainYieldPoint { - type Result = ControlFlow<()>; - - fn visit_expr(&mut self, e: &ast::Expr) -> ControlFlow<()> { - if let ast::ExprKind::Await(_, _) | ast::ExprKind::Yield(_) = e.kind { - ControlFlow::Break(()) - } else { - visit::walk_expr(self, e) - } - } - - fn visit_mac_call(&mut self, _: &ast::MacCall) -> ControlFlow<()> { - // Macros should be expanded at this point. - unreachable!("unexpanded macro in ast lowering"); - } + }; - fn visit_item(&mut self, _: &ast::Item) -> ControlFlow<()> { - // Do not recurse into nested items. - ControlFlow::Continue(()) - } + if !let_statements.is_empty() { + // Generate: + // { + // super let … + // super let … + // <core::fmt::Arguments>::new_…(…) + // } + let call = ctx.arena.alloc(ctx.expr(macsp, call)); + let block = ctx.block_all(macsp, ctx.arena.alloc_from_iter(let_statements), Some(call)); + hir::ExprKind::Block(block, None) + } else { + call } - - MayContainYieldPoint.visit_expr(e).is_break() } fn for_all_argument_indexes(template: &mut [FormatArgsPiece], mut f: impl FnMut(&mut usize)) { diff --git a/compiler/rustc_ast_lowering/src/lib.rs b/compiler/rustc_ast_lowering/src/lib.rs index 3b99a653417..26d7c0cd6d3 100644 --- a/compiler/rustc_ast_lowering/src/lib.rs +++ b/compiler/rustc_ast_lowering/src/lib.rs @@ -48,7 +48,7 @@ use rustc_data_structures::sorted_map::SortedMap; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::sync::spawn; use rustc_data_structures::tagged_ptr::TaggedRef; -use rustc_errors::{DiagArgFromDisplay, DiagCtxtHandle, StashKey}; +use rustc_errors::{DiagArgFromDisplay, DiagCtxtHandle}; use rustc_hir::def::{DefKind, LifetimeRes, Namespace, PartialRes, PerNS, Res}; use rustc_hir::def_id::{CRATE_DEF_ID, LOCAL_CRATE, LocalDefId}; use rustc_hir::lints::DelayedLint; @@ -60,7 +60,7 @@ use rustc_index::{Idx, IndexSlice, IndexVec}; use rustc_macros::extension; use rustc_middle::span_bug; use rustc_middle::ty::{ResolverAstLowering, TyCtxt}; -use rustc_session::parse::{add_feature_diagnostics, feature_err}; +use rustc_session::parse::add_feature_diagnostics; use rustc_span::symbol::{Ident, Symbol, kw, sym}; use rustc_span::{DUMMY_SP, DesugaringKind, Span}; use smallvec::SmallVec; @@ -2109,15 +2109,6 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { // `ExprKind::Paren(ExprKind::Underscore)` and should also be lowered to `GenericArg::Infer` match c.value.peel_parens().kind { ExprKind::Underscore => { - if !self.tcx.features().generic_arg_infer() { - feature_err( - &self.tcx.sess, - sym::generic_arg_infer, - c.value.span, - fluent_generated::ast_lowering_underscore_array_length_unstable, - ) - .stash(c.value.span, StashKey::UnderscoreForArrayLengths); - } let ct_kind = hir::ConstArgKind::Infer(self.lower_span(c.value.span), ()); self.arena.alloc(hir::ConstArg { hir_id: self.lower_node_id(c.id), kind: ct_kind }) } @@ -2301,6 +2292,26 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { self.stmt(span, hir::StmtKind::Let(self.arena.alloc(local))) } + fn stmt_super_let_pat( + &mut self, + span: Span, + pat: &'hir hir::Pat<'hir>, + init: Option<&'hir hir::Expr<'hir>>, + ) -> hir::Stmt<'hir> { + let hir_id = self.next_id(); + let local = hir::LetStmt { + super_: Some(span), + hir_id, + init, + pat, + els: None, + source: hir::LocalSource::Normal, + span: self.lower_span(span), + ty: None, + }; + self.stmt(span, hir::StmtKind::Let(self.arena.alloc(local))) + } + fn block_expr(&mut self, expr: &'hir hir::Expr<'hir>) -> &'hir hir::Block<'hir> { self.block_all(expr.span, &[], Some(expr)) } diff --git a/compiler/rustc_ast_pretty/src/pprust/state/expr.rs b/compiler/rustc_ast_pretty/src/pprust/state/expr.rs index ee49246a4bb..f6b5ff404db 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state/expr.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state/expr.rs @@ -386,18 +386,44 @@ impl<'a> State<'a> { let ib = self.ibox(INDENT_UNIT); - // The Match subexpression in `match x {} - 1` must be parenthesized if - // it is the leftmost subexpression in a statement: - // - // (match x {}) - 1; - // - // But not otherwise: - // - // let _ = match x {} - 1; - // - // Same applies to a small set of other expression kinds which eagerly - // terminate a statement which opens with them. - let needs_par = fixup.would_cause_statement_boundary(expr); + let needs_par = { + // The Match subexpression in `match x {} - 1` must be parenthesized + // if it is the leftmost subexpression in a statement: + // + // (match x {}) - 1; + // + // But not otherwise: + // + // let _ = match x {} - 1; + // + // Same applies to a small set of other expression kinds which + // eagerly terminate a statement which opens with them. + fixup.would_cause_statement_boundary(expr) + } || { + // If a binary operation ends up with an attribute, such as + // resulting from the following macro expansion, then parentheses + // are required so that the attribute encompasses the right + // subexpression and not just the left one. + // + // #![feature(stmt_expr_attributes)] + // + // macro_rules! add_attr { + // ($e:expr) => { #[attr] $e }; + // } + // + // let _ = add_attr!(1 + 1); + // + // We must pretty-print `#[attr] (1 + 1)` not `#[attr] 1 + 1`. + !attrs.is_empty() + && matches!( + expr.kind, + ast::ExprKind::Binary(..) + | ast::ExprKind::Cast(..) + | ast::ExprKind::Assign(..) + | ast::ExprKind::AssignOp(..) + | ast::ExprKind::Range(..) + ) + }; if needs_par { self.popen(); fixup = FixupContext::default(); diff --git a/compiler/rustc_attr_data_structures/src/attributes.rs b/compiler/rustc_attr_data_structures/src/attributes.rs index 8f95a017809..c7487847e6f 100644 --- a/compiler/rustc_attr_data_structures/src/attributes.rs +++ b/compiler/rustc_attr_data_structures/src/attributes.rs @@ -8,7 +8,7 @@ use thin_vec::ThinVec; use crate::{DefaultBodyStability, PartialConstStability, PrintAttribute, RustcVersion, Stability}; -#[derive(Copy, Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] +#[derive(Copy, Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic, PrintAttribute)] pub enum InlineAttr { None, Hint, @@ -38,7 +38,8 @@ pub enum InstructionSetAttr { ArmT32, } -#[derive(Clone, Encodable, Decodable, Debug, PartialEq, Eq, HashStable_Generic, Default)] +#[derive(Copy, Clone, Debug, PartialEq, Eq, Default, PrintAttribute)] +#[derive(Encodable, Decodable, HashStable_Generic)] pub enum OptimizeAttr { /// No `#[optimize(..)]` attribute #[default] @@ -182,6 +183,9 @@ impl Deprecation { #[derive(Clone, Debug, HashStable_Generic, Encodable, Decodable, PrintAttribute)] pub enum AttributeKind { // tidy-alphabetical-start + /// Represents `#[align(N)]`. + Align { align: Align, span: Span }, + /// Represents `#[rustc_allow_const_fn_unstable]`. AllowConstFnUnstable(ThinVec<Symbol>), @@ -198,6 +202,9 @@ pub enum AttributeKind { span: Span, }, + /// Represents `#[cold]`. + Cold(Span), + /// Represents `#[rustc_confusables]`. Confusables { symbols: ThinVec<Symbol>, @@ -221,9 +228,31 @@ pub enum AttributeKind { /// Represents [`#[doc]`](https://doc.rust-lang.org/stable/rustdoc/write-documentation/the-doc-attribute.html). DocComment { style: AttrStyle, kind: CommentKind, span: Span, comment: Symbol }, + /// Represents `#[inline]` and `#[rustc_force_inline]`. + Inline(InlineAttr, Span), + /// Represents `#[rustc_macro_transparency]`. MacroTransparency(Transparency), + /// Represents [`#[may_dangle]`](https://std-dev-guide.rust-lang.org/tricky/may-dangle.html). + MayDangle(Span), + + /// Represents `#[must_use]`. + MustUse { + span: Span, + /// must_use can optionally have a reason: `#[must_use = "reason this must be used"]` + reason: Option<Symbol>, + }, + + /// Represents `#[no_mangle]` + NoMangle(Span), + + /// Represents `#[optimize(size|speed)]` + Optimize(OptimizeAttr, Span), + + /// Represents `#[rustc_pub_transparent]` (used by the `repr_transparent_external_private_fields` lint). + PubTransparent(Span), + /// Represents [`#[repr]`](https://doc.rust-lang.org/stable/reference/type-layout.html#representations). Repr(ThinVec<(ReprAttr, Span)>), diff --git a/compiler/rustc_attr_data_structures/src/lints.rs b/compiler/rustc_attr_data_structures/src/lints.rs index 7e3664b2263..e34c54c6d32 100644 --- a/compiler/rustc_attr_data_structures/src/lints.rs +++ b/compiler/rustc_attr_data_structures/src/lints.rs @@ -11,4 +11,5 @@ pub struct AttributeLint<Id> { #[derive(Clone, Debug, HashStable_Generic)] pub enum AttributeLintKind { UnusedDuplicate { this: Span, other: Span, warning: bool }, + IllFormedAttributeInput { suggestions: Vec<String> }, } diff --git a/compiler/rustc_attr_parsing/messages.ftl b/compiler/rustc_attr_parsing/messages.ftl index c9443feb021..0891afc003e 100644 --- a/compiler/rustc_attr_parsing/messages.ftl +++ b/compiler/rustc_attr_parsing/messages.ftl @@ -23,8 +23,10 @@ attr_parsing_expects_feature_list = attr_parsing_expects_features = `{$name}` expects feature names -attr_parsing_incorrect_meta_item = expected a quoted string literal -attr_parsing_incorrect_meta_item_suggestion = consider surrounding this with quotes +attr_parsing_ill_formed_attribute_input = {$num_suggestions -> + [1] attribute must be of the form {$suggestions} + *[other] valid forms for the attribute are {$suggestions} + } attr_parsing_incorrect_repr_format_align_one_arg = incorrect `repr(align)` attribute format: `align` takes exactly one argument in parentheses @@ -42,6 +44,9 @@ attr_parsing_incorrect_repr_format_packed_expect_integer = attr_parsing_incorrect_repr_format_packed_one_or_zero_arg = incorrect `repr(packed)` attribute format: `packed` takes exactly one parenthesized argument, or no parentheses at all +attr_parsing_invalid_alignment_value = + invalid alignment value: {$error_part} + attr_parsing_invalid_issue_string = `issue` must be a non-zero numeric string or "none" .must_not_be_zero = `issue` must not be "0", use "none" instead @@ -81,9 +86,6 @@ attr_parsing_missing_note = attr_parsing_missing_since = missing 'since' -attr_parsing_multiple_item = - multiple '{$item}' items - attr_parsing_multiple_stability_levels = multiple stability levels @@ -122,10 +124,6 @@ attr_parsing_unsupported_literal_cfg_boolean = literal in `cfg` predicate value must be a boolean attr_parsing_unsupported_literal_cfg_string = literal in `cfg` predicate value must be a string -attr_parsing_unsupported_literal_deprecated_kv_pair = - item in `deprecated` must be a key/value pair -attr_parsing_unsupported_literal_deprecated_string = - literal in `deprecated` value must be a string attr_parsing_unsupported_literal_generic = unsupported literal attr_parsing_unsupported_literal_suggestion = @@ -136,6 +134,7 @@ attr_parsing_unused_duplicate = .suggestion = remove this attribute .note = attribute also specified here .warn = {-passes_previously_accepted} + attr_parsing_unused_multiple = multiple `{$name}` attributes .suggestion = remove this attribute diff --git a/compiler/rustc_attr_parsing/src/attributes/allow_unstable.rs b/compiler/rustc_attr_parsing/src/attributes/allow_unstable.rs index 81192f902a2..21b01a8d071 100644 --- a/compiler/rustc_attr_parsing/src/attributes/allow_unstable.rs +++ b/compiler/rustc_attr_parsing/src/attributes/allow_unstable.rs @@ -1,6 +1,7 @@ use std::iter; use rustc_attr_data_structures::AttributeKind; +use rustc_feature::{AttributeTemplate, template}; use rustc_span::{Span, Symbol, sym}; use super::{CombineAttributeParser, ConvertFn}; @@ -13,6 +14,7 @@ impl<S: Stage> CombineAttributeParser<S> for AllowInternalUnstableParser { const PATH: &[Symbol] = &[sym::allow_internal_unstable]; type Item = (Symbol, Span); const CONVERT: ConvertFn<Self::Item> = AttributeKind::AllowInternalUnstable; + const TEMPLATE: AttributeTemplate = template!(Word, List: "feat1, feat2, ..."); fn extend<'c>( cx: &'c mut AcceptContext<'_, '_, S>, @@ -29,6 +31,7 @@ impl<S: Stage> CombineAttributeParser<S> for AllowConstFnUnstableParser { const PATH: &[Symbol] = &[sym::rustc_allow_const_fn_unstable]; type Item = Symbol; const CONVERT: ConvertFn<Self::Item> = AttributeKind::AllowConstFnUnstable; + const TEMPLATE: AttributeTemplate = template!(Word, List: "feat1, feat2, ..."); fn extend<'c>( cx: &'c mut AcceptContext<'_, '_, S>, diff --git a/compiler/rustc_attr_parsing/src/attributes/codegen_attrs.rs b/compiler/rustc_attr_parsing/src/attributes/codegen_attrs.rs new file mode 100644 index 00000000000..ba4e2935004 --- /dev/null +++ b/compiler/rustc_attr_parsing/src/attributes/codegen_attrs.rs @@ -0,0 +1,76 @@ +use rustc_attr_data_structures::{AttributeKind, OptimizeAttr}; +use rustc_feature::{AttributeTemplate, template}; +use rustc_span::sym; + +use super::{AttributeOrder, OnDuplicate, SingleAttributeParser}; +use crate::context::{AcceptContext, Stage}; +use crate::parser::ArgParser; + +pub(crate) struct OptimizeParser; + +impl<S: Stage> SingleAttributeParser<S> for OptimizeParser { + const PATH: &[rustc_span::Symbol] = &[sym::optimize]; + const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepLast; + const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::WarnButFutureError; + const TEMPLATE: AttributeTemplate = template!(List: "size|speed|none"); + + fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> { + let Some(list) = args.list() else { + cx.expected_list(cx.attr_span); + return None; + }; + + let Some(single) = list.single() else { + cx.expected_single_argument(list.span); + return None; + }; + + let res = match single.meta_item().and_then(|i| i.path().word().map(|i| i.name)) { + Some(sym::size) => OptimizeAttr::Size, + Some(sym::speed) => OptimizeAttr::Speed, + Some(sym::none) => OptimizeAttr::DoNotOptimize, + _ => { + cx.expected_specific_argument(single.span(), vec!["size", "speed", "none"]); + OptimizeAttr::Default + } + }; + + Some(AttributeKind::Optimize(res, cx.attr_span)) + } +} + +pub(crate) struct ColdParser; + +impl<S: Stage> SingleAttributeParser<S> for ColdParser { + const PATH: &[rustc_span::Symbol] = &[sym::cold]; + const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepLast; + const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn; + const TEMPLATE: AttributeTemplate = template!(Word); + + fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> { + if !args.no_args() { + cx.expected_no_args(args.span().unwrap_or(cx.attr_span)); + return None; + }; + + Some(AttributeKind::Cold(cx.attr_span)) + } +} + +pub(crate) struct NoMangleParser; + +impl<S: Stage> SingleAttributeParser<S> for NoMangleParser { + const PATH: &[rustc_span::Symbol] = &[sym::no_mangle]; + const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepLast; + const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn; + const TEMPLATE: AttributeTemplate = template!(Word); + + fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> { + if !args.no_args() { + cx.expected_no_args(args.span().unwrap_or(cx.attr_span)); + return None; + }; + + Some(AttributeKind::NoMangle(cx.attr_span)) + } +} diff --git a/compiler/rustc_attr_parsing/src/attributes/confusables.rs b/compiler/rustc_attr_parsing/src/attributes/confusables.rs index afd3c012f05..c911908dfb3 100644 --- a/compiler/rustc_attr_parsing/src/attributes/confusables.rs +++ b/compiler/rustc_attr_parsing/src/attributes/confusables.rs @@ -1,4 +1,5 @@ use rustc_attr_data_structures::AttributeKind; +use rustc_feature::template; use rustc_span::{Span, Symbol, sym}; use thin_vec::ThinVec; @@ -13,37 +14,33 @@ pub(crate) struct ConfusablesParser { } impl<S: Stage> AttributeParser<S> for ConfusablesParser { - const ATTRIBUTES: AcceptMapping<Self, S> = &[(&[sym::rustc_confusables], |this, cx, args| { - let Some(list) = args.list() else { - // FIXME(jdonszelmann): error when not a list? Bring validation code here. - // NOTE: currently subsequent attributes are silently ignored using - // tcx.get_attr(). - return; - }; - - if list.is_empty() { - cx.emit_err(session_diagnostics::EmptyConfusables { span: cx.attr_span }); - } - - for param in list.mixed() { - let span = param.span(); - - let Some(lit) = param.lit() else { - cx.emit_err(session_diagnostics::IncorrectMetaItem { - span, - suggestion: Some(session_diagnostics::IncorrectMetaItemSuggestion { - lo: span.shrink_to_lo(), - hi: span.shrink_to_hi(), - }), - }); - continue; + const ATTRIBUTES: AcceptMapping<Self, S> = &[( + &[sym::rustc_confusables], + template!(List: r#""name1", "name2", ..."#), + |this, cx, args| { + let Some(list) = args.list() else { + cx.expected_list(cx.attr_span); + return; }; - this.confusables.push(lit.symbol); - } + if list.is_empty() { + cx.emit_err(session_diagnostics::EmptyConfusables { span: cx.attr_span }); + } + + for param in list.mixed() { + let span = param.span(); + + let Some(lit) = param.lit().and_then(|i| i.value_str()) else { + cx.expected_string_literal(span, param.lit()); + continue; + }; + + this.confusables.push(lit); + } - this.first_span.get_or_insert(cx.attr_span); - })]; + this.first_span.get_or_insert(cx.attr_span); + }, + )]; fn finalize(self, _cx: &FinalizeContext<'_, '_, S>) -> Option<AttributeKind> { if self.confusables.is_empty() { diff --git a/compiler/rustc_attr_parsing/src/attributes/deprecation.rs b/compiler/rustc_attr_parsing/src/attributes/deprecation.rs index 1faee41c2a9..702ad66f578 100644 --- a/compiler/rustc_attr_parsing/src/attributes/deprecation.rs +++ b/compiler/rustc_attr_parsing/src/attributes/deprecation.rs @@ -1,4 +1,5 @@ use rustc_attr_data_structures::{AttributeKind, DeprecatedSince, Deprecation}; +use rustc_feature::{AttributeTemplate, template}; use rustc_span::{Span, Symbol, sym}; use super::util::parse_version; @@ -6,7 +7,6 @@ use super::{AttributeOrder, OnDuplicate, SingleAttributeParser}; use crate::context::{AcceptContext, Stage}; use crate::parser::ArgParser; use crate::session_diagnostics; -use crate::session_diagnostics::UnsupportedLiteralReason; pub(crate) struct DeprecationParser; @@ -18,25 +18,18 @@ fn get<S: Stage>( item: &Option<Symbol>, ) -> Option<Symbol> { if item.is_some() { - cx.emit_err(session_diagnostics::MultipleItem { span: param_span, item: name.to_string() }); + cx.duplicate_key(param_span, name); return None; } if let Some(v) = arg.name_value() { if let Some(value_str) = v.value_as_str() { Some(value_str) } else { - let lit = v.value_as_lit(); - cx.emit_err(session_diagnostics::UnsupportedLiteral { - span: v.value_span, - reason: UnsupportedLiteralReason::DeprecatedString, - is_bytestr: lit.kind.is_bytestr(), - start_point_span: cx.sess().source_map().start_point(lit.span), - }); + cx.expected_string_literal(v.value_span, Some(&v.value_as_lit())); None } } else { - // FIXME(jdonszelmann): suggestion? - cx.emit_err(session_diagnostics::IncorrectMetaItem { span: param_span, suggestion: None }); + cx.expected_name_value(param_span, Some(name)); None } } @@ -45,6 +38,11 @@ impl<S: Stage> SingleAttributeParser<S> for DeprecationParser { const PATH: &[Symbol] = &[sym::deprecated]; const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepFirst; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const TEMPLATE: AttributeTemplate = template!( + Word, + List: r#"/*opt*/ since = "version", /*opt*/ note = "reason""#, + NameValueStr: "reason" + ); fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> { let features = cx.features(); @@ -55,57 +53,60 @@ impl<S: Stage> SingleAttributeParser<S> for DeprecationParser { let is_rustc = features.staged_api(); - if let Some(value) = args.name_value() - && let Some(value_str) = value.value_as_str() - { - note = Some(value_str) - } else if let Some(list) = args.list() { - for param in list.mixed() { - let param_span = param.span(); - let Some(param) = param.meta_item() else { - cx.emit_err(session_diagnostics::UnsupportedLiteral { - span: param_span, - reason: UnsupportedLiteralReason::DeprecatedKvPair, - is_bytestr: false, - start_point_span: cx.sess().source_map().start_point(param_span), - }); - return None; - }; + match args { + ArgParser::NoArgs => { + // ok + } + ArgParser::List(list) => { + for param in list.mixed() { + let Some(param) = param.meta_item() else { + cx.unexpected_literal(param.span()); + return None; + }; - let ident_name = param.path().word_sym(); + let ident_name = param.path().word_sym(); - match ident_name { - Some(name @ sym::since) => { - since = Some(get(cx, name, param_span, param.args(), &since)?); - } - Some(name @ sym::note) => { - note = Some(get(cx, name, param_span, param.args(), ¬e)?); - } - Some(name @ sym::suggestion) => { - if !features.deprecated_suggestion() { - cx.emit_err(session_diagnostics::DeprecatedItemSuggestion { - span: param_span, - is_nightly: cx.sess().is_nightly_build(), - details: (), - }); + match ident_name { + Some(name @ sym::since) => { + since = Some(get(cx, name, param.span(), param.args(), &since)?); + } + Some(name @ sym::note) => { + note = Some(get(cx, name, param.span(), param.args(), ¬e)?); } + Some(name @ sym::suggestion) => { + if !features.deprecated_suggestion() { + cx.emit_err(session_diagnostics::DeprecatedItemSuggestion { + span: param.span(), + is_nightly: cx.sess().is_nightly_build(), + details: (), + }); + } - suggestion = Some(get(cx, name, param_span, param.args(), &suggestion)?); - } - _ => { - cx.emit_err(session_diagnostics::UnknownMetaItem { - span: param_span, - item: param.path().to_string(), - expected: if features.deprecated_suggestion() { - &["since", "note", "suggestion"] - } else { - &["since", "note"] - }, - }); - return None; + suggestion = + Some(get(cx, name, param.span(), param.args(), &suggestion)?); + } + _ => { + cx.unknown_key( + param.span(), + param.path().to_string(), + if features.deprecated_suggestion() { + &["since", "note", "suggestion"] + } else { + &["since", "note"] + }, + ); + return None; + } } } } + ArgParser::NameValue(v) => { + let Some(value) = v.value_as_str() else { + cx.expected_string_literal(v.value_span, Some(v.value_as_lit())); + return None; + }; + note = Some(value); + } } let since = if let Some(since) = since { diff --git a/compiler/rustc_attr_parsing/src/attributes/inline.rs b/compiler/rustc_attr_parsing/src/attributes/inline.rs index c7f82082c2e..25efc3ae49b 100644 --- a/compiler/rustc_attr_parsing/src/attributes/inline.rs +++ b/compiler/rustc_attr_parsing/src/attributes/inline.rs @@ -29,7 +29,7 @@ impl<S: Stage> SingleAttributeParser<S> for InlineParser { return None; }; - match l.meta_item().and_then(|i| i.word_without_args().map(|i| i.name)) { + match l.meta_item().and_then(|i| i.path().word_sym()) { Some(sym::always) => { Some(AttributeKind::Inline(InlineAttr::Always, cx.attr_span)) } @@ -63,7 +63,7 @@ impl<S: Stage> SingleAttributeParser<S> for RustcForceInlineParser { const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::WarnButFutureError; const TEMPLATE: AttributeTemplate = template!(Word, List: "reason", NameValueStr: "reason"); - fn convert(cx: &AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> { + fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> { let reason = match args { ArgParser::NoArgs => None, ArgParser::List(list) => { @@ -73,7 +73,7 @@ impl<S: Stage> SingleAttributeParser<S> for RustcForceInlineParser { }; let Some(reason) = l.lit().and_then(|i| i.kind.str()) else { - cx.expected_string_literal(l.span()); + cx.expected_string_literal(l.span(), l.lit()); return None; }; @@ -81,7 +81,7 @@ impl<S: Stage> SingleAttributeParser<S> for RustcForceInlineParser { } ArgParser::NameValue(v) => { let Some(reason) = v.value_as_str() else { - cx.expected_string_literal(v.value_span); + cx.expected_string_literal(v.value_span, Some(v.value_as_lit())); return None; }; diff --git a/compiler/rustc_attr_parsing/src/attributes/lint_helpers.rs b/compiler/rustc_attr_parsing/src/attributes/lint_helpers.rs index 32a20d4c5b5..4cfd9a82ce8 100644 --- a/compiler/rustc_attr_parsing/src/attributes/lint_helpers.rs +++ b/compiler/rustc_attr_parsing/src/attributes/lint_helpers.rs @@ -1,4 +1,5 @@ use rustc_attr_data_structures::AttributeKind; +use rustc_feature::{AttributeTemplate, template}; use rustc_span::{Symbol, sym}; use crate::attributes::{AttributeOrder, OnDuplicate, SingleAttributeParser}; @@ -9,13 +10,25 @@ pub(crate) struct AsPtrParser; impl<S: Stage> SingleAttributeParser<S> for AsPtrParser { const PATH: &[Symbol] = &[sym::rustc_as_ptr]; - const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepFirst; - const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const TEMPLATE: AttributeTemplate = template!(Word); fn convert(cx: &mut AcceptContext<'_, '_, S>, _args: &ArgParser<'_>) -> Option<AttributeKind> { // FIXME: check that there's no args (this is currently checked elsewhere) Some(AttributeKind::AsPtr(cx.attr_span)) } } + +pub(crate) struct PubTransparentParser; +impl<S: Stage> SingleAttributeParser<S> for PubTransparentParser { + const PATH: &[Symbol] = &[sym::rustc_pub_transparent]; + const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepFirst; + const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error; + const TEMPLATE: AttributeTemplate = template!(Word); + + fn convert(cx: &mut AcceptContext<'_, '_, S>, _args: &ArgParser<'_>) -> Option<AttributeKind> { + // FIXME: check that there's no args (this is currently checked elsewhere) + Some(AttributeKind::PubTransparent(cx.attr_span)) + } +} diff --git a/compiler/rustc_attr_parsing/src/attributes/mod.rs b/compiler/rustc_attr_parsing/src/attributes/mod.rs index df488c89a34..3162c1fc727 100644 --- a/compiler/rustc_attr_parsing/src/attributes/mod.rs +++ b/compiler/rustc_attr_parsing/src/attributes/mod.rs @@ -18,6 +18,7 @@ use std::marker::PhantomData; use rustc_attr_data_structures::AttributeKind; use rustc_attr_data_structures::lints::AttributeLintKind; +use rustc_feature::AttributeTemplate; use rustc_span::{Span, Symbol}; use thin_vec::ThinVec; @@ -27,16 +28,20 @@ use crate::session_diagnostics::UnusedMultiple; pub(crate) mod allow_unstable; pub(crate) mod cfg; +pub(crate) mod codegen_attrs; pub(crate) mod confusables; pub(crate) mod deprecation; +pub(crate) mod inline; pub(crate) mod lint_helpers; +pub(crate) mod must_use; pub(crate) mod repr; +pub(crate) mod semantics; pub(crate) mod stability; pub(crate) mod transparency; pub(crate) mod util; type AcceptFn<T, S> = for<'sess> fn(&mut T, &mut AcceptContext<'_, 'sess, S>, &ArgParser<'_>); -type AcceptMapping<T, S> = &'static [(&'static [Symbol], AcceptFn<T, S>)]; +type AcceptMapping<T, S> = &'static [(&'static [Symbol], AttributeTemplate, AcceptFn<T, S>)]; /// An [`AttributeParser`] is a type which searches for syntactic attributes. /// @@ -84,14 +89,30 @@ pub(crate) trait AttributeParser<S: Stage>: Default + 'static { /// [`SingleAttributeParser`] can only convert attributes one-to-one, and cannot combine multiple /// attributes together like is necessary for `#[stable()]` and `#[unstable()]` for example. pub(crate) trait SingleAttributeParser<S: Stage>: 'static { + /// The single path of the attribute this parser accepts. + /// + /// If you need the parser to accept more than one path, use [`AttributeParser`] instead const PATH: &[Symbol]; + + /// Configures the precedence of attributes with the same `PATH` on a syntax node. const ATTRIBUTE_ORDER: AttributeOrder; + + /// Configures what to do when when the same attribute is + /// applied more than once on the same syntax node. + /// + /// [`ATTRIBUTE_ORDER`](Self::ATTRIBUTE_ORDER) specified which one is assumed to be correct, + /// and this specified whether to, for example, warn or error on the other one. const ON_DUPLICATE: OnDuplicate<S>; + /// The template this attribute parser should implement. Used for diagnostics. + const TEMPLATE: AttributeTemplate; + /// Converts a single syntactical attribute to a single semantic attribute, or [`AttributeKind`] fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind>; } +/// Use in combination with [`SingleAttributeParser`]. +/// `Single<T: SingleAttributeParser>` implements [`AttributeParser`]. pub(crate) struct Single<T: SingleAttributeParser<S>, S: Stage>( PhantomData<(S, T)>, Option<(AttributeKind, Span)>, @@ -104,8 +125,10 @@ impl<T: SingleAttributeParser<S>, S: Stage> Default for Single<T, S> { } impl<T: SingleAttributeParser<S>, S: Stage> AttributeParser<S> for Single<T, S> { - const ATTRIBUTES: AcceptMapping<Self, S> = - &[(T::PATH, |group: &mut Single<T, S>, cx, args| { + const ATTRIBUTES: AcceptMapping<Self, S> = &[( + T::PATH, + <T as SingleAttributeParser<S>>::TEMPLATE, + |group: &mut Single<T, S>, cx, args| { if let Some(pa) = T::convert(cx, args) { match T::ATTRIBUTE_ORDER { // keep the first and report immediately. ignore this attribute @@ -126,7 +149,8 @@ impl<T: SingleAttributeParser<S>, S: Stage> AttributeParser<S> for Single<T, S> group.1 = Some((pa, cx.attr_span)); } - })]; + }, + )]; fn finalize(self, _cx: &FinalizeContext<'_, '_, S>) -> Option<AttributeKind> { Some(self.1?.0) @@ -221,8 +245,15 @@ pub(crate) trait CombineAttributeParser<S: Stage>: 'static { const PATH: &[rustc_span::Symbol]; type Item; + /// A function that converts individual items (of type [`Item`](Self::Item)) into the final attribute. + /// + /// For example, individual representations fomr `#[repr(...)]` attributes into an `AttributeKind::Repr(x)`, + /// where `x` is a vec of these individual reprs. const CONVERT: ConvertFn<Self::Item>; + /// The template this attribute parser should implement. Used for diagnostics. + const TEMPLATE: AttributeTemplate; + /// Converts a single syntactical attribute to a number of elements of the semantic attribute, or [`AttributeKind`] fn extend<'c>( cx: &'c mut AcceptContext<'_, '_, S>, @@ -230,6 +261,8 @@ pub(crate) trait CombineAttributeParser<S: Stage>: 'static { ) -> impl IntoIterator<Item = Self::Item> + 'c; } +/// Use in combination with [`CombineAttributeParser`]. +/// `Combine<T: CombineAttributeParser>` implements [`AttributeParser`]. pub(crate) struct Combine<T: CombineAttributeParser<S>, S: Stage>( PhantomData<(S, T)>, ThinVec<<T as CombineAttributeParser<S>>::Item>, @@ -242,8 +275,11 @@ impl<T: CombineAttributeParser<S>, S: Stage> Default for Combine<T, S> { } impl<T: CombineAttributeParser<S>, S: Stage> AttributeParser<S> for Combine<T, S> { - const ATTRIBUTES: AcceptMapping<Self, S> = - &[(T::PATH, |group: &mut Combine<T, S>, cx, args| group.1.extend(T::extend(cx, args)))]; + const ATTRIBUTES: AcceptMapping<Self, S> = &[( + T::PATH, + <T as CombineAttributeParser<S>>::TEMPLATE, + |group: &mut Combine<T, S>, cx, args| group.1.extend(T::extend(cx, args)), + )]; fn finalize(self, _cx: &FinalizeContext<'_, '_, S>) -> Option<AttributeKind> { if self.1.is_empty() { None } else { Some(T::CONVERT(self.1)) } diff --git a/compiler/rustc_attr_parsing/src/attributes/must_use.rs b/compiler/rustc_attr_parsing/src/attributes/must_use.rs new file mode 100644 index 00000000000..a672d956127 --- /dev/null +++ b/compiler/rustc_attr_parsing/src/attributes/must_use.rs @@ -0,0 +1,40 @@ +use rustc_attr_data_structures::AttributeKind; +use rustc_errors::DiagArgValue; +use rustc_feature::{AttributeTemplate, template}; +use rustc_span::{Symbol, sym}; + +use crate::attributes::{AttributeOrder, OnDuplicate, SingleAttributeParser}; +use crate::context::{AcceptContext, Stage}; +use crate::parser::ArgParser; +use crate::session_diagnostics; + +pub(crate) struct MustUseParser; + +impl<S: Stage> SingleAttributeParser<S> for MustUseParser { + const PATH: &[Symbol] = &[sym::must_use]; + const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepLast; + const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::WarnButFutureError; + const TEMPLATE: AttributeTemplate = template!(Word, NameValueStr: "reason"); + + fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> { + Some(AttributeKind::MustUse { + span: cx.attr_span, + reason: match args { + ArgParser::NoArgs => None, + ArgParser::NameValue(name_value) => name_value.value_as_str(), + ArgParser::List(_) => { + let suggestions = + <Self as SingleAttributeParser<S>>::TEMPLATE.suggestions(false, "must_use"); + cx.emit_err(session_diagnostics::MustUseIllFormedAttributeInput { + num_suggestions: suggestions.len(), + suggestions: DiagArgValue::StrListSepByAnd( + suggestions.into_iter().map(|s| format!("`{s}`").into()).collect(), + ), + span: cx.attr_span, + }); + return None; + } + }, + }) + } +} diff --git a/compiler/rustc_attr_parsing/src/attributes/repr.rs b/compiler/rustc_attr_parsing/src/attributes/repr.rs index 753b2366b41..4aa27043e98 100644 --- a/compiler/rustc_attr_parsing/src/attributes/repr.rs +++ b/compiler/rustc_attr_parsing/src/attributes/repr.rs @@ -1,9 +1,10 @@ use rustc_abi::Align; use rustc_ast::{IntTy, LitIntType, LitKind, UintTy}; use rustc_attr_data_structures::{AttributeKind, IntType, ReprAttr}; +use rustc_feature::{AttributeTemplate, template}; use rustc_span::{DUMMY_SP, Span, Symbol, sym}; -use super::{CombineAttributeParser, ConvertFn}; +use super::{AcceptMapping, AttributeParser, CombineAttributeParser, ConvertFn, FinalizeContext}; use crate::context::{AcceptContext, Stage}; use crate::parser::{ArgParser, MetaItemListParser, MetaItemParser}; use crate::session_diagnostics; @@ -23,6 +24,9 @@ impl<S: Stage> CombineAttributeParser<S> for ReprParser { type Item = (ReprAttr, Span); const PATH: &[Symbol] = &[sym::repr]; const CONVERT: ConvertFn<Self::Item> = AttributeKind::Repr; + // FIXME(jdonszelmann): never used + const TEMPLATE: AttributeTemplate = + template!(List: "C | Rust | align(...) | packed(...) | <integer type> | transparent"); fn extend<'c>( cx: &'c mut AcceptContext<'_, '_, S>, @@ -31,6 +35,7 @@ impl<S: Stage> CombineAttributeParser<S> for ReprParser { let mut reprs = Vec::new(); let Some(list) = args.list() else { + cx.expected_list(cx.attr_span); return reprs; }; @@ -199,7 +204,7 @@ fn parse_repr_align<S: Stage>( }); } Align => { - cx.dcx().emit_err(session_diagnostics::IncorrectReprFormatAlignOneArg { + cx.emit_err(session_diagnostics::IncorrectReprFormatAlignOneArg { span: param_span, }); } @@ -262,3 +267,57 @@ fn parse_alignment(node: &LitKind) -> Result<Align, &'static str> { Err("not an unsuffixed integer") } } + +/// Parse #[align(N)]. +#[derive(Default)] +pub(crate) struct AlignParser(Option<(Align, Span)>); + +impl AlignParser { + const PATH: &'static [Symbol] = &[sym::align]; + const TEMPLATE: AttributeTemplate = template!(List: "<alignment in bytes>"); + + fn parse<'c, S: Stage>( + &mut self, + cx: &'c mut AcceptContext<'_, '_, S>, + args: &'c ArgParser<'_>, + ) { + match args { + ArgParser::NoArgs | ArgParser::NameValue(_) => { + cx.expected_list(cx.attr_span); + } + ArgParser::List(list) => { + let Some(align) = list.single() else { + cx.expected_single_argument(list.span); + return; + }; + + let Some(lit) = align.lit() else { + cx.emit_err(session_diagnostics::IncorrectReprFormatExpectInteger { + span: align.span(), + }); + + return; + }; + + match parse_alignment(&lit.kind) { + Ok(literal) => self.0 = Ord::max(self.0, Some((literal, cx.attr_span))), + Err(message) => { + cx.emit_err(session_diagnostics::InvalidAlignmentValue { + span: lit.span, + error_part: message, + }); + } + } + } + } + } +} + +impl<S: Stage> AttributeParser<S> for AlignParser { + const ATTRIBUTES: AcceptMapping<Self, S> = &[(Self::PATH, Self::TEMPLATE, Self::parse)]; + + fn finalize(self, _cx: &FinalizeContext<'_, '_, S>) -> Option<AttributeKind> { + let (align, span) = self.0?; + Some(AttributeKind::Align { align, span }) + } +} diff --git a/compiler/rustc_attr_parsing/src/attributes/semantics.rs b/compiler/rustc_attr_parsing/src/attributes/semantics.rs new file mode 100644 index 00000000000..071574a5612 --- /dev/null +++ b/compiler/rustc_attr_parsing/src/attributes/semantics.rs @@ -0,0 +1,19 @@ +use rustc_attr_data_structures::AttributeKind; +use rustc_feature::{AttributeTemplate, template}; +use rustc_span::{Symbol, sym}; + +use crate::attributes::{AttributeOrder, OnDuplicate, SingleAttributeParser}; +use crate::context::{AcceptContext, Stage}; +use crate::parser::ArgParser; + +pub(crate) struct MayDangleParser; +impl<S: Stage> SingleAttributeParser<S> for MayDangleParser { + const PATH: &[Symbol] = &[sym::may_dangle]; + const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepFirst; + const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn; + const TEMPLATE: AttributeTemplate = template!(Word); + + fn convert(cx: &mut AcceptContext<'_, '_, S>, _args: &ArgParser<'_>) -> Option<AttributeKind> { + Some(AttributeKind::MayDangle(cx.attr_span)) + } +} diff --git a/compiler/rustc_attr_parsing/src/attributes/stability.rs b/compiler/rustc_attr_parsing/src/attributes/stability.rs index 6589a51db2b..6871ff4ec9f 100644 --- a/compiler/rustc_attr_parsing/src/attributes/stability.rs +++ b/compiler/rustc_attr_parsing/src/attributes/stability.rs @@ -5,7 +5,8 @@ use rustc_attr_data_structures::{ StableSince, UnstableReason, VERSION_PLACEHOLDER, }; use rustc_errors::ErrorGuaranteed; -use rustc_span::{Span, Symbol, sym}; +use rustc_feature::{AttributeTemplate, template}; +use rustc_span::{Ident, Span, Symbol, sym}; use super::util::parse_version; use super::{AcceptMapping, AttributeOrder, AttributeParser, OnDuplicate, SingleAttributeParser}; @@ -43,26 +44,39 @@ impl StabilityParser { impl<S: Stage> AttributeParser<S> for StabilityParser { const ATTRIBUTES: AcceptMapping<Self, S> = &[ - (&[sym::stable], |this, cx, args| { - reject_outside_std!(cx); - if !this.check_duplicate(cx) - && let Some((feature, level)) = parse_stability(cx, args) - { - this.stability = Some((Stability { level, feature }, cx.attr_span)); - } - }), - (&[sym::unstable], |this, cx, args| { - reject_outside_std!(cx); - if !this.check_duplicate(cx) - && let Some((feature, level)) = parse_unstability(cx, args) - { - this.stability = Some((Stability { level, feature }, cx.attr_span)); - } - }), - (&[sym::rustc_allowed_through_unstable_modules], |this, cx, args| { - reject_outside_std!(cx); - this.allowed_through_unstable_modules = args.name_value().and_then(|i| i.value_as_str()) - }), + ( + &[sym::stable], + template!(List: r#"feature = "name", since = "version""#), + |this, cx, args| { + reject_outside_std!(cx); + if !this.check_duplicate(cx) + && let Some((feature, level)) = parse_stability(cx, args) + { + this.stability = Some((Stability { level, feature }, cx.attr_span)); + } + }, + ), + ( + &[sym::unstable], + template!(List: r#"feature = "name", reason = "...", issue = "N""#), + |this, cx, args| { + reject_outside_std!(cx); + if !this.check_duplicate(cx) + && let Some((feature, level)) = parse_unstability(cx, args) + { + this.stability = Some((Stability { level, feature }, cx.attr_span)); + } + }, + ), + ( + &[sym::rustc_allowed_through_unstable_modules], + template!(NameValueStr: "deprecation message"), + |this, cx, args| { + reject_outside_std!(cx); + this.allowed_through_unstable_modules = + args.name_value().and_then(|i| i.value_as_str()) + }, + ), ]; fn finalize(mut self, cx: &FinalizeContext<'_, '_, S>) -> Option<AttributeKind> { @@ -96,8 +110,10 @@ pub(crate) struct BodyStabilityParser { } impl<S: Stage> AttributeParser<S> for BodyStabilityParser { - const ATTRIBUTES: AcceptMapping<Self, S> = - &[(&[sym::rustc_default_body_unstable], |this, cx, args| { + const ATTRIBUTES: AcceptMapping<Self, S> = &[( + &[sym::rustc_default_body_unstable], + template!(List: r#"feature = "name", reason = "...", issue = "N""#), + |this, cx, args| { reject_outside_std!(cx); if this.stability.is_some() { cx.dcx() @@ -105,7 +121,8 @@ impl<S: Stage> AttributeParser<S> for BodyStabilityParser { } else if let Some((feature, level)) = parse_unstability(cx, args) { this.stability = Some((DefaultBodyStability { level, feature }, cx.attr_span)); } - })]; + }, + )]; fn finalize(self, _cx: &FinalizeContext<'_, '_, S>) -> Option<AttributeKind> { let (stability, span) = self.stability?; @@ -120,6 +137,7 @@ impl<S: Stage> SingleAttributeParser<S> for ConstStabilityIndirectParser { const PATH: &[Symbol] = &[sym::rustc_const_stable_indirect]; const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepFirst; const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Ignore; + const TEMPLATE: AttributeTemplate = template!(Word); fn convert(_cx: &mut AcceptContext<'_, '_, S>, _args: &ArgParser<'_>) -> Option<AttributeKind> { Some(AttributeKind::ConstStabilityIndirect) @@ -146,7 +164,7 @@ impl ConstStabilityParser { impl<S: Stage> AttributeParser<S> for ConstStabilityParser { const ATTRIBUTES: AcceptMapping<Self, S> = &[ - (&[sym::rustc_const_stable], |this, cx, args| { + (&[sym::rustc_const_stable], template!(List: r#"feature = "name""#), |this, cx, args| { reject_outside_std!(cx); if !this.check_duplicate(cx) @@ -158,7 +176,7 @@ impl<S: Stage> AttributeParser<S> for ConstStabilityParser { )); } }), - (&[sym::rustc_const_unstable], |this, cx, args| { + (&[sym::rustc_const_unstable], template!(List: r#"feature = "name""#), |this, cx, args| { reject_outside_std!(cx); if !this.check_duplicate(cx) && let Some((feature, level)) = parse_unstability(cx, args) @@ -169,7 +187,7 @@ impl<S: Stage> AttributeParser<S> for ConstStabilityParser { )); } }), - (&[sym::rustc_promotable], |this, cx, _| { + (&[sym::rustc_promotable], template!(Word), |this, cx, _| { reject_outside_std!(cx); this.promotable = true; }), @@ -199,12 +217,10 @@ fn insert_value_into_option_or_error<S: Stage>( cx: &AcceptContext<'_, '_, S>, param: &MetaItemParser<'_>, item: &mut Option<Symbol>, + name: Ident, ) -> Option<()> { if item.is_some() { - cx.emit_err(session_diagnostics::MultipleItem { - span: param.span(), - item: param.path().to_string(), - }); + cx.duplicate_key(name.span, name.name); None } else if let Some(v) = param.args().name_value() && let Some(s) = v.value_as_str() @@ -212,10 +228,7 @@ fn insert_value_into_option_or_error<S: Stage>( *item = Some(s); Some(()) } else { - cx.emit_err(session_diagnostics::IncorrectMetaItem { - span: param.span(), - suggestion: None, - }); + cx.expected_name_value(param.span(), Some(name.name)); None } } @@ -241,9 +254,14 @@ pub(crate) fn parse_stability<S: Stage>( return None; }; - match param.path().word_sym() { - Some(sym::feature) => insert_value_into_option_or_error(cx, ¶m, &mut feature)?, - Some(sym::since) => insert_value_into_option_or_error(cx, ¶m, &mut since)?, + let word = param.path().word(); + match word.map(|i| i.name) { + Some(sym::feature) => { + insert_value_into_option_or_error(cx, ¶m, &mut feature, word.unwrap())? + } + Some(sym::since) => { + insert_value_into_option_or_error(cx, ¶m, &mut since, word.unwrap())? + } _ => { cx.emit_err(session_diagnostics::UnknownMetaItem { span: param_span, @@ -310,11 +328,16 @@ pub(crate) fn parse_unstability<S: Stage>( return None; }; - match param.path().word_sym() { - Some(sym::feature) => insert_value_into_option_or_error(cx, ¶m, &mut feature)?, - Some(sym::reason) => insert_value_into_option_or_error(cx, ¶m, &mut reason)?, + let word = param.path().word(); + match word.map(|i| i.name) { + Some(sym::feature) => { + insert_value_into_option_or_error(cx, ¶m, &mut feature, word.unwrap())? + } + Some(sym::reason) => { + insert_value_into_option_or_error(cx, ¶m, &mut reason, word.unwrap())? + } Some(sym::issue) => { - insert_value_into_option_or_error(cx, ¶m, &mut issue)?; + insert_value_into_option_or_error(cx, ¶m, &mut issue, word.unwrap())?; // These unwraps are safe because `insert_value_into_option_or_error` ensures the meta item // is a name/value pair string literal. @@ -344,9 +367,11 @@ pub(crate) fn parse_unstability<S: Stage>( is_soft = true; } Some(sym::implied_by) => { - insert_value_into_option_or_error(cx, ¶m, &mut implied_by)? + insert_value_into_option_or_error(cx, ¶m, &mut implied_by, word.unwrap())? + } + Some(sym::old_name) => { + insert_value_into_option_or_error(cx, ¶m, &mut old_name, word.unwrap())? } - Some(sym::old_name) => insert_value_into_option_or_error(cx, ¶m, &mut old_name)?, _ => { cx.emit_err(session_diagnostics::UnknownMetaItem { span: param.span(), diff --git a/compiler/rustc_attr_parsing/src/attributes/transparency.rs b/compiler/rustc_attr_parsing/src/attributes/transparency.rs index 16ad9d03e50..ce5ceb9139a 100644 --- a/compiler/rustc_attr_parsing/src/attributes/transparency.rs +++ b/compiler/rustc_attr_parsing/src/attributes/transparency.rs @@ -1,4 +1,5 @@ use rustc_attr_data_structures::AttributeKind; +use rustc_feature::{AttributeTemplate, template}; use rustc_span::hygiene::Transparency; use rustc_span::{Symbol, sym}; @@ -17,14 +18,23 @@ impl<S: Stage> SingleAttributeParser<S> for TransparencyParser { const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Custom(|cx, used, unused| { cx.dcx().span_err(vec![used, unused], "multiple macro transparency attributes"); }); + const TEMPLATE: AttributeTemplate = + template!(NameValueStr: "transparent|semitransparent|opaque"); fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> { - match args.name_value().and_then(|nv| nv.value_as_str()) { + let Some(nv) = args.name_value() else { + cx.expected_name_value(cx.attr_span, None); + return None; + }; + match nv.value_as_str() { Some(sym::transparent) => Some(Transparency::Transparent), Some(sym::semiopaque | sym::semitransparent) => Some(Transparency::SemiOpaque), Some(sym::opaque) => Some(Transparency::Opaque), - Some(other) => { - cx.dcx().span_err(cx.attr_span, format!("unknown macro transparency: `{other}`")); + Some(_) => { + cx.expected_specific_argument_strings( + nv.value_span, + vec!["transparent", "semitransparent", "opaque"], + ); None } None => None, diff --git a/compiler/rustc_attr_parsing/src/context.rs b/compiler/rustc_attr_parsing/src/context.rs index 3193d8975e9..171995dc9cb 100644 --- a/compiler/rustc_attr_parsing/src/context.rs +++ b/compiler/rustc_attr_parsing/src/context.rs @@ -5,32 +5,36 @@ use std::ops::{Deref, DerefMut}; use std::sync::LazyLock; use private::Sealed; -use rustc_ast as ast; -use rustc_ast::NodeId; +use rustc_ast::{self as ast, MetaItemLit, NodeId}; use rustc_attr_data_structures::AttributeKind; use rustc_attr_data_structures::lints::{AttributeLint, AttributeLintKind}; use rustc_errors::{DiagCtxtHandle, Diagnostic}; -use rustc_feature::Features; +use rustc_feature::{AttributeTemplate, Features}; use rustc_hir::{AttrArgs, AttrItem, AttrPath, Attribute, HashIgnoredAttrId, HirId}; use rustc_session::Session; use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, Symbol, sym}; use crate::attributes::allow_unstable::{AllowConstFnUnstableParser, AllowInternalUnstableParser}; +use crate::attributes::codegen_attrs::{ColdParser, NoMangleParser, OptimizeParser}; use crate::attributes::confusables::ConfusablesParser; use crate::attributes::deprecation::DeprecationParser; -use crate::attributes::lint_helpers::AsPtrParser; -use crate::attributes::repr::ReprParser; +use crate::attributes::inline::{InlineParser, RustcForceInlineParser}; +use crate::attributes::lint_helpers::{AsPtrParser, PubTransparentParser}; +use crate::attributes::must_use::MustUseParser; +use crate::attributes::repr::{AlignParser, ReprParser}; +use crate::attributes::semantics::MayDangleParser; use crate::attributes::stability::{ BodyStabilityParser, ConstStabilityIndirectParser, ConstStabilityParser, StabilityParser, }; use crate::attributes::transparency::TransparencyParser; use crate::attributes::{AttributeParser as _, Combine, Single}; use crate::parser::{ArgParser, MetaItemParser}; +use crate::session_diagnostics::{AttributeParseError, AttributeParseErrorReason, UnknownMetaItem}; macro_rules! group_type { ($stage: ty) => { LazyLock<( - BTreeMap<&'static [Symbol], Box<dyn for<'sess, 'a> Fn(&mut AcceptContext<'_, 'sess, $stage>, &ArgParser<'a>) + Send + Sync>>, + BTreeMap<&'static [Symbol], Vec<(AttributeTemplate, Box<dyn for<'sess, 'a> Fn(&mut AcceptContext<'_, 'sess, $stage>, &ArgParser<'a>) + Send + Sync>)>>, Vec<Box<dyn Send + Sync + Fn(&mut FinalizeContext<'_, '_, $stage>) -> Option<AttributeKind>>> )> }; @@ -59,7 +63,7 @@ macro_rules! attribute_parsers { @[$ty: ty] pub(crate) static $name: ident = [$($names: ty),* $(,)?]; ) => { pub(crate) static $name: group_type!($ty) = LazyLock::new(|| { - let mut accepts = BTreeMap::<_, Box<dyn for<'sess, 'a> Fn(&mut AcceptContext<'_, 'sess, $ty>, &ArgParser<'a>) + Send + Sync>>::new(); + let mut accepts = BTreeMap::<_, Vec<(AttributeTemplate, Box<dyn for<'sess, 'a> Fn(&mut AcceptContext<'_, 'sess, $ty>, &ArgParser<'a>) + Send + Sync>)>>::new(); let mut finalizes = Vec::<Box<dyn Send + Sync + Fn(&mut FinalizeContext<'_, '_, $ty>) -> Option<AttributeKind>>>::new(); $( { @@ -67,13 +71,12 @@ macro_rules! attribute_parsers { static STATE_OBJECT: RefCell<$names> = RefCell::new(<$names>::default()); }; - for (k, v) in <$names>::ATTRIBUTES { - let old = accepts.insert(*k, Box::new(|cx, args| { + for (path, template, accept_fn) in <$names>::ATTRIBUTES { + accepts.entry(*path).or_default().push((*template, Box::new(|cx, args| { STATE_OBJECT.with_borrow_mut(|s| { - v(s, cx, args) + accept_fn(s, cx, args) }) - })); - assert!(old.is_none()); + }))); } finalizes.push(Box::new(|cx| { @@ -90,6 +93,7 @@ macro_rules! attribute_parsers { attribute_parsers!( pub(crate) static ATTRIBUTE_PARSERS = [ // tidy-alphabetical-start + AlignParser, BodyStabilityParser, ConfusablesParser, ConstStabilityParser, @@ -104,8 +108,16 @@ attribute_parsers!( // tidy-alphabetical-start Single<AsPtrParser>, + Single<ColdParser>, Single<ConstStabilityIndirectParser>, Single<DeprecationParser>, + Single<InlineParser>, + Single<MayDangleParser>, + Single<MustUseParser>, + Single<NoMangleParser>, + Single<OptimizeParser>, + Single<PubTransparentParser>, + Single<RustcForceInlineParser>, Single<TransparencyParser>, // tidy-alphabetical-end ]; @@ -165,6 +177,14 @@ pub(crate) struct AcceptContext<'f, 'sess, S: Stage> { pub(crate) finalize_cx: FinalizeContext<'f, 'sess, S>, /// The span of the attribute currently being parsed pub(crate) attr_span: Span, + + /// The expected structure of the attribute. + /// + /// Used in reporting errors to give a hint to users what the attribute *should* look like. + pub(crate) template: &'f AttributeTemplate, + + /// The name of the attribute we're currently accepting. + pub(crate) attr_path: AttrPath, } impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> { @@ -172,10 +192,143 @@ impl<'f, 'sess: 'f, S: Stage> AcceptContext<'f, 'sess, S> { S::emit_err(&self.sess, diag) } + /// Emit a lint. This method is somewhat special, since lints emitted during attribute parsing + /// must be delayed until after HIR is built. This method will take care of the details of + /// that. pub(crate) fn emit_lint(&mut self, lint: AttributeLintKind, span: Span) { let id = self.target_id; (self.emit_lint)(AttributeLint { id, span, kind: lint }); } + + pub(crate) fn unknown_key( + &self, + span: Span, + found: String, + options: &'static [&'static str], + ) -> ErrorGuaranteed { + self.emit_err(UnknownMetaItem { span, item: found, expected: options }) + } + + /// error that a string literal was expected. + /// You can optionally give the literal you did find (which you found not to be a string literal) + /// which can make better errors. For example, if the literal was a byte string it will suggest + /// removing the `b` prefix. + pub(crate) fn expected_string_literal( + &self, + span: Span, + actual_literal: Option<&MetaItemLit>, + ) -> ErrorGuaranteed { + self.emit_err(AttributeParseError { + span, + attr_span: self.attr_span, + template: self.template.clone(), + attribute: self.attr_path.clone(), + reason: AttributeParseErrorReason::ExpectedStringLiteral { + byte_string: actual_literal.and_then(|i| { + i.kind.is_bytestr().then(|| self.sess().source_map().start_point(i.span)) + }), + }, + }) + } + + pub(crate) fn expected_list(&self, span: Span) -> ErrorGuaranteed { + self.emit_err(AttributeParseError { + span, + attr_span: self.attr_span, + template: self.template.clone(), + attribute: self.attr_path.clone(), + reason: AttributeParseErrorReason::ExpectedList, + }) + } + + pub(crate) fn expected_no_args(&self, args_span: Span) -> ErrorGuaranteed { + self.emit_err(AttributeParseError { + span: args_span, + attr_span: self.attr_span, + template: self.template.clone(), + attribute: self.attr_path.clone(), + reason: AttributeParseErrorReason::ExpectedNoArgs, + }) + } + + /// emit an error that a `name = value` pair was expected at this span. The symbol can be given for + /// a nicer error message talking about the specific name that was found lacking a value. + pub(crate) fn expected_name_value(&self, span: Span, name: Option<Symbol>) -> ErrorGuaranteed { + self.emit_err(AttributeParseError { + span, + attr_span: self.attr_span, + template: self.template.clone(), + attribute: self.attr_path.clone(), + reason: AttributeParseErrorReason::ExpectedNameValue(name), + }) + } + + /// emit an error that a `name = value` pair was found where that name was already seen. + pub(crate) fn duplicate_key(&self, span: Span, key: Symbol) -> ErrorGuaranteed { + self.emit_err(AttributeParseError { + span, + attr_span: self.attr_span, + template: self.template.clone(), + attribute: self.attr_path.clone(), + reason: AttributeParseErrorReason::DuplicateKey(key), + }) + } + + /// an error that should be emitted when a [`MetaItemOrLitParser`](crate::parser::MetaItemOrLitParser) + /// was expected *not* to be a literal, but instead a meta item. + pub(crate) fn unexpected_literal(&self, span: Span) -> ErrorGuaranteed { + self.emit_err(AttributeParseError { + span, + attr_span: self.attr_span, + template: self.template.clone(), + attribute: self.attr_path.clone(), + reason: AttributeParseErrorReason::UnexpectedLiteral, + }) + } + + pub(crate) fn expected_single_argument(&self, span: Span) -> ErrorGuaranteed { + self.emit_err(AttributeParseError { + span, + attr_span: self.attr_span, + template: self.template.clone(), + attribute: self.attr_path.clone(), + reason: AttributeParseErrorReason::ExpectedSingleArgument, + }) + } + + pub(crate) fn expected_specific_argument( + &self, + span: Span, + possibilities: Vec<&'static str>, + ) -> ErrorGuaranteed { + self.emit_err(AttributeParseError { + span, + attr_span: self.attr_span, + template: self.template.clone(), + attribute: self.attr_path.clone(), + reason: AttributeParseErrorReason::ExpectedSpecificArgument { + possibilities, + strings: false, + }, + }) + } + + pub(crate) fn expected_specific_argument_strings( + &self, + span: Span, + possibilities: Vec<&'static str>, + ) -> ErrorGuaranteed { + self.emit_err(AttributeParseError { + span, + attr_span: self.attr_span, + template: self.template.clone(), + attribute: self.attr_path.clone(), + reason: AttributeParseErrorReason::ExpectedSpecificArgument { + possibilities, + strings: true, + }, + }) + } } impl<'f, 'sess, S: Stage> Deref for AcceptContext<'f, 'sess, S> { @@ -286,19 +439,13 @@ impl<'sess> AttributeParser<'sess, Early> { parsed.pop() } - - pub fn new_early(sess: &'sess Session, features: &'sess Features, tools: Vec<Symbol>) -> Self { - Self { features: Some(features), tools, parse_only: None, sess, stage: PhantomData } - } } -impl<'sess> AttributeParser<'sess, Late> { +impl<'sess, S: Stage> AttributeParser<'sess, S> { pub fn new(sess: &'sess Session, features: &'sess Features, tools: Vec<Symbol>) -> Self { Self { features: Some(features), tools, parse_only: None, sess, stage: PhantomData } } -} -impl<'sess, S: Stage> AttributeParser<'sess, S> { pub(crate) fn sess(&self) -> &'sess Session { &self.sess } @@ -374,18 +521,22 @@ impl<'sess, S: Stage> AttributeParser<'sess, S> { let args = parser.args(); let parts = path.segments().map(|i| i.name).collect::<Vec<_>>(); - if let Some(accept) = S::parsers().0.get(parts.as_slice()) { - let mut cx: AcceptContext<'_, 'sess, S> = AcceptContext { - finalize_cx: FinalizeContext { - cx: self, - target_span, - target_id, - emit_lint: &mut emit_lint, - }, - attr_span: lower_span(attr.span), - }; - - accept(&mut cx, args) + if let Some(accepts) = S::parsers().0.get(parts.as_slice()) { + for (template, accept) in accepts { + let mut cx: AcceptContext<'_, 'sess, S> = AcceptContext { + finalize_cx: FinalizeContext { + cx: self, + target_span, + target_id, + emit_lint: &mut emit_lint, + }, + attr_span: lower_span(attr.span), + template, + attr_path: path.get_attribute_path(), + }; + + accept(&mut cx, args) + } } else { // If we're here, we must be compiling a tool attribute... Or someone // forgot to parse their fancy new attribute. Let's warn them in any case. diff --git a/compiler/rustc_attr_parsing/src/lints.rs b/compiler/rustc_attr_parsing/src/lints.rs index d0d112446b4..fee22293b47 100644 --- a/compiler/rustc_attr_parsing/src/lints.rs +++ b/compiler/rustc_attr_parsing/src/lints.rs @@ -1,5 +1,5 @@ use rustc_attr_data_structures::lints::{AttributeLint, AttributeLintKind}; -use rustc_errors::LintEmitter; +use rustc_errors::{DiagArgValue, LintEmitter}; use rustc_hir::HirId; use crate::session_diagnostics; @@ -15,5 +15,18 @@ pub fn emit_attribute_lint<L: LintEmitter>(lint: &AttributeLint<HirId>, lint_emi *span, session_diagnostics::UnusedDuplicate { this, other, warning }, ), + AttributeLintKind::IllFormedAttributeInput { suggestions } => { + lint_emitter.emit_node_span_lint( + rustc_session::lint::builtin::ILL_FORMED_ATTRIBUTE_INPUT, + *id, + *span, + session_diagnostics::IllFormedAttributeInput { + num_suggestions: suggestions.len(), + suggestions: DiagArgValue::StrListSepByAnd( + suggestions.into_iter().map(|s| format!("`{s}`").into()).collect(), + ), + }, + ); + } } } diff --git a/compiler/rustc_attr_parsing/src/session_diagnostics.rs b/compiler/rustc_attr_parsing/src/session_diagnostics.rs index 7f847d3dd4c..2a020770e5d 100644 --- a/compiler/rustc_attr_parsing/src/session_diagnostics.rs +++ b/compiler/rustc_attr_parsing/src/session_diagnostics.rs @@ -2,7 +2,11 @@ use std::num::IntErrorKind; use rustc_ast as ast; use rustc_errors::codes::*; -use rustc_errors::{Applicability, Diag, DiagCtxtHandle, Diagnostic, EmissionGuarantee, Level}; +use rustc_errors::{ + Applicability, Diag, DiagArgValue, DiagCtxtHandle, Diagnostic, EmissionGuarantee, Level, +}; +use rustc_feature::AttributeTemplate; +use rustc_hir::AttrPath; use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic}; use rustc_span::{Span, Symbol}; @@ -12,8 +16,6 @@ pub(crate) enum UnsupportedLiteralReason { Generic, CfgString, CfgBoolean, - DeprecatedString, - DeprecatedKvPair, } #[derive(Diagnostic)] @@ -32,37 +34,6 @@ pub(crate) struct InvalidPredicate { pub predicate: String, } -#[derive(Diagnostic)] -#[diag(attr_parsing_multiple_item, code = E0538)] -pub(crate) struct MultipleItem { - #[primary_span] - pub span: Span, - - pub item: String, -} - -#[derive(Diagnostic)] -#[diag(attr_parsing_incorrect_meta_item, code = E0539)] -pub(crate) struct IncorrectMetaItem { - #[primary_span] - pub span: Span, - - #[subdiagnostic] - pub suggestion: Option<IncorrectMetaItemSuggestion>, -} - -#[derive(Subdiagnostic)] -#[multipart_suggestion( - attr_parsing_incorrect_meta_item_suggestion, - applicability = "maybe-incorrect" -)] -pub(crate) struct IncorrectMetaItemSuggestion { - #[suggestion_part(code = "\"")] - pub lo: Span, - #[suggestion_part(code = "\"")] - pub hi: Span, -} - /// Error code: E0541 pub(crate) struct UnknownMetaItem<'a> { pub span: Span, @@ -217,6 +188,7 @@ pub(crate) struct InvalidReprHintNoValue { } /// Error code: E0565 +// FIXME(jdonszelmann): slowly phased out pub(crate) struct UnsupportedLiteral { pub span: Span, pub reason: UnsupportedLiteralReason, @@ -239,12 +211,6 @@ impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for UnsupportedLiteral { UnsupportedLiteralReason::CfgBoolean => { fluent::attr_parsing_unsupported_literal_cfg_boolean } - UnsupportedLiteralReason::DeprecatedString => { - fluent::attr_parsing_unsupported_literal_deprecated_string - } - UnsupportedLiteralReason::DeprecatedKvPair => { - fluent::attr_parsing_unsupported_literal_deprecated_kv_pair - } }, ); diag.span(self.span); @@ -462,6 +428,23 @@ pub(crate) struct UnusedDuplicate { pub warning: bool, } +// FIXME(jdonszelmann): duplicated in rustc_lints, should be moved here completely. +#[derive(LintDiagnostic)] +#[diag(attr_parsing_ill_formed_attribute_input)] +pub(crate) struct IllFormedAttributeInput { + pub num_suggestions: usize, + pub suggestions: DiagArgValue, +} + +#[derive(Diagnostic)] +#[diag(attr_parsing_ill_formed_attribute_input)] +pub(crate) struct MustUseIllFormedAttributeInput { + #[primary_span] + pub span: Span, + pub num_suggestions: usize, + pub suggestions: DiagArgValue, +} + #[derive(Diagnostic)] #[diag(attr_parsing_stability_outside_std, code = E0734)] pub(crate) struct StabilityOutsideStd { @@ -477,6 +460,14 @@ pub(crate) struct EmptyConfusables { } #[derive(Diagnostic)] +#[diag(attr_parsing_invalid_alignment_value, code = E0589)] +pub(crate) struct InvalidAlignmentValue { + #[primary_span] + pub span: Span, + pub error_part: &'static str, +} + +#[derive(Diagnostic)] #[diag(attr_parsing_repr_ident, code = E0565)] pub(crate) struct ReprIdent { #[primary_span] @@ -490,3 +481,120 @@ pub(crate) struct UnrecognizedReprHint { #[primary_span] pub span: Span, } + +pub(crate) enum AttributeParseErrorReason { + ExpectedNoArgs, + ExpectedStringLiteral { byte_string: Option<Span> }, + ExpectedSingleArgument, + ExpectedList, + UnexpectedLiteral, + ExpectedNameValue(Option<Symbol>), + DuplicateKey(Symbol), + ExpectedSpecificArgument { possibilities: Vec<&'static str>, strings: bool }, +} + +pub(crate) struct AttributeParseError { + pub(crate) span: Span, + pub(crate) attr_span: Span, + pub(crate) template: AttributeTemplate, + pub(crate) attribute: AttrPath, + pub(crate) reason: AttributeParseErrorReason, +} + +impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for AttributeParseError { + fn into_diag(self, dcx: DiagCtxtHandle<'a>, level: Level) -> Diag<'a, G> { + let name = self.attribute.to_string(); + + let mut diag = Diag::new(dcx, level, format!("malformed `{name}` attribute input")); + diag.span(self.attr_span); + diag.code(E0539); + match self.reason { + AttributeParseErrorReason::ExpectedStringLiteral { byte_string } => { + if let Some(start_point_span) = byte_string { + diag.span_suggestion( + start_point_span, + fluent::attr_parsing_unsupported_literal_suggestion, + "", + Applicability::MaybeIncorrect, + ); + diag.note("expected a normal string literal, not a byte string literal"); + + return diag; + } else { + diag.span_label(self.span, "expected a string literal here"); + } + } + AttributeParseErrorReason::ExpectedSingleArgument => { + diag.span_label(self.span, "expected a single argument here"); + diag.code(E0805); + } + AttributeParseErrorReason::ExpectedList => { + diag.span_label(self.span, "expected this to be a list"); + } + AttributeParseErrorReason::DuplicateKey(key) => { + diag.span_label(self.span, format!("found `{key}` used as a key more than once")); + diag.code(E0538); + } + AttributeParseErrorReason::UnexpectedLiteral => { + diag.span_label(self.span, format!("didn't expect a literal here")); + diag.code(E0565); + } + AttributeParseErrorReason::ExpectedNoArgs => { + diag.span_label(self.span, format!("didn't expect any arguments here")); + diag.code(E0565); + } + AttributeParseErrorReason::ExpectedNameValue(None) => { + diag.span_label( + self.span, + format!("expected this to be of the form `{name} = \"...\"`"), + ); + } + AttributeParseErrorReason::ExpectedNameValue(Some(name)) => { + diag.span_label( + self.span, + format!("expected this to be of the form `{name} = \"...\"`"), + ); + } + AttributeParseErrorReason::ExpectedSpecificArgument { possibilities, strings } => { + let quote = if strings { '"' } else { '`' }; + match possibilities.as_slice() { + &[] => {} + &[x] => { + diag.span_label( + self.span, + format!("the only valid argument here is {quote}{x}{quote}"), + ); + } + [first, second] => { + diag.span_label(self.span, format!("valid arguments are {quote}{first}{quote} or {quote}{second}{quote}")); + } + [first @ .., second_to_last, last] => { + let mut res = String::new(); + for i in first { + res.push_str(&format!("{quote}{i}{quote}, ")); + } + res.push_str(&format!( + "{quote}{second_to_last}{quote} or {quote}{last}{quote}" + )); + + diag.span_label(self.span, format!("valid arguments are {res}")); + } + } + } + } + + let suggestions = self.template.suggestions(false, &name); + diag.span_suggestions( + self.attr_span, + if suggestions.len() == 1 { + "must be of the form" + } else { + "try changing it to one of the following valid forms of the attribute" + }, + suggestions, + Applicability::HasPlaceholders, + ); + + diag + } +} diff --git a/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs b/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs index 34d36849939..98dc898db23 100644 --- a/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs @@ -3201,14 +3201,6 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { let expr_ty: Option<Ty<'_>> = visitor.prop_expr.map(|expr| typeck_results.expr_ty(expr).peel_refs()); - let is_format_arguments_item = if let Some(expr_ty) = expr_ty - && let ty::Adt(adt, _) = expr_ty.kind() - { - self.infcx.tcx.is_lang_item(adt.did(), LangItem::FormatArguments) - } else { - false - }; - if visitor.found == 0 && stmt.span.contains(proper_span) && let Some(p) = sm.span_to_margin(stmt.span) @@ -3236,25 +3228,17 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { "" }; - if !is_format_arguments_item { - let addition = format!( - "let {}binding = {};\n{}", - mutability, - s, - " ".repeat(p) - ); - err.multipart_suggestion_verbose( - msg, - vec![ - (stmt.span.shrink_to_lo(), addition), - (proper_span, "binding".to_string()), - ], - Applicability::MaybeIncorrect, - ); - } else { - err.note("the result of `format_args!` can only be assigned directly if no placeholders in its arguments are used"); - err.note("to learn more, visit <https://doc.rust-lang.org/std/macro.format_args.html>"); - } + let addition = + format!("let {}binding = {};\n{}", mutability, s, " ".repeat(p)); + err.multipart_suggestion_verbose( + msg, + vec![ + (stmt.span.shrink_to_lo(), addition), + (proper_span, "binding".to_string()), + ], + Applicability::MaybeIncorrect, + ); + suggested = true; break; } diff --git a/compiler/rustc_builtin_macros/messages.ftl b/compiler/rustc_builtin_macros/messages.ftl index d32e6f1558e..c5d1f2ad2de 100644 --- a/compiler/rustc_builtin_macros/messages.ftl +++ b/compiler/rustc_builtin_macros/messages.ftl @@ -104,6 +104,8 @@ builtin_macros_concat_bytes_bad_repeat = repeat count is not a positive number builtin_macros_concat_bytes_invalid = cannot concatenate {$lit_kind} literals .byte_char = try using a byte character .byte_str = try using a byte string + .c_str = try using a null-terminated byte string + .c_str_note = concatenating C strings is ambiguous about including the '\0' .number_array = try wrapping the number in an array builtin_macros_concat_bytes_missing_literal = expected a byte literal diff --git a/compiler/rustc_builtin_macros/src/alloc_error_handler.rs b/compiler/rustc_builtin_macros/src/alloc_error_handler.rs index ea406e70666..e75bc944d7e 100644 --- a/compiler/rustc_builtin_macros/src/alloc_error_handler.rs +++ b/compiler/rustc_builtin_macros/src/alloc_error_handler.rs @@ -62,8 +62,8 @@ pub(crate) fn expand( fn generate_handler(cx: &ExtCtxt<'_>, handler: Ident, span: Span, sig_span: Span) -> Stmt { let usize = cx.path_ident(span, Ident::new(sym::usize, span)); let ty_usize = cx.ty_path(usize); - let size = Ident::from_str_and_span("size", span); - let align = Ident::from_str_and_span("align", span); + let size = Ident::new(sym::size, span); + let align = Ident::new(sym::align, span); let layout_new = cx.std_path(&[sym::alloc, sym::Layout, sym::from_size_align_unchecked]); let layout_new = cx.expr_path(cx.path(span, layout_new)); diff --git a/compiler/rustc_builtin_macros/src/autodiff.rs b/compiler/rustc_builtin_macros/src/autodiff.rs index dc3bb8ab52a..df1b1eb60e1 100644 --- a/compiler/rustc_builtin_macros/src/autodiff.rs +++ b/compiler/rustc_builtin_macros/src/autodiff.rs @@ -652,8 +652,10 @@ mod llvm_enzyme { exprs = ecx.expr_call(new_decl_span, bb_call_expr, thin_vec![exprs]); } else { let q = QSelf { ty: d_ret_ty, path_span: span, position: 0 }; - let y = - ExprKind::Path(Some(P(q)), ecx.path_ident(span, Ident::from_str("default"))); + let y = ExprKind::Path( + Some(P(q)), + ecx.path_ident(span, Ident::with_dummy_span(kw::Default)), + ); let default_call_expr = ecx.expr(span, y); let default_call_expr = ecx.expr_call(new_decl_span, default_call_expr, thin_vec![]); diff --git a/compiler/rustc_builtin_macros/src/cfg_eval.rs b/compiler/rustc_builtin_macros/src/cfg_eval.rs index fe44350863c..ec3b87467a9 100644 --- a/compiler/rustc_builtin_macros/src/cfg_eval.rs +++ b/compiler/rustc_builtin_macros/src/cfg_eval.rs @@ -161,7 +161,7 @@ impl MutVisitor for CfgEval<'_> { } #[instrument(level = "trace", skip(self))] - fn visit_method_receiver_expr(&mut self, expr: &mut P<ast::Expr>) { + fn visit_method_receiver_expr(&mut self, expr: &mut ast::Expr) { self.0.configure_expr(expr, true); mut_visit::walk_expr(self, expr); } diff --git a/compiler/rustc_builtin_macros/src/concat_bytes.rs b/compiler/rustc_builtin_macros/src/concat_bytes.rs index 456f2b9ab31..92d011fb9d1 100644 --- a/compiler/rustc_builtin_macros/src/concat_bytes.rs +++ b/compiler/rustc_builtin_macros/src/concat_bytes.rs @@ -1,6 +1,6 @@ use rustc_ast::ptr::P; use rustc_ast::tokenstream::TokenStream; -use rustc_ast::{ExprKind, LitIntType, LitKind, UintTy, token}; +use rustc_ast::{ExprKind, LitIntType, LitKind, StrStyle, UintTy, token}; use rustc_expand::base::{DummyResult, ExpandResult, ExtCtxt, MacEager, MacroExpanderResult}; use rustc_session::errors::report_lit_error; use rustc_span::{ErrorGuaranteed, Span}; @@ -21,15 +21,32 @@ fn invalid_type_err( let snippet = cx.sess.source_map().span_to_snippet(span).ok(); let dcx = cx.dcx(); match LitKind::from_token_lit(token_lit) { - Ok(LitKind::CStr(_, _)) => { + Ok(LitKind::CStr(_, style)) => { // Avoid ambiguity in handling of terminal `NUL` by refusing to // concatenate C string literals as bytes. - dcx.emit_err(errors::ConcatCStrLit { span }) + let sugg = if let Some(mut as_bstr) = snippet + && style == StrStyle::Cooked + && as_bstr.starts_with('c') + && as_bstr.ends_with('"') + { + // Suggest`c"foo"` -> `b"foo\0"` if we can + as_bstr.replace_range(0..1, "b"); + as_bstr.pop(); + as_bstr.push_str(r#"\0""#); + Some(ConcatBytesInvalidSuggestion::CStrLit { span, as_bstr }) + } else { + // No suggestion for a missing snippet, raw strings, or if for some reason we have + // a span that doesn't match `c"foo"` (possible if a proc macro assigns a span + // that doesn't actually point to a C string). + None + }; + // We can only provide a suggestion if we have a snip and it is not a raw string + dcx.emit_err(ConcatBytesInvalid { span, lit_kind: "C string", sugg, cs_note: Some(()) }) } Ok(LitKind::Char(_)) => { let sugg = snippet.map(|snippet| ConcatBytesInvalidSuggestion::CharLit { span, snippet }); - dcx.emit_err(ConcatBytesInvalid { span, lit_kind: "character", sugg }) + dcx.emit_err(ConcatBytesInvalid { span, lit_kind: "character", sugg, cs_note: None }) } Ok(LitKind::Str(_, _)) => { // suggestion would be invalid if we are nested @@ -38,18 +55,21 @@ fn invalid_type_err( } else { None }; - dcx.emit_err(ConcatBytesInvalid { span, lit_kind: "string", sugg }) + dcx.emit_err(ConcatBytesInvalid { span, lit_kind: "string", sugg, cs_note: None }) } Ok(LitKind::Float(_, _)) => { - dcx.emit_err(ConcatBytesInvalid { span, lit_kind: "float", sugg: None }) - } - Ok(LitKind::Bool(_)) => { - dcx.emit_err(ConcatBytesInvalid { span, lit_kind: "boolean", sugg: None }) + dcx.emit_err(ConcatBytesInvalid { span, lit_kind: "float", sugg: None, cs_note: None }) } + Ok(LitKind::Bool(_)) => dcx.emit_err(ConcatBytesInvalid { + span, + lit_kind: "boolean", + sugg: None, + cs_note: None, + }), Ok(LitKind::Int(_, _)) if !is_nested => { let sugg = snippet.map(|snippet| ConcatBytesInvalidSuggestion::IntLit { span, snippet }); - dcx.emit_err(ConcatBytesInvalid { span, lit_kind: "numeric", sugg }) + dcx.emit_err(ConcatBytesInvalid { span, lit_kind: "numeric", sugg, cs_note: None }) } Ok(LitKind::Int(val, LitIntType::Unsuffixed | LitIntType::Unsigned(UintTy::U8))) => { assert!(val.get() > u8::MAX.into()); // must be an error diff --git a/compiler/rustc_builtin_macros/src/errors.rs b/compiler/rustc_builtin_macros/src/errors.rs index 3a2e96a5e5a..b7ecfd2285c 100644 --- a/compiler/rustc_builtin_macros/src/errors.rs +++ b/compiler/rustc_builtin_macros/src/errors.rs @@ -215,6 +215,8 @@ pub(crate) struct ConcatBytesInvalid { pub(crate) lit_kind: &'static str, #[subdiagnostic] pub(crate) sugg: Option<ConcatBytesInvalidSuggestion>, + #[note(builtin_macros_c_str_note)] + pub(crate) cs_note: Option<()>, } #[derive(Subdiagnostic)] @@ -239,6 +241,13 @@ pub(crate) enum ConcatBytesInvalidSuggestion { span: Span, snippet: String, }, + #[note(builtin_macros_c_str_note)] + #[suggestion(builtin_macros_c_str, code = "{as_bstr}", applicability = "machine-applicable")] + CStrLit { + #[primary_span] + span: Span, + as_bstr: String, + }, #[suggestion( builtin_macros_number_array, code = "[{snippet}]", diff --git a/compiler/rustc_builtin_macros/src/format.rs b/compiler/rustc_builtin_macros/src/format.rs index 39f9d5f9005..6785cb6aef5 100644 --- a/compiler/rustc_builtin_macros/src/format.rs +++ b/compiler/rustc_builtin_macros/src/format.rs @@ -606,6 +606,7 @@ fn make_format_args( template, arguments: args, uncooked_fmt_str, + is_source_literal, })) } diff --git a/compiler/rustc_builtin_macros/src/proc_macro_harness.rs b/compiler/rustc_builtin_macros/src/proc_macro_harness.rs index daf480a9ce4..42b7e0e06d1 100644 --- a/compiler/rustc_builtin_macros/src/proc_macro_harness.rs +++ b/compiler/rustc_builtin_macros/src/proc_macro_harness.rs @@ -56,7 +56,7 @@ pub fn inject( is_test_crate: bool, dcx: DiagCtxtHandle<'_>, ) { - let ecfg = ExpansionConfig::default("proc_macro".to_string(), features); + let ecfg = ExpansionConfig::default(sym::proc_macro, features); let mut cx = ExtCtxt::new(sess, ecfg, resolver, None); let mut collect = CollectProcMacros { diff --git a/compiler/rustc_builtin_macros/src/standard_library_imports.rs b/compiler/rustc_builtin_macros/src/standard_library_imports.rs index a1ee53b7ca2..682e7c9b17a 100644 --- a/compiler/rustc_builtin_macros/src/standard_library_imports.rs +++ b/compiler/rustc_builtin_macros/src/standard_library_imports.rs @@ -36,7 +36,7 @@ pub fn inject( let span = DUMMY_SP.with_def_site_ctxt(expn_id.to_expn_id()); let call_site = DUMMY_SP.with_call_site_ctxt(expn_id.to_expn_id()); - let ecfg = ExpansionConfig::default("std_lib_injection".to_string(), features); + let ecfg = ExpansionConfig::default(sym::std_lib_injection, features); let cx = ExtCtxt::new(sess, ecfg, resolver, None); let ident_span = if edition >= Edition2018 { span } else { call_site }; diff --git a/compiler/rustc_builtin_macros/src/test_harness.rs b/compiler/rustc_builtin_macros/src/test_harness.rs index 0bc313cbdac..111c85d49eb 100644 --- a/compiler/rustc_builtin_macros/src/test_harness.rs +++ b/compiler/rustc_builtin_macros/src/test_harness.rs @@ -6,7 +6,7 @@ use rustc_ast as ast; use rustc_ast::entry::EntryPointType; use rustc_ast::mut_visit::*; use rustc_ast::ptr::P; -use rustc_ast::visit::{Visitor, walk_item}; +use rustc_ast::visit::Visitor; use rustc_ast::{ModKind, attr}; use rustc_errors::DiagCtxtHandle; use rustc_expand::base::{ExtCtxt, ResolverExpand}; @@ -146,11 +146,11 @@ impl<'a> MutVisitor for TestHarnessGenerator<'a> { ) = item.kind { let prev_tests = mem::take(&mut self.tests); - walk_item_kind(&mut item.kind, item.span, item.id, &mut item.vis, (), self); + ast::mut_visit::walk_item(self, item); self.add_test_cases(item.id, span, prev_tests); } else { // But in those cases, we emit a lint to warn the user of these missing tests. - walk_item(&mut InnerItemLinter { sess: self.cx.ext_cx.sess }, &item); + ast::visit::walk_item(&mut InnerItemLinter { sess: self.cx.ext_cx.sess }, &item); } } } @@ -227,7 +227,7 @@ fn generate_test_harness( panic_strategy: PanicStrategy, test_runner: Option<ast::Path>, ) { - let econfig = ExpansionConfig::default("test".to_string(), features); + let econfig = ExpansionConfig::default(sym::test, features); let ext_cx = ExtCtxt::new(sess, econfig, resolver, None); let expn_id = ext_cx.resolver.expansion_for_ast_pass( diff --git a/compiler/rustc_codegen_cranelift/src/allocator.rs b/compiler/rustc_codegen_cranelift/src/allocator.rs index 9cff8a84db3..ffb932a3c38 100644 --- a/compiler/rustc_codegen_cranelift/src/allocator.rs +++ b/compiler/rustc_codegen_cranelift/src/allocator.rs @@ -1,6 +1,7 @@ //! Allocator shim // Adapted from rustc +use cranelift_frontend::{FunctionBuilder, FunctionBuilderContext}; use rustc_ast::expand::allocator::{ ALLOCATOR_METHODS, AllocatorKind, AllocatorTy, NO_ALLOC_SHIM_IS_UNSTABLE, alloc_error_handler_name, default_fn_name, global_fn_name, @@ -97,16 +98,31 @@ fn codegen_inner( data.define(Box::new([val])); module.define_data(data_id, &data).unwrap(); - let data_id = module - .declare_data( - &mangle_internal_symbol(tcx, NO_ALLOC_SHIM_IS_UNSTABLE), - Linkage::Export, - false, - false, - ) - .unwrap(); - let mut data = DataDescription::new(); - data.set_align(1); - data.define(Box::new([0])); - module.define_data(data_id, &data).unwrap(); + { + let sig = Signature { + call_conv: module.target_config().default_call_conv, + params: vec![], + returns: vec![], + }; + let func_id = module + .declare_function( + &mangle_internal_symbol(tcx, NO_ALLOC_SHIM_IS_UNSTABLE), + Linkage::Export, + &sig, + ) + .unwrap(); + + let mut ctx = Context::new(); + ctx.func.signature = sig; + let mut func_ctx = FunctionBuilderContext::new(); + let mut bcx = FunctionBuilder::new(&mut ctx.func, &mut func_ctx); + + let block = bcx.create_block(); + bcx.switch_to_block(block); + bcx.ins().return_(&[]); + bcx.seal_all_blocks(); + bcx.finalize(); + + module.define_function(func_id, &mut ctx).unwrap(); + } } diff --git a/compiler/rustc_codegen_cranelift/src/lib.rs b/compiler/rustc_codegen_cranelift/src/lib.rs index 07ea29f3024..8e34436fb5e 100644 --- a/compiler/rustc_codegen_cranelift/src/lib.rs +++ b/compiler/rustc_codegen_cranelift/src/lib.rs @@ -184,7 +184,7 @@ impl CodegenBackend for CraneliftCodegenBackend { // FIXME return the actually used target features. this is necessary for #[cfg(target_feature)] let target_features = if sess.target.arch == "x86_64" && sess.target.os != "none" { // x86_64 mandates SSE2 support and rustc requires the x87 feature to be enabled - vec![sym::fsxr, sym::sse, sym::sse2, Symbol::intern("x87")] + vec![sym::fxsr, sym::sse, sym::sse2, Symbol::intern("x87")] } else if sess.target.arch == "aarch64" { match &*sess.target.os { "none" => vec![], diff --git a/compiler/rustc_codegen_gcc/.cspell.json b/compiler/rustc_codegen_gcc/.cspell.json new file mode 100644 index 00000000000..388ccce2b09 --- /dev/null +++ b/compiler/rustc_codegen_gcc/.cspell.json @@ -0,0 +1,27 @@ +{ + "allowCompoundWords": true, + "dictionaries": ["cpp", "rust-extra", "rustc_codegen_gcc"], + "dictionaryDefinitions": [ + { + "name": "rust-extra", + "path": "tools/cspell_dicts/rust.txt", + "addWords": true + }, + { + "name": "rustc_codegen_gcc", + "path": "tools/cspell_dicts/rustc_codegen_gcc.txt", + "addWords": true + } + ], + "files": [ + "src/**/*.rs" + ], + "ignorePaths": [ + "src/intrinsic/archs.rs", + "src/intrinsic/llvm.rs" + ], + "ignoreRegExpList": [ + "/(FIXME|NOTE|TODO)\\([^)]+\\)/", + "__builtin_\\w*" + ] +} diff --git a/compiler/rustc_codegen_gcc/.github/workflows/ci.yml b/compiler/rustc_codegen_gcc/.github/workflows/ci.yml index ef024258ffc..5c8e7d62816 100644 --- a/compiler/rustc_codegen_gcc/.github/workflows/ci.yml +++ b/compiler/rustc_codegen_gcc/.github/workflows/ci.yml @@ -12,6 +12,8 @@ permissions: env: # Enable backtraces for easier debugging RUST_BACKTRACE: 1 + # For the run-make tests. + LLVM_BIN_DIR: /usr/bin jobs: build: @@ -48,7 +50,7 @@ jobs: - name: Install packages # `llvm-14-tools` is needed to install the `FileCheck` binary which is used for asm tests. - run: sudo apt-get install ninja-build ripgrep llvm-14-tools + run: sudo apt-get install ninja-build ripgrep llvm-14-tools llvm - name: Install rustfmt & clippy run: rustup component add rustfmt clippy @@ -61,11 +63,15 @@ jobs: sudo dpkg --force-overwrite -i ${{ matrix.libgccjit_version.gcc }} echo 'gcc-path = "/usr/lib/"' > config.toml + # Some run-make tests fail if we use our forked GCC because it doesn't + # bundle libstdc++, so we switch to gcc-14 to have a GCC that has + # libstdc++. + - name: Set default GCC to gcc-14 + run: sudo update-alternatives --install /usr/bin/cc cc /usr/bin/gcc-14 30 + - name: Set env run: | echo "workspace="$GITHUB_WORKSPACE >> $GITHUB_ENV - echo "LIBRARY_PATH=/usr/lib" >> $GITHUB_ENV - echo "LD_LIBRARY_PATH=/usr/lib" >> $GITHUB_ENV #- name: Cache rust repository ## We only clone the rust repository for rustc tests @@ -76,12 +82,22 @@ jobs: #path: rust #key: ${{ runner.os }}-packages-${{ hashFiles('rust/.git/HEAD') }} + - name: Prepare + run: ./y.sh prepare --only-libcore + + - name: Check formatting + run: ./y.sh fmt --check + + - name: clippy + run: | + cargo clippy --all-targets -- -D warnings + cargo clippy --all-targets --no-default-features -- -D warnings + cargo clippy --manifest-path build_system/Cargo.toml --all-targets -- -D warnings + - name: Build run: | - ./y.sh prepare --only-libcore ./y.sh build --sysroot - ./y.sh test --mini-tests - cargo test + ./y.sh test --cargo-tests - name: Run y.sh cargo build run: | @@ -101,20 +117,19 @@ jobs: run: | ./y.sh test --release --clean --build-sysroot ${{ matrix.commands }} - - name: Check formatting - run: ./y.sh fmt --check - - - name: clippy - run: | - cargo clippy --all-targets -- -D warnings - cargo clippy --all-targets --features master -- -D warnings - duplicates: runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v4 - run: python tools/check_intrinsics_duplicates.py + spell_check: + runs-on: ubuntu-24.04 + steps: + - uses: actions/checkout@v4 + - uses: crate-ci/typos@v1.32.0 + - uses: streetsidesoftware/cspell-action@v7 + build_system: runs-on: ubuntu-24.04 steps: diff --git a/compiler/rustc_codegen_gcc/.github/workflows/failures.yml b/compiler/rustc_codegen_gcc/.github/workflows/failures.yml index bc42eb1468e..67b7fbe4478 100644 --- a/compiler/rustc_codegen_gcc/.github/workflows/failures.yml +++ b/compiler/rustc_codegen_gcc/.github/workflows/failures.yml @@ -66,8 +66,8 @@ jobs: run: | sudo dpkg --force-overwrite -i gcc-15.deb echo 'gcc-path = "/usr/lib"' > config.toml - echo "LIBRARY_PATH=/usr/lib" >> $GITHUB_ENV - echo "LD_LIBRARY_PATH=/usr/lib" >> $GITHUB_ENV + + - name: Set env run: | diff --git a/compiler/rustc_codegen_gcc/.github/workflows/m68k.yml b/compiler/rustc_codegen_gcc/.github/workflows/m68k.yml index 21731f7087e..245bee7f2a3 100644 --- a/compiler/rustc_codegen_gcc/.github/workflows/m68k.yml +++ b/compiler/rustc_codegen_gcc/.github/workflows/m68k.yml @@ -65,8 +65,8 @@ jobs: - name: Set env run: | echo "workspace="$GITHUB_WORKSPACE >> $GITHUB_ENV - echo "LIBRARY_PATH=/usr/lib" >> $GITHUB_ENV - echo "LD_LIBRARY_PATH=/usr/lib" >> $GITHUB_ENV + + #- name: Cache rust repository ## We only clone the rust repository for rustc tests @@ -95,7 +95,7 @@ jobs: ./y.sh prepare --only-libcore --cross ./y.sh build --sysroot --features compiler_builtins/no-f16-f128 --target-triple m68k-unknown-linux-gnu ./y.sh test --mini-tests - CG_GCC_TEST_TARGET=m68k-unknown-linux-gnu cargo test + CG_GCC_TEST_TARGET=m68k-unknown-linux-gnu ./y.sh test --cargo-tests ./y.sh clean all - name: Prepare dependencies diff --git a/compiler/rustc_codegen_gcc/.github/workflows/release.yml b/compiler/rustc_codegen_gcc/.github/workflows/release.yml index 47a40286554..1d8eaf9a141 100644 --- a/compiler/rustc_codegen_gcc/.github/workflows/release.yml +++ b/compiler/rustc_codegen_gcc/.github/workflows/release.yml @@ -12,6 +12,8 @@ permissions: env: # Enable backtraces for easier debugging RUST_BACKTRACE: 1 + # For the run-make tests. + LLVM_BIN_DIR: /usr/bin jobs: build: @@ -36,7 +38,8 @@ jobs: uses: Swatinem/rust-cache@v2 - name: Install packages - run: sudo apt-get install ninja-build ripgrep + # `llvm-14-tools` is needed to install the `FileCheck` binary which is used for run-make tests. + run: sudo apt-get install ninja-build ripgrep llvm-14-tools llvm - name: Download artifact run: curl -LO https://github.com/rust-lang/gcc/releases/latest/download/gcc-15.deb @@ -46,18 +49,21 @@ jobs: sudo dpkg --force-overwrite -i gcc-15.deb echo 'gcc-path = "/usr/lib/"' > config.toml + # Some run-make tests fail if we use our forked GCC because it doesn't + # bundle libstdc++, so we switch to gcc-14 to have a GCC that has + # libstdc++. + - name: Set default GCC to gcc-14 + run: sudo update-alternatives --install /usr/bin/cc cc /usr/bin/gcc-14 30 + - name: Set env run: | echo "workspace="$GITHUB_WORKSPACE >> $GITHUB_ENV - echo "LIBRARY_PATH=/usr/lib" >> $GITHUB_ENV - echo "LD_LIBRARY_PATH=/usr/lib" >> $GITHUB_ENV - name: Build run: | ./y.sh prepare --only-libcore EMBED_LTO_BITCODE=1 ./y.sh build --sysroot --release --release-sysroot - ./y.sh test --mini-tests - cargo test + ./y.sh test --cargo-tests ./y.sh clean all - name: Prepare dependencies diff --git a/compiler/rustc_codegen_gcc/.github/workflows/stdarch.yml b/compiler/rustc_codegen_gcc/.github/workflows/stdarch.yml index f26ac3b755f..20d009f08a7 100644 --- a/compiler/rustc_codegen_gcc/.github/workflows/stdarch.yml +++ b/compiler/rustc_codegen_gcc/.github/workflows/stdarch.yml @@ -90,7 +90,7 @@ jobs: if: ${{ !matrix.cargo_runner }} run: | ./y.sh test --release --clean --release-sysroot --build-sysroot --mini-tests --std-tests --test-libcore - cargo test + ./y.sh test --cargo-tests - name: Run stdarch tests if: ${{ !matrix.cargo_runner }} diff --git a/compiler/rustc_codegen_gcc/.gitignore b/compiler/rustc_codegen_gcc/.gitignore index c1e6631a281..8f73d3eb972 100644 --- a/compiler/rustc_codegen_gcc/.gitignore +++ b/compiler/rustc_codegen_gcc/.gitignore @@ -19,4 +19,5 @@ tools/llvmint-2 llvm build_system/target config.toml -build \ No newline at end of file +build +rustlantis \ No newline at end of file diff --git a/compiler/rustc_codegen_gcc/CONTRIBUTING.md b/compiler/rustc_codegen_gcc/CONTRIBUTING.md index 8e313ab08b5..54cba0e6de3 100644 --- a/compiler/rustc_codegen_gcc/CONTRIBUTING.md +++ b/compiler/rustc_codegen_gcc/CONTRIBUTING.md @@ -33,7 +33,7 @@ To run specific tests, use appropriate flags such as: - `./y.sh test --test-libcore` - `./y.sh test --std-tests` -- `cargo test -- <name of test>` +- `./y.sh test --cargo-tests -- <name of test>` Additionally, you can run the tests of `libgccjit`: diff --git a/compiler/rustc_codegen_gcc/Cargo.lock b/compiler/rustc_codegen_gcc/Cargo.lock index 967a51a1cc6..b20c181a8cb 100644 --- a/compiler/rustc_codegen_gcc/Cargo.lock +++ b/compiler/rustc_codegen_gcc/Cargo.lock @@ -82,6 +82,18 @@ dependencies = [ ] [[package]] +name = "getrandom" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "wasi", +] + +[[package]] name = "hermit-abi" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -111,9 +123,9 @@ checksum = "5aaeb2981e0606ca11d79718f8bb01164f1d6ed75080182d3abf017e6d244b6d" [[package]] name = "linux-raw-sys" -version = "0.4.14" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" [[package]] name = "memchr" @@ -138,6 +150,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" [[package]] +name = "r-efi" +version = "5.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" + +[[package]] name = "regex" version = "1.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -166,9 +184,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.42" +version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f93dc38ecbab2eb790ff964bb77fa94faf256fd3e73285fd7ba0903b76bedb85" +checksum = "c71e83d6afe7ff64890ec6b71d6a69bb8a610ab78ce364b3352876bb4c801266" dependencies = [ "bitflags", "errno", @@ -188,12 +206,12 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.14.0" +version = "3.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c" +checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1" dependencies = [ - "cfg-if", "fastrand", + "getrandom", "once_cell", "rustix", "windows-sys", @@ -243,6 +261,15 @@ dependencies = [ ] [[package]] +name = "wasi" +version = "0.14.2+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" +dependencies = [ + "wit-bindgen-rt", +] + +[[package]] name = "winapi" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -345,3 +372,12 @@ name = "windows_x86_64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "wit-bindgen-rt" +version = "0.39.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" +dependencies = [ + "bitflags", +] diff --git a/compiler/rustc_codegen_gcc/Cargo.toml b/compiler/rustc_codegen_gcc/Cargo.toml index c692a90f0a4..c284e3f060b 100644 --- a/compiler/rustc_codegen_gcc/Cargo.toml +++ b/compiler/rustc_codegen_gcc/Cargo.toml @@ -31,7 +31,7 @@ gccjit = "2.7" [dev-dependencies] boml = "0.3.1" lang_tester = "0.8.0" -tempfile = "3.7.1" +tempfile = "3.20" [profile.dev] # By compiling dependencies with optimizations, performing tests gets much faster. diff --git a/compiler/rustc_codegen_gcc/_typos.toml b/compiler/rustc_codegen_gcc/_typos.toml new file mode 100644 index 00000000000..4a6a506a981 --- /dev/null +++ b/compiler/rustc_codegen_gcc/_typos.toml @@ -0,0 +1,9 @@ +[default.extend-words] +ba = "ba" +hsa = "hsa" +olt = "olt" +seh = "seh" +typ = "typ" + +[files] +extend-exclude = ["src/intrinsic/archs.rs"] diff --git a/compiler/rustc_codegen_gcc/build_system/build_sysroot/Cargo.lock b/compiler/rustc_codegen_gcc/build_system/build_sysroot/Cargo.lock index 51bec5aa9e3..0c75977ee79 100644 --- a/compiler/rustc_codegen_gcc/build_system/build_sysroot/Cargo.lock +++ b/compiler/rustc_codegen_gcc/build_system/build_sysroot/Cargo.lock @@ -1,24 +1,24 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "addr2line" -version = "0.22.0" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" dependencies = [ "compiler_builtins", - "gimli 0.29.0", + "gimli", "rustc-std-workspace-alloc", "rustc-std-workspace-core", ] [[package]] -name = "adler" -version = "1.0.2" +name = "adler2" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" +checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" dependencies = [ "compiler_builtins", "rustc-std-workspace-core", @@ -33,10 +33,21 @@ dependencies = [ ] [[package]] -name = "allocator-api2" -version = "0.2.18" +name = "alloctests" +version = "0.0.0" +dependencies = [ + "rand", + "rand_xorshift", +] + +[[package]] +name = "cc" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" +checksum = "1aeb932158bd710538c73702db6945cb68a8fb08c519e6e12706b94263b36db8" +dependencies = [ + "shlex", +] [[package]] name = "cfg-if" @@ -50,10 +61,11 @@ dependencies = [ [[package]] name = "compiler_builtins" -version = "0.1.118" +version = "0.1.160" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92afe7344b64cccf3662ca26d5d1c0828ab826f04206b97d856e3625e390e4b5" +checksum = "6376049cfa92c0aa8b9ac95fae22184b981c658208d4ed8a1dc553cd83612895" dependencies = [ + "cc", "rustc-std-workspace-core", ] @@ -62,10 +74,18 @@ name = "core" version = "0.0.0" [[package]] +name = "coretests" +version = "0.0.0" +dependencies = [ + "rand", + "rand_xorshift", +] + +[[package]] name = "dlmalloc" -version = "0.2.6" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3264b043b8e977326c1ee9e723da2c1f8d09a99df52cacf00b4dbce5ac54414d" +checksum = "8cff88b751e7a276c4ab0e222c3f355190adc6dde9ce39c851db39da34990df7" dependencies = [ "cfg-if", "compiler_builtins", @@ -97,20 +117,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.29.0" +version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" -dependencies = [ - "compiler_builtins", - "rustc-std-workspace-alloc", - "rustc-std-workspace-core", -] - -[[package]] -name = "gimli" -version = "0.30.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2e1d97fbe9722ba9bbd0c97051c2956e726562b61f86a25a4360398a40edfc9" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" dependencies = [ "compiler_builtins", "rustc-std-workspace-alloc", @@ -119,11 +128,10 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.14.5" +version = "0.15.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" +checksum = "84b26c544d002229e640969970a2e74021aadf6e2f96372b9c58eff97de08eb3" dependencies = [ - "allocator-api2", "compiler_builtins", "rustc-std-workspace-alloc", "rustc-std-workspace-core", @@ -131,9 +139,9 @@ dependencies = [ [[package]] name = "hermit-abi" -version = "0.4.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" +checksum = "f154ce46856750ed433c8649605bf7ed2de3bc35fd9d2a9f30cddd873c80cb08" dependencies = [ "compiler_builtins", "rustc-std-workspace-alloc", @@ -142,9 +150,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.155" +version = "0.2.172" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" +checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa" dependencies = [ "rustc-std-workspace-core", ] @@ -161,11 +169,11 @@ dependencies = [ [[package]] name = "miniz_oxide" -version = "0.7.4" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" +checksum = "3be647b768db090acb35d5ec5db2b0e1f1de11133ca123b9eacf5137868f892a" dependencies = [ - "adler", + "adler2", "compiler_builtins", "rustc-std-workspace-alloc", "rustc-std-workspace-core", @@ -173,9 +181,9 @@ dependencies = [ [[package]] name = "object" -version = "0.36.3" +version = "0.36.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27b64972346851a39438c60b341ebc01bba47464ae329e55cf343eb93964efd9" +checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" dependencies = [ "compiler_builtins", "memchr", @@ -188,7 +196,6 @@ name = "panic_abort" version = "0.0.0" dependencies = [ "alloc", - "cfg-if", "compiler_builtins", "core", "libc", @@ -211,14 +218,22 @@ name = "proc_macro" version = "0.0.0" dependencies = [ "core", + "rustc-literal-escaper", "std", ] [[package]] +name = "profiler_builtins" +version = "0.0.0" +dependencies = [ + "cc", +] + +[[package]] name = "r-efi" -version = "4.5.0" +version = "5.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9e935efc5854715dfc0a4c9ef18dc69dee0ec3bf9cc3ab740db831c0fdd86a3" +checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" dependencies = [ "compiler_builtins", "rustc-std-workspace-core", @@ -226,9 +241,9 @@ dependencies = [ [[package]] name = "r-efi-alloc" -version = "1.0.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31d6f09fe2b6ad044bc3d2c34ce4979796581afd2f1ebc185837e02421e02fd7" +checksum = "e43c53ff1a01d423d1cb762fd991de07d32965ff0ca2e4f80444ac7804198203" dependencies = [ "compiler_builtins", "r-efi", @@ -236,6 +251,30 @@ dependencies = [ ] [[package]] +name = "rand" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97" +dependencies = [ + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" + +[[package]] +name = "rand_xorshift" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "513962919efc330f829edb2535844d1b912b0fbe2ca165d613e4e8788bb05a5a" +dependencies = [ + "rand_core", +] + +[[package]] name = "rustc-demangle" version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -246,6 +285,15 @@ dependencies = [ ] [[package]] +name = "rustc-literal-escaper" +version = "0.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0041b6238913c41fe704213a4a9329e2f685a156d1781998128b4149c230ad04" +dependencies = [ + "rustc-std-workspace-std", +] + +[[package]] name = "rustc-std-workspace-alloc" version = "1.99.0" dependencies = [ @@ -267,6 +315,12 @@ dependencies = [ ] [[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] name = "std" version = "0.0.0" dependencies = [ @@ -286,10 +340,13 @@ dependencies = [ "panic_unwind", "r-efi", "r-efi-alloc", + "rand", + "rand_xorshift", "rustc-demangle", "std_detect", "unwind", "wasi", + "windows-targets 0.0.0", ] [[package]] @@ -298,6 +355,7 @@ version = "0.1.5" dependencies = [ "cfg-if", "compiler_builtins", + "libc", "rustc-std-workspace-alloc", "rustc-std-workspace-core", ] @@ -306,10 +364,8 @@ dependencies = [ name = "sysroot" version = "0.0.0" dependencies = [ - "alloc", - "compiler_builtins", - "core", "proc_macro", + "profiler_builtins", "std", "test", ] @@ -326,9 +382,9 @@ dependencies = [ [[package]] name = "unicode-width" -version = "0.1.13" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d" +checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" dependencies = [ "compiler_builtins", "rustc-std-workspace-core", @@ -348,12 +404,12 @@ dependencies = [ [[package]] name = "unwinding" -version = "0.2.2" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc55842d0db6329a669d55a623c674b02d677b16bfb2d24857d4089d41eba882" +checksum = "8393f2782b6060a807337ff353780c1ca15206f9ba2424df18cb6e733bd7b345" dependencies = [ "compiler_builtins", - "gimli 0.30.0", + "gimli", "rustc-std-workspace-core", ] @@ -370,15 +426,19 @@ dependencies = [ [[package]] name = "windows-sys" -version = "0.52.0" +version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ - "windows-targets", + "windows-targets 0.52.6", ] [[package]] name = "windows-targets" +version = "0.0.0" + +[[package]] +name = "windows-targets" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" diff --git a/compiler/rustc_codegen_gcc/build_system/src/build.rs b/compiler/rustc_codegen_gcc/build_system/src/build.rs index e98377f15a9..ecc4c1b2fe2 100644 --- a/compiler/rustc_codegen_gcc/build_system/src/build.rs +++ b/compiler/rustc_codegen_gcc/build_system/src/build.rs @@ -33,7 +33,7 @@ impl BuildArg { } arg => { if !build_arg.config_info.parse_argument(arg, &mut args)? { - return Err(format!("Unknown argument `{}`", arg)); + return Err(format!("Unknown argument `{arg}`")); } } } @@ -105,14 +105,14 @@ pub fn create_build_sysroot_content(start_dir: &Path) -> Result<(), String> { if !start_dir.is_dir() { create_dir(start_dir)?; } - copy_file("build_system/build_sysroot/Cargo.toml", &start_dir.join("Cargo.toml"))?; - copy_file("build_system/build_sysroot/Cargo.lock", &start_dir.join("Cargo.lock"))?; + copy_file("build_system/build_sysroot/Cargo.toml", start_dir.join("Cargo.toml"))?; + copy_file("build_system/build_sysroot/Cargo.lock", start_dir.join("Cargo.lock"))?; let src_dir = start_dir.join("src"); if !src_dir.is_dir() { create_dir(&src_dir)?; } - copy_file("build_system/build_sysroot/lib.rs", &start_dir.join("src/lib.rs")) + copy_file("build_system/build_sysroot/lib.rs", start_dir.join("src/lib.rs")) } pub fn build_sysroot(env: &HashMap<String, String>, config: &ConfigInfo) -> Result<(), String> { @@ -169,7 +169,7 @@ pub fn build_sysroot(env: &HashMap<String, String>, config: &ConfigInfo) -> Resu run_command(&[&"cp", &"-r", &dir_to_copy, &sysroot_path], None).map(|_| ()) }; walk_dir( - start_dir.join(&format!("target/{}/{}/deps", config.target_triple, channel)), + start_dir.join(format!("target/{}/{}/deps", config.target_triple, channel)), &mut copier.clone(), &mut copier, false, diff --git a/compiler/rustc_codegen_gcc/build_system/src/clean.rs b/compiler/rustc_codegen_gcc/build_system/src/clean.rs index 768a78e789e..a441ed613f9 100644 --- a/compiler/rustc_codegen_gcc/build_system/src/clean.rs +++ b/compiler/rustc_codegen_gcc/build_system/src/clean.rs @@ -17,12 +17,12 @@ enum CleanArg { impl CleanArg { fn new() -> Result<Self, String> { // We skip the binary and the "clean" option. - for arg in std::env::args().skip(2) { + if let Some(arg) = std::env::args().nth(2) { return match arg.as_str() { "all" => Ok(Self::All), "ui-tests" => Ok(Self::UiTests), "--help" => Ok(Self::Help), - a => Err(format!("Unknown argument `{}`", a)), + a => Err(format!("Unknown argument `{a}`")), }; } Ok(Self::default()) diff --git a/compiler/rustc_codegen_gcc/build_system/src/clone_gcc.rs b/compiler/rustc_codegen_gcc/build_system/src/clone_gcc.rs index b49dd47f352..ee683df419c 100644 --- a/compiler/rustc_codegen_gcc/build_system/src/clone_gcc.rs +++ b/compiler/rustc_codegen_gcc/build_system/src/clone_gcc.rs @@ -43,7 +43,7 @@ impl Args { } arg => { if !command_args.config_info.parse_argument(arg, &mut args)? { - return Err(format!("Unknown option {}", arg)); + return Err(format!("Unknown option {arg}")); } } } @@ -52,7 +52,7 @@ impl Args { Some(p) => p.into(), None => PathBuf::from("./gcc"), }; - return Ok(Some(command_args)); + Ok(Some(command_args)) } } @@ -64,7 +64,7 @@ pub fn run() -> Result<(), String> { let result = git_clone("https://github.com/rust-lang/gcc", Some(&args.out_path), false)?; if result.ran_clone { let gcc_commit = args.config_info.get_gcc_commit()?; - println!("Checking out GCC commit `{}`...", gcc_commit); + println!("Checking out GCC commit `{gcc_commit}`..."); run_command_with_output( &[&"git", &"checkout", &gcc_commit], Some(Path::new(&result.repo_dir)), diff --git a/compiler/rustc_codegen_gcc/build_system/src/config.rs b/compiler/rustc_codegen_gcc/build_system/src/config.rs index 4f9fcc97151..650c030ca53 100644 --- a/compiler/rustc_codegen_gcc/build_system/src/config.rs +++ b/compiler/rustc_codegen_gcc/build_system/src/config.rs @@ -66,7 +66,7 @@ impl ConfigFile { "Expected a boolean for `download-gccjit`", ); } - _ => return failed_config_parsing(config_file, &format!("Unknown key `{}`", key)), + _ => return failed_config_parsing(config_file, &format!("Unknown key `{key}`")), } } match (config.gcc_path.as_mut(), config.download_gccjit) { @@ -86,9 +86,7 @@ impl ConfigFile { let path = Path::new(gcc_path); *gcc_path = path .canonicalize() - .map_err(|err| { - format!("Failed to get absolute path of `{}`: {:?}", gcc_path, err) - })? + .map_err(|err| format!("Failed to get absolute path of `{gcc_path}`: {err:?}"))? .display() .to_string(); } @@ -175,7 +173,7 @@ impl ConfigInfo { "--sysroot-panic-abort" => self.sysroot_panic_abort = true, "--gcc-path" => match args.next() { Some(arg) if !arg.is_empty() => { - self.gcc_path = Some(arg.into()); + self.gcc_path = Some(arg); } _ => { return Err("Expected a value after `--gcc-path`, found nothing".to_string()); @@ -244,7 +242,7 @@ impl ConfigInfo { let libgccjit_so = output_dir.join(libgccjit_so_name); if !libgccjit_so.is_file() && !self.no_download { // Download time! - let tempfile_name = format!("{}.download", libgccjit_so_name); + let tempfile_name = format!("{libgccjit_so_name}.download"); let tempfile = output_dir.join(&tempfile_name); let is_in_ci = std::env::var("GITHUB_ACTIONS").is_ok(); @@ -262,14 +260,14 @@ impl ConfigInfo { ) })?; - println!("Downloaded libgccjit.so version {} successfully!", commit); + println!("Downloaded libgccjit.so version {commit} successfully!"); // We need to create a link named `libgccjit.so.0` because that's what the linker is // looking for. - create_symlink(&libgccjit_so, output_dir.join(&format!("{}.0", libgccjit_so_name)))?; + create_symlink(&libgccjit_so, output_dir.join(format!("{libgccjit_so_name}.0")))?; } let gcc_path = output_dir.display().to_string(); - println!("Using `{}` as path for libgccjit", gcc_path); + println!("Using `{gcc_path}` as path for libgccjit"); self.gcc_path = Some(gcc_path); Ok(()) } @@ -286,8 +284,7 @@ impl ConfigInfo { // since we already have everything we need. if let Some(gcc_path) = &self.gcc_path { println!( - "`--gcc-path` was provided, ignoring config file. Using `{}` as path for libgccjit", - gcc_path + "`--gcc-path` was provided, ignoring config file. Using `{gcc_path}` as path for libgccjit" ); return Ok(()); } @@ -343,7 +340,7 @@ impl ConfigInfo { self.dylib_ext = match os_name.as_str() { "Linux" => "so", "Darwin" => "dylib", - os => return Err(format!("unsupported OS `{}`", os)), + os => return Err(format!("unsupported OS `{os}`")), } .to_string(); let rustc = match env.get("RUSTC") { @@ -355,10 +352,10 @@ impl ConfigInfo { None => return Err("no host found".to_string()), }; - if self.target_triple.is_empty() { - if let Some(overwrite) = env.get("OVERWRITE_TARGET_TRIPLE") { - self.target_triple = overwrite.clone(); - } + if self.target_triple.is_empty() + && let Some(overwrite) = env.get("OVERWRITE_TARGET_TRIPLE") + { + self.target_triple = overwrite.clone(); } if self.target_triple.is_empty() { self.target_triple = self.host_triple.clone(); @@ -378,7 +375,7 @@ impl ConfigInfo { } let current_dir = - std_env::current_dir().map_err(|error| format!("`current_dir` failed: {:?}", error))?; + std_env::current_dir().map_err(|error| format!("`current_dir` failed: {error:?}"))?; let channel = if self.channel == Channel::Release { "release" } else if let Some(channel) = env.get("CHANNEL") { @@ -391,15 +388,15 @@ impl ConfigInfo { self.cg_backend_path = current_dir .join("target") .join(channel) - .join(&format!("librustc_codegen_gcc.{}", self.dylib_ext)) + .join(format!("librustc_codegen_gcc.{}", self.dylib_ext)) .display() .to_string(); self.sysroot_path = - current_dir.join(&get_sysroot_dir()).join("sysroot").display().to_string(); + current_dir.join(get_sysroot_dir()).join("sysroot").display().to_string(); if let Some(backend) = &self.backend { // This option is only used in the rust compiler testsuite. The sysroot is handled // by its build system directly so no need to set it ourselves. - rustflags.push(format!("-Zcodegen-backend={}", backend)); + rustflags.push(format!("-Zcodegen-backend={backend}")); } else { rustflags.extend_from_slice(&[ "--sysroot".to_string(), @@ -412,10 +409,10 @@ impl ConfigInfo { // We have a different environment variable than RUSTFLAGS to make sure those flags are // only sent to rustc_codegen_gcc and not the LLVM backend. if let Some(cg_rustflags) = env.get("CG_RUSTFLAGS") { - rustflags.extend_from_slice(&split_args(&cg_rustflags)?); + rustflags.extend_from_slice(&split_args(cg_rustflags)?); } if let Some(test_flags) = env.get("TEST_FLAGS") { - rustflags.extend_from_slice(&split_args(&test_flags)?); + rustflags.extend_from_slice(&split_args(test_flags)?); } if let Some(linker) = linker { @@ -438,8 +435,8 @@ impl ConfigInfo { env.insert("RUSTC_LOG".to_string(), "warn".to_string()); let sysroot = current_dir - .join(&get_sysroot_dir()) - .join(&format!("sysroot/lib/rustlib/{}/lib", self.target_triple)); + .join(get_sysroot_dir()) + .join(format!("sysroot/lib/rustlib/{}/lib", self.target_triple)); let ld_library_path = format!( "{target}:{sysroot}:{gcc_path}", target = self.cargo_target_dir, @@ -505,7 +502,7 @@ fn download_gccjit( with_progress_bar: bool, ) -> Result<(), String> { let url = if std::env::consts::OS == "linux" && std::env::consts::ARCH == "x86_64" { - format!("https://github.com/rust-lang/gcc/releases/download/master-{}/libgccjit.so", commit) + format!("https://github.com/rust-lang/gcc/releases/download/master-{commit}/libgccjit.so") } else { eprintln!( "\ @@ -518,7 +515,7 @@ to `download-gccjit = false` and set `gcc-path` to the appropriate directory." )); }; - println!("Downloading `{}`...", url); + println!("Downloading `{url}`..."); // Try curl. If that fails and we are on windows, fallback to PowerShell. let mut ret = run_command_with_output( @@ -538,7 +535,7 @@ to `download-gccjit = false` and set `gcc-path` to the appropriate directory." if with_progress_bar { &"--progress-bar" } else { &"-s" }, &url.as_str(), ], - Some(&output_dir), + Some(output_dir), ); if ret.is_err() && cfg!(windows) { eprintln!("Fallback to PowerShell"); @@ -549,12 +546,11 @@ to `download-gccjit = false` and set `gcc-path` to the appropriate directory." &"-Command", &"[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;", &format!( - "(New-Object System.Net.WebClient).DownloadFile('{}', '{}')", - url, tempfile_name, + "(New-Object System.Net.WebClient).DownloadFile('{url}', '{tempfile_name}')", ) .as_str(), ], - Some(&output_dir), + Some(output_dir), ); } ret diff --git a/compiler/rustc_codegen_gcc/build_system/src/fmt.rs b/compiler/rustc_codegen_gcc/build_system/src/fmt.rs index de310a6a30f..7e6594f50f9 100644 --- a/compiler/rustc_codegen_gcc/build_system/src/fmt.rs +++ b/compiler/rustc_codegen_gcc/build_system/src/fmt.rs @@ -16,21 +16,21 @@ fn show_usage() { pub fn run() -> Result<(), String> { let mut check = false; // We skip binary name and the `info` command. - let mut args = std::env::args().skip(2); - while let Some(arg) = args.next() { + let args = std::env::args().skip(2); + for arg in args { match arg.as_str() { "--help" => { show_usage(); return Ok(()); } "--check" => check = true, - _ => return Err(format!("Unknown option {}", arg)), + _ => return Err(format!("Unknown option {arg}")), } } let cmd: &[&dyn AsRef<OsStr>] = if check { &[&"cargo", &"fmt", &"--check"] } else { &[&"cargo", &"fmt"] }; - run_command_with_output(cmd, Some(&Path::new(".")))?; - run_command_with_output(cmd, Some(&Path::new("build_system"))) + run_command_with_output(cmd, Some(Path::new(".")))?; + run_command_with_output(cmd, Some(Path::new("build_system"))) } diff --git a/compiler/rustc_codegen_gcc/build_system/src/fuzz.rs b/compiler/rustc_codegen_gcc/build_system/src/fuzz.rs new file mode 100644 index 00000000000..453211366b3 --- /dev/null +++ b/compiler/rustc_codegen_gcc/build_system/src/fuzz.rs @@ -0,0 +1,289 @@ +use std::ffi::OsStr; +use std::path::Path; + +mod reduce; + +use crate::utils::run_command_with_output; + +fn show_usage() { + println!( + r#" +`fuzz` command help: + --reduce : Reduces a file generated by rustlantis + --help : Show this help + --start : Start of the fuzzed range + --count : The number of cases to fuzz + -j --jobs : The number of threads to use during fuzzing"# + ); +} + +pub fn run() -> Result<(), String> { + // We skip binary name and the `fuzz` command. + let mut args = std::env::args().skip(2); + let mut start = 0; + let mut count = 100; + let mut threads = + std::thread::available_parallelism().map(|threads| threads.get()).unwrap_or(1); + while let Some(arg) = args.next() { + match arg.as_str() { + "--reduce" => { + let Some(path) = args.next() else { + return Err("--reduce must be provided with a path".into()); + }; + if !std::fs::exists(&path).unwrap_or(false) { + return Err("--reduce must be provided with a valid path".into()); + } + reduce::reduce(&path); + return Ok(()); + } + "--help" => { + show_usage(); + return Ok(()); + } + "--start" => { + start = + str::parse(&args.next().ok_or_else(|| "Fuzz start not provided!".to_string())?) + .map_err(|err| (format!("Fuzz start not a number {err:?}!")))?; + } + "--count" => { + count = + str::parse(&args.next().ok_or_else(|| "Fuzz count not provided!".to_string())?) + .map_err(|err| (format!("Fuzz count not a number {err:?}!")))?; + } + "-j" | "--jobs" => { + threads = str::parse( + &args.next().ok_or_else(|| "Fuzz thread count not provided!".to_string())?, + ) + .map_err(|err| (format!("Fuzz thread count not a number {err:?}!")))?; + } + _ => return Err(format!("Unknown option {arg}")), + } + } + + // Ensure that we have a cloned version of rustlantis on hand. + crate::utils::git_clone( + "https://github.com/cbeuw/rustlantis.git", + Some("clones/rustlantis".as_ref()), + true, + ) + .map_err(|err| (format!("Git clone failed with message: {err:?}!")))?; + + // Ensure that we are on the newest rustlantis commit. + let cmd: &[&dyn AsRef<OsStr>] = &[&"git", &"pull", &"origin"]; + run_command_with_output(cmd, Some(Path::new("clones/rustlantis")))?; + + // Build the release version of rustlantis + let cmd: &[&dyn AsRef<OsStr>] = &[&"cargo", &"build", &"--release"]; + run_command_with_output(cmd, Some(Path::new("clones/rustlantis")))?; + // Fuzz a given range + fuzz_range(start, start + count, threads); + Ok(()) +} + +/// Fuzzes a range `start..end` with `threads`. +fn fuzz_range(start: u64, end: u64, threads: usize) { + use std::sync::Arc; + use std::sync::atomic::{AtomicU64, Ordering}; + use std::time::{Duration, Instant}; + // Total amount of files to fuzz + let total = end - start; + // Currently fuzzed element + let start = Arc::new(AtomicU64::new(start)); + // Count time during fuzzing + let start_time = Instant::now(); + let mut workers = Vec::with_capacity(threads); + // Spawn `threads`.. + for _ in 0..threads { + let start = start.clone(); + // .. which each will .. + workers.push(std::thread::spawn(move || { + // ... grab the next fuzz seed ... + while start.load(Ordering::Relaxed) < end { + let next = start.fetch_add(1, Ordering::Relaxed); + // .. test that seed . + match test(next, false) { + Err(err) => { + // If the test failed at compile-time... + println!("test({next}) failed because {err:?}"); + // ... copy that file to the directory `target/fuzz/compiletime_error`... + let mut out_path: std::path::PathBuf = + "target/fuzz/compiletime_error".into(); + std::fs::create_dir_all(&out_path).unwrap(); + // .. into a file named `fuzz{seed}.rs`. + out_path.push(format!("fuzz{next}.rs")); + std::fs::copy(err, out_path).unwrap(); + } + Ok(Err(err)) => { + // If the test failed at run-time... + println!("The LLVM and GCC results don't match for {err:?}"); + // ... generate a new file, which prints temporaries(instead of hashing them)... + let mut out_path: std::path::PathBuf = "target/fuzz/runtime_error".into(); + std::fs::create_dir_all(&out_path).unwrap(); + let Ok(Err(tmp_print_err)) = test(next, true) else { + // ... if that file does not reproduce the issue... + // ... save the original sample in a file named `fuzz{seed}.rs`... + out_path.push(format!("fuzz{next}.rs")); + std::fs::copy(err, &out_path).unwrap(); + continue; + }; + // ... if that new file still produces the issue, copy it to `fuzz{seed}.rs`.. + out_path.push(format!("fuzz{next}.rs")); + std::fs::copy(tmp_print_err, &out_path).unwrap(); + // ... and start reducing it, using some properties of `rustlantis` to speed up the process. + reduce::reduce(&out_path); + } + // If the test passed, do nothing + Ok(Ok(())) => (), + } + } + })); + } + // The "manager" thread loop. + while start.load(Ordering::Relaxed) < end || !workers.iter().all(|t| t.is_finished()) { + // Every 500 ms... + let five_hundred_millis = Duration::from_millis(500); + std::thread::sleep(five_hundred_millis); + // ... calculate the remaining fuzz iters ... + let remaining = end - start.load(Ordering::Relaxed); + // ... fix the count(the start counter counts the cases that + // begun fuzzing, and not only the ones that are done)... + let fuzzed = (total - remaining).saturating_sub(threads as u64); + // ... and the fuzz speed ... + let iter_per_sec = fuzzed as f64 / start_time.elapsed().as_secs_f64(); + // .. and use them to display fuzzing stats. + println!( + "fuzzed {fuzzed} cases({}%), at rate {iter_per_sec} iter/s, remaining ~{}s", + (100 * fuzzed) as f64 / total as f64, + (remaining as f64) / iter_per_sec + ) + } + drop(workers); +} + +/// Builds & runs a file with LLVM. +fn debug_llvm(path: &std::path::Path) -> Result<Vec<u8>, String> { + // Build a file named `llvm_elf`... + let exe_path = path.with_extension("llvm_elf"); + // ... using the LLVM backend ... + let output = std::process::Command::new("rustc") + .arg(path) + .arg("-o") + .arg(&exe_path) + .output() + .map_err(|err| format!("{err:?}"))?; + // ... check that the compilation succeeded ... + if !output.status.success() { + return Err(format!("LLVM compilation failed:{output:?}")); + } + // ... run the resulting executable ... + let output = + std::process::Command::new(&exe_path).output().map_err(|err| format!("{err:?}"))?; + // ... check it run normally ... + if !output.status.success() { + return Err(format!( + "The program at {path:?}, compiled with LLVM, exited unsuccessfully:{output:?}" + )); + } + // ... cleanup that executable ... + std::fs::remove_file(exe_path).map_err(|err| format!("{err:?}"))?; + // ... and return the output(stdout + stderr - this allows UB checks to fire). + let mut res = output.stdout; + res.extend(output.stderr); + Ok(res) +} + +/// Builds & runs a file with GCC. +fn release_gcc(path: &std::path::Path) -> Result<Vec<u8>, String> { + // Build a file named `gcc_elf`... + let exe_path = path.with_extension("gcc_elf"); + // ... using the GCC backend ... + let output = std::process::Command::new("./y.sh") + .arg("rustc") + .arg(path) + .arg("-O") + .arg("-o") + .arg(&exe_path) + .output() + .map_err(|err| format!("{err:?}"))?; + // ... check that the compilation succeeded ... + if !output.status.success() { + return Err(format!("GCC compilation failed:{output:?}")); + } + // ... run the resulting executable .. + let output = + std::process::Command::new(&exe_path).output().map_err(|err| format!("{err:?}"))?; + // ... check it run normally ... + if !output.status.success() { + return Err(format!( + "The program at {path:?}, compiled with GCC, exited unsuccessfully:{output:?}" + )); + } + // ... cleanup that executable ... + std::fs::remove_file(exe_path).map_err(|err| format!("{err:?}"))?; + // ... and return the output(stdout + stderr - this allows UB checks to fire). + let mut res = output.stdout; + res.extend(output.stderr); + Ok(res) +} +type ResultCache = Option<(Vec<u8>, Vec<u8>)>; +/// Generates a new rustlantis file, & compares the result of running it with GCC and LLVM. +fn test(seed: u64, print_tmp_vars: bool) -> Result<Result<(), std::path::PathBuf>, String> { + // Generate a Rust source... + let source_file = generate(seed, print_tmp_vars)?; + test_file(&source_file, true) +} +/// Tests a file with a cached LLVM result. Used for reduction, when it is known +/// that a given transformation should not change the execution result. +fn test_cached( + source_file: &Path, + remove_tmps: bool, + cache: &mut ResultCache, +) -> Result<Result<(), std::path::PathBuf>, String> { + // Test `source_file` with release GCC ... + let gcc_res = release_gcc(source_file)?; + if cache.is_none() { + // ...test `source_file` with debug LLVM ... + *cache = Some((debug_llvm(source_file)?, gcc_res.clone())); + } + let (llvm_res, old_gcc) = cache.as_ref().unwrap(); + // ... compare the results ... + if *llvm_res != gcc_res && gcc_res == *old_gcc { + // .. if they don't match, report an error. + Ok(Err(source_file.to_path_buf())) + } else { + if remove_tmps { + std::fs::remove_file(source_file).map_err(|err| format!("{err:?}"))?; + } + Ok(Ok(())) + } +} +fn test_file( + source_file: &Path, + remove_tmps: bool, +) -> Result<Result<(), std::path::PathBuf>, String> { + let mut uncached = None; + test_cached(source_file, remove_tmps, &mut uncached) +} + +/// Generates a new rustlantis file for us to run tests on. +fn generate(seed: u64, print_tmp_vars: bool) -> Result<std::path::PathBuf, String> { + use std::io::Write; + let mut out_path = std::env::temp_dir(); + out_path.push(format!("fuzz{seed}.rs")); + // We need to get the command output here. + let mut generate = std::process::Command::new("cargo"); + generate + .args(["run", "--release", "--bin", "generate"]) + .arg(format!("{seed}")) + .current_dir("clones/rustlantis"); + if print_tmp_vars { + generate.arg("--debug"); + } + let out = generate.output().map_err(|err| format!("{err:?}"))?; + // Stuff the rustlantis output in a source file. + std::fs::File::create(&out_path) + .map_err(|err| format!("{err:?}"))? + .write_all(&out.stdout) + .map_err(|err| format!("{err:?}"))?; + Ok(out_path) +} diff --git a/compiler/rustc_codegen_gcc/build_system/src/fuzz/reduce.rs b/compiler/rustc_codegen_gcc/build_system/src/fuzz/reduce.rs new file mode 100644 index 00000000000..20715ab0e7c --- /dev/null +++ b/compiler/rustc_codegen_gcc/build_system/src/fuzz/reduce.rs @@ -0,0 +1,432 @@ +use std::io::Write; +use std::path::{Path, PathBuf}; + +use super::ResultCache; + +/// Saves a reduced file for a given `stage` +fn save_reduction(lines: &[String], path: &Path, stage: &str) { + let mut path = path.to_path_buf(); + path.set_extension(format!("rs.{stage}")); + let mut file = std::fs::File::create(&path).expect("Could not create the reduced example file"); + for line in lines { + file.write_all(line.as_bytes()).expect("Could not save the reduced example"); + } +} + +/// Checks if a given reduction is valid. +fn test_reduction(lines: &[String], path: &Path, cache: &mut ResultCache) -> bool { + let mut path = path.to_path_buf(); + path.set_extension("rs_reduced"); + let mut file = std::fs::File::create(&path).expect("Could not create the reduced example file"); + for line in lines { + file.write_all(line.as_bytes()).expect("Could not save the reduced example"); + } + let res = super::test_cached(&path, false, cache); + let Ok(Err(_)) = res else { + return false; + }; + true +} + +/// Removes duplicate assignments in bulk. +/// If a line A = B is followed directly by A = C, +/// then removing the first line ought to be fully sound, +/// and not change the behaviour of the program at all. Detect & remove such lines. +fn remove_dup_assign( + file: &mut Vec<String>, + path: &PathBuf, + starts: usize, + ends: usize, + cache: &mut ResultCache, +) { + let mut file_copy = file.clone(); + let mut reduction_count = 0; + // Not worth it. + if ends - starts < 8 { + return; + } + for index in starts..ends { + let Some((prefix, _)) = file_copy[index].split_once('=') else { + continue; + }; + let Some((prefix2, postifx2)) = file_copy[index + 1].split_once('=') else { + continue; + }; + let prefix = prefix.trim(); + let prefix2 = prefix2.trim(); + // FIXME: Right now, remove_dup_assign cares about assignments to the exact same place. + // However, given an assigemnt like this: + // ``` + // A.0 = 1_u32; + // A = (2_u32, 3.0); + // ``` + // The first assignment could be safely omitted. + // Additionally, we try to check if the second assignment could depend on the first one. + // In such cases, the result is likely to change, so we bail. + if prefix == prefix2 && !postifx2.contains(prefix) { + file_copy[index] = "".into(); + reduction_count += 1; + } + } + // We have removed no lines - no point in testing. + if reduction_count == 0 { + return; + } + // Check if the removed lines affected the execution result in any way, shape or form. + if test_reduction(&file_copy, path, cache) { + println!("Reduced {path:?} by {reduction_count} lines `remove_dup_assign`"); + *file = file_copy; + } else { + // The execution result changed. + // This can occur if the second assignment depended on the first one. + // Eg. + // ``` + // a = b + c; + // a = a + d; + // ``` + remove_dup_assign(file, path, starts, (starts + ends) / 2, cache); + remove_dup_assign(file, path, (starts + ends) / 2, ends, cache); + } + save_reduction(file, path, "remove_dup_assign"); +} + +/// Removes all the unneeded calls to `dump_var`. This is not something tools like `cvise` can do, +/// but it greately speeds up MIR interpretation + native execution. +fn remove_dump_var(file: &mut Vec<String>, path: &PathBuf) { + let mut curr = 0; + // ... try disabling `dump_vars` one by one, until only the necessary ones are left. + while curr < file.len() { + let Some(line) = file[curr..].iter().position(|line| line.contains("dump_var")) else { + // No more `dump_var`s to remove - exit early. + break; + }; + // Make the line absolute again. + let line = line + curr; + let mut file_copy = file.clone(); + // Try removing 3 consecutive lines(the call, block end and block beginning). This effectively removes a `dump_var`. + file_copy.remove(line); + file_copy.remove(line); + file_copy.remove(line); + // Not cached - the execution result can change. + let mut uncached = None; + // Check if this reduction is valid. + if test_reduction(&file_copy, path, &mut uncached) { + println!("Reduced {path:?} by 3 lines `remove_dump_var`"); + *file = file_copy; + curr = line; + } else { + curr = line + 1; + } + } + save_reduction(file, path, "remove_dump_var"); +} + +/// Replaces matches with gotos where possible. +/// This exploits some properties of rustlantis(match arm order), +/// and is only soundly applicable to MIR generated by it. +/// Still, it is not something `cvise` can do, but it simplifies the code a ton. +fn match_to_goto(file: &mut Vec<String>, path: &PathBuf, cache: &mut ResultCache) { + let mut curr = 0; + + while curr < file.len() { + let Some(match_starts) = file[curr..].iter().position(|line| line.contains("match")) else { + // No more `match`es to remove - exit early. + break; + }; + let match_starts = match_starts + curr; + // Find the end of the match + let Some(match_ends) = file[match_starts..].iter().position(|line| line.contains('}')) + else { + // Can't find match end - exit early. + break; + }; + let match_ends = match_ends + match_starts; + let match_body = &file[match_starts..match_ends]; + + // Find where this match should normally jump to. + // This *should* be the second-last arm of the match, as per the paper(the remaining blocks are decoys). + // If this ever changes, this reduction may not always be sound. + // This is not a problem, however: we NEED to use MIRI for reduction anwyway, + // and it will catch this issue. + let jumps_to = &match_body[match_body.len() - 2].trim(); + let Some((_, bb_ident)) = jumps_to.split_once("bb") else { + break; + }; + // We now have the number of the block we jump to at runtime. + let bb_ident = bb_ident.trim_matches(','); + // Try replacing this match with an unconditional jump. + let mut file_copy = file.clone(); + for _ in match_starts..(match_ends + 1) { + file_copy.remove(match_starts); + } + file_copy.insert(match_starts, format!("Goto(bb{bb_ident})\n")); + if test_reduction(&file_copy, path, cache) { + println!("Reduced {path:?} by {} lines `match_to_goto`", match_ends - match_starts); + *file = file_copy; + curr = match_starts; + } else { + curr = match_ends; + } + } + save_reduction(file, path, "match_to_goto"); +} + +/// At this point, we can try "killing" blocks, by replacing their bodies with calls to `abort`. +/// This is always sound(the program aborts, so no UB can occur after the block), +/// and allows us to safely remove *a lot* of unneeded blocks. +fn block_abort(file: &mut Vec<String>, path: &PathBuf, cache: &mut ResultCache) { + let mut curr = 0; + while curr < file.len() { + let Some(block_starts) = file[curr..] + .iter() + .position(|line| line.starts_with("bb") && line.trim_end().ends_with(" = {")) + else { + // No more `block`s to kill - exit early. + break; + }; + let block_starts = block_starts + curr; + // Find the beginning of the next block to find the end of this block. + let Some(block_ends) = file[(block_starts + 1)..] + .iter() + .position(|line| line.starts_with("bb") && line.trim_end().ends_with(" = {")) + else { + // No more `block`s to kill - exit early. + break; + }; + let block_ends = block_starts + block_ends; + let block_starts = block_starts + 1; + let mut file_copy = file.clone(); + // Remove the block body... + for _ in block_starts..(block_ends) { + file_copy.remove(block_starts); + } + // ..and insert an unconditional call to abort. + file_copy.insert( + block_starts, + "Call(tmp = core::intrinsics::abort(), ReturnTo(bb1), UnwindUnreachable())\n" + .to_string(), + ); + file_copy.insert(block_starts, "let tmp = ();\n".to_string()); + + if test_reduction(&file_copy, path, cache) { + println!("Reduced {path:?} by {} lines `block_abort`", block_ends - block_starts - 2); + *file = file_copy; + curr = block_starts; + } else { + curr = block_ends; + } + } + save_reduction(file, path, "block_abort"); +} + +/// Removes unreachable basic blocks +fn remove_block(file: &mut Vec<String>, path: &PathBuf, cache: &mut ResultCache) { + let mut curr = 0; + + // Next, we try to outright remove blocks. + while curr < file.len() { + let Some(block_starts) = file[curr..] + .iter() + .position(|line| line.starts_with("bb") && line.trim_end().ends_with(" = {")) + else { + // No more `block`s to remove - exit early. + break; + }; + let block_starts = block_starts + curr; + // Find the beginning of the next block to find the end of this block. + let Some(block_ends) = file[(block_starts + 1)..] + .iter() + .position(|line| line.starts_with("bb") && line.trim_end().ends_with(" = {")) + else { + // No more `block`s to remove - exit early. + break; + }; + let block_ends = block_starts + block_ends + 1; + // Large blocks are likely to be necessary. + if block_ends - block_starts > 6 { + curr = block_starts + 1; + continue; + } + let mut file_copy = file.clone(); + file_copy.drain(block_starts..block_ends); + if test_reduction(&file_copy, path, cache) { + println!("Reduced {path:?} by {} lines `remove_blocks`", block_ends - block_starts); + *file = file_copy; + curr = block_starts; + } else { + curr = block_starts + 1; + } + } + save_reduction(file, path, "remove_block"); +} + +/// Merges blocks ending with unconditional jumps. +fn linearize_cf(file: &mut Vec<String>, path: &PathBuf, cache: &mut ResultCache) { + let mut curr = 0; + + // Next, we try to linearize the control flow. What the does that mean? + // Given a sequence like this: + // Goto(bb22) + // } + // bb22 = { + // We remove those 3 lines, merging the blocks together. This is not something `cvise` can do, + // and it makes other transformations easier. + while curr < file.len() { + let Some(block_starts) = file[curr..] + .iter() + .position(|line| line.starts_with("bb") && line.trim_end().ends_with(" = {")) + else { + // No more `block`s to remove - exit early. + break; + }; + let block_starts = block_starts + curr; + // Extract the block id. + let Some((block, _)) = file[block_starts].split_once('=') else { + curr = block_starts + 1; + continue; + }; + let block = block.trim(); + if file[block_starts - 2].trim() != format!("Goto({block})") { + curr = block_starts + 1; + continue; + } + let mut file_copy = file.clone(); + // Try removing 3 consecutive lines(the goto, block end and block beginning). This effectively removes a `Goto(next)`. + file_copy.remove(block_starts - 2); + file_copy.remove(block_starts - 2); + file_copy.remove(block_starts - 2); + // Check if this reduction is valid. + if test_reduction(&file_copy, path, cache) { + println!("Reduced {path:?} by 3 lines `linearize_cf`"); + *file = file_copy; + curr = block_starts; + } else { + curr = block_starts + 1; + } + } + save_reduction(file, path, "linearize_cf"); +} + +/// Replaces a call to a given function with a 0 assignment to the destination place, and a Goto. +/// This is always sound, because: +/// 1. All the functions arguments are always initialized +/// 2. and point to initialized memory(the operand of &raw must be an initialized place in rustlantis). +fn remove_fn_calls(file: &mut Vec<String>, path: &PathBuf, cache: &mut ResultCache) { + let mut curr = 0; + + while curr < file.len() { + let Some(fn_call) = + file[curr..].iter().position(|line| line.contains("Call(") && line.contains(" = fn")) + else { + // No more calls to remove - exit early. + break; + }; + let fn_call = fn_call + curr; + let line = file[fn_call].trim(); + // Skip the Call( + let line = &line["Call(".len()..]; + // Extract the destination place + let Some((place, line)) = line.split_once('=') else { + curr = fn_call + 1; + continue; + }; + // Skip till the return block id. + let Some((_, line)) = line.split_once("ReturnTo(") else { + curr = fn_call + 1; + continue; + }; + // Extract the full return block + let Some((block, _)) = line.split_once(')') else { + curr = fn_call + 1; + continue; + }; + let mut file_copy = file.clone(); + // Remove the call. + file_copy.remove(fn_call); + file_copy.insert(fn_call, format!("Goto({block})\n")); + file_copy.insert(fn_call, format!("{place} = 0;\n")); + // Check if this reduction is valid. + if test_reduction(&file_copy, path, cache) { + println!("Reduced {path:?} using `remove_fn_calls` {cache:?}"); + *file = file_copy; + curr = fn_call; + } else { + curr = fn_call + 1; + } + } + save_reduction(file, path, "remove_fn_calls"); +} + +/// Fully removes unreachable functions. +fn remove_fns(file: &mut Vec<String>, path: &PathBuf, cache: &mut ResultCache) { + let mut curr = 0; + + while curr < file.len() { + // Find a function start + let Some(fn_start) = file[curr..].iter().position(|line| { + line.contains("#[custom_mir(dialect = \"runtime\", phase = \"initial\")]") + }) else { + // No more functions to remove - exit early. + break; + }; + // Find the next function(and use that to find the end of this one). + // FIXME: this check is flawed: it will never remove the very last function(the one before main). + // The other checks will turn that function into a single call to abort, but it is still annoying that it is kept. + let fn_start = fn_start + curr; + let Some(fn_end) = file[(fn_start + 3)..].iter().position(|line| line.contains("fn fn")) + else { + // No more functions to remove - exit early. + break; + }; + let fn_end = fn_start + 2 + fn_end; + let mut file_copy = file.clone(); + // Remove the function.\\ + file_copy.drain(fn_start..fn_end); + // Check if this reduction is valid. + if test_reduction(&file_copy, path, cache) { + println!("Reduced {path:?} by {} lines `remove_fns`", fn_end - fn_start); + *file = file_copy; + } else { + curr = fn_start + 1; + } + } + save_reduction(file, path, "remove_fns"); +} + +pub(super) fn reduce(path: impl AsRef<Path>) { + let path = path.as_ref().to_owned(); + // ... read the file to a buffer .. + let file = std::fs::read_to_string(&path).expect("Could not open the file to reduce"); + let mut file: Vec<_> = file.split_inclusive('\n').map(|s| s.to_string()).collect(); + + // ... and run reduction passes. + println!("running `remove_dump_var` on {path:?}."); + remove_dump_var(&mut file, &path); + // After `dump_var`, the execution results ought not to change. Cache them. + let mut cache = None; + // Fill the cache + assert!( + test_reduction(&file, &path, &mut cache), + "Reduction error: check that the input file is a valid reproducer." + ); + println!("cache:{cache:?}"); + println!("running `remove_fn_calls` on {path:?}."); + remove_fn_calls(&mut file, &path, &mut cache); + println!("running `remove_fns` on {path:?}."); + remove_fns(&mut file, &path, &mut cache); + let len = file.len(); + println!("running `remove_dup_assign` on {path:?}."); + remove_dup_assign(&mut file, &path, 0, len, &mut cache); + file.retain(|line| !line.is_empty()); + println!("running `match_to_goto` on {path:?}."); + match_to_goto(&mut file, &path, &mut cache); + println!("running `block_abort` on {path:?}."); + block_abort(&mut file, &path, &mut cache); + println!("running `remove_block` on {path:?}."); + remove_block(&mut file, &path, &mut cache); + println!("running `linearize_cf` on {path:?}."); + linearize_cf(&mut file, &path, &mut cache); + let mut out = std::fs::File::create(&path).expect("Could not save the reduction result."); + let file = file.into_iter().collect::<String>(); + out.write_all(file.as_bytes()).expect("failed to write into file"); +} diff --git a/compiler/rustc_codegen_gcc/build_system/src/info.rs b/compiler/rustc_codegen_gcc/build_system/src/info.rs index bd891de2eb4..66fdcf88cbb 100644 --- a/compiler/rustc_codegen_gcc/build_system/src/info.rs +++ b/compiler/rustc_codegen_gcc/build_system/src/info.rs @@ -15,7 +15,7 @@ pub fn run() -> Result<(), String> { config.no_download = true; config.setup_gcc_path()?; if let Some(gcc_path) = config.gcc_path { - println!("{}", gcc_path); + println!("{gcc_path}"); } Ok(()) } diff --git a/compiler/rustc_codegen_gcc/build_system/src/main.rs b/compiler/rustc_codegen_gcc/build_system/src/main.rs index c70b00e09ae..078a4726ba8 100644 --- a/compiler/rustc_codegen_gcc/build_system/src/main.rs +++ b/compiler/rustc_codegen_gcc/build_system/src/main.rs @@ -5,6 +5,7 @@ mod clean; mod clone_gcc; mod config; mod fmt; +mod fuzz; mod info; mod prepare; mod rust_tools; @@ -42,7 +43,8 @@ Commands: test : Runs tests for the project. info : Displays information about the build environment and project configuration. clone-gcc : Clones the GCC compiler from a specified source. - fmt : Runs rustfmt" + fmt : Runs rustfmt + fuzz : Fuzzes `cg_gcc` using rustlantis" ); } @@ -56,6 +58,7 @@ pub enum Command { Test, Info, Fmt, + Fuzz, } fn main() { @@ -75,6 +78,7 @@ fn main() { Some("info") => Command::Info, Some("clone-gcc") => Command::CloneGcc, Some("fmt") => Command::Fmt, + Some("fuzz") => Command::Fuzz, Some("--help") => { usage(); process::exit(0); @@ -97,6 +101,7 @@ fn main() { Command::Info => info::run(), Command::CloneGcc => clone_gcc::run(), Command::Fmt => fmt::run(), + Command::Fuzz => fuzz::run(), } { eprintln!("Command failed to run: {e}"); process::exit(1); diff --git a/compiler/rustc_codegen_gcc/build_system/src/prepare.rs b/compiler/rustc_codegen_gcc/build_system/src/prepare.rs index d14639afee5..35a6e20fb86 100644 --- a/compiler/rustc_codegen_gcc/build_system/src/prepare.rs +++ b/compiler/rustc_codegen_gcc/build_system/src/prepare.rs @@ -18,9 +18,9 @@ fn prepare_libcore( if let Some(path) = sysroot_source { rustlib_dir = Path::new(&path) .canonicalize() - .map_err(|error| format!("Failed to canonicalize path: {:?}", error))?; + .map_err(|error| format!("Failed to canonicalize path: {error:?}"))?; if !rustlib_dir.is_dir() { - return Err(format!("Custom sysroot path {:?} not found", rustlib_dir)); + return Err(format!("Custom sysroot path {rustlib_dir:?} not found")); } } else { let rustc_path = match get_rustc_path() { @@ -36,17 +36,17 @@ fn prepare_libcore( rustlib_dir = parent .join("../lib/rustlib/src/rust") .canonicalize() - .map_err(|error| format!("Failed to canonicalize path: {:?}", error))?; + .map_err(|error| format!("Failed to canonicalize path: {error:?}"))?; if !rustlib_dir.is_dir() { return Err("Please install `rust-src` component".to_string()); } } let sysroot_dir = sysroot_path.join("sysroot_src"); - if sysroot_dir.is_dir() { - if let Err(error) = fs::remove_dir_all(&sysroot_dir) { - return Err(format!("Failed to remove `{}`: {:?}", sysroot_dir.display(), error,)); - } + if sysroot_dir.is_dir() + && let Err(error) = fs::remove_dir_all(&sysroot_dir) + { + return Err(format!("Failed to remove `{}`: {:?}", sysroot_dir.display(), error,)); } let sysroot_library_dir = sysroot_dir.join("library"); @@ -122,7 +122,7 @@ fn prepare_rand() -> Result<(), String> { // Apply patch for the rand crate. let file_path = "patches/crates/0001-Remove-deny-warnings.patch"; let rand_dir = Path::new("build/rand"); - println!("[GIT] apply `{}`", file_path); + println!("[GIT] apply `{file_path}`"); let path = Path::new("../..").join(file_path); run_command_with_output(&[&"git", &"apply", &path], Some(rand_dir))?; run_command_with_output(&[&"git", &"add", &"-A"], Some(rand_dir))?; @@ -149,7 +149,7 @@ fn clone_and_setup<F>(repo_url: &str, checkout_commit: &str, extra: Option<F>) - where F: Fn(&Path) -> Result<(), String>, { - let clone_result = git_clone_root_dir(repo_url, &Path::new(crate::BUILD_DIR), false)?; + let clone_result = git_clone_root_dir(repo_url, Path::new(crate::BUILD_DIR), false)?; if !clone_result.ran_clone { println!("`{}` has already been cloned", clone_result.repo_name); } diff --git a/compiler/rustc_codegen_gcc/build_system/src/rust_tools.rs b/compiler/rustc_codegen_gcc/build_system/src/rust_tools.rs index 105f5eebe24..b1faa27acc4 100644 --- a/compiler/rustc_codegen_gcc/build_system/src/rust_tools.rs +++ b/compiler/rustc_codegen_gcc/build_system/src/rust_tools.rs @@ -1,24 +1,22 @@ use std::collections::HashMap; use std::ffi::OsStr; +#[cfg(unix)] +use std::os::unix::process::CommandExt; use std::path::PathBuf; use crate::config::ConfigInfo; -use crate::utils::{ - get_toolchain, run_command_with_output_and_env_no_err, rustc_toolchain_version_info, - rustc_version_info, -}; +use crate::utils::{get_toolchain, rustc_toolchain_version_info, rustc_version_info}; fn args(command: &str) -> Result<Option<Vec<String>>, String> { // We skip the binary and the "cargo"/"rustc" option. - if let Some("--help") = std::env::args().skip(2).next().as_deref() { + if let Some("--help") = std::env::args().nth(2).as_deref() { usage(command); return Ok(None); } let args = std::env::args().skip(2).collect::<Vec<_>>(); if args.is_empty() { return Err(format!( - "Expected at least one argument for `{}` subcommand, found none", - command + "Expected at least one argument for `{command}` subcommand, found none" )); } Ok(Some(args)) @@ -27,12 +25,11 @@ fn args(command: &str) -> Result<Option<Vec<String>>, String> { fn usage(command: &str) { println!( r#" -`{}` command help: +`{command}` command help: [args] : Arguments to be passed to the cargo command --help : Show this help "#, - command, ) } @@ -51,10 +48,10 @@ impl RustcTools { // expected. let current_dir = std::env::current_dir() .and_then(|path| path.canonicalize()) - .map_err(|error| format!("Failed to get current directory path: {:?}", error))?; + .map_err(|error| format!("Failed to get current directory path: {error:?}"))?; let current_exe = std::env::current_exe() .and_then(|path| path.canonicalize()) - .map_err(|error| format!("Failed to get current exe path: {:?}", error))?; + .map_err(|error| format!("Failed to get current exe path: {error:?}"))?; let mut parent_dir = current_exe.components().map(|comp| comp.as_os_str()).collect::<Vec<_>>(); // We run this script from "build_system/target/release/y", so we need to remove these elements. @@ -68,7 +65,7 @@ impl RustcTools { )); } } - let parent_dir = PathBuf::from(parent_dir.join(&OsStr::new("/"))); + let parent_dir = PathBuf::from(parent_dir.join(OsStr::new("/"))); std::env::set_current_dir(&parent_dir).map_err(|error| { format!("Failed to go to `{}` folder: {:?}", parent_dir.display(), error) })?; @@ -92,11 +89,31 @@ impl RustcTools { std::env::set_current_dir(¤t_dir).map_err(|error| { format!("Failed to go back to `{}` folder: {:?}", current_dir.display(), error) })?; - let toolchain = format!("+{}", toolchain); + let toolchain = format!("+{toolchain}"); Ok(Some(Self { toolchain, args, env, config })) } } +fn exec(input: &[&dyn AsRef<OsStr>], env: &HashMap<String, String>) -> Result<(), String> { + #[cfg(unix)] + { + // We use `exec` to call the `execvp` syscall instead of creating a new process where the + // command will be executed because very few signals can actually kill a current process, + // so if segmentation fault (SIGSEGV signal) happens and we raise to the current process, + // it will simply do nothing and we won't have the nice error message for the shell. + let error = crate::utils::get_command_inner(input, None, Some(env)).exec(); + eprintln!("execvp syscall failed: {error:?}"); + std::process::exit(1); + } + #[cfg(not(unix))] + { + if crate::utils::run_command_with_output_and_env_no_err(input, None, Some(env)).is_err() { + std::process::exit(1); + } + Ok(()) + } +} + pub fn run_cargo() -> Result<(), String> { let Some(mut tools) = RustcTools::new("cargo")? else { return Ok(()) }; let rustflags = tools.env.get("RUSTFLAGS").cloned().unwrap_or_default(); @@ -105,11 +122,7 @@ pub fn run_cargo() -> Result<(), String> { for arg in &tools.args { command.push(arg); } - if run_command_with_output_and_env_no_err(&command, None, Some(&tools.env)).is_err() { - std::process::exit(1); - } - - Ok(()) + exec(&command, &tools.env) } pub fn run_rustc() -> Result<(), String> { @@ -118,8 +131,5 @@ pub fn run_rustc() -> Result<(), String> { for arg in &tools.args { command.push(arg); } - if run_command_with_output_and_env_no_err(&command, None, Some(&tools.env)).is_err() { - std::process::exit(1); - } - Ok(()) + exec(&command, &tools.env) } diff --git a/compiler/rustc_codegen_gcc/build_system/src/test.rs b/compiler/rustc_codegen_gcc/build_system/src/test.rs index df4ac85233b..bcaab0fb526 100644 --- a/compiler/rustc_codegen_gcc/build_system/src/test.rs +++ b/compiler/rustc_codegen_gcc/build_system/src/test.rs @@ -9,8 +9,8 @@ use crate::build; use crate::config::{Channel, ConfigInfo}; use crate::utils::{ create_dir, get_sysroot_dir, get_toolchain, git_clone, git_clone_root_dir, remove_file, - run_command, run_command_with_env, run_command_with_output_and_env, rustc_version_info, - split_args, walk_dir, + run_command, run_command_with_env, run_command_with_output, run_command_with_output_and_env, + rustc_version_info, split_args, walk_dir, }; type Env = HashMap<String, String>; @@ -42,7 +42,7 @@ fn get_runners() -> Runners { ); runners.insert("--extended-regex-tests", ("Run extended regex tests", extended_regex_tests)); runners.insert("--mini-tests", ("Run mini tests", mini_tests)); - + runners.insert("--cargo-tests", ("Run cargo tests", cargo_tests)); runners } @@ -53,9 +53,9 @@ fn get_number_after_arg( match args.next() { Some(nb) if !nb.is_empty() => match usize::from_str(&nb) { Ok(nb) => Ok(nb), - Err(_) => Err(format!("Expected a number after `{}`, found `{}`", option, nb)), + Err(_) => Err(format!("Expected a number after `{option}`, found `{nb}`")), }, - _ => Err(format!("Expected a number after `{}`, found nothing", option)), + _ => Err(format!("Expected a number after `{option}`, found nothing")), } } @@ -76,8 +76,8 @@ fn show_usage() { for (option, (doc, _)) in get_runners() { // FIXME: Instead of using the hard-coded `23` value, better to compute it instead. let needed_spaces = 23_usize.saturating_sub(option.len()); - let spaces: String = std::iter::repeat(' ').take(needed_spaces).collect(); - println!(" {}{}: {}", option, spaces, doc); + let spaces: String = std::iter::repeat_n(' ', needed_spaces).collect(); + println!(" {option}{spaces}: {doc}"); } println!(" --help : Show this help"); } @@ -88,6 +88,8 @@ struct TestArg { use_system_gcc: bool, runners: Vec<String>, flags: Vec<String>, + /// Additional arguments, to be passed to commands like `cargo test`. + test_args: Vec<String>, nb_parts: Option<usize>, current_part: Option<usize>, sysroot_panic_abort: bool, @@ -137,13 +139,14 @@ impl TestArg { test_arg.sysroot_features.push(feature); } _ => { - return Err(format!("Expected an argument after `{}`, found nothing", arg)); + return Err(format!("Expected an argument after `{arg}`, found nothing")); } }, "--help" => { show_usage(); return Ok(None); } + "--" => test_arg.test_args.extend(&mut args), x if runners.contains_key(x) && !test_arg.runners.iter().any(|runner| runner == x) => { @@ -151,7 +154,7 @@ impl TestArg { } arg => { if !test_arg.config_info.parse_argument(arg, &mut args)? { - return Err(format!("Unknown option {}", arg)); + return Err(format!("Unknown option {arg}")); } } } @@ -189,7 +192,7 @@ fn build_if_no_backend(env: &Env, args: &TestArg) -> Result<(), String> { command.push(&"--release"); &tmp_env } else { - &env + env }; for flag in args.flags.iter() { command.push(flag); @@ -203,6 +206,33 @@ fn clean(_env: &Env, args: &TestArg) -> Result<(), String> { create_dir(&path) } +fn cargo_tests(test_env: &Env, test_args: &TestArg) -> Result<(), String> { + // First, we call `mini_tests` to build minicore for us. This ensures we are testing with a working `minicore`, + // and that any changes we have made affect `minicore`(since it would get rebuilt). + mini_tests(test_env, test_args)?; + // Then, we copy some of the env vars from `test_env` + // We don't want to pass things like `RUSTFLAGS`, since they contain the -Zcodegen-backend flag. + // That would force `cg_gcc` to *rebuild itself* and only then run tests, which is undesirable. + let mut env = HashMap::new(); + env.insert( + "LD_LIBRARY_PATH".into(), + test_env.get("LD_LIBRARY_PATH").expect("LD_LIBRARY_PATH missing!").to_string(), + ); + env.insert( + "LIBRARY_PATH".into(), + test_env.get("LIBRARY_PATH").expect("LIBRARY_PATH missing!").to_string(), + ); + env.insert( + "CG_RUSTFLAGS".into(), + test_env.get("CG_RUSTFLAGS").map(|s| s.as_str()).unwrap_or("").to_string(), + ); + // Pass all the default args + the user-specified ones. + let mut args: Vec<&dyn AsRef<OsStr>> = vec![&"cargo", &"test"]; + args.extend(test_args.test_args.iter().map(|s| s as &dyn AsRef<OsStr>)); + run_command_with_output_and_env(&args, None, Some(&env))?; + Ok(()) +} + fn mini_tests(env: &Env, args: &TestArg) -> Result<(), String> { // FIXME: create a function "display_if_not_quiet" or something along the line. println!("[BUILD] mini_core"); @@ -222,7 +252,7 @@ fn mini_tests(env: &Env, args: &TestArg) -> Result<(), String> { &"--target", &args.config_info.target_triple, ]); - run_command_with_output_and_env(&command, None, Some(&env))?; + run_command_with_output_and_env(&command, None, Some(env))?; // FIXME: create a function "display_if_not_quiet" or something along the line. println!("[BUILD] example"); @@ -234,7 +264,7 @@ fn mini_tests(env: &Env, args: &TestArg) -> Result<(), String> { &"--target", &args.config_info.target_triple, ]); - run_command_with_output_and_env(&command, None, Some(&env))?; + run_command_with_output_and_env(&command, None, Some(env))?; // FIXME: create a function "display_if_not_quiet" or something along the line. println!("[AOT] mini_core_hello_world"); @@ -249,14 +279,14 @@ fn mini_tests(env: &Env, args: &TestArg) -> Result<(), String> { &"--target", &args.config_info.target_triple, ]); - run_command_with_output_and_env(&command, None, Some(&env))?; + run_command_with_output_and_env(&command, None, Some(env))?; let command: &[&dyn AsRef<OsStr>] = &[ &Path::new(&args.config_info.cargo_target_dir).join("mini_core_hello_world"), &"abc", &"bcd", ]; - maybe_run_command_in_vm(&command, env, args)?; + maybe_run_command_in_vm(command, env, args)?; Ok(()) } @@ -454,22 +484,47 @@ fn setup_rustc(env: &mut Env, args: &TestArg) -> Result<PathBuf, String> { } else { run_command_with_output_and_env(&[&"git", &"checkout"], rust_dir, Some(env))?; } + + let mut patches = Vec::new(); + walk_dir( + "patches/tests", + &mut |_| Ok(()), + &mut |file_path: &Path| { + patches.push(file_path.to_path_buf()); + Ok(()) + }, + false, + )?; + patches.sort(); + // TODO: remove duplication with prepare.rs by creating a apply_patch function in the utils + // module. + for file_path in patches { + println!("[GIT] apply `{}`", file_path.display()); + let path = Path::new("../..").join(file_path); + run_command_with_output(&[&"git", &"apply", &path], rust_dir)?; + run_command_with_output(&[&"git", &"add", &"-A"], rust_dir)?; + run_command_with_output( + &[&"git", &"commit", &"--no-gpg-sign", &"-m", &format!("Patch {}", path.display())], + rust_dir, + )?; + } + let cargo = String::from_utf8( run_command_with_env(&[&"rustup", &"which", &"cargo"], rust_dir, Some(env))?.stdout, ) - .map_err(|error| format!("Failed to retrieve cargo path: {:?}", error)) + .map_err(|error| format!("Failed to retrieve cargo path: {error:?}")) .and_then(|cargo| { let cargo = cargo.trim().to_owned(); - if cargo.is_empty() { Err(format!("`cargo` path is empty")) } else { Ok(cargo) } + if cargo.is_empty() { Err("`cargo` path is empty".to_string()) } else { Ok(cargo) } })?; let rustc = String::from_utf8( run_command_with_env(&[&"rustup", &toolchain, &"which", &"rustc"], rust_dir, Some(env))? .stdout, ) - .map_err(|error| format!("Failed to retrieve rustc path: {:?}", error)) + .map_err(|error| format!("Failed to retrieve rustc path: {error:?}")) .and_then(|rustc| { let rustc = rustc.trim().to_owned(); - if rustc.is_empty() { Err(format!("`rustc` path is empty")) } else { Ok(rustc) } + if rustc.is_empty() { Err("`rustc` path is empty".to_string()) } else { Ok(rustc) } })?; let llvm_filecheck = match run_command_with_env( &[ @@ -479,7 +534,8 @@ fn setup_rustc(env: &mut Env, args: &TestArg) -> Result<PathBuf, String> { which FileCheck-11 || \ which FileCheck-12 || \ which FileCheck-13 || \ - which FileCheck-14", + which FileCheck-14 || \ + which FileCheck", ], rust_dir, Some(env), @@ -487,13 +543,15 @@ fn setup_rustc(env: &mut Env, args: &TestArg) -> Result<PathBuf, String> { Ok(cmd) => String::from_utf8_lossy(&cmd.stdout).to_string(), Err(_) => { eprintln!("Failed to retrieve LLVM FileCheck, ignoring..."); + // FIXME: the test tests/run-make/no-builtins-attribute will fail if we cannot find + // FileCheck. String::new() } }; let file_path = rust_dir_path.join("config.toml"); std::fs::write( &file_path, - &format!( + format!( r#"change-id = 115898 [rust] @@ -532,7 +590,7 @@ fn asm_tests(env: &Env, args: &TestArg) -> Result<(), String> { let codegen_backend_path = format!( "{pwd}/target/{channel}/librustc_codegen_gcc.{dylib_ext}", pwd = std::env::current_dir() - .map_err(|error| format!("`current_dir` failed: {:?}", error))? + .map_err(|error| format!("`current_dir` failed: {error:?}"))? .display(), channel = args.config_info.channel.as_str(), dylib_ext = args.config_info.dylib_ext, @@ -587,11 +645,11 @@ where F: Fn(&[&dyn AsRef<OsStr>], Option<&Path>, &Env) -> Result<(), String>, { let toolchain = get_toolchain()?; - let toolchain_arg = format!("+{}", toolchain); + let toolchain_arg = format!("+{toolchain}"); let rustc_version = String::from_utf8( run_command_with_env(&[&args.config_info.rustc_command[0], &"-V"], cwd, Some(env))?.stdout, ) - .map_err(|error| format!("Failed to retrieve rustc version: {:?}", error))?; + .map_err(|error| format!("Failed to retrieve rustc version: {error:?}"))?; let rustc_toolchain_version = String::from_utf8( run_command_with_env( &[&args.config_info.rustc_command[0], &toolchain_arg, &"-V"], @@ -600,20 +658,19 @@ where )? .stdout, ) - .map_err(|error| format!("Failed to retrieve rustc +toolchain version: {:?}", error))?; + .map_err(|error| format!("Failed to retrieve rustc +toolchain version: {error:?}"))?; if rustc_version != rustc_toolchain_version { eprintln!( - "rustc_codegen_gcc is built for `{}` but the default rustc version is `{}`.", - rustc_toolchain_version, rustc_version, + "rustc_codegen_gcc is built for `{rustc_toolchain_version}` but the default rustc version is `{rustc_version}`.", ); - eprintln!("Using `{}`.", rustc_toolchain_version); + eprintln!("Using `{rustc_toolchain_version}`."); } let mut env = env.clone(); let rustflags = env.get("RUSTFLAGS").cloned().unwrap_or_default(); env.insert("RUSTDOCFLAGS".to_string(), rustflags); let mut cargo_command: Vec<&dyn AsRef<OsStr>> = vec![&"cargo", &toolchain_arg]; - cargo_command.extend_from_slice(&command); + cargo_command.extend_from_slice(command); callback(&cargo_command, cwd, &env) } @@ -680,7 +737,15 @@ fn test_libcore(env: &Env, args: &TestArg) -> Result<(), String> { println!("[TEST] libcore"); let path = get_sysroot_dir().join("sysroot_src/library/coretests"); let _ = remove_dir_all(path.join("target")); - run_cargo_command(&[&"test"], Some(&path), env, args)?; + // TODO(antoyo): run in release mode when we fix the failures. + // TODO(antoyo): remove the --skip f16::test_total_cmp when this issue is fixed: + // https://github.com/rust-lang/rust/issues/141503 + run_cargo_command( + &[&"test", &"--", &"--skip", &"f16::test_total_cmp"], + Some(&path), + env, + args, + )?; Ok(()) } @@ -818,7 +883,7 @@ fn contains_ui_error_patterns(file_path: &Path, keep_lto_tests: bool) -> Result< // Tests generating errors. let file = File::open(file_path) .map_err(|error| format!("Failed to read `{}`: {:?}", file_path.display(), error))?; - for line in BufReader::new(file).lines().filter_map(|line| line.ok()) { + for line in BufReader::new(file).lines().map_while(Result::ok) { let line = line.trim(); if line.is_empty() { continue; @@ -887,7 +952,7 @@ where if !prepare_files_callback(&rust_path)? { // FIXME: create a function "display_if_not_quiet" or something along the line. - println!("Keeping all {} tests", test_type); + println!("Keeping all {test_type} tests"); } if test_type == "ui" { @@ -919,8 +984,7 @@ where "borrowck", "test-attrs", ] - .iter() - .any(|name| *name == dir_name) + .contains(&dir_name) { remove_dir_all(dir).map_err(|error| { format!("Failed to remove folder `{}`: {:?}", dir.display(), error) @@ -975,10 +1039,7 @@ where if nb_parts > 0 { let current_part = args.current_part.unwrap(); // FIXME: create a function "display_if_not_quiet" or something along the line. - println!( - "Splitting ui_test into {} parts (and running part {})", - nb_parts, current_part - ); + println!("Splitting ui_test into {nb_parts} parts (and running part {current_part})"); let out = String::from_utf8( run_command( &[ @@ -996,7 +1057,7 @@ where )? .stdout, ) - .map_err(|error| format!("Failed to retrieve output of find command: {:?}", error))?; + .map_err(|error| format!("Failed to retrieve output of find command: {error:?}"))?; let mut files = out .split('\n') .map(|line| line.trim()) @@ -1016,7 +1077,7 @@ where } // FIXME: create a function "display_if_not_quiet" or something along the line. - println!("[TEST] rustc {} test suite", test_type); + println!("[TEST] rustc {test_type} test suite"); env.insert("COMPILETEST_FORCE_STAGE0".to_string(), "1".to_string()); let extra = @@ -1040,7 +1101,7 @@ where &"always", &"--stage", &"0", - &format!("tests/{}", test_type), + &format!("tests/{test_type}"), &"--compiletest-rustc-args", &rustc_args, ], @@ -1051,19 +1112,18 @@ where } fn test_rustc(env: &Env, args: &TestArg) -> Result<(), String> { - //test_rustc_inner(env, args, |_| Ok(false), false, "run-make")?; + test_rustc_inner(env, args, |_| Ok(false), false, "run-make")?; test_rustc_inner(env, args, |_| Ok(false), false, "ui") } fn test_failing_rustc(env: &Env, args: &TestArg) -> Result<(), String> { - let result1 = Ok(()); - /*test_rustc_inner( + let result1 = test_rustc_inner( env, args, retain_files_callback("tests/failing-run-make-tests.txt", "run-make"), false, "run-make", - )*/ + ); let result2 = test_rustc_inner( env, @@ -1084,14 +1144,13 @@ fn test_successful_rustc(env: &Env, args: &TestArg) -> Result<(), String> { false, "ui", )?; - Ok(()) - /*test_rustc_inner( + test_rustc_inner( env, args, remove_files_callback("tests/failing-run-make-tests.txt", "run-make"), false, "run-make", - )*/ + ) } fn test_failing_ui_pattern_tests(env: &Env, args: &TestArg) -> Result<(), String> { @@ -1118,7 +1177,7 @@ fn retain_files_callback<'a>( run_command( &[ &"find", - &format!("tests/{}", test_type), + &format!("tests/{test_type}"), &"-mindepth", &"1", &"-type", @@ -1137,7 +1196,7 @@ fn retain_files_callback<'a>( run_command( &[ &"find", - &format!("tests/{}", test_type), + &format!("tests/{test_type}"), &"-type", &"f", &"-name", @@ -1152,15 +1211,12 @@ fn retain_files_callback<'a>( } // Putting back only the failing ones. - if let Ok(files) = std::fs::read_to_string(&file_path) { + if let Ok(files) = std::fs::read_to_string(file_path) { for file in files.split('\n').map(|line| line.trim()).filter(|line| !line.is_empty()) { - run_command(&[&"git", &"checkout", &"--", &file], Some(&rust_path))?; + run_command(&[&"git", &"checkout", &"--", &file], Some(rust_path))?; } } else { - println!( - "Failed to read `{}`, not putting back failing {} tests", - file_path, test_type - ); + println!("Failed to read `{file_path}`, not putting back failing {test_type} tests"); } Ok(true) @@ -1188,8 +1244,7 @@ fn remove_files_callback<'a>( } } else { println!( - "Failed to read `{}`, not putting back failing {} tests", - file_path, test_type + "Failed to read `{file_path}`, not putting back failing {test_type} tests" ); } } else { @@ -1202,7 +1257,7 @@ fn remove_files_callback<'a>( remove_file(&path)?; } } else { - println!("Failed to read `{}`, not putting back failing ui tests", file_path); + println!("Failed to read `{file_path}`, not putting back failing ui tests"); } } Ok(true) @@ -1217,7 +1272,9 @@ fn run_all(env: &Env, args: &TestArg) -> Result<(), String> { // asm_tests(env, args)?; test_libcore(env, args)?; extended_sysroot_tests(env, args)?; + cargo_tests(env, args)?; test_rustc(env, args)?; + Ok(()) } diff --git a/compiler/rustc_codegen_gcc/build_system/src/utils.rs b/compiler/rustc_codegen_gcc/build_system/src/utils.rs index ca177a5feb8..d77707d5f17 100644 --- a/compiler/rustc_codegen_gcc/build_system/src/utils.rs +++ b/compiler/rustc_codegen_gcc/build_system/src/utils.rs @@ -1,7 +1,5 @@ use std::collections::HashMap; use std::ffi::OsStr; -#[cfg(unix)] -use std::ffi::c_int; use std::fmt::Debug; use std::fs; #[cfg(unix)] @@ -9,11 +7,6 @@ use std::os::unix::process::ExitStatusExt; use std::path::{Path, PathBuf}; use std::process::{Command, ExitStatus, Output}; -#[cfg(unix)] -unsafe extern "C" { - fn raise(signal: c_int) -> c_int; -} - fn exec_command( input: &[&dyn AsRef<OsStr>], cwd: Option<&Path>, @@ -27,17 +20,14 @@ fn exec_command( #[cfg(unix)] { if let Some(signal) = status.signal() { - unsafe { - raise(signal as _); - } // In case the signal didn't kill the current process. - return Err(command_error(input, &cwd, format!("Process received signal {}", signal))); + return Err(command_error(input, &cwd, format!("Process received signal {signal}"))); } } Ok(status) } -fn get_command_inner( +pub(crate) fn get_command_inner( input: &[&dyn AsRef<OsStr>], cwd: Option<&Path>, env: Option<&HashMap<String, String>>, @@ -75,18 +65,18 @@ fn check_exit_status( ); let input = input.iter().map(|i| i.as_ref()).collect::<Vec<&OsStr>>(); if show_err { - eprintln!("Command `{:?}` failed", input); + eprintln!("Command `{input:?}` failed"); } if let Some(output) = output { let stdout = String::from_utf8_lossy(&output.stdout); if !stdout.is_empty() { error.push_str("\n==== STDOUT ====\n"); - error.push_str(&*stdout); + error.push_str(&stdout); } let stderr = String::from_utf8_lossy(&output.stderr); if !stderr.is_empty() { error.push_str("\n==== STDERR ====\n"); - error.push_str(&*stderr); + error.push_str(&stderr); } } Err(error) @@ -136,6 +126,7 @@ pub fn run_command_with_output_and_env( Ok(()) } +#[cfg(not(unix))] pub fn run_command_with_output_and_env_no_err( input: &[&dyn AsRef<OsStr>], cwd: Option<&Path>, @@ -242,7 +233,7 @@ pub fn get_toolchain() -> Result<String, String> { if !line.starts_with("channel") { return None; } - line.split('"').skip(1).next() + line.split('"').nth(1) }) .next() { @@ -281,7 +272,7 @@ fn git_clone_inner( } fn get_repo_name(url: &str) -> String { - let repo_name = url.split('/').last().unwrap(); + let repo_name = url.split('/').next_back().unwrap(); match repo_name.strip_suffix(".git") { Some(n) => n.to_string(), None => repo_name.to_string(), diff --git a/compiler/rustc_codegen_gcc/example/std_example.rs b/compiler/rustc_codegen_gcc/example/std_example.rs index 5fa1e0afb06..7587b4827ca 100644 --- a/compiler/rustc_codegen_gcc/example/std_example.rs +++ b/compiler/rustc_codegen_gcc/example/std_example.rs @@ -77,18 +77,18 @@ fn main() { assert_eq!(tmp as i128, -0x1234_5678_9ABC_DEF0i128); // Check that all u/i128 <-> float casts work correctly. - let houndred_u128 = 100u128; - let houndred_i128 = 100i128; - let houndred_f32 = 100.0f32; - let houndred_f64 = 100.0f64; - assert_eq!(houndred_u128 as f32, 100.0); - assert_eq!(houndred_u128 as f64, 100.0); - assert_eq!(houndred_f32 as u128, 100); - assert_eq!(houndred_f64 as u128, 100); - assert_eq!(houndred_i128 as f32, 100.0); - assert_eq!(houndred_i128 as f64, 100.0); - assert_eq!(houndred_f32 as i128, 100); - assert_eq!(houndred_f64 as i128, 100); + let hundred_u128 = 100u128; + let hundred_i128 = 100i128; + let hundred_f32 = 100.0f32; + let hundred_f64 = 100.0f64; + assert_eq!(hundred_u128 as f32, 100.0); + assert_eq!(hundred_u128 as f64, 100.0); + assert_eq!(hundred_f32 as u128, 100); + assert_eq!(hundred_f64 as u128, 100); + assert_eq!(hundred_i128 as f32, 100.0); + assert_eq!(hundred_i128 as f64, 100.0); + assert_eq!(hundred_f32 as i128, 100); + assert_eq!(hundred_f64 as i128, 100); let _a = 1u32 << 2u8; diff --git a/compiler/rustc_codegen_gcc/messages.ftl b/compiler/rustc_codegen_gcc/messages.ftl index 18a8a5a1e04..55a28bc9493 100644 --- a/compiler/rustc_codegen_gcc/messages.ftl +++ b/compiler/rustc_codegen_gcc/messages.ftl @@ -1,7 +1,3 @@ -codegen_gcc_unknown_ctarget_feature_prefix = - unknown feature specified for `-Ctarget-feature`: `{$feature}` - .note = features must begin with a `+` to enable or `-` to disable it - codegen_gcc_unwinding_inline_asm = GCC backend does not support unwinding from inline asm @@ -16,15 +12,3 @@ codegen_gcc_lto_disallowed = lto can only be run for executables, cdylibs and st codegen_gcc_lto_dylib = lto cannot be used for `dylib` crate type without `-Zdylib-lto` codegen_gcc_lto_bitcode_from_rlib = failed to get bitcode from object file for LTO ({$gcc_err}) - -codegen_gcc_unknown_ctarget_feature = - unknown and unstable feature specified for `-Ctarget-feature`: `{$feature}` - .note = it is still passed through to the codegen backend, but use of this feature might be unsound and the behavior of this feature can change in the future - .possible_feature = you might have meant: `{$rust_feature}` - .consider_filing_feature_request = consider filing a feature request - -codegen_gcc_missing_features = - add the missing features in a `target_feature` attribute - -codegen_gcc_target_feature_disable_or_enable = - the target features {$features} must all be either enabled or disabled together diff --git a/compiler/rustc_codegen_gcc/patches/0001-Pin-compiler_builtins-to-0.1.160.patch b/compiler/rustc_codegen_gcc/patches/0001-Pin-compiler_builtins-to-0.1.160.patch new file mode 100644 index 00000000000..39266e081ed --- /dev/null +++ b/compiler/rustc_codegen_gcc/patches/0001-Pin-compiler_builtins-to-0.1.160.patch @@ -0,0 +1,39 @@ +From cdb3d407740e4f15c3746051f8ba89b8e74e99d3 Mon Sep 17 00:00:00 2001 +From: None <none@example.com> +Date: Fri, 30 May 2025 13:46:22 -0400 +Subject: [PATCH] Pin compiler_builtins to 0.1.160 + +--- + library/alloc/Cargo.toml | 2 +- + library/std/Cargo.toml | 2 +- + 2 files changed, 2 insertions(+), 2 deletions(-) + +diff --git a/library/alloc/Cargo.toml b/library/alloc/Cargo.toml +index 9d0d957..365c9dc 100644 +--- a/library/alloc/Cargo.toml ++++ b/library/alloc/Cargo.toml +@@ -16,7 +16,7 @@ bench = false + + [dependencies] + core = { path = "../core", public = true } +-compiler_builtins = { version = "=0.1.159", features = ['rustc-dep-of-std'] } ++compiler_builtins = { version = "=0.1.160", features = ['rustc-dep-of-std'] } + + [features] + compiler-builtins-mem = ['compiler_builtins/mem'] +diff --git a/library/std/Cargo.toml b/library/std/Cargo.toml +index 4ff4895..31371f0 100644 +--- a/library/std/Cargo.toml ++++ b/library/std/Cargo.toml +@@ -18,7 +18,7 @@ cfg-if = { version = "1.0", features = ['rustc-dep-of-std'] } + panic_unwind = { path = "../panic_unwind", optional = true } + panic_abort = { path = "../panic_abort" } + core = { path = "../core", public = true } +-compiler_builtins = { version = "=0.1.159" } ++compiler_builtins = { version = "=0.1.160" } + unwind = { path = "../unwind" } + hashbrown = { version = "0.15", default-features = false, features = [ + 'rustc-dep-of-std', +-- +2.49.0 + diff --git a/compiler/rustc_codegen_gcc/patches/tests/0001-Workaround-to-make-a-run-make-test-pass.patch b/compiler/rustc_codegen_gcc/patches/tests/0001-Workaround-to-make-a-run-make-test-pass.patch new file mode 100644 index 00000000000..a329d09a95e --- /dev/null +++ b/compiler/rustc_codegen_gcc/patches/tests/0001-Workaround-to-make-a-run-make-test-pass.patch @@ -0,0 +1,25 @@ +From a131c69e54b5c02fe3b517e8f3ad23d4f784ffc8 Mon Sep 17 00:00:00 2001 +From: Antoni Boucher <bouanto@zoho.com> +Date: Fri, 13 Jun 2025 20:25:33 -0400 +Subject: [PATCH] Workaround to make a run-make test pass + +--- + tests/run-make/linker-warning/rmake.rs | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/tests/run-make/linker-warning/rmake.rs b/tests/run-make/linker-warning/rmake.rs +index bc21739fefc..0946a7e2a48 100644 +--- a/tests/run-make/linker-warning/rmake.rs ++++ b/tests/run-make/linker-warning/rmake.rs +@@ -55,7 +55,7 @@ fn main() { + diff() + .expected_file("short-error.txt") + .actual_text("(linker error)", out.stderr()) +- .normalize(r#"/rustc[^/]*/"#, "/rustc/") ++ .normalize(r#"/tmp/rustc[^/]*/"#, "/tmp/rustc/") + .normalize( + regex::escape(run_make_support::build_root().to_str().unwrap()), + "/build-root", +-- +2.49.0 + diff --git a/compiler/rustc_codegen_gcc/rust-toolchain b/compiler/rustc_codegen_gcc/rust-toolchain index a8cda28688c..bafe497a2a2 100644 --- a/compiler/rustc_codegen_gcc/rust-toolchain +++ b/compiler/rustc_codegen_gcc/rust-toolchain @@ -1,3 +1,3 @@ [toolchain] -channel = "nightly-2025-05-12" +channel = "nightly-2025-05-21" components = ["rust-src", "rustc-dev", "llvm-tools-preview"] diff --git a/compiler/rustc_codegen_gcc/src/allocator.rs b/compiler/rustc_codegen_gcc/src/allocator.rs index f4ebd42ee2d..cf8aa500c77 100644 --- a/compiler/rustc_codegen_gcc/src/allocator.rs +++ b/compiler/rustc_codegen_gcc/src/allocator.rs @@ -57,7 +57,7 @@ pub(crate) unsafe fn codegen( let from_name = mangle_internal_symbol(tcx, &global_fn_name(method.name)); let to_name = mangle_internal_symbol(tcx, &default_fn_name(method.name)); - create_wrapper_function(tcx, context, &from_name, &to_name, &types, output); + create_wrapper_function(tcx, context, &from_name, Some(&to_name), &types, output); } } @@ -66,7 +66,7 @@ pub(crate) unsafe fn codegen( tcx, context, &mangle_internal_symbol(tcx, "__rust_alloc_error_handler"), - &mangle_internal_symbol(tcx, alloc_error_handler_name(alloc_error_handler_kind)), + Some(&mangle_internal_symbol(tcx, alloc_error_handler_name(alloc_error_handler_kind))), &[usize, usize], None, ); @@ -81,21 +81,21 @@ pub(crate) unsafe fn codegen( let value = context.new_rvalue_from_int(i8, value as i32); global.global_set_initializer_rvalue(value); - let name = mangle_internal_symbol(tcx, NO_ALLOC_SHIM_IS_UNSTABLE); - let global = context.new_global(None, GlobalKind::Exported, i8, name); - #[cfg(feature = "master")] - global.add_attribute(VarAttribute::Visibility(symbol_visibility_to_gcc( - tcx.sess.default_visibility(), - ))); - let value = context.new_rvalue_from_int(i8, 0); - global.global_set_initializer_rvalue(value); + create_wrapper_function( + tcx, + context, + &mangle_internal_symbol(tcx, NO_ALLOC_SHIM_IS_UNSTABLE), + None, + &[], + None, + ); } fn create_wrapper_function( tcx: TyCtxt<'_>, context: &Context<'_>, from_name: &str, - to_name: &str, + to_name: Option<&str>, types: &[Type<'_>], output: Option<Type<'_>>, ) { @@ -124,34 +124,40 @@ fn create_wrapper_function( // TODO(antoyo): emit unwind tables. } - let args: Vec<_> = types - .iter() - .enumerate() - .map(|(index, typ)| context.new_parameter(None, *typ, format!("param{}", index))) - .collect(); - let callee = context.new_function( - None, - FunctionType::Extern, - output.unwrap_or(void), - &args, - to_name, - false, - ); - #[cfg(feature = "master")] - callee.add_attribute(FnAttribute::Visibility(gccjit::Visibility::Hidden)); - let block = func.new_block("entry"); - let args = args - .iter() - .enumerate() - .map(|(i, _)| func.get_param(i as i32).to_rvalue()) - .collect::<Vec<_>>(); - let ret = context.new_call(None, callee, &args); - //llvm::LLVMSetTailCall(ret, True); - if output.is_some() { - block.end_with_return(None, ret); + if let Some(to_name) = to_name { + let args: Vec<_> = types + .iter() + .enumerate() + .map(|(index, typ)| context.new_parameter(None, *typ, format!("param{}", index))) + .collect(); + let callee = context.new_function( + None, + FunctionType::Extern, + output.unwrap_or(void), + &args, + to_name, + false, + ); + #[cfg(feature = "master")] + callee.add_attribute(FnAttribute::Visibility(gccjit::Visibility::Hidden)); + + let args = args + .iter() + .enumerate() + .map(|(i, _)| func.get_param(i as i32).to_rvalue()) + .collect::<Vec<_>>(); + let ret = context.new_call(None, callee, &args); + //llvm::LLVMSetTailCall(ret, True); + if output.is_some() { + block.end_with_return(None, ret); + } else { + block.add_eval(None, ret); + block.end_with_void_return(None); + } } else { + assert!(output.is_none()); block.end_with_void_return(None); } diff --git a/compiler/rustc_codegen_gcc/src/asm.rs b/compiler/rustc_codegen_gcc/src/asm.rs index c35337ae7ce..17e2e028b16 100644 --- a/compiler/rustc_codegen_gcc/src/asm.rs +++ b/compiler/rustc_codegen_gcc/src/asm.rs @@ -1,3 +1,5 @@ +// cSpell:ignoreRegExp [afkspqvwy]reg + use std::borrow::Cow; use gccjit::{LValue, RValue, ToRValue, Type}; @@ -138,7 +140,7 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { // `outputs.len() + inputs.len()`. let mut labels = vec![]; - // Clobbers collected from `out("explicit register") _` and `inout("expl_reg") var => _` + // Clobbers collected from `out("explicit register") _` and `inout("explicit_reg") var => _` let mut clobbers = vec![]; // We're trying to preallocate space for the template @@ -203,7 +205,7 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { // is also used as an in register, do not add it to the clobbers list. // it will be treated as a lateout register with `out_place: None` if !late { - bug!("input registers can only be used as lateout regisers"); + bug!("input registers can only be used as lateout registers"); } ("r", dummy_output_type(self.cx, reg.reg_class())) } else { @@ -641,7 +643,8 @@ fn explicit_reg_to_gcc(reg: InlineAsmReg) -> &'static str { }, } } - + InlineAsmReg::Arm(reg) => reg.name(), + InlineAsmReg::AArch64(reg) => reg.name(), _ => unimplemented!(), } } diff --git a/compiler/rustc_codegen_gcc/src/attributes.rs b/compiler/rustc_codegen_gcc/src/attributes.rs index c853c88a6ea..bf0927dc590 100644 --- a/compiler/rustc_codegen_gcc/src/attributes.rs +++ b/compiler/rustc_codegen_gcc/src/attributes.rs @@ -16,7 +16,7 @@ use crate::gcc_util::to_gcc_features; /// Checks if the function `instance` is recursively inline. /// Returns `false` if a functions is guaranteed to be non-recursive, and `true` if it *might* be recursive. #[cfg(feature = "master")] -fn resursively_inline<'gcc, 'tcx>( +fn recursively_inline<'gcc, 'tcx>( cx: &CodegenCx<'gcc, 'tcx>, instance: ty::Instance<'tcx>, ) -> bool { @@ -61,7 +61,7 @@ fn inline_attr<'gcc, 'tcx>( // // That prevents issues steming from recursive `#[inline(always)]` at a *relatively* small cost. // We *only* need to check all the terminators of a function marked with this attribute. - if resursively_inline(cx, instance) { + if recursively_inline(cx, instance) { Some(FnAttribute::Inline) } else { Some(FnAttribute::AlwaysInline) diff --git a/compiler/rustc_codegen_gcc/src/back/lto.rs b/compiler/rustc_codegen_gcc/src/back/lto.rs index e9c87f35779..10fce860b77 100644 --- a/compiler/rustc_codegen_gcc/src/back/lto.rs +++ b/compiler/rustc_codegen_gcc/src/back/lto.rs @@ -11,11 +11,12 @@ // does not remove it? // // TODO(antoyo): for performance, check which optimizations the C++ frontend enables. -// +// cSpell:disable // Fix these warnings: // /usr/bin/ld: warning: type of symbol `_RNvNvNvNtCs5JWOrf9uCus_5rayon11thread_pool19WORKER_THREAD_STATE7___getit5___KEY' changed from 1 to 6 in /tmp/ccKeUSiR.ltrans0.ltrans.o // /usr/bin/ld: warning: type of symbol `_RNvNvNvNvNtNtNtCsAj5i4SGTR7_3std4sync4mpmc5waker17current_thread_id5DUMMY7___getit5___KEY' changed from 1 to 6 in /tmp/ccKeUSiR.ltrans0.ltrans.o // /usr/bin/ld: warning: incremental linking of LTO and non-LTO objects; using -flinker-output=nolto-rel which will bypass whole program optimization +// cSpell:enable use std::ffi::{CStr, CString}; use std::fs::{self, File}; use std::path::{Path, PathBuf}; diff --git a/compiler/rustc_codegen_gcc/src/back/write.rs b/compiler/rustc_codegen_gcc/src/back/write.rs index 09e955acf39..d03d063bdac 100644 --- a/compiler/rustc_codegen_gcc/src/back/write.rs +++ b/compiler/rustc_codegen_gcc/src/back/write.rs @@ -186,6 +186,7 @@ pub(crate) fn codegen( if fat_lto { let lto_path = format!("{}.lto", path); + // cSpell:disable // FIXME(antoyo): The LTO frontend generates the following warning: // ../build_sysroot/sysroot_src/library/core/src/num/dec2flt/lemire.rs:150:15: warning: type of ‘_ZN4core3num7dec2flt5table17POWER_OF_FIVE_12817ha449a68fb31379e4E’ does not match original declaration [-Wlto-type-mismatch] // 150 | let (lo5, hi5) = POWER_OF_FIVE_128[index]; @@ -193,6 +194,7 @@ pub(crate) fn codegen( // lto1: note: ‘_ZN4core3num7dec2flt5table17POWER_OF_FIVE_12817ha449a68fb31379e4E’ was previously declared here // // This option is to mute it to make the UI tests pass with LTO enabled. + // cSpell:enable context.add_driver_option("-Wno-lto-type-mismatch"); // NOTE: this doesn't actually generate an executable. With the above // flags, it combines the .o files together in another .o. diff --git a/compiler/rustc_codegen_gcc/src/builder.rs b/compiler/rustc_codegen_gcc/src/builder.rs index 68c6156fa4b..a2e34d1f8fb 100644 --- a/compiler/rustc_codegen_gcc/src/builder.rs +++ b/compiler/rustc_codegen_gcc/src/builder.rs @@ -765,7 +765,15 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> { #[cfg(feature = "master")] match self.cx.type_kind(a_type) { - TypeKind::Half | TypeKind::Float => { + TypeKind::Half => { + let fmodf = self.context.get_builtin_function("fmodf"); + let f32_type = self.type_f32(); + let a = self.context.new_cast(self.location, a, f32_type); + let b = self.context.new_cast(self.location, b, f32_type); + let result = self.context.new_call(self.location, fmodf, &[a, b]); + return self.context.new_cast(self.location, result, a_type); + } + TypeKind::Float => { let fmodf = self.context.get_builtin_function("fmodf"); return self.context.new_call(self.location, fmodf, &[a, b]); } @@ -774,8 +782,19 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> { return self.context.new_call(self.location, fmod, &[a, b]); } TypeKind::FP128 => { - let fmodl = self.context.get_builtin_function("fmodl"); - return self.context.new_call(self.location, fmodl, &[a, b]); + let f128_type = self.type_f128(); + let fmodf128 = self.context.new_function( + None, + gccjit::FunctionType::Extern, + f128_type, + &[ + self.context.new_parameter(None, f128_type, "a"), + self.context.new_parameter(None, f128_type, "b"), + ], + "fmodf128", + false, + ); + return self.context.new_call(self.location, fmodf128, &[a, b]); } _ => (), } @@ -924,7 +943,12 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> { // dereference after a drop, for instance. // FIXME(antoyo): this check that we don't call get_aligned() a second time on a type. // Ideally, we shouldn't need to do this check. - let aligned_type = if pointee_ty == self.cx.u128_type || pointee_ty == self.cx.i128_type { + // FractalFir: the `align == self.int128_align` check ensures we *do* call `get_aligned` if + // the alignment of a `u128`/`i128` is not the one mandated by the ABI. This ensures we handle + // under-aligned loads correctly. + let aligned_type = if (pointee_ty == self.cx.u128_type || pointee_ty == self.cx.i128_type) + && align == self.int128_align + { pointee_ty } else { pointee_ty.get_aligned(align.bytes()) @@ -1010,13 +1034,13 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> { let b_offset = a.size(self).align_to(b.align(self).abi); let mut load = |i, scalar: &abi::Scalar, align| { - let llptr = if i == 0 { + let ptr = if i == 0 { place.val.llval } else { self.inbounds_ptradd(place.val.llval, self.const_usize(b_offset.bytes())) }; let llty = place.layout.scalar_pair_element_gcc_type(self, i); - let load = self.load(llty, llptr, align); + let load = self.load(llty, ptr, align); scalar_load_metadata(self, load, scalar); if scalar.is_bool() { self.trunc(load, self.type_i1()) } else { load } }; diff --git a/compiler/rustc_codegen_gcc/src/callee.rs b/compiler/rustc_codegen_gcc/src/callee.rs index c8130b7c010..189ac7cd779 100644 --- a/compiler/rustc_codegen_gcc/src/callee.rs +++ b/compiler/rustc_codegen_gcc/src/callee.rs @@ -34,7 +34,7 @@ pub fn get_fn<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, instance: Instance<'tcx>) unreachable!(); /* // Create a fn pointer with the new signature. - let ptrty = fn_abi.ptr_to_gcc_type(cx); + let ptrtype = fn_abi.ptr_to_gcc_type(cx); // This is subtle and surprising, but sometimes we have to bitcast // the resulting fn pointer. The reason has to do with external @@ -59,7 +59,7 @@ pub fn get_fn<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, instance: Instance<'tcx>) // This can occur on either a crate-local or crate-external // reference. It also occurs when testing libcore and in some // other weird situations. Annoying. - if cx.val_ty(func) != ptrty { + if cx.val_ty(func) != ptrtype { // TODO(antoyo): cast the pointer. func } diff --git a/compiler/rustc_codegen_gcc/src/common.rs b/compiler/rustc_codegen_gcc/src/common.rs index 918195364ff..58ff2f1f8f0 100644 --- a/compiler/rustc_codegen_gcc/src/common.rs +++ b/compiler/rustc_codegen_gcc/src/common.rs @@ -9,7 +9,6 @@ use rustc_middle::mir::Mutability; use rustc_middle::mir::interpret::{ConstAllocation, GlobalAlloc, Scalar}; use rustc_middle::ty::layout::LayoutOf; -use crate::consts::const_alloc_to_gcc; use crate::context::CodegenCx; use crate::type_of::LayoutGccExt; @@ -46,12 +45,65 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> { } pub fn bytes_in_context<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, bytes: &[u8]) -> RValue<'gcc> { - let context = &cx.context; - let byte_type = context.new_type::<u8>(); - let typ = context.new_array_type(None, byte_type, bytes.len() as u64); - let elements: Vec<_> = - bytes.iter().map(|&byte| context.new_rvalue_from_int(byte_type, byte as i32)).collect(); - context.new_array_constructor(None, typ, &elements) + // Instead of always using an array of bytes, use an array of larger integers of target endianness + // if possible. This reduces the amount of `rvalues` we use, which reduces memory usage significantly. + // + // FIXME(FractalFir): Consider using `global_set_initializer` instead. Before this is done, we need to confirm that + // `global_set_initializer` is more memory efficient than the current solution. + // `global_set_initializer` calls `global_set_initializer_rvalue` under the hood - does it generate an array of rvalues, + // or is it using a more efficient representation? + match bytes.len() % 8 { + 0 => { + let context = &cx.context; + let byte_type = context.new_type::<u64>(); + let typ = context.new_array_type(None, byte_type, bytes.len() as u64 / 8); + let elements: Vec<_> = bytes + .chunks_exact(8) + .map(|arr| { + let arr: [u8; 8] = arr.try_into().unwrap(); + context.new_rvalue_from_long( + byte_type, + // Since we are representing arbitrary byte runs as integers, we need to follow the target + // endianness. + match cx.sess().target.options.endian { + rustc_abi::Endian::Little => u64::from_le_bytes(arr) as i64, + rustc_abi::Endian::Big => u64::from_be_bytes(arr) as i64, + }, + ) + }) + .collect(); + context.new_array_constructor(None, typ, &elements) + } + 4 => { + let context = &cx.context; + let byte_type = context.new_type::<u32>(); + let typ = context.new_array_type(None, byte_type, bytes.len() as u64 / 4); + let elements: Vec<_> = bytes + .chunks_exact(4) + .map(|arr| { + let arr: [u8; 4] = arr.try_into().unwrap(); + context.new_rvalue_from_int( + byte_type, + match cx.sess().target.options.endian { + rustc_abi::Endian::Little => u32::from_le_bytes(arr) as i32, + rustc_abi::Endian::Big => u32::from_be_bytes(arr) as i32, + }, + ) + }) + .collect(); + context.new_array_constructor(None, typ, &elements) + } + _ => { + let context = cx.context; + let byte_type = context.new_type::<u8>(); + let typ = context.new_array_type(None, byte_type, bytes.len() as u64); + let elements: Vec<_> = bytes + .iter() + .map(|&byte| context.new_rvalue_from_int(byte_type, byte as i32)) + .collect(); + context.new_array_constructor(None, typ, &elements) + } + } } pub fn type_is_pointer(typ: Type<'_>) -> bool { @@ -185,14 +237,15 @@ impl<'gcc, 'tcx> ConstCodegenMethods for CodegenCx<'gcc, 'tcx> { // FIXME(antoyo): there's some issues with using the u128 code that follows, so hard-code // the paths for floating-point values. - if ty == self.float_type { + // TODO: Remove this code? + /*if ty == self.float_type { return self .context .new_rvalue_from_double(ty, f32::from_bits(data as u32) as f64); } if ty == self.double_type { return self.context.new_rvalue_from_double(ty, f64::from_bits(data as u64)); - } + }*/ let value = self.const_uint_big(self.type_ix(bitsize), data); let bytesize = layout.size(self).bytes(); @@ -212,7 +265,20 @@ impl<'gcc, 'tcx> ConstCodegenMethods for CodegenCx<'gcc, 'tcx> { let alloc_id = prov.alloc_id(); let base_addr = match self.tcx.global_alloc(alloc_id) { GlobalAlloc::Memory(alloc) => { - let init = const_alloc_to_gcc(self, alloc); + // For ZSTs directly codegen an aligned pointer. + // This avoids generating a zero-sized constant value and actually needing a + // real address at runtime. + if alloc.inner().len() == 0 { + assert_eq!(offset.bytes(), 0); + let val = self.const_usize(alloc.inner().align.bytes()); + return if matches!(layout.primitive(), Pointer(_)) { + self.context.new_cast(None, val, ty) + } else { + self.const_bitcast(val, ty) + }; + } + + let init = self.const_data_from_alloc(alloc); let alloc = alloc.inner(); let value = match alloc.mutability { Mutability::Mut => self.static_addr_of_mut(init, alloc.align, None), @@ -234,7 +300,7 @@ impl<'gcc, 'tcx> ConstCodegenMethods for CodegenCx<'gcc, 'tcx> { }), ))) .unwrap_memory(); - let init = const_alloc_to_gcc(self, alloc); + let init = self.const_data_from_alloc(alloc); self.static_addr_of(init, alloc.inner().align, None) } GlobalAlloc::Static(def_id) => { @@ -257,7 +323,19 @@ impl<'gcc, 'tcx> ConstCodegenMethods for CodegenCx<'gcc, 'tcx> { } fn const_data_from_alloc(&self, alloc: ConstAllocation<'_>) -> Self::Value { - const_alloc_to_gcc(self, alloc) + // We ignore the alignment for the purpose of deduping RValues + // The alignment is not handled / used in any way by `const_alloc_to_gcc`, + // so it is OK to overwrite it here. + let mut mock_alloc = alloc.inner().clone(); + mock_alloc.align = rustc_abi::Align::MAX; + // Check if the rvalue is already in the cache - if so, just return it directly. + if let Some(res) = self.const_cache.borrow().get(&mock_alloc) { + return *res; + } + // Rvalue not in the cache - convert and add it. + let res = crate::consts::const_alloc_to_gcc_uncached(self, alloc); + self.const_cache.borrow_mut().insert(mock_alloc, res); + res } fn const_ptr_byte_offset(&self, base_addr: Self::Value, offset: abi::Size) -> Self::Value { diff --git a/compiler/rustc_codegen_gcc/src/consts.rs b/compiler/rustc_codegen_gcc/src/consts.rs index 1690641a5bc..b43f9b24c6a 100644 --- a/compiler/rustc_codegen_gcc/src/consts.rs +++ b/compiler/rustc_codegen_gcc/src/consts.rs @@ -36,18 +36,14 @@ fn set_global_alignment<'gcc, 'tcx>( impl<'gcc, 'tcx> StaticCodegenMethods for CodegenCx<'gcc, 'tcx> { fn static_addr_of(&self, cv: RValue<'gcc>, align: Align, kind: Option<&str>) -> RValue<'gcc> { - // TODO(antoyo): implement a proper rvalue comparison in libgccjit instead of doing the - // following: - for (value, variable) in &*self.const_globals.borrow() { - if format!("{:?}", value) == format!("{:?}", cv) { - if let Some(global_variable) = self.global_lvalues.borrow().get(variable) { - let alignment = align.bits() as i32; - if alignment > global_variable.get_alignment() { - global_variable.set_alignment(alignment); - } + if let Some(variable) = self.const_globals.borrow().get(&cv) { + if let Some(global_variable) = self.global_lvalues.borrow().get(variable) { + let alignment = align.bits() as i32; + if alignment > global_variable.get_alignment() { + global_variable.set_alignment(alignment); } - return *variable; } + return *variable; } let global_value = self.static_addr_of_mut(cv, align, kind); #[cfg(feature = "master")] @@ -288,8 +284,10 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> { global } } - -pub fn const_alloc_to_gcc<'gcc>( +/// Converts a given const alloc to a gcc Rvalue, without any caching or deduplication. +/// YOU SHOULD NOT call this function directly - that may break the semantics of Rust. +/// Use `const_data_from_alloc` instead. +pub(crate) fn const_alloc_to_gcc_uncached<'gcc>( cx: &CodegenCx<'gcc, '_>, alloc: ConstAllocation<'_>, ) -> RValue<'gcc> { @@ -321,7 +319,7 @@ pub fn const_alloc_to_gcc<'gcc>( // and we properly interpret the provenance as a relocation pointer offset. alloc.inspect_with_uninit_and_ptr_outside_interpreter(offset..(offset + pointer_size)), ) - .expect("const_alloc_to_llvm: could not read relocation pointer") + .expect("const_alloc_to_gcc_uncached: could not read relocation pointer") as u64; let address_space = cx.tcx.global_alloc(alloc_id).address_space(cx); @@ -360,7 +358,7 @@ fn codegen_static_initializer<'gcc, 'tcx>( def_id: DefId, ) -> Result<(RValue<'gcc>, ConstAllocation<'tcx>), ErrorHandled> { let alloc = cx.tcx.eval_static_initializer(def_id)?; - Ok((const_alloc_to_gcc(cx, alloc), alloc)) + Ok((cx.const_data_from_alloc(alloc), alloc)) } fn check_and_apply_linkage<'gcc, 'tcx>( diff --git a/compiler/rustc_codegen_gcc/src/context.rs b/compiler/rustc_codegen_gcc/src/context.rs index 4955e039e7b..ff141ad365b 100644 --- a/compiler/rustc_codegen_gcc/src/context.rs +++ b/compiler/rustc_codegen_gcc/src/context.rs @@ -1,14 +1,16 @@ use std::cell::{Cell, RefCell}; +use std::collections::HashMap; use gccjit::{ Block, CType, Context, Function, FunctionPtrType, FunctionType, LValue, Location, RValue, Type, }; -use rustc_abi::{HasDataLayout, PointeeInfo, Size, TargetDataLayout, VariantIdx}; +use rustc_abi::{Align, HasDataLayout, PointeeInfo, Size, TargetDataLayout, VariantIdx}; use rustc_codegen_ssa::base::wants_msvc_seh; use rustc_codegen_ssa::errors as ssa_errors; use rustc_codegen_ssa::traits::{BackendTypes, BaseTypeCodegenMethods, MiscCodegenMethods}; use rustc_data_structures::base_n::{ALPHANUMERIC_ONLY, ToBaseN}; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; +use rustc_middle::mir::interpret::Allocation; use rustc_middle::mir::mono::CodegenUnit; use rustc_middle::span_bug; use rustc_middle::ty::layout::{ @@ -28,6 +30,8 @@ use crate::common::SignType; #[cfg_attr(not(feature = "master"), allow(dead_code))] pub struct CodegenCx<'gcc, 'tcx> { + /// A cache of converted ConstAllocs + pub const_cache: RefCell<HashMap<Allocation, RValue<'gcc>>>, pub codegen_unit: &'tcx CodegenUnit<'tcx>, pub context: &'gcc Context<'gcc>, @@ -129,6 +133,9 @@ pub struct CodegenCx<'gcc, 'tcx> { #[cfg(feature = "master")] pub cleanup_blocks: RefCell<FxHashSet<Block<'gcc>>>, + /// The alignment of a u128/i128 type. + // We cache this, since it is needed for alignment checks during loads. + pub int128_align: Align, } impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> { @@ -220,6 +227,12 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> { } let mut cx = Self { + int128_align: tcx + .layout_of(ty::TypingEnv::fully_monomorphized().as_query_input(tcx.types.i128)) + .expect("Can't get the layout of `i128`") + .align + .abi, + const_cache: Default::default(), codegen_unit, context, current_func: RefCell::new(None), @@ -428,8 +441,8 @@ impl<'gcc, 'tcx> MiscCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> { // `rust_eh_personality` function, but rather we wired it up to the // CRT's custom personality function, which forces LLVM to consider // landing pads as "landing pads for SEH". - if let Some(llpersonality) = self.eh_personality.get() { - return llpersonality; + if let Some(personality_func) = self.eh_personality.get() { + return personality_func; } let tcx = self.tcx; let func = match tcx.lang_items().eh_personality() { diff --git a/compiler/rustc_codegen_gcc/src/errors.rs b/compiler/rustc_codegen_gcc/src/errors.rs index 7786be9ae5d..b7e7343460f 100644 --- a/compiler/rustc_codegen_gcc/src/errors.rs +++ b/compiler/rustc_codegen_gcc/src/errors.rs @@ -1,31 +1,7 @@ -use rustc_macros::{Diagnostic, Subdiagnostic}; +use rustc_macros::Diagnostic; use rustc_span::Span; #[derive(Diagnostic)] -#[diag(codegen_gcc_unknown_ctarget_feature_prefix)] -#[note] -pub(crate) struct UnknownCTargetFeaturePrefix<'a> { - pub feature: &'a str, -} - -#[derive(Diagnostic)] -#[diag(codegen_gcc_unknown_ctarget_feature)] -#[note] -pub(crate) struct UnknownCTargetFeature<'a> { - pub feature: &'a str, - #[subdiagnostic] - pub rust_feature: PossibleFeature<'a>, -} - -#[derive(Subdiagnostic)] -pub(crate) enum PossibleFeature<'a> { - #[help(codegen_gcc_possible_feature)] - Some { rust_feature: &'a str }, - #[help(codegen_gcc_consider_filing_feature_request)] - None, -} - -#[derive(Diagnostic)] #[diag(codegen_gcc_unwinding_inline_asm)] pub(crate) struct UnwindingInlineAsm { #[primary_span] diff --git a/compiler/rustc_codegen_gcc/src/gcc_util.rs b/compiler/rustc_codegen_gcc/src/gcc_util.rs index d90e66aea31..42ba40692b7 100644 --- a/compiler/rustc_codegen_gcc/src/gcc_util.rs +++ b/compiler/rustc_codegen_gcc/src/gcc_util.rs @@ -1,20 +1,12 @@ #[cfg(feature = "master")] use gccjit::Context; -use rustc_codegen_ssa::codegen_attrs::check_tied_features; -use rustc_codegen_ssa::errors::TargetFeatureDisableOrEnable; -use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::unord::UnordSet; +use rustc_codegen_ssa::target_features; use rustc_session::Session; -use rustc_session::features::{StabilityExt, retpoline_features_by_flags}; -use rustc_target::target_features::RUSTC_SPECIFIC_FEATURES; use smallvec::{SmallVec, smallvec}; -use crate::errors::{PossibleFeature, UnknownCTargetFeature, UnknownCTargetFeaturePrefix}; - -fn gcc_features_by_flags(sess: &Session) -> Vec<&str> { - let mut features: Vec<&str> = Vec::new(); - retpoline_features_by_flags(sess, &mut features); - features +fn gcc_features_by_flags(sess: &Session, features: &mut Vec<String>) { + target_features::retpoline_features_by_flags(sess, features); + // FIXME: LLVM also sets +reserve-x18 here under some conditions. } /// The list of GCC features computed from CLI flags (`-Ctarget-cpu`, `-Ctarget-feature`, @@ -44,98 +36,29 @@ pub(crate) fn global_gcc_features(sess: &Session, diagnostics: bool) -> Vec<Stri features.extend(sess.target.features.split(',').filter(|v| !v.is_empty()).map(String::from)); // -Ctarget-features - let known_features = sess.target.rust_target_features(); - let mut featsmap = FxHashMap::default(); - - // Compute implied features - let mut all_rust_features = vec![]; - for feature in sess.opts.cg.target_feature.split(',').chain(gcc_features_by_flags(sess)) { - if let Some(feature) = feature.strip_prefix('+') { - all_rust_features.extend( - UnordSet::from(sess.target.implied_target_features(feature)) - .to_sorted_stable_ord() - .iter() - .map(|&&s| (true, s)), - ) - } else if let Some(feature) = feature.strip_prefix('-') { - // FIXME: Why do we not remove implied features on "-" here? - // We do the equivalent above in `target_config`. - // See <https://github.com/rust-lang/rust/issues/134792>. - all_rust_features.push((false, feature)); - } else if !feature.is_empty() && diagnostics { - sess.dcx().emit_warn(UnknownCTargetFeaturePrefix { feature }); - } - } - // Remove features that are meant for rustc, not codegen. - all_rust_features.retain(|&(_, feature)| { - // Retain if it is not a rustc feature - !RUSTC_SPECIFIC_FEATURES.contains(&feature) - }); - - // Check feature validity. - if diagnostics { - for &(enable, feature) in &all_rust_features { - let feature_state = known_features.iter().find(|&&(v, _, _)| v == feature); - match feature_state { - None => { - let rust_feature = known_features.iter().find_map(|&(rust_feature, _, _)| { - let gcc_features = to_gcc_features(sess, rust_feature); - if gcc_features.contains(&feature) && !gcc_features.contains(&rust_feature) - { - Some(rust_feature) - } else { - None - } - }); - let unknown_feature = if let Some(rust_feature) = rust_feature { - UnknownCTargetFeature { - feature, - rust_feature: PossibleFeature::Some { rust_feature }, - } - } else { - UnknownCTargetFeature { feature, rust_feature: PossibleFeature::None } - }; - sess.dcx().emit_warn(unknown_feature); - } - Some(&(_, stability, _)) => { - stability.verify_feature_enabled_by_flag(sess, enable, feature); - } - } - - // FIXME(nagisa): figure out how to not allocate a full hashset here. - featsmap.insert(feature, enable); - } - } - - // Translate this into GCC features. - let feats = - all_rust_features.iter().flat_map(|&(enable, feature)| { - let enable_disable = if enable { '+' } else { '-' }; + target_features::flag_to_backend_features( + sess, + diagnostics, + |feature| to_gcc_features(sess, feature), + |feature, enable| { // We run through `to_gcc_features` when // passing requests down to GCC. This means that all in-language // features also work on the command line instead of having two // different names when the GCC name and the Rust name differ. - to_gcc_features(sess, feature) - .iter() - .flat_map(|feat| to_gcc_features(sess, feat).into_iter()) - .map(|feature| { - if enable_disable == '-' { - format!("-{}", feature) - } else { - feature.to_string() - } - }) - .collect::<Vec<_>>() - }); - features.extend(feats); - - if diagnostics && let Some(f) = check_tied_features(sess, &featsmap) { - sess.dcx().emit_err(TargetFeatureDisableOrEnable { - features: f, - span: None, - missing_features: None, - }); - } + features.extend( + to_gcc_features(sess, feature) + .iter() + .flat_map(|feat| to_gcc_features(sess, feat).into_iter()) + .map( + |feature| { + if !enable { format!("-{}", feature) } else { feature.to_string() } + }, + ), + ); + }, + ); + + gcc_features_by_flags(sess, &mut features); features } @@ -143,6 +66,7 @@ pub(crate) fn global_gcc_features(sess: &Session, diagnostics: bool) -> Vec<Stri // To find a list of GCC's names, check https://gcc.gnu.org/onlinedocs/gcc/Function-Attributes.html pub fn to_gcc_features<'a>(sess: &Session, s: &'a str) -> SmallVec<[&'a str; 2]> { let arch = if sess.target.arch == "x86_64" { "x86" } else { &*sess.target.arch }; + // cSpell:disable match (arch, s) { // FIXME: seems like x87 does not exist? ("x86", "x87") => smallvec![], @@ -181,6 +105,7 @@ pub fn to_gcc_features<'a>(sess: &Session, s: &'a str) -> SmallVec<[&'a str; 2]> ("aarch64", "sve2-bitperm") => smallvec!["sve2-bitperm", "neon"], (_, s) => smallvec![s], } + // cSpell:enable } fn arch_to_gcc(name: &str) -> &str { diff --git a/compiler/rustc_codegen_gcc/src/int.rs b/compiler/rustc_codegen_gcc/src/int.rs index eb4acd8ade9..6f21ce9352b 100644 --- a/compiler/rustc_codegen_gcc/src/int.rs +++ b/compiler/rustc_codegen_gcc/src/int.rs @@ -2,6 +2,8 @@ //! This module exists because some integer types are not supported on some gcc platforms, e.g. //! 128-bit integers on 32-bit platforms and thus require to be handled manually. +// cSpell:words cmpti divti modti mulodi muloti udivti umodti + use gccjit::{BinaryOp, ComparisonOp, FunctionType, Location, RValue, ToRValue, Type, UnaryOp}; use rustc_abi::{CanonAbi, Endian, ExternAbi}; use rustc_codegen_ssa::common::{IntPredicate, TypeKind}; @@ -913,9 +915,11 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> { debug_assert!(value_type.dyncast_array().is_some()); let name_suffix = match self.type_kind(dest_typ) { + // cSpell:disable TypeKind::Float => "tisf", TypeKind::Double => "tidf", - TypeKind::FP128 => "tixf", + TypeKind::FP128 => "titf", + // cSpell:enable kind => panic!("cannot cast a non-native integer to type {:?}", kind), }; let sign = if signed { "" } else { "un" }; @@ -957,8 +961,10 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> { debug_assert!(dest_typ.dyncast_array().is_some()); let name_suffix = match self.type_kind(value_type) { + // cSpell:disable TypeKind::Float => "sfti", TypeKind::Double => "dfti", + // cSpell:enable kind => panic!("cannot cast a {:?} to non-native integer", kind), }; let sign = if signed { "" } else { "uns" }; diff --git a/compiler/rustc_codegen_gcc/src/intrinsic/archs.rs b/compiler/rustc_codegen_gcc/src/intrinsic/archs.rs index 5ada535aa41..f0352c5e6e5 100644 --- a/compiler/rustc_codegen_gcc/src/intrinsic/archs.rs +++ b/compiler/rustc_codegen_gcc/src/intrinsic/archs.rs @@ -1,9978 +1,10178 @@ // File generated by `rustc_codegen_gcc/tools/generate_intrinsics.py` // DO NOT EDIT IT! -match name { - // AMDGPU - "llvm.AMDGPU.div.fixup.f32" => "__builtin_amdgpu_div_fixup", - "llvm.AMDGPU.div.fixup.f64" => "__builtin_amdgpu_div_fixup", - "llvm.AMDGPU.div.fixup.v2f64" => "__builtin_amdgpu_div_fixup", - "llvm.AMDGPU.div.fixup.v4f32" => "__builtin_amdgpu_div_fixup", - "llvm.AMDGPU.div.fmas.f32" => "__builtin_amdgpu_div_fmas", - "llvm.AMDGPU.div.fmas.f64" => "__builtin_amdgpu_div_fmas", - "llvm.AMDGPU.div.fmas.v2f64" => "__builtin_amdgpu_div_fmas", - "llvm.AMDGPU.div.fmas.v4f32" => "__builtin_amdgpu_div_fmas", - "llvm.AMDGPU.ldexp.f32" => "__builtin_amdgpu_ldexp", - "llvm.AMDGPU.ldexp.f64" => "__builtin_amdgpu_ldexp", - "llvm.AMDGPU.ldexp.v2f64" => "__builtin_amdgpu_ldexp", - "llvm.AMDGPU.ldexp.v4f32" => "__builtin_amdgpu_ldexp", - "llvm.AMDGPU.rcp.f32" => "__builtin_amdgpu_rcp", - "llvm.AMDGPU.rcp.f64" => "__builtin_amdgpu_rcp", - "llvm.AMDGPU.rcp.v2f64" => "__builtin_amdgpu_rcp", - "llvm.AMDGPU.rcp.v4f32" => "__builtin_amdgpu_rcp", - "llvm.AMDGPU.rsq.clamped.f32" => "__builtin_amdgpu_rsq_clamped", - "llvm.AMDGPU.rsq.clamped.f64" => "__builtin_amdgpu_rsq_clamped", - "llvm.AMDGPU.rsq.clamped.v2f64" => "__builtin_amdgpu_rsq_clamped", - "llvm.AMDGPU.rsq.clamped.v4f32" => "__builtin_amdgpu_rsq_clamped", - "llvm.AMDGPU.rsq.f32" => "__builtin_amdgpu_rsq", - "llvm.AMDGPU.rsq.f64" => "__builtin_amdgpu_rsq", - "llvm.AMDGPU.rsq.v2f64" => "__builtin_amdgpu_rsq", - "llvm.AMDGPU.rsq.v4f32" => "__builtin_amdgpu_rsq", - "llvm.AMDGPU.trig.preop.f32" => "__builtin_amdgpu_trig_preop", - "llvm.AMDGPU.trig.preop.f64" => "__builtin_amdgpu_trig_preop", - "llvm.AMDGPU.trig.preop.v2f64" => "__builtin_amdgpu_trig_preop", - "llvm.AMDGPU.trig.preop.v4f32" => "__builtin_amdgpu_trig_preop", - // aarch64 - "llvm.aarch64.chkfeat" => "__builtin_arm_chkfeat", - "llvm.aarch64.dmb" => "__builtin_arm_dmb", - "llvm.aarch64.dsb" => "__builtin_arm_dsb", - "llvm.aarch64.gcspopm" => "__builtin_arm_gcspopm", - "llvm.aarch64.gcsss" => "__builtin_arm_gcsss", - "llvm.aarch64.isb" => "__builtin_arm_isb", - "llvm.aarch64.prefetch" => "__builtin_arm_prefetch", - "llvm.aarch64.sme.in.streaming.mode" => "__builtin_arm_in_streaming_mode", - "llvm.aarch64.sve.aesd" => "__builtin_sve_svaesd_u8", - "llvm.aarch64.sve.aese" => "__builtin_sve_svaese_u8", - "llvm.aarch64.sve.aesimc" => "__builtin_sve_svaesimc_u8", - "llvm.aarch64.sve.aesmc" => "__builtin_sve_svaesmc_u8", - "llvm.aarch64.sve.rax1" => "__builtin_sve_svrax1_u64", - "llvm.aarch64.sve.rdffr" => "__builtin_sve_svrdffr", - "llvm.aarch64.sve.rdffr.z" => "__builtin_sve_svrdffr_z", - "llvm.aarch64.sve.setffr" => "__builtin_sve_svsetffr", - "llvm.aarch64.sve.sm4e" => "__builtin_sve_svsm4e_u32", - "llvm.aarch64.sve.sm4ekey" => "__builtin_sve_svsm4ekey_u32", - "llvm.aarch64.sve.wrffr" => "__builtin_sve_svwrffr", - "llvm.aarch64.tcancel" => "__builtin_arm_tcancel", - "llvm.aarch64.tcommit" => "__builtin_arm_tcommit", - "llvm.aarch64.tstart" => "__builtin_arm_tstart", - "llvm.aarch64.ttest" => "__builtin_arm_ttest", - // amdgcn - "llvm.amdgcn.alignbyte" => "__builtin_amdgcn_alignbyte", - "llvm.amdgcn.ashr.pk.i8.i32" => "__builtin_amdgcn_ashr_pk_i8_i32", - "llvm.amdgcn.ashr.pk.u8.i32" => "__builtin_amdgcn_ashr_pk_u8_i32", - "llvm.amdgcn.buffer.wbinvl1" => "__builtin_amdgcn_buffer_wbinvl1", - "llvm.amdgcn.buffer.wbinvl1.sc" => "__builtin_amdgcn_buffer_wbinvl1_sc", - "llvm.amdgcn.buffer.wbinvl1.vol" => "__builtin_amdgcn_buffer_wbinvl1_vol", - "llvm.amdgcn.cubeid" => "__builtin_amdgcn_cubeid", - "llvm.amdgcn.cubema" => "__builtin_amdgcn_cubema", - "llvm.amdgcn.cubesc" => "__builtin_amdgcn_cubesc", - "llvm.amdgcn.cubetc" => "__builtin_amdgcn_cubetc", - "llvm.amdgcn.cvt.f32.bf8" => "__builtin_amdgcn_cvt_f32_bf8", - "llvm.amdgcn.cvt.f32.fp8" => "__builtin_amdgcn_cvt_f32_fp8", - "llvm.amdgcn.cvt.off.f32.i4" => "__builtin_amdgcn_cvt_off_f32_i4", - "llvm.amdgcn.cvt.pk.bf8.f32" => "__builtin_amdgcn_cvt_pk_bf8_f32", - "llvm.amdgcn.cvt.pk.f32.bf8" => "__builtin_amdgcn_cvt_pk_f32_bf8", - "llvm.amdgcn.cvt.pk.f32.fp8" => "__builtin_amdgcn_cvt_pk_f32_fp8", - "llvm.amdgcn.cvt.pk.fp8.f32" => "__builtin_amdgcn_cvt_pk_fp8_f32", - "llvm.amdgcn.cvt.pk.i16" => "__builtin_amdgcn_cvt_pk_i16", - "llvm.amdgcn.cvt.pk.u16" => "__builtin_amdgcn_cvt_pk_u16", - "llvm.amdgcn.cvt.pk.u8.f32" => "__builtin_amdgcn_cvt_pk_u8_f32", - "llvm.amdgcn.cvt.pknorm.i16" => "__builtin_amdgcn_cvt_pknorm_i16", - "llvm.amdgcn.cvt.pknorm.u16" => "__builtin_amdgcn_cvt_pknorm_u16", - "llvm.amdgcn.cvt.pkrtz" => "__builtin_amdgcn_cvt_pkrtz", - "llvm.amdgcn.cvt.scalef32.2xpk16.bf6.f32" => "__builtin_amdgcn_cvt_scalef32_2xpk16_bf6_f32", - "llvm.amdgcn.cvt.scalef32.2xpk16.fp6.f32" => "__builtin_amdgcn_cvt_scalef32_2xpk16_fp6_f32", - "llvm.amdgcn.cvt.scalef32.f16.bf8" => "__builtin_amdgcn_cvt_scalef32_f16_bf8", - "llvm.amdgcn.cvt.scalef32.f16.fp8" => "__builtin_amdgcn_cvt_scalef32_f16_fp8", - "llvm.amdgcn.cvt.scalef32.f32.bf8" => "__builtin_amdgcn_cvt_scalef32_f32_bf8", - "llvm.amdgcn.cvt.scalef32.f32.fp8" => "__builtin_amdgcn_cvt_scalef32_f32_fp8", - "llvm.amdgcn.cvt.scalef32.pk.bf16.bf8" => "__builtin_amdgcn_cvt_scalef32_pk_bf16_bf8", - "llvm.amdgcn.cvt.scalef32.pk.bf16.fp4" => "__builtin_amdgcn_cvt_scalef32_pk_bf16_fp4", - "llvm.amdgcn.cvt.scalef32.pk.bf16.fp8" => "__builtin_amdgcn_cvt_scalef32_pk_bf16_fp8", - "llvm.amdgcn.cvt.scalef32.pk.bf8.bf16" => "__builtin_amdgcn_cvt_scalef32_pk_bf8_bf16", - "llvm.amdgcn.cvt.scalef32.pk.bf8.f16" => "__builtin_amdgcn_cvt_scalef32_pk_bf8_f16", - "llvm.amdgcn.cvt.scalef32.pk.bf8.f32" => "__builtin_amdgcn_cvt_scalef32_pk_bf8_f32", - "llvm.amdgcn.cvt.scalef32.pk.f16.bf8" => "__builtin_amdgcn_cvt_scalef32_pk_f16_bf8", - "llvm.amdgcn.cvt.scalef32.pk.f16.fp4" => "__builtin_amdgcn_cvt_scalef32_pk_f16_fp4", - "llvm.amdgcn.cvt.scalef32.pk.f16.fp8" => "__builtin_amdgcn_cvt_scalef32_pk_f16_fp8", - "llvm.amdgcn.cvt.scalef32.pk.f32.bf8" => "__builtin_amdgcn_cvt_scalef32_pk_f32_bf8", - "llvm.amdgcn.cvt.scalef32.pk.f32.fp4" => "__builtin_amdgcn_cvt_scalef32_pk_f32_fp4", - "llvm.amdgcn.cvt.scalef32.pk.f32.fp8" => "__builtin_amdgcn_cvt_scalef32_pk_f32_fp8", - "llvm.amdgcn.cvt.scalef32.pk.fp4.bf16" => "__builtin_amdgcn_cvt_scalef32_pk_fp4_bf16", - "llvm.amdgcn.cvt.scalef32.pk.fp4.f16" => "__builtin_amdgcn_cvt_scalef32_pk_fp4_f16", - "llvm.amdgcn.cvt.scalef32.pk.fp4.f32" => "__builtin_amdgcn_cvt_scalef32_pk_fp4_f32", - "llvm.amdgcn.cvt.scalef32.pk.fp8.bf16" => "__builtin_amdgcn_cvt_scalef32_pk_fp8_bf16", - "llvm.amdgcn.cvt.scalef32.pk.fp8.f16" => "__builtin_amdgcn_cvt_scalef32_pk_fp8_f16", - "llvm.amdgcn.cvt.scalef32.pk.fp8.f32" => "__builtin_amdgcn_cvt_scalef32_pk_fp8_f32", - "llvm.amdgcn.cvt.scalef32.pk32.bf16.bf6" => "__builtin_amdgcn_cvt_scalef32_pk32_bf16_bf6", - "llvm.amdgcn.cvt.scalef32.pk32.bf16.fp6" => "__builtin_amdgcn_cvt_scalef32_pk32_bf16_fp6", - "llvm.amdgcn.cvt.scalef32.pk32.bf6.bf16" => "__builtin_amdgcn_cvt_scalef32_pk32_bf6_bf16", - "llvm.amdgcn.cvt.scalef32.pk32.bf6.f16" => "__builtin_amdgcn_cvt_scalef32_pk32_bf6_f16", - "llvm.amdgcn.cvt.scalef32.pk32.f16.bf6" => "__builtin_amdgcn_cvt_scalef32_pk32_f16_bf6", - "llvm.amdgcn.cvt.scalef32.pk32.f16.fp6" => "__builtin_amdgcn_cvt_scalef32_pk32_f16_fp6", - "llvm.amdgcn.cvt.scalef32.pk32.f32.bf6" => "__builtin_amdgcn_cvt_scalef32_pk32_f32_bf6", - "llvm.amdgcn.cvt.scalef32.pk32.f32.fp6" => "__builtin_amdgcn_cvt_scalef32_pk32_f32_fp6", - "llvm.amdgcn.cvt.scalef32.pk32.fp6.bf16" => "__builtin_amdgcn_cvt_scalef32_pk32_fp6_bf16", - "llvm.amdgcn.cvt.scalef32.pk32.fp6.f16" => "__builtin_amdgcn_cvt_scalef32_pk32_fp6_f16", - "llvm.amdgcn.cvt.scalef32.sr.bf8.bf16" => "__builtin_amdgcn_cvt_scalef32_sr_bf8_bf16", - "llvm.amdgcn.cvt.scalef32.sr.bf8.f16" => "__builtin_amdgcn_cvt_scalef32_sr_bf8_f16", - "llvm.amdgcn.cvt.scalef32.sr.bf8.f32" => "__builtin_amdgcn_cvt_scalef32_sr_bf8_f32", - "llvm.amdgcn.cvt.scalef32.sr.fp8.bf16" => "__builtin_amdgcn_cvt_scalef32_sr_fp8_bf16", - "llvm.amdgcn.cvt.scalef32.sr.fp8.f16" => "__builtin_amdgcn_cvt_scalef32_sr_fp8_f16", - "llvm.amdgcn.cvt.scalef32.sr.fp8.f32" => "__builtin_amdgcn_cvt_scalef32_sr_fp8_f32", - "llvm.amdgcn.cvt.scalef32.sr.pk.fp4.bf16" => "__builtin_amdgcn_cvt_scalef32_sr_pk_fp4_bf16", - "llvm.amdgcn.cvt.scalef32.sr.pk.fp4.f16" => "__builtin_amdgcn_cvt_scalef32_sr_pk_fp4_f16", - "llvm.amdgcn.cvt.scalef32.sr.pk.fp4.f32" => "__builtin_amdgcn_cvt_scalef32_sr_pk_fp4_f32", - "llvm.amdgcn.cvt.scalef32.sr.pk32.bf6.bf16" => "__builtin_amdgcn_cvt_scalef32_sr_pk32_bf6_bf16", - "llvm.amdgcn.cvt.scalef32.sr.pk32.bf6.f16" => "__builtin_amdgcn_cvt_scalef32_sr_pk32_bf6_f16", - "llvm.amdgcn.cvt.scalef32.sr.pk32.bf6.f32" => "__builtin_amdgcn_cvt_scalef32_sr_pk32_bf6_f32", - "llvm.amdgcn.cvt.scalef32.sr.pk32.fp6.bf16" => "__builtin_amdgcn_cvt_scalef32_sr_pk32_fp6_bf16", - "llvm.amdgcn.cvt.scalef32.sr.pk32.fp6.f16" => "__builtin_amdgcn_cvt_scalef32_sr_pk32_fp6_f16", - "llvm.amdgcn.cvt.scalef32.sr.pk32.fp6.f32" => "__builtin_amdgcn_cvt_scalef32_sr_pk32_fp6_f32", - "llvm.amdgcn.cvt.sr.bf16.f32" => "__builtin_amdgcn_cvt_sr_bf16_f32", - "llvm.amdgcn.cvt.sr.bf8.f32" => "__builtin_amdgcn_cvt_sr_bf8_f32", - "llvm.amdgcn.cvt.sr.f16.f32" => "__builtin_amdgcn_cvt_sr_f16_f32", - "llvm.amdgcn.cvt.sr.fp8.f32" => "__builtin_amdgcn_cvt_sr_fp8_f32", - "llvm.amdgcn.dispatch.id" => "__builtin_amdgcn_dispatch_id", - "llvm.amdgcn.dot4.f32.bf8.bf8" => "__builtin_amdgcn_dot4_f32_bf8_bf8", - "llvm.amdgcn.dot4.f32.bf8.fp8" => "__builtin_amdgcn_dot4_f32_bf8_fp8", - "llvm.amdgcn.dot4.f32.fp8.bf8" => "__builtin_amdgcn_dot4_f32_fp8_bf8", - "llvm.amdgcn.dot4.f32.fp8.fp8" => "__builtin_amdgcn_dot4_f32_fp8_fp8", - "llvm.amdgcn.ds.add.gs.reg.rtn" => "__builtin_amdgcn_ds_add_gs_reg_rtn", - "llvm.amdgcn.ds.bpermute" => "__builtin_amdgcn_ds_bpermute", - "llvm.amdgcn.ds.bpermute.fi.b32" => "__builtin_amdgcn_ds_bpermute_fi_b32", - "llvm.amdgcn.ds.gws.barrier" => "__builtin_amdgcn_ds_gws_barrier", - "llvm.amdgcn.ds.gws.init" => "__builtin_amdgcn_ds_gws_init", - "llvm.amdgcn.ds.gws.sema.br" => "__builtin_amdgcn_ds_gws_sema_br", - "llvm.amdgcn.ds.gws.sema.p" => "__builtin_amdgcn_ds_gws_sema_p", - "llvm.amdgcn.ds.gws.sema.release.all" => "__builtin_amdgcn_ds_gws_sema_release_all", - "llvm.amdgcn.ds.gws.sema.v" => "__builtin_amdgcn_ds_gws_sema_v", - "llvm.amdgcn.ds.permute" => "__builtin_amdgcn_ds_permute", - "llvm.amdgcn.ds.sub.gs.reg.rtn" => "__builtin_amdgcn_ds_sub_gs_reg_rtn", - "llvm.amdgcn.ds.swizzle" => "__builtin_amdgcn_ds_swizzle", - "llvm.amdgcn.endpgm" => "__builtin_amdgcn_endpgm", - "llvm.amdgcn.fdot2" => "__builtin_amdgcn_fdot2", - "llvm.amdgcn.fdot2.bf16.bf16" => "__builtin_amdgcn_fdot2_bf16_bf16", - "llvm.amdgcn.fdot2.f16.f16" => "__builtin_amdgcn_fdot2_f16_f16", - "llvm.amdgcn.fdot2.f32.bf16" => "__builtin_amdgcn_fdot2_f32_bf16", - "llvm.amdgcn.fdot2c.f32.bf16" => "__builtin_amdgcn_fdot2c_f32_bf16", - "llvm.amdgcn.fmul.legacy" => "__builtin_amdgcn_fmul_legacy", - "llvm.amdgcn.global.load.lds" => "__builtin_amdgcn_global_load_lds", - "llvm.amdgcn.groupstaticsize" => "__builtin_amdgcn_groupstaticsize", - "llvm.amdgcn.iglp.opt" => "__builtin_amdgcn_iglp_opt", - "llvm.amdgcn.implicit.buffer.ptr" => "__builtin_amdgcn_implicit_buffer_ptr", - "llvm.amdgcn.implicitarg.ptr" => "__builtin_amdgcn_implicitarg_ptr", - "llvm.amdgcn.interp.mov" => "__builtin_amdgcn_interp_mov", - "llvm.amdgcn.interp.p1" => "__builtin_amdgcn_interp_p1", - "llvm.amdgcn.interp.p1.f16" => "__builtin_amdgcn_interp_p1_f16", - "llvm.amdgcn.interp.p2" => "__builtin_amdgcn_interp_p2", - "llvm.amdgcn.interp.p2.f16" => "__builtin_amdgcn_interp_p2_f16", - "llvm.amdgcn.is.private" => "__builtin_amdgcn_is_private", - "llvm.amdgcn.is.shared" => "__builtin_amdgcn_is_shared", - "llvm.amdgcn.kernarg.segment.ptr" => "__builtin_amdgcn_kernarg_segment_ptr", - "llvm.amdgcn.lerp" => "__builtin_amdgcn_lerp", - "llvm.amdgcn.mbcnt.hi" => "__builtin_amdgcn_mbcnt_hi", - "llvm.amdgcn.mbcnt.lo" => "__builtin_amdgcn_mbcnt_lo", - "llvm.amdgcn.mfma.f32.16x16x16bf16.1k" => "__builtin_amdgcn_mfma_f32_16x16x16bf16_1k", - "llvm.amdgcn.mfma.f32.16x16x16f16" => "__builtin_amdgcn_mfma_f32_16x16x16f16", - "llvm.amdgcn.mfma.f32.16x16x1f32" => "__builtin_amdgcn_mfma_f32_16x16x1f32", - "llvm.amdgcn.mfma.f32.16x16x2bf16" => "__builtin_amdgcn_mfma_f32_16x16x2bf16", - "llvm.amdgcn.mfma.f32.16x16x32.bf16" => "__builtin_amdgcn_mfma_f32_16x16x32_bf16", - "llvm.amdgcn.mfma.f32.16x16x32.bf8.bf8" => "__builtin_amdgcn_mfma_f32_16x16x32_bf8_bf8", - "llvm.amdgcn.mfma.f32.16x16x32.bf8.fp8" => "__builtin_amdgcn_mfma_f32_16x16x32_bf8_fp8", - "llvm.amdgcn.mfma.f32.16x16x32.f16" => "__builtin_amdgcn_mfma_f32_16x16x32_f16", - "llvm.amdgcn.mfma.f32.16x16x32.fp8.bf8" => "__builtin_amdgcn_mfma_f32_16x16x32_fp8_bf8", - "llvm.amdgcn.mfma.f32.16x16x32.fp8.fp8" => "__builtin_amdgcn_mfma_f32_16x16x32_fp8_fp8", - "llvm.amdgcn.mfma.f32.16x16x4bf16.1k" => "__builtin_amdgcn_mfma_f32_16x16x4bf16_1k", - "llvm.amdgcn.mfma.f32.16x16x4f16" => "__builtin_amdgcn_mfma_f32_16x16x4f16", - "llvm.amdgcn.mfma.f32.16x16x4f32" => "__builtin_amdgcn_mfma_f32_16x16x4f32", - "llvm.amdgcn.mfma.f32.16x16x8.xf32" => "__builtin_amdgcn_mfma_f32_16x16x8_xf32", - "llvm.amdgcn.mfma.f32.16x16x8bf16" => "__builtin_amdgcn_mfma_f32_16x16x8bf16", - "llvm.amdgcn.mfma.f32.32x32x16.bf16" => "__builtin_amdgcn_mfma_f32_32x32x16_bf16", - "llvm.amdgcn.mfma.f32.32x32x16.bf8.bf8" => "__builtin_amdgcn_mfma_f32_32x32x16_bf8_bf8", - "llvm.amdgcn.mfma.f32.32x32x16.bf8.fp8" => "__builtin_amdgcn_mfma_f32_32x32x16_bf8_fp8", - "llvm.amdgcn.mfma.f32.32x32x16.f16" => "__builtin_amdgcn_mfma_f32_32x32x16_f16", - "llvm.amdgcn.mfma.f32.32x32x16.fp8.bf8" => "__builtin_amdgcn_mfma_f32_32x32x16_fp8_bf8", - "llvm.amdgcn.mfma.f32.32x32x16.fp8.fp8" => "__builtin_amdgcn_mfma_f32_32x32x16_fp8_fp8", - "llvm.amdgcn.mfma.f32.32x32x1f32" => "__builtin_amdgcn_mfma_f32_32x32x1f32", - "llvm.amdgcn.mfma.f32.32x32x2bf16" => "__builtin_amdgcn_mfma_f32_32x32x2bf16", - "llvm.amdgcn.mfma.f32.32x32x2f32" => "__builtin_amdgcn_mfma_f32_32x32x2f32", - "llvm.amdgcn.mfma.f32.32x32x4.xf32" => "__builtin_amdgcn_mfma_f32_32x32x4_xf32", - "llvm.amdgcn.mfma.f32.32x32x4bf16" => "__builtin_amdgcn_mfma_f32_32x32x4bf16", - "llvm.amdgcn.mfma.f32.32x32x4bf16.1k" => "__builtin_amdgcn_mfma_f32_32x32x4bf16_1k", - "llvm.amdgcn.mfma.f32.32x32x4f16" => "__builtin_amdgcn_mfma_f32_32x32x4f16", - "llvm.amdgcn.mfma.f32.32x32x8bf16.1k" => "__builtin_amdgcn_mfma_f32_32x32x8bf16_1k", - "llvm.amdgcn.mfma.f32.32x32x8f16" => "__builtin_amdgcn_mfma_f32_32x32x8f16", - "llvm.amdgcn.mfma.f32.4x4x1f32" => "__builtin_amdgcn_mfma_f32_4x4x1f32", - "llvm.amdgcn.mfma.f32.4x4x2bf16" => "__builtin_amdgcn_mfma_f32_4x4x2bf16", - "llvm.amdgcn.mfma.f32.4x4x4bf16.1k" => "__builtin_amdgcn_mfma_f32_4x4x4bf16_1k", - "llvm.amdgcn.mfma.f32.4x4x4f16" => "__builtin_amdgcn_mfma_f32_4x4x4f16", - "llvm.amdgcn.mfma.f64.16x16x4f64" => "__builtin_amdgcn_mfma_f64_16x16x4f64", - "llvm.amdgcn.mfma.f64.4x4x4f64" => "__builtin_amdgcn_mfma_f64_4x4x4f64", - "llvm.amdgcn.mfma.i32.16x16x16i8" => "__builtin_amdgcn_mfma_i32_16x16x16i8", - "llvm.amdgcn.mfma.i32.16x16x32.i8" => "__builtin_amdgcn_mfma_i32_16x16x32_i8", - "llvm.amdgcn.mfma.i32.16x16x4i8" => "__builtin_amdgcn_mfma_i32_16x16x4i8", - "llvm.amdgcn.mfma.i32.16x16x64.i8" => "__builtin_amdgcn_mfma_i32_16x16x64_i8", - "llvm.amdgcn.mfma.i32.32x32x16.i8" => "__builtin_amdgcn_mfma_i32_32x32x16_i8", - "llvm.amdgcn.mfma.i32.32x32x32.i8" => "__builtin_amdgcn_mfma_i32_32x32x32_i8", - "llvm.amdgcn.mfma.i32.32x32x4i8" => "__builtin_amdgcn_mfma_i32_32x32x4i8", - "llvm.amdgcn.mfma.i32.32x32x8i8" => "__builtin_amdgcn_mfma_i32_32x32x8i8", - "llvm.amdgcn.mfma.i32.4x4x4i8" => "__builtin_amdgcn_mfma_i32_4x4x4i8", - "llvm.amdgcn.mqsad.pk.u16.u8" => "__builtin_amdgcn_mqsad_pk_u16_u8", - "llvm.amdgcn.mqsad.u32.u8" => "__builtin_amdgcn_mqsad_u32_u8", - "llvm.amdgcn.msad.u8" => "__builtin_amdgcn_msad_u8", - "llvm.amdgcn.perm" => "__builtin_amdgcn_perm", - "llvm.amdgcn.permlane16.var" => "__builtin_amdgcn_permlane16_var", - "llvm.amdgcn.permlanex16.var" => "__builtin_amdgcn_permlanex16_var", - "llvm.amdgcn.prng.b32" => "__builtin_amdgcn_prng_b32", - "llvm.amdgcn.qsad.pk.u16.u8" => "__builtin_amdgcn_qsad_pk_u16_u8", - "llvm.amdgcn.queue.ptr" => "__builtin_amdgcn_queue_ptr", - "llvm.amdgcn.raw.ptr.buffer.load.lds" => "__builtin_amdgcn_raw_ptr_buffer_load_lds", - "llvm.amdgcn.rcp.legacy" => "__builtin_amdgcn_rcp_legacy", - "llvm.amdgcn.rsq.legacy" => "__builtin_amdgcn_rsq_legacy", - "llvm.amdgcn.s.barrier" => "__builtin_amdgcn_s_barrier", - "llvm.amdgcn.s.barrier.signal" => "__builtin_amdgcn_s_barrier_signal", - "llvm.amdgcn.s.barrier.signal.isfirst" => "__builtin_amdgcn_s_barrier_signal_isfirst", - "llvm.amdgcn.s.barrier.signal.var" => "__builtin_amdgcn_s_barrier_signal_var", - "llvm.amdgcn.s.barrier.wait" => "__builtin_amdgcn_s_barrier_wait", - "llvm.amdgcn.s.buffer.prefetch.data" => "__builtin_amdgcn_s_buffer_prefetch_data", - "llvm.amdgcn.s.dcache.inv" => "__builtin_amdgcn_s_dcache_inv", - "llvm.amdgcn.s.dcache.inv.vol" => "__builtin_amdgcn_s_dcache_inv_vol", - "llvm.amdgcn.s.dcache.wb" => "__builtin_amdgcn_s_dcache_wb", - "llvm.amdgcn.s.dcache.wb.vol" => "__builtin_amdgcn_s_dcache_wb_vol", - "llvm.amdgcn.s.decperflevel" => "__builtin_amdgcn_s_decperflevel", - "llvm.amdgcn.s.get.barrier.state" => "__builtin_amdgcn_s_get_barrier_state", - "llvm.amdgcn.s.get.named.barrier.state" => "__builtin_amdgcn_s_get_named_barrier_state", - "llvm.amdgcn.s.get.waveid.in.workgroup" => "__builtin_amdgcn_s_get_waveid_in_workgroup", - "llvm.amdgcn.s.getpc" => "__builtin_amdgcn_s_getpc", - "llvm.amdgcn.s.getreg" => "__builtin_amdgcn_s_getreg", - "llvm.amdgcn.s.incperflevel" => "__builtin_amdgcn_s_incperflevel", - "llvm.amdgcn.s.memrealtime" => "__builtin_amdgcn_s_memrealtime", - "llvm.amdgcn.s.memtime" => "__builtin_amdgcn_s_memtime", - "llvm.amdgcn.s.sendmsg" => "__builtin_amdgcn_s_sendmsg", - "llvm.amdgcn.s.sendmsghalt" => "__builtin_amdgcn_s_sendmsghalt", - "llvm.amdgcn.s.setprio" => "__builtin_amdgcn_s_setprio", - "llvm.amdgcn.s.setreg" => "__builtin_amdgcn_s_setreg", - "llvm.amdgcn.s.sleep" => "__builtin_amdgcn_s_sleep", - "llvm.amdgcn.s.sleep.var" => "__builtin_amdgcn_s_sleep_var", - "llvm.amdgcn.s.ttracedata" => "__builtin_amdgcn_s_ttracedata", - "llvm.amdgcn.s.ttracedata.imm" => "__builtin_amdgcn_s_ttracedata_imm", - "llvm.amdgcn.s.wait.event.export.ready" => "__builtin_amdgcn_s_wait_event_export_ready", - "llvm.amdgcn.s.waitcnt" => "__builtin_amdgcn_s_waitcnt", - "llvm.amdgcn.sad.hi.u8" => "__builtin_amdgcn_sad_hi_u8", - "llvm.amdgcn.sad.u16" => "__builtin_amdgcn_sad_u16", - "llvm.amdgcn.sad.u8" => "__builtin_amdgcn_sad_u8", - "llvm.amdgcn.sched.barrier" => "__builtin_amdgcn_sched_barrier", - "llvm.amdgcn.sched.group.barrier" => "__builtin_amdgcn_sched_group_barrier", - "llvm.amdgcn.sdot2" => "__builtin_amdgcn_sdot2", - "llvm.amdgcn.sdot4" => "__builtin_amdgcn_sdot4", - "llvm.amdgcn.sdot8" => "__builtin_amdgcn_sdot8", - "llvm.amdgcn.smfmac.f32.16x16x128.bf8.bf8" => "__builtin_amdgcn_smfmac_f32_16x16x128_bf8_bf8", - "llvm.amdgcn.smfmac.f32.16x16x128.bf8.fp8" => "__builtin_amdgcn_smfmac_f32_16x16x128_bf8_fp8", - "llvm.amdgcn.smfmac.f32.16x16x128.fp8.bf8" => "__builtin_amdgcn_smfmac_f32_16x16x128_fp8_bf8", - "llvm.amdgcn.smfmac.f32.16x16x128.fp8.fp8" => "__builtin_amdgcn_smfmac_f32_16x16x128_fp8_fp8", - "llvm.amdgcn.smfmac.f32.16x16x32.bf16" => "__builtin_amdgcn_smfmac_f32_16x16x32_bf16", - "llvm.amdgcn.smfmac.f32.16x16x32.f16" => "__builtin_amdgcn_smfmac_f32_16x16x32_f16", - "llvm.amdgcn.smfmac.f32.16x16x64.bf16" => "__builtin_amdgcn_smfmac_f32_16x16x64_bf16", - "llvm.amdgcn.smfmac.f32.16x16x64.bf8.bf8" => "__builtin_amdgcn_smfmac_f32_16x16x64_bf8_bf8", - "llvm.amdgcn.smfmac.f32.16x16x64.bf8.fp8" => "__builtin_amdgcn_smfmac_f32_16x16x64_bf8_fp8", - "llvm.amdgcn.smfmac.f32.16x16x64.f16" => "__builtin_amdgcn_smfmac_f32_16x16x64_f16", - "llvm.amdgcn.smfmac.f32.16x16x64.fp8.bf8" => "__builtin_amdgcn_smfmac_f32_16x16x64_fp8_bf8", - "llvm.amdgcn.smfmac.f32.16x16x64.fp8.fp8" => "__builtin_amdgcn_smfmac_f32_16x16x64_fp8_fp8", - "llvm.amdgcn.smfmac.f32.32x32x16.bf16" => "__builtin_amdgcn_smfmac_f32_32x32x16_bf16", - "llvm.amdgcn.smfmac.f32.32x32x16.f16" => "__builtin_amdgcn_smfmac_f32_32x32x16_f16", - "llvm.amdgcn.smfmac.f32.32x32x32.bf16" => "__builtin_amdgcn_smfmac_f32_32x32x32_bf16", - "llvm.amdgcn.smfmac.f32.32x32x32.bf8.bf8" => "__builtin_amdgcn_smfmac_f32_32x32x32_bf8_bf8", - "llvm.amdgcn.smfmac.f32.32x32x32.bf8.fp8" => "__builtin_amdgcn_smfmac_f32_32x32x32_bf8_fp8", - "llvm.amdgcn.smfmac.f32.32x32x32.f16" => "__builtin_amdgcn_smfmac_f32_32x32x32_f16", - "llvm.amdgcn.smfmac.f32.32x32x32.fp8.bf8" => "__builtin_amdgcn_smfmac_f32_32x32x32_fp8_bf8", - "llvm.amdgcn.smfmac.f32.32x32x32.fp8.fp8" => "__builtin_amdgcn_smfmac_f32_32x32x32_fp8_fp8", - "llvm.amdgcn.smfmac.f32.32x32x64.bf8.bf8" => "__builtin_amdgcn_smfmac_f32_32x32x64_bf8_bf8", - "llvm.amdgcn.smfmac.f32.32x32x64.bf8.fp8" => "__builtin_amdgcn_smfmac_f32_32x32x64_bf8_fp8", - "llvm.amdgcn.smfmac.f32.32x32x64.fp8.bf8" => "__builtin_amdgcn_smfmac_f32_32x32x64_fp8_bf8", - "llvm.amdgcn.smfmac.f32.32x32x64.fp8.fp8" => "__builtin_amdgcn_smfmac_f32_32x32x64_fp8_fp8", - "llvm.amdgcn.smfmac.i32.16x16x128.i8" => "__builtin_amdgcn_smfmac_i32_16x16x128_i8", - "llvm.amdgcn.smfmac.i32.16x16x64.i8" => "__builtin_amdgcn_smfmac_i32_16x16x64_i8", - "llvm.amdgcn.smfmac.i32.32x32x32.i8" => "__builtin_amdgcn_smfmac_i32_32x32x32_i8", - "llvm.amdgcn.smfmac.i32.32x32x64.i8" => "__builtin_amdgcn_smfmac_i32_32x32x64_i8", - "llvm.amdgcn.sudot4" => "__builtin_amdgcn_sudot4", - "llvm.amdgcn.sudot8" => "__builtin_amdgcn_sudot8", - "llvm.amdgcn.udot2" => "__builtin_amdgcn_udot2", - "llvm.amdgcn.udot4" => "__builtin_amdgcn_udot4", - "llvm.amdgcn.udot8" => "__builtin_amdgcn_udot8", - "llvm.amdgcn.wave.barrier" => "__builtin_amdgcn_wave_barrier", - "llvm.amdgcn.wavefrontsize" => "__builtin_amdgcn_wavefrontsize", - "llvm.amdgcn.workgroup.id.x" => "__builtin_amdgcn_workgroup_id_x", - "llvm.amdgcn.workgroup.id.y" => "__builtin_amdgcn_workgroup_id_y", - "llvm.amdgcn.workgroup.id.z" => "__builtin_amdgcn_workgroup_id_z", - "llvm.amdgcn.workitem.id.x" => "__builtin_amdgcn_workitem_id_x", - "llvm.amdgcn.workitem.id.y" => "__builtin_amdgcn_workitem_id_y", - "llvm.amdgcn.workitem.id.z" => "__builtin_amdgcn_workitem_id_z", - // arm - "llvm.arm.cdp" => "__builtin_arm_cdp", - "llvm.arm.cdp2" => "__builtin_arm_cdp2", - "llvm.arm.cmse.tt" => "__builtin_arm_cmse_TT", - "llvm.arm.cmse.tta" => "__builtin_arm_cmse_TTA", - "llvm.arm.cmse.ttat" => "__builtin_arm_cmse_TTAT", - "llvm.arm.cmse.ttt" => "__builtin_arm_cmse_TTT", - "llvm.arm.dmb" => "__builtin_arm_dmb", - "llvm.arm.dsb" => "__builtin_arm_dsb", - "llvm.arm.get.fpscr" => "__builtin_arm_get_fpscr", - "llvm.arm.isb" => "__builtin_arm_isb", - "llvm.arm.ldc" => "__builtin_arm_ldc", - "llvm.arm.ldc2" => "__builtin_arm_ldc2", - "llvm.arm.ldc2l" => "__builtin_arm_ldc2l", - "llvm.arm.ldcl" => "__builtin_arm_ldcl", - "llvm.arm.mcr" => "__builtin_arm_mcr", - "llvm.arm.mcr2" => "__builtin_arm_mcr2", - "llvm.arm.mcrr" => "__builtin_arm_mcrr", - "llvm.arm.mcrr2" => "__builtin_arm_mcrr2", - "llvm.arm.mrc" => "__builtin_arm_mrc", - "llvm.arm.mrc2" => "__builtin_arm_mrc2", - "llvm.arm.qadd" => "__builtin_arm_qadd", - "llvm.arm.qadd16" => "__builtin_arm_qadd16", - "llvm.arm.qadd8" => "__builtin_arm_qadd8", - "llvm.arm.qasx" => "__builtin_arm_qasx", - "llvm.arm.qsax" => "__builtin_arm_qsax", - "llvm.arm.qsub" => "__builtin_arm_qsub", - "llvm.arm.qsub16" => "__builtin_arm_qsub16", - "llvm.arm.qsub8" => "__builtin_arm_qsub8", - "llvm.arm.sadd16" => "__builtin_arm_sadd16", - "llvm.arm.sadd8" => "__builtin_arm_sadd8", - "llvm.arm.sasx" => "__builtin_arm_sasx", - "llvm.arm.sel" => "__builtin_arm_sel", - "llvm.arm.set.fpscr" => "__builtin_arm_set_fpscr", - "llvm.arm.shadd16" => "__builtin_arm_shadd16", - "llvm.arm.shadd8" => "__builtin_arm_shadd8", - "llvm.arm.shasx" => "__builtin_arm_shasx", - "llvm.arm.shsax" => "__builtin_arm_shsax", - "llvm.arm.shsub16" => "__builtin_arm_shsub16", - "llvm.arm.shsub8" => "__builtin_arm_shsub8", - "llvm.arm.smlabb" => "__builtin_arm_smlabb", - "llvm.arm.smlabt" => "__builtin_arm_smlabt", - "llvm.arm.smlad" => "__builtin_arm_smlad", - "llvm.arm.smladx" => "__builtin_arm_smladx", - "llvm.arm.smlald" => "__builtin_arm_smlald", - "llvm.arm.smlaldx" => "__builtin_arm_smlaldx", - "llvm.arm.smlatb" => "__builtin_arm_smlatb", - "llvm.arm.smlatt" => "__builtin_arm_smlatt", - "llvm.arm.smlawb" => "__builtin_arm_smlawb", - "llvm.arm.smlawt" => "__builtin_arm_smlawt", - "llvm.arm.smlsd" => "__builtin_arm_smlsd", - "llvm.arm.smlsdx" => "__builtin_arm_smlsdx", - "llvm.arm.smlsld" => "__builtin_arm_smlsld", - "llvm.arm.smlsldx" => "__builtin_arm_smlsldx", - "llvm.arm.smuad" => "__builtin_arm_smuad", - "llvm.arm.smuadx" => "__builtin_arm_smuadx", - "llvm.arm.smulbb" => "__builtin_arm_smulbb", - "llvm.arm.smulbt" => "__builtin_arm_smulbt", - "llvm.arm.smultb" => "__builtin_arm_smultb", - "llvm.arm.smultt" => "__builtin_arm_smultt", - "llvm.arm.smulwb" => "__builtin_arm_smulwb", - "llvm.arm.smulwt" => "__builtin_arm_smulwt", - "llvm.arm.smusd" => "__builtin_arm_smusd", - "llvm.arm.smusdx" => "__builtin_arm_smusdx", - "llvm.arm.ssat" => "__builtin_arm_ssat", - "llvm.arm.ssat16" => "__builtin_arm_ssat16", - "llvm.arm.ssax" => "__builtin_arm_ssax", - "llvm.arm.ssub16" => "__builtin_arm_ssub16", - "llvm.arm.ssub8" => "__builtin_arm_ssub8", - "llvm.arm.stc" => "__builtin_arm_stc", - "llvm.arm.stc2" => "__builtin_arm_stc2", - "llvm.arm.stc2l" => "__builtin_arm_stc2l", - "llvm.arm.stcl" => "__builtin_arm_stcl", - "llvm.arm.sxtab16" => "__builtin_arm_sxtab16", - "llvm.arm.sxtb16" => "__builtin_arm_sxtb16", - "llvm.arm.thread.pointer" => "__builtin_thread_pointer", - "llvm.arm.uadd16" => "__builtin_arm_uadd16", - "llvm.arm.uadd8" => "__builtin_arm_uadd8", - "llvm.arm.uasx" => "__builtin_arm_uasx", - "llvm.arm.uhadd16" => "__builtin_arm_uhadd16", - "llvm.arm.uhadd8" => "__builtin_arm_uhadd8", - "llvm.arm.uhasx" => "__builtin_arm_uhasx", - "llvm.arm.uhsax" => "__builtin_arm_uhsax", - "llvm.arm.uhsub16" => "__builtin_arm_uhsub16", - "llvm.arm.uhsub8" => "__builtin_arm_uhsub8", - "llvm.arm.uqadd16" => "__builtin_arm_uqadd16", - "llvm.arm.uqadd8" => "__builtin_arm_uqadd8", - "llvm.arm.uqasx" => "__builtin_arm_uqasx", - "llvm.arm.uqsax" => "__builtin_arm_uqsax", - "llvm.arm.uqsub16" => "__builtin_arm_uqsub16", - "llvm.arm.uqsub8" => "__builtin_arm_uqsub8", - "llvm.arm.usad8" => "__builtin_arm_usad8", - "llvm.arm.usada8" => "__builtin_arm_usada8", - "llvm.arm.usat" => "__builtin_arm_usat", - "llvm.arm.usat16" => "__builtin_arm_usat16", - "llvm.arm.usax" => "__builtin_arm_usax", - "llvm.arm.usub16" => "__builtin_arm_usub16", - "llvm.arm.usub8" => "__builtin_arm_usub8", - "llvm.arm.uxtab16" => "__builtin_arm_uxtab16", - "llvm.arm.uxtb16" => "__builtin_arm_uxtb16", - // bpf - "llvm.bpf.btf.type.id" => "__builtin_bpf_btf_type_id", - "llvm.bpf.compare" => "__builtin_bpf_compare", - "llvm.bpf.getelementptr.and.load" => "__builtin_bpf_getelementptr_and_load", - "llvm.bpf.getelementptr.and.store" => "__builtin_bpf_getelementptr_and_store", - "llvm.bpf.load.byte" => "__builtin_bpf_load_byte", - "llvm.bpf.load.half" => "__builtin_bpf_load_half", - "llvm.bpf.load.word" => "__builtin_bpf_load_word", - "llvm.bpf.passthrough" => "__builtin_bpf_passthrough", - "llvm.bpf.preserve.enum.value" => "__builtin_bpf_preserve_enum_value", - "llvm.bpf.preserve.field.info" => "__builtin_bpf_preserve_field_info", - "llvm.bpf.preserve.type.info" => "__builtin_bpf_preserve_type_info", - "llvm.bpf.pseudo" => "__builtin_bpf_pseudo", - // cuda - "llvm.cuda.syncthreads" => "__syncthreads", - // hexagon - "llvm.hexagon.A2.abs" => "__builtin_HEXAGON_A2_abs", - "llvm.hexagon.A2.absp" => "__builtin_HEXAGON_A2_absp", - "llvm.hexagon.A2.abssat" => "__builtin_HEXAGON_A2_abssat", - "llvm.hexagon.A2.add" => "__builtin_HEXAGON_A2_add", - "llvm.hexagon.A2.addh.h16.hh" => "__builtin_HEXAGON_A2_addh_h16_hh", - "llvm.hexagon.A2.addh.h16.hl" => "__builtin_HEXAGON_A2_addh_h16_hl", - "llvm.hexagon.A2.addh.h16.lh" => "__builtin_HEXAGON_A2_addh_h16_lh", - "llvm.hexagon.A2.addh.h16.ll" => "__builtin_HEXAGON_A2_addh_h16_ll", - "llvm.hexagon.A2.addh.h16.sat.hh" => "__builtin_HEXAGON_A2_addh_h16_sat_hh", - "llvm.hexagon.A2.addh.h16.sat.hl" => "__builtin_HEXAGON_A2_addh_h16_sat_hl", - "llvm.hexagon.A2.addh.h16.sat.lh" => "__builtin_HEXAGON_A2_addh_h16_sat_lh", - "llvm.hexagon.A2.addh.h16.sat.ll" => "__builtin_HEXAGON_A2_addh_h16_sat_ll", - "llvm.hexagon.A2.addh.l16.hl" => "__builtin_HEXAGON_A2_addh_l16_hl", - "llvm.hexagon.A2.addh.l16.ll" => "__builtin_HEXAGON_A2_addh_l16_ll", - "llvm.hexagon.A2.addh.l16.sat.hl" => "__builtin_HEXAGON_A2_addh_l16_sat_hl", - "llvm.hexagon.A2.addh.l16.sat.ll" => "__builtin_HEXAGON_A2_addh_l16_sat_ll", - "llvm.hexagon.A2.addi" => "__builtin_HEXAGON_A2_addi", - "llvm.hexagon.A2.addp" => "__builtin_HEXAGON_A2_addp", - "llvm.hexagon.A2.addpsat" => "__builtin_HEXAGON_A2_addpsat", - "llvm.hexagon.A2.addsat" => "__builtin_HEXAGON_A2_addsat", - "llvm.hexagon.A2.addsp" => "__builtin_HEXAGON_A2_addsp", - "llvm.hexagon.A2.and" => "__builtin_HEXAGON_A2_and", - "llvm.hexagon.A2.andir" => "__builtin_HEXAGON_A2_andir", - "llvm.hexagon.A2.andp" => "__builtin_HEXAGON_A2_andp", - "llvm.hexagon.A2.aslh" => "__builtin_HEXAGON_A2_aslh", - "llvm.hexagon.A2.asrh" => "__builtin_HEXAGON_A2_asrh", - "llvm.hexagon.A2.combine.hh" => "__builtin_HEXAGON_A2_combine_hh", - "llvm.hexagon.A2.combine.hl" => "__builtin_HEXAGON_A2_combine_hl", - "llvm.hexagon.A2.combine.lh" => "__builtin_HEXAGON_A2_combine_lh", - "llvm.hexagon.A2.combine.ll" => "__builtin_HEXAGON_A2_combine_ll", - "llvm.hexagon.A2.combineii" => "__builtin_HEXAGON_A2_combineii", - "llvm.hexagon.A2.combinew" => "__builtin_HEXAGON_A2_combinew", - "llvm.hexagon.A2.max" => "__builtin_HEXAGON_A2_max", - "llvm.hexagon.A2.maxp" => "__builtin_HEXAGON_A2_maxp", - "llvm.hexagon.A2.maxu" => "__builtin_HEXAGON_A2_maxu", - "llvm.hexagon.A2.maxup" => "__builtin_HEXAGON_A2_maxup", - "llvm.hexagon.A2.min" => "__builtin_HEXAGON_A2_min", - "llvm.hexagon.A2.minp" => "__builtin_HEXAGON_A2_minp", - "llvm.hexagon.A2.minu" => "__builtin_HEXAGON_A2_minu", - "llvm.hexagon.A2.minup" => "__builtin_HEXAGON_A2_minup", - "llvm.hexagon.A2.neg" => "__builtin_HEXAGON_A2_neg", - "llvm.hexagon.A2.negp" => "__builtin_HEXAGON_A2_negp", - "llvm.hexagon.A2.negsat" => "__builtin_HEXAGON_A2_negsat", - "llvm.hexagon.A2.not" => "__builtin_HEXAGON_A2_not", - "llvm.hexagon.A2.notp" => "__builtin_HEXAGON_A2_notp", - "llvm.hexagon.A2.or" => "__builtin_HEXAGON_A2_or", - "llvm.hexagon.A2.orir" => "__builtin_HEXAGON_A2_orir", - "llvm.hexagon.A2.orp" => "__builtin_HEXAGON_A2_orp", - "llvm.hexagon.A2.roundsat" => "__builtin_HEXAGON_A2_roundsat", - "llvm.hexagon.A2.sat" => "__builtin_HEXAGON_A2_sat", - "llvm.hexagon.A2.satb" => "__builtin_HEXAGON_A2_satb", - "llvm.hexagon.A2.sath" => "__builtin_HEXAGON_A2_sath", - "llvm.hexagon.A2.satub" => "__builtin_HEXAGON_A2_satub", - "llvm.hexagon.A2.satuh" => "__builtin_HEXAGON_A2_satuh", - "llvm.hexagon.A2.sub" => "__builtin_HEXAGON_A2_sub", - "llvm.hexagon.A2.subh.h16.hh" => "__builtin_HEXAGON_A2_subh_h16_hh", - "llvm.hexagon.A2.subh.h16.hl" => "__builtin_HEXAGON_A2_subh_h16_hl", - "llvm.hexagon.A2.subh.h16.lh" => "__builtin_HEXAGON_A2_subh_h16_lh", - "llvm.hexagon.A2.subh.h16.ll" => "__builtin_HEXAGON_A2_subh_h16_ll", - "llvm.hexagon.A2.subh.h16.sat.hh" => "__builtin_HEXAGON_A2_subh_h16_sat_hh", - "llvm.hexagon.A2.subh.h16.sat.hl" => "__builtin_HEXAGON_A2_subh_h16_sat_hl", - "llvm.hexagon.A2.subh.h16.sat.lh" => "__builtin_HEXAGON_A2_subh_h16_sat_lh", - "llvm.hexagon.A2.subh.h16.sat.ll" => "__builtin_HEXAGON_A2_subh_h16_sat_ll", - "llvm.hexagon.A2.subh.l16.hl" => "__builtin_HEXAGON_A2_subh_l16_hl", - "llvm.hexagon.A2.subh.l16.ll" => "__builtin_HEXAGON_A2_subh_l16_ll", - "llvm.hexagon.A2.subh.l16.sat.hl" => "__builtin_HEXAGON_A2_subh_l16_sat_hl", - "llvm.hexagon.A2.subh.l16.sat.ll" => "__builtin_HEXAGON_A2_subh_l16_sat_ll", - "llvm.hexagon.A2.subp" => "__builtin_HEXAGON_A2_subp", - "llvm.hexagon.A2.subri" => "__builtin_HEXAGON_A2_subri", - "llvm.hexagon.A2.subsat" => "__builtin_HEXAGON_A2_subsat", - "llvm.hexagon.A2.svaddh" => "__builtin_HEXAGON_A2_svaddh", - "llvm.hexagon.A2.svaddhs" => "__builtin_HEXAGON_A2_svaddhs", - "llvm.hexagon.A2.svadduhs" => "__builtin_HEXAGON_A2_svadduhs", - "llvm.hexagon.A2.svavgh" => "__builtin_HEXAGON_A2_svavgh", - "llvm.hexagon.A2.svavghs" => "__builtin_HEXAGON_A2_svavghs", - "llvm.hexagon.A2.svnavgh" => "__builtin_HEXAGON_A2_svnavgh", - "llvm.hexagon.A2.svsubh" => "__builtin_HEXAGON_A2_svsubh", - "llvm.hexagon.A2.svsubhs" => "__builtin_HEXAGON_A2_svsubhs", - "llvm.hexagon.A2.svsubuhs" => "__builtin_HEXAGON_A2_svsubuhs", - "llvm.hexagon.A2.swiz" => "__builtin_HEXAGON_A2_swiz", - "llvm.hexagon.A2.sxtb" => "__builtin_HEXAGON_A2_sxtb", - "llvm.hexagon.A2.sxth" => "__builtin_HEXAGON_A2_sxth", - "llvm.hexagon.A2.sxtw" => "__builtin_HEXAGON_A2_sxtw", - "llvm.hexagon.A2.tfr" => "__builtin_HEXAGON_A2_tfr", - "llvm.hexagon.A2.tfrih" => "__builtin_HEXAGON_A2_tfrih", - "llvm.hexagon.A2.tfril" => "__builtin_HEXAGON_A2_tfril", - "llvm.hexagon.A2.tfrp" => "__builtin_HEXAGON_A2_tfrp", - "llvm.hexagon.A2.tfrpi" => "__builtin_HEXAGON_A2_tfrpi", - "llvm.hexagon.A2.tfrsi" => "__builtin_HEXAGON_A2_tfrsi", - "llvm.hexagon.A2.vabsh" => "__builtin_HEXAGON_A2_vabsh", - "llvm.hexagon.A2.vabshsat" => "__builtin_HEXAGON_A2_vabshsat", - "llvm.hexagon.A2.vabsw" => "__builtin_HEXAGON_A2_vabsw", - "llvm.hexagon.A2.vabswsat" => "__builtin_HEXAGON_A2_vabswsat", - "llvm.hexagon.A2.vaddb.map" => "__builtin_HEXAGON_A2_vaddb_map", - "llvm.hexagon.A2.vaddh" => "__builtin_HEXAGON_A2_vaddh", - "llvm.hexagon.A2.vaddhs" => "__builtin_HEXAGON_A2_vaddhs", - "llvm.hexagon.A2.vaddub" => "__builtin_HEXAGON_A2_vaddub", - "llvm.hexagon.A2.vaddubs" => "__builtin_HEXAGON_A2_vaddubs", - "llvm.hexagon.A2.vadduhs" => "__builtin_HEXAGON_A2_vadduhs", - "llvm.hexagon.A2.vaddw" => "__builtin_HEXAGON_A2_vaddw", - "llvm.hexagon.A2.vaddws" => "__builtin_HEXAGON_A2_vaddws", - "llvm.hexagon.A2.vavgh" => "__builtin_HEXAGON_A2_vavgh", - "llvm.hexagon.A2.vavghcr" => "__builtin_HEXAGON_A2_vavghcr", - "llvm.hexagon.A2.vavghr" => "__builtin_HEXAGON_A2_vavghr", - "llvm.hexagon.A2.vavgub" => "__builtin_HEXAGON_A2_vavgub", - "llvm.hexagon.A2.vavgubr" => "__builtin_HEXAGON_A2_vavgubr", - "llvm.hexagon.A2.vavguh" => "__builtin_HEXAGON_A2_vavguh", - "llvm.hexagon.A2.vavguhr" => "__builtin_HEXAGON_A2_vavguhr", - "llvm.hexagon.A2.vavguw" => "__builtin_HEXAGON_A2_vavguw", - "llvm.hexagon.A2.vavguwr" => "__builtin_HEXAGON_A2_vavguwr", - "llvm.hexagon.A2.vavgw" => "__builtin_HEXAGON_A2_vavgw", - "llvm.hexagon.A2.vavgwcr" => "__builtin_HEXAGON_A2_vavgwcr", - "llvm.hexagon.A2.vavgwr" => "__builtin_HEXAGON_A2_vavgwr", - "llvm.hexagon.A2.vcmpbeq" => "__builtin_HEXAGON_A2_vcmpbeq", - "llvm.hexagon.A2.vcmpbgtu" => "__builtin_HEXAGON_A2_vcmpbgtu", - "llvm.hexagon.A2.vcmpheq" => "__builtin_HEXAGON_A2_vcmpheq", - "llvm.hexagon.A2.vcmphgt" => "__builtin_HEXAGON_A2_vcmphgt", - "llvm.hexagon.A2.vcmphgtu" => "__builtin_HEXAGON_A2_vcmphgtu", - "llvm.hexagon.A2.vcmpweq" => "__builtin_HEXAGON_A2_vcmpweq", - "llvm.hexagon.A2.vcmpwgt" => "__builtin_HEXAGON_A2_vcmpwgt", - "llvm.hexagon.A2.vcmpwgtu" => "__builtin_HEXAGON_A2_vcmpwgtu", - "llvm.hexagon.A2.vconj" => "__builtin_HEXAGON_A2_vconj", - "llvm.hexagon.A2.vmaxb" => "__builtin_HEXAGON_A2_vmaxb", - "llvm.hexagon.A2.vmaxh" => "__builtin_HEXAGON_A2_vmaxh", - "llvm.hexagon.A2.vmaxub" => "__builtin_HEXAGON_A2_vmaxub", - "llvm.hexagon.A2.vmaxuh" => "__builtin_HEXAGON_A2_vmaxuh", - "llvm.hexagon.A2.vmaxuw" => "__builtin_HEXAGON_A2_vmaxuw", - "llvm.hexagon.A2.vmaxw" => "__builtin_HEXAGON_A2_vmaxw", - "llvm.hexagon.A2.vminb" => "__builtin_HEXAGON_A2_vminb", - "llvm.hexagon.A2.vminh" => "__builtin_HEXAGON_A2_vminh", - "llvm.hexagon.A2.vminub" => "__builtin_HEXAGON_A2_vminub", - "llvm.hexagon.A2.vminuh" => "__builtin_HEXAGON_A2_vminuh", - "llvm.hexagon.A2.vminuw" => "__builtin_HEXAGON_A2_vminuw", - "llvm.hexagon.A2.vminw" => "__builtin_HEXAGON_A2_vminw", - "llvm.hexagon.A2.vnavgh" => "__builtin_HEXAGON_A2_vnavgh", - "llvm.hexagon.A2.vnavghcr" => "__builtin_HEXAGON_A2_vnavghcr", - "llvm.hexagon.A2.vnavghr" => "__builtin_HEXAGON_A2_vnavghr", - "llvm.hexagon.A2.vnavgw" => "__builtin_HEXAGON_A2_vnavgw", - "llvm.hexagon.A2.vnavgwcr" => "__builtin_HEXAGON_A2_vnavgwcr", - "llvm.hexagon.A2.vnavgwr" => "__builtin_HEXAGON_A2_vnavgwr", - "llvm.hexagon.A2.vraddub" => "__builtin_HEXAGON_A2_vraddub", - "llvm.hexagon.A2.vraddub.acc" => "__builtin_HEXAGON_A2_vraddub_acc", - "llvm.hexagon.A2.vrsadub" => "__builtin_HEXAGON_A2_vrsadub", - "llvm.hexagon.A2.vrsadub.acc" => "__builtin_HEXAGON_A2_vrsadub_acc", - "llvm.hexagon.A2.vsubb.map" => "__builtin_HEXAGON_A2_vsubb_map", - "llvm.hexagon.A2.vsubh" => "__builtin_HEXAGON_A2_vsubh", - "llvm.hexagon.A2.vsubhs" => "__builtin_HEXAGON_A2_vsubhs", - "llvm.hexagon.A2.vsubub" => "__builtin_HEXAGON_A2_vsubub", - "llvm.hexagon.A2.vsububs" => "__builtin_HEXAGON_A2_vsububs", - "llvm.hexagon.A2.vsubuhs" => "__builtin_HEXAGON_A2_vsubuhs", - "llvm.hexagon.A2.vsubw" => "__builtin_HEXAGON_A2_vsubw", - "llvm.hexagon.A2.vsubws" => "__builtin_HEXAGON_A2_vsubws", - "llvm.hexagon.A2.xor" => "__builtin_HEXAGON_A2_xor", - "llvm.hexagon.A2.xorp" => "__builtin_HEXAGON_A2_xorp", - "llvm.hexagon.A2.zxtb" => "__builtin_HEXAGON_A2_zxtb", - "llvm.hexagon.A2.zxth" => "__builtin_HEXAGON_A2_zxth", - "llvm.hexagon.A4.andn" => "__builtin_HEXAGON_A4_andn", - "llvm.hexagon.A4.andnp" => "__builtin_HEXAGON_A4_andnp", - "llvm.hexagon.A4.bitsplit" => "__builtin_HEXAGON_A4_bitsplit", - "llvm.hexagon.A4.bitspliti" => "__builtin_HEXAGON_A4_bitspliti", - "llvm.hexagon.A4.boundscheck" => "__builtin_HEXAGON_A4_boundscheck", - "llvm.hexagon.A4.cmpbeq" => "__builtin_HEXAGON_A4_cmpbeq", - "llvm.hexagon.A4.cmpbeqi" => "__builtin_HEXAGON_A4_cmpbeqi", - "llvm.hexagon.A4.cmpbgt" => "__builtin_HEXAGON_A4_cmpbgt", - "llvm.hexagon.A4.cmpbgti" => "__builtin_HEXAGON_A4_cmpbgti", - "llvm.hexagon.A4.cmpbgtu" => "__builtin_HEXAGON_A4_cmpbgtu", - "llvm.hexagon.A4.cmpbgtui" => "__builtin_HEXAGON_A4_cmpbgtui", - "llvm.hexagon.A4.cmpheq" => "__builtin_HEXAGON_A4_cmpheq", - "llvm.hexagon.A4.cmpheqi" => "__builtin_HEXAGON_A4_cmpheqi", - "llvm.hexagon.A4.cmphgt" => "__builtin_HEXAGON_A4_cmphgt", - "llvm.hexagon.A4.cmphgti" => "__builtin_HEXAGON_A4_cmphgti", - "llvm.hexagon.A4.cmphgtu" => "__builtin_HEXAGON_A4_cmphgtu", - "llvm.hexagon.A4.cmphgtui" => "__builtin_HEXAGON_A4_cmphgtui", - "llvm.hexagon.A4.combineir" => "__builtin_HEXAGON_A4_combineir", - "llvm.hexagon.A4.combineri" => "__builtin_HEXAGON_A4_combineri", - "llvm.hexagon.A4.cround.ri" => "__builtin_HEXAGON_A4_cround_ri", - "llvm.hexagon.A4.cround.rr" => "__builtin_HEXAGON_A4_cround_rr", - "llvm.hexagon.A4.modwrapu" => "__builtin_HEXAGON_A4_modwrapu", - "llvm.hexagon.A4.orn" => "__builtin_HEXAGON_A4_orn", - "llvm.hexagon.A4.ornp" => "__builtin_HEXAGON_A4_ornp", - "llvm.hexagon.A4.rcmpeq" => "__builtin_HEXAGON_A4_rcmpeq", - "llvm.hexagon.A4.rcmpeqi" => "__builtin_HEXAGON_A4_rcmpeqi", - "llvm.hexagon.A4.rcmpneq" => "__builtin_HEXAGON_A4_rcmpneq", - "llvm.hexagon.A4.rcmpneqi" => "__builtin_HEXAGON_A4_rcmpneqi", - "llvm.hexagon.A4.round.ri" => "__builtin_HEXAGON_A4_round_ri", - "llvm.hexagon.A4.round.ri.sat" => "__builtin_HEXAGON_A4_round_ri_sat", - "llvm.hexagon.A4.round.rr" => "__builtin_HEXAGON_A4_round_rr", - "llvm.hexagon.A4.round.rr.sat" => "__builtin_HEXAGON_A4_round_rr_sat", - "llvm.hexagon.A4.tlbmatch" => "__builtin_HEXAGON_A4_tlbmatch", - "llvm.hexagon.A4.vcmpbeq.any" => "__builtin_HEXAGON_A4_vcmpbeq_any", - "llvm.hexagon.A4.vcmpbeqi" => "__builtin_HEXAGON_A4_vcmpbeqi", - "llvm.hexagon.A4.vcmpbgt" => "__builtin_HEXAGON_A4_vcmpbgt", - "llvm.hexagon.A4.vcmpbgti" => "__builtin_HEXAGON_A4_vcmpbgti", - "llvm.hexagon.A4.vcmpbgtui" => "__builtin_HEXAGON_A4_vcmpbgtui", - "llvm.hexagon.A4.vcmpheqi" => "__builtin_HEXAGON_A4_vcmpheqi", - "llvm.hexagon.A4.vcmphgti" => "__builtin_HEXAGON_A4_vcmphgti", - "llvm.hexagon.A4.vcmphgtui" => "__builtin_HEXAGON_A4_vcmphgtui", - "llvm.hexagon.A4.vcmpweqi" => "__builtin_HEXAGON_A4_vcmpweqi", - "llvm.hexagon.A4.vcmpwgti" => "__builtin_HEXAGON_A4_vcmpwgti", - "llvm.hexagon.A4.vcmpwgtui" => "__builtin_HEXAGON_A4_vcmpwgtui", - "llvm.hexagon.A4.vrmaxh" => "__builtin_HEXAGON_A4_vrmaxh", - "llvm.hexagon.A4.vrmaxuh" => "__builtin_HEXAGON_A4_vrmaxuh", - "llvm.hexagon.A4.vrmaxuw" => "__builtin_HEXAGON_A4_vrmaxuw", - "llvm.hexagon.A4.vrmaxw" => "__builtin_HEXAGON_A4_vrmaxw", - "llvm.hexagon.A4.vrminh" => "__builtin_HEXAGON_A4_vrminh", - "llvm.hexagon.A4.vrminuh" => "__builtin_HEXAGON_A4_vrminuh", - "llvm.hexagon.A4.vrminuw" => "__builtin_HEXAGON_A4_vrminuw", - "llvm.hexagon.A4.vrminw" => "__builtin_HEXAGON_A4_vrminw", - "llvm.hexagon.A5.vaddhubs" => "__builtin_HEXAGON_A5_vaddhubs", - "llvm.hexagon.A6.vcmpbeq.notany" => "__builtin_HEXAGON_A6_vcmpbeq_notany", - "llvm.hexagon.A7.clip" => "__builtin_HEXAGON_A7_clip", - "llvm.hexagon.A7.croundd.ri" => "__builtin_HEXAGON_A7_croundd_ri", - "llvm.hexagon.A7.croundd.rr" => "__builtin_HEXAGON_A7_croundd_rr", - "llvm.hexagon.A7.vclip" => "__builtin_HEXAGON_A7_vclip", - "llvm.hexagon.C2.all8" => "__builtin_HEXAGON_C2_all8", - "llvm.hexagon.C2.and" => "__builtin_HEXAGON_C2_and", - "llvm.hexagon.C2.andn" => "__builtin_HEXAGON_C2_andn", - "llvm.hexagon.C2.any8" => "__builtin_HEXAGON_C2_any8", - "llvm.hexagon.C2.bitsclr" => "__builtin_HEXAGON_C2_bitsclr", - "llvm.hexagon.C2.bitsclri" => "__builtin_HEXAGON_C2_bitsclri", - "llvm.hexagon.C2.bitsset" => "__builtin_HEXAGON_C2_bitsset", - "llvm.hexagon.C2.cmpeq" => "__builtin_HEXAGON_C2_cmpeq", - "llvm.hexagon.C2.cmpeqi" => "__builtin_HEXAGON_C2_cmpeqi", - "llvm.hexagon.C2.cmpeqp" => "__builtin_HEXAGON_C2_cmpeqp", - "llvm.hexagon.C2.cmpgei" => "__builtin_HEXAGON_C2_cmpgei", - "llvm.hexagon.C2.cmpgeui" => "__builtin_HEXAGON_C2_cmpgeui", - "llvm.hexagon.C2.cmpgt" => "__builtin_HEXAGON_C2_cmpgt", - "llvm.hexagon.C2.cmpgti" => "__builtin_HEXAGON_C2_cmpgti", - "llvm.hexagon.C2.cmpgtp" => "__builtin_HEXAGON_C2_cmpgtp", - "llvm.hexagon.C2.cmpgtu" => "__builtin_HEXAGON_C2_cmpgtu", - "llvm.hexagon.C2.cmpgtui" => "__builtin_HEXAGON_C2_cmpgtui", - "llvm.hexagon.C2.cmpgtup" => "__builtin_HEXAGON_C2_cmpgtup", - "llvm.hexagon.C2.cmplt" => "__builtin_HEXAGON_C2_cmplt", - "llvm.hexagon.C2.cmpltu" => "__builtin_HEXAGON_C2_cmpltu", - "llvm.hexagon.C2.mask" => "__builtin_HEXAGON_C2_mask", - "llvm.hexagon.C2.mux" => "__builtin_HEXAGON_C2_mux", - "llvm.hexagon.C2.muxii" => "__builtin_HEXAGON_C2_muxii", - "llvm.hexagon.C2.muxir" => "__builtin_HEXAGON_C2_muxir", - "llvm.hexagon.C2.muxri" => "__builtin_HEXAGON_C2_muxri", - "llvm.hexagon.C2.not" => "__builtin_HEXAGON_C2_not", - "llvm.hexagon.C2.or" => "__builtin_HEXAGON_C2_or", - "llvm.hexagon.C2.orn" => "__builtin_HEXAGON_C2_orn", - "llvm.hexagon.C2.pxfer.map" => "__builtin_HEXAGON_C2_pxfer_map", - "llvm.hexagon.C2.tfrpr" => "__builtin_HEXAGON_C2_tfrpr", - "llvm.hexagon.C2.tfrrp" => "__builtin_HEXAGON_C2_tfrrp", - "llvm.hexagon.C2.vitpack" => "__builtin_HEXAGON_C2_vitpack", - "llvm.hexagon.C2.vmux" => "__builtin_HEXAGON_C2_vmux", - "llvm.hexagon.C2.xor" => "__builtin_HEXAGON_C2_xor", - "llvm.hexagon.C4.and.and" => "__builtin_HEXAGON_C4_and_and", - "llvm.hexagon.C4.and.andn" => "__builtin_HEXAGON_C4_and_andn", - "llvm.hexagon.C4.and.or" => "__builtin_HEXAGON_C4_and_or", - "llvm.hexagon.C4.and.orn" => "__builtin_HEXAGON_C4_and_orn", - "llvm.hexagon.C4.cmplte" => "__builtin_HEXAGON_C4_cmplte", - "llvm.hexagon.C4.cmpltei" => "__builtin_HEXAGON_C4_cmpltei", - "llvm.hexagon.C4.cmplteu" => "__builtin_HEXAGON_C4_cmplteu", - "llvm.hexagon.C4.cmplteui" => "__builtin_HEXAGON_C4_cmplteui", - "llvm.hexagon.C4.cmpneq" => "__builtin_HEXAGON_C4_cmpneq", - "llvm.hexagon.C4.cmpneqi" => "__builtin_HEXAGON_C4_cmpneqi", - "llvm.hexagon.C4.fastcorner9" => "__builtin_HEXAGON_C4_fastcorner9", - "llvm.hexagon.C4.fastcorner9.not" => "__builtin_HEXAGON_C4_fastcorner9_not", - "llvm.hexagon.C4.nbitsclr" => "__builtin_HEXAGON_C4_nbitsclr", - "llvm.hexagon.C4.nbitsclri" => "__builtin_HEXAGON_C4_nbitsclri", - "llvm.hexagon.C4.nbitsset" => "__builtin_HEXAGON_C4_nbitsset", - "llvm.hexagon.C4.or.and" => "__builtin_HEXAGON_C4_or_and", - "llvm.hexagon.C4.or.andn" => "__builtin_HEXAGON_C4_or_andn", - "llvm.hexagon.C4.or.or" => "__builtin_HEXAGON_C4_or_or", - "llvm.hexagon.C4.or.orn" => "__builtin_HEXAGON_C4_or_orn", - "llvm.hexagon.F2.conv.d2df" => "__builtin_HEXAGON_F2_conv_d2df", - "llvm.hexagon.F2.conv.d2sf" => "__builtin_HEXAGON_F2_conv_d2sf", - "llvm.hexagon.F2.conv.df2d" => "__builtin_HEXAGON_F2_conv_df2d", - "llvm.hexagon.F2.conv.df2d.chop" => "__builtin_HEXAGON_F2_conv_df2d_chop", - "llvm.hexagon.F2.conv.df2sf" => "__builtin_HEXAGON_F2_conv_df2sf", - "llvm.hexagon.F2.conv.df2ud" => "__builtin_HEXAGON_F2_conv_df2ud", - "llvm.hexagon.F2.conv.df2ud.chop" => "__builtin_HEXAGON_F2_conv_df2ud_chop", - "llvm.hexagon.F2.conv.df2uw" => "__builtin_HEXAGON_F2_conv_df2uw", - "llvm.hexagon.F2.conv.df2uw.chop" => "__builtin_HEXAGON_F2_conv_df2uw_chop", - "llvm.hexagon.F2.conv.df2w" => "__builtin_HEXAGON_F2_conv_df2w", - "llvm.hexagon.F2.conv.df2w.chop" => "__builtin_HEXAGON_F2_conv_df2w_chop", - "llvm.hexagon.F2.conv.sf2d" => "__builtin_HEXAGON_F2_conv_sf2d", - "llvm.hexagon.F2.conv.sf2d.chop" => "__builtin_HEXAGON_F2_conv_sf2d_chop", - "llvm.hexagon.F2.conv.sf2df" => "__builtin_HEXAGON_F2_conv_sf2df", - "llvm.hexagon.F2.conv.sf2ud" => "__builtin_HEXAGON_F2_conv_sf2ud", - "llvm.hexagon.F2.conv.sf2ud.chop" => "__builtin_HEXAGON_F2_conv_sf2ud_chop", - "llvm.hexagon.F2.conv.sf2uw" => "__builtin_HEXAGON_F2_conv_sf2uw", - "llvm.hexagon.F2.conv.sf2uw.chop" => "__builtin_HEXAGON_F2_conv_sf2uw_chop", - "llvm.hexagon.F2.conv.sf2w" => "__builtin_HEXAGON_F2_conv_sf2w", - "llvm.hexagon.F2.conv.sf2w.chop" => "__builtin_HEXAGON_F2_conv_sf2w_chop", - "llvm.hexagon.F2.conv.ud2df" => "__builtin_HEXAGON_F2_conv_ud2df", - "llvm.hexagon.F2.conv.ud2sf" => "__builtin_HEXAGON_F2_conv_ud2sf", - "llvm.hexagon.F2.conv.uw2df" => "__builtin_HEXAGON_F2_conv_uw2df", - "llvm.hexagon.F2.conv.uw2sf" => "__builtin_HEXAGON_F2_conv_uw2sf", - "llvm.hexagon.F2.conv.w2df" => "__builtin_HEXAGON_F2_conv_w2df", - "llvm.hexagon.F2.conv.w2sf" => "__builtin_HEXAGON_F2_conv_w2sf", - "llvm.hexagon.F2.dfadd" => "__builtin_HEXAGON_F2_dfadd", - "llvm.hexagon.F2.dfclass" => "__builtin_HEXAGON_F2_dfclass", - "llvm.hexagon.F2.dfcmpeq" => "__builtin_HEXAGON_F2_dfcmpeq", - "llvm.hexagon.F2.dfcmpge" => "__builtin_HEXAGON_F2_dfcmpge", - "llvm.hexagon.F2.dfcmpgt" => "__builtin_HEXAGON_F2_dfcmpgt", - "llvm.hexagon.F2.dfcmpuo" => "__builtin_HEXAGON_F2_dfcmpuo", - "llvm.hexagon.F2.dffixupd" => "__builtin_HEXAGON_F2_dffixupd", - "llvm.hexagon.F2.dffixupn" => "__builtin_HEXAGON_F2_dffixupn", - "llvm.hexagon.F2.dffixupr" => "__builtin_HEXAGON_F2_dffixupr", - "llvm.hexagon.F2.dffma" => "__builtin_HEXAGON_F2_dffma", - "llvm.hexagon.F2.dffma.lib" => "__builtin_HEXAGON_F2_dffma_lib", - "llvm.hexagon.F2.dffma.sc" => "__builtin_HEXAGON_F2_dffma_sc", - "llvm.hexagon.F2.dffms" => "__builtin_HEXAGON_F2_dffms", - "llvm.hexagon.F2.dffms.lib" => "__builtin_HEXAGON_F2_dffms_lib", - "llvm.hexagon.F2.dfimm.n" => "__builtin_HEXAGON_F2_dfimm_n", - "llvm.hexagon.F2.dfimm.p" => "__builtin_HEXAGON_F2_dfimm_p", - "llvm.hexagon.F2.dfmax" => "__builtin_HEXAGON_F2_dfmax", - "llvm.hexagon.F2.dfmin" => "__builtin_HEXAGON_F2_dfmin", - "llvm.hexagon.F2.dfmpy" => "__builtin_HEXAGON_F2_dfmpy", - "llvm.hexagon.F2.dfmpyfix" => "__builtin_HEXAGON_F2_dfmpyfix", - "llvm.hexagon.F2.dfmpyhh" => "__builtin_HEXAGON_F2_dfmpyhh", - "llvm.hexagon.F2.dfmpylh" => "__builtin_HEXAGON_F2_dfmpylh", - "llvm.hexagon.F2.dfmpyll" => "__builtin_HEXAGON_F2_dfmpyll", - "llvm.hexagon.F2.dfsub" => "__builtin_HEXAGON_F2_dfsub", - "llvm.hexagon.F2.sfadd" => "__builtin_HEXAGON_F2_sfadd", - "llvm.hexagon.F2.sfclass" => "__builtin_HEXAGON_F2_sfclass", - "llvm.hexagon.F2.sfcmpeq" => "__builtin_HEXAGON_F2_sfcmpeq", - "llvm.hexagon.F2.sfcmpge" => "__builtin_HEXAGON_F2_sfcmpge", - "llvm.hexagon.F2.sfcmpgt" => "__builtin_HEXAGON_F2_sfcmpgt", - "llvm.hexagon.F2.sfcmpuo" => "__builtin_HEXAGON_F2_sfcmpuo", - "llvm.hexagon.F2.sffixupd" => "__builtin_HEXAGON_F2_sffixupd", - "llvm.hexagon.F2.sffixupn" => "__builtin_HEXAGON_F2_sffixupn", - "llvm.hexagon.F2.sffixupr" => "__builtin_HEXAGON_F2_sffixupr", - "llvm.hexagon.F2.sffma" => "__builtin_HEXAGON_F2_sffma", - "llvm.hexagon.F2.sffma.lib" => "__builtin_HEXAGON_F2_sffma_lib", - "llvm.hexagon.F2.sffma.sc" => "__builtin_HEXAGON_F2_sffma_sc", - "llvm.hexagon.F2.sffms" => "__builtin_HEXAGON_F2_sffms", - "llvm.hexagon.F2.sffms.lib" => "__builtin_HEXAGON_F2_sffms_lib", - "llvm.hexagon.F2.sfimm.n" => "__builtin_HEXAGON_F2_sfimm_n", - "llvm.hexagon.F2.sfimm.p" => "__builtin_HEXAGON_F2_sfimm_p", - "llvm.hexagon.F2.sfmax" => "__builtin_HEXAGON_F2_sfmax", - "llvm.hexagon.F2.sfmin" => "__builtin_HEXAGON_F2_sfmin", - "llvm.hexagon.F2.sfmpy" => "__builtin_HEXAGON_F2_sfmpy", - "llvm.hexagon.F2.sfsub" => "__builtin_HEXAGON_F2_sfsub", - "llvm.hexagon.L2.loadw.locked" => "__builtin_HEXAGON_L2_loadw_locked", - "llvm.hexagon.L4.loadd.locked" => "__builtin__HEXAGON_L4_loadd_locked", - "llvm.hexagon.M2.acci" => "__builtin_HEXAGON_M2_acci", - "llvm.hexagon.M2.accii" => "__builtin_HEXAGON_M2_accii", - "llvm.hexagon.M2.cmaci.s0" => "__builtin_HEXAGON_M2_cmaci_s0", - "llvm.hexagon.M2.cmacr.s0" => "__builtin_HEXAGON_M2_cmacr_s0", - "llvm.hexagon.M2.cmacs.s0" => "__builtin_HEXAGON_M2_cmacs_s0", - "llvm.hexagon.M2.cmacs.s1" => "__builtin_HEXAGON_M2_cmacs_s1", - "llvm.hexagon.M2.cmacsc.s0" => "__builtin_HEXAGON_M2_cmacsc_s0", - "llvm.hexagon.M2.cmacsc.s1" => "__builtin_HEXAGON_M2_cmacsc_s1", - "llvm.hexagon.M2.cmpyi.s0" => "__builtin_HEXAGON_M2_cmpyi_s0", - "llvm.hexagon.M2.cmpyr.s0" => "__builtin_HEXAGON_M2_cmpyr_s0", - "llvm.hexagon.M2.cmpyrs.s0" => "__builtin_HEXAGON_M2_cmpyrs_s0", - "llvm.hexagon.M2.cmpyrs.s1" => "__builtin_HEXAGON_M2_cmpyrs_s1", - "llvm.hexagon.M2.cmpyrsc.s0" => "__builtin_HEXAGON_M2_cmpyrsc_s0", - "llvm.hexagon.M2.cmpyrsc.s1" => "__builtin_HEXAGON_M2_cmpyrsc_s1", - "llvm.hexagon.M2.cmpys.s0" => "__builtin_HEXAGON_M2_cmpys_s0", - "llvm.hexagon.M2.cmpys.s1" => "__builtin_HEXAGON_M2_cmpys_s1", - "llvm.hexagon.M2.cmpysc.s0" => "__builtin_HEXAGON_M2_cmpysc_s0", - "llvm.hexagon.M2.cmpysc.s1" => "__builtin_HEXAGON_M2_cmpysc_s1", - "llvm.hexagon.M2.cnacs.s0" => "__builtin_HEXAGON_M2_cnacs_s0", - "llvm.hexagon.M2.cnacs.s1" => "__builtin_HEXAGON_M2_cnacs_s1", - "llvm.hexagon.M2.cnacsc.s0" => "__builtin_HEXAGON_M2_cnacsc_s0", - "llvm.hexagon.M2.cnacsc.s1" => "__builtin_HEXAGON_M2_cnacsc_s1", - "llvm.hexagon.M2.dpmpyss.acc.s0" => "__builtin_HEXAGON_M2_dpmpyss_acc_s0", - "llvm.hexagon.M2.dpmpyss.nac.s0" => "__builtin_HEXAGON_M2_dpmpyss_nac_s0", - "llvm.hexagon.M2.dpmpyss.rnd.s0" => "__builtin_HEXAGON_M2_dpmpyss_rnd_s0", - "llvm.hexagon.M2.dpmpyss.s0" => "__builtin_HEXAGON_M2_dpmpyss_s0", - "llvm.hexagon.M2.dpmpyuu.acc.s0" => "__builtin_HEXAGON_M2_dpmpyuu_acc_s0", - "llvm.hexagon.M2.dpmpyuu.nac.s0" => "__builtin_HEXAGON_M2_dpmpyuu_nac_s0", - "llvm.hexagon.M2.dpmpyuu.s0" => "__builtin_HEXAGON_M2_dpmpyuu_s0", - "llvm.hexagon.M2.hmmpyh.rs1" => "__builtin_HEXAGON_M2_hmmpyh_rs1", - "llvm.hexagon.M2.hmmpyh.s1" => "__builtin_HEXAGON_M2_hmmpyh_s1", - "llvm.hexagon.M2.hmmpyl.rs1" => "__builtin_HEXAGON_M2_hmmpyl_rs1", - "llvm.hexagon.M2.hmmpyl.s1" => "__builtin_HEXAGON_M2_hmmpyl_s1", - "llvm.hexagon.M2.maci" => "__builtin_HEXAGON_M2_maci", - "llvm.hexagon.M2.macsin" => "__builtin_HEXAGON_M2_macsin", - "llvm.hexagon.M2.macsip" => "__builtin_HEXAGON_M2_macsip", - "llvm.hexagon.M2.mmachs.rs0" => "__builtin_HEXAGON_M2_mmachs_rs0", - "llvm.hexagon.M2.mmachs.rs1" => "__builtin_HEXAGON_M2_mmachs_rs1", - "llvm.hexagon.M2.mmachs.s0" => "__builtin_HEXAGON_M2_mmachs_s0", - "llvm.hexagon.M2.mmachs.s1" => "__builtin_HEXAGON_M2_mmachs_s1", - "llvm.hexagon.M2.mmacls.rs0" => "__builtin_HEXAGON_M2_mmacls_rs0", - "llvm.hexagon.M2.mmacls.rs1" => "__builtin_HEXAGON_M2_mmacls_rs1", - "llvm.hexagon.M2.mmacls.s0" => "__builtin_HEXAGON_M2_mmacls_s0", - "llvm.hexagon.M2.mmacls.s1" => "__builtin_HEXAGON_M2_mmacls_s1", - "llvm.hexagon.M2.mmacuhs.rs0" => "__builtin_HEXAGON_M2_mmacuhs_rs0", - "llvm.hexagon.M2.mmacuhs.rs1" => "__builtin_HEXAGON_M2_mmacuhs_rs1", - "llvm.hexagon.M2.mmacuhs.s0" => "__builtin_HEXAGON_M2_mmacuhs_s0", - "llvm.hexagon.M2.mmacuhs.s1" => "__builtin_HEXAGON_M2_mmacuhs_s1", - "llvm.hexagon.M2.mmaculs.rs0" => "__builtin_HEXAGON_M2_mmaculs_rs0", - "llvm.hexagon.M2.mmaculs.rs1" => "__builtin_HEXAGON_M2_mmaculs_rs1", - "llvm.hexagon.M2.mmaculs.s0" => "__builtin_HEXAGON_M2_mmaculs_s0", - "llvm.hexagon.M2.mmaculs.s1" => "__builtin_HEXAGON_M2_mmaculs_s1", - "llvm.hexagon.M2.mmpyh.rs0" => "__builtin_HEXAGON_M2_mmpyh_rs0", - "llvm.hexagon.M2.mmpyh.rs1" => "__builtin_HEXAGON_M2_mmpyh_rs1", - "llvm.hexagon.M2.mmpyh.s0" => "__builtin_HEXAGON_M2_mmpyh_s0", - "llvm.hexagon.M2.mmpyh.s1" => "__builtin_HEXAGON_M2_mmpyh_s1", - "llvm.hexagon.M2.mmpyl.rs0" => "__builtin_HEXAGON_M2_mmpyl_rs0", - "llvm.hexagon.M2.mmpyl.rs1" => "__builtin_HEXAGON_M2_mmpyl_rs1", - "llvm.hexagon.M2.mmpyl.s0" => "__builtin_HEXAGON_M2_mmpyl_s0", - "llvm.hexagon.M2.mmpyl.s1" => "__builtin_HEXAGON_M2_mmpyl_s1", - "llvm.hexagon.M2.mmpyuh.rs0" => "__builtin_HEXAGON_M2_mmpyuh_rs0", - "llvm.hexagon.M2.mmpyuh.rs1" => "__builtin_HEXAGON_M2_mmpyuh_rs1", - "llvm.hexagon.M2.mmpyuh.s0" => "__builtin_HEXAGON_M2_mmpyuh_s0", - "llvm.hexagon.M2.mmpyuh.s1" => "__builtin_HEXAGON_M2_mmpyuh_s1", - "llvm.hexagon.M2.mmpyul.rs0" => "__builtin_HEXAGON_M2_mmpyul_rs0", - "llvm.hexagon.M2.mmpyul.rs1" => "__builtin_HEXAGON_M2_mmpyul_rs1", - "llvm.hexagon.M2.mmpyul.s0" => "__builtin_HEXAGON_M2_mmpyul_s0", - "llvm.hexagon.M2.mmpyul.s1" => "__builtin_HEXAGON_M2_mmpyul_s1", - "llvm.hexagon.M2.mnaci" => "__builtin_HEXAGON_M2_mnaci", - "llvm.hexagon.M2.mpy.acc.hh.s0" => "__builtin_HEXAGON_M2_mpy_acc_hh_s0", - "llvm.hexagon.M2.mpy.acc.hh.s1" => "__builtin_HEXAGON_M2_mpy_acc_hh_s1", - "llvm.hexagon.M2.mpy.acc.hl.s0" => "__builtin_HEXAGON_M2_mpy_acc_hl_s0", - "llvm.hexagon.M2.mpy.acc.hl.s1" => "__builtin_HEXAGON_M2_mpy_acc_hl_s1", - "llvm.hexagon.M2.mpy.acc.lh.s0" => "__builtin_HEXAGON_M2_mpy_acc_lh_s0", - "llvm.hexagon.M2.mpy.acc.lh.s1" => "__builtin_HEXAGON_M2_mpy_acc_lh_s1", - "llvm.hexagon.M2.mpy.acc.ll.s0" => "__builtin_HEXAGON_M2_mpy_acc_ll_s0", - "llvm.hexagon.M2.mpy.acc.ll.s1" => "__builtin_HEXAGON_M2_mpy_acc_ll_s1", - "llvm.hexagon.M2.mpy.acc.sat.hh.s0" => "__builtin_HEXAGON_M2_mpy_acc_sat_hh_s0", - "llvm.hexagon.M2.mpy.acc.sat.hh.s1" => "__builtin_HEXAGON_M2_mpy_acc_sat_hh_s1", - "llvm.hexagon.M2.mpy.acc.sat.hl.s0" => "__builtin_HEXAGON_M2_mpy_acc_sat_hl_s0", - "llvm.hexagon.M2.mpy.acc.sat.hl.s1" => "__builtin_HEXAGON_M2_mpy_acc_sat_hl_s1", - "llvm.hexagon.M2.mpy.acc.sat.lh.s0" => "__builtin_HEXAGON_M2_mpy_acc_sat_lh_s0", - "llvm.hexagon.M2.mpy.acc.sat.lh.s1" => "__builtin_HEXAGON_M2_mpy_acc_sat_lh_s1", - "llvm.hexagon.M2.mpy.acc.sat.ll.s0" => "__builtin_HEXAGON_M2_mpy_acc_sat_ll_s0", - "llvm.hexagon.M2.mpy.acc.sat.ll.s1" => "__builtin_HEXAGON_M2_mpy_acc_sat_ll_s1", - "llvm.hexagon.M2.mpy.hh.s0" => "__builtin_HEXAGON_M2_mpy_hh_s0", - "llvm.hexagon.M2.mpy.hh.s1" => "__builtin_HEXAGON_M2_mpy_hh_s1", - "llvm.hexagon.M2.mpy.hl.s0" => "__builtin_HEXAGON_M2_mpy_hl_s0", - "llvm.hexagon.M2.mpy.hl.s1" => "__builtin_HEXAGON_M2_mpy_hl_s1", - "llvm.hexagon.M2.mpy.lh.s0" => "__builtin_HEXAGON_M2_mpy_lh_s0", - "llvm.hexagon.M2.mpy.lh.s1" => "__builtin_HEXAGON_M2_mpy_lh_s1", - "llvm.hexagon.M2.mpy.ll.s0" => "__builtin_HEXAGON_M2_mpy_ll_s0", - "llvm.hexagon.M2.mpy.ll.s1" => "__builtin_HEXAGON_M2_mpy_ll_s1", - "llvm.hexagon.M2.mpy.nac.hh.s0" => "__builtin_HEXAGON_M2_mpy_nac_hh_s0", - "llvm.hexagon.M2.mpy.nac.hh.s1" => "__builtin_HEXAGON_M2_mpy_nac_hh_s1", - "llvm.hexagon.M2.mpy.nac.hl.s0" => "__builtin_HEXAGON_M2_mpy_nac_hl_s0", - "llvm.hexagon.M2.mpy.nac.hl.s1" => "__builtin_HEXAGON_M2_mpy_nac_hl_s1", - "llvm.hexagon.M2.mpy.nac.lh.s0" => "__builtin_HEXAGON_M2_mpy_nac_lh_s0", - "llvm.hexagon.M2.mpy.nac.lh.s1" => "__builtin_HEXAGON_M2_mpy_nac_lh_s1", - "llvm.hexagon.M2.mpy.nac.ll.s0" => "__builtin_HEXAGON_M2_mpy_nac_ll_s0", - "llvm.hexagon.M2.mpy.nac.ll.s1" => "__builtin_HEXAGON_M2_mpy_nac_ll_s1", - "llvm.hexagon.M2.mpy.nac.sat.hh.s0" => "__builtin_HEXAGON_M2_mpy_nac_sat_hh_s0", - "llvm.hexagon.M2.mpy.nac.sat.hh.s1" => "__builtin_HEXAGON_M2_mpy_nac_sat_hh_s1", - "llvm.hexagon.M2.mpy.nac.sat.hl.s0" => "__builtin_HEXAGON_M2_mpy_nac_sat_hl_s0", - "llvm.hexagon.M2.mpy.nac.sat.hl.s1" => "__builtin_HEXAGON_M2_mpy_nac_sat_hl_s1", - "llvm.hexagon.M2.mpy.nac.sat.lh.s0" => "__builtin_HEXAGON_M2_mpy_nac_sat_lh_s0", - "llvm.hexagon.M2.mpy.nac.sat.lh.s1" => "__builtin_HEXAGON_M2_mpy_nac_sat_lh_s1", - "llvm.hexagon.M2.mpy.nac.sat.ll.s0" => "__builtin_HEXAGON_M2_mpy_nac_sat_ll_s0", - "llvm.hexagon.M2.mpy.nac.sat.ll.s1" => "__builtin_HEXAGON_M2_mpy_nac_sat_ll_s1", - "llvm.hexagon.M2.mpy.rnd.hh.s0" => "__builtin_HEXAGON_M2_mpy_rnd_hh_s0", - "llvm.hexagon.M2.mpy.rnd.hh.s1" => "__builtin_HEXAGON_M2_mpy_rnd_hh_s1", - "llvm.hexagon.M2.mpy.rnd.hl.s0" => "__builtin_HEXAGON_M2_mpy_rnd_hl_s0", - "llvm.hexagon.M2.mpy.rnd.hl.s1" => "__builtin_HEXAGON_M2_mpy_rnd_hl_s1", - "llvm.hexagon.M2.mpy.rnd.lh.s0" => "__builtin_HEXAGON_M2_mpy_rnd_lh_s0", - "llvm.hexagon.M2.mpy.rnd.lh.s1" => "__builtin_HEXAGON_M2_mpy_rnd_lh_s1", - "llvm.hexagon.M2.mpy.rnd.ll.s0" => "__builtin_HEXAGON_M2_mpy_rnd_ll_s0", - "llvm.hexagon.M2.mpy.rnd.ll.s1" => "__builtin_HEXAGON_M2_mpy_rnd_ll_s1", - "llvm.hexagon.M2.mpy.sat.hh.s0" => "__builtin_HEXAGON_M2_mpy_sat_hh_s0", - "llvm.hexagon.M2.mpy.sat.hh.s1" => "__builtin_HEXAGON_M2_mpy_sat_hh_s1", - "llvm.hexagon.M2.mpy.sat.hl.s0" => "__builtin_HEXAGON_M2_mpy_sat_hl_s0", - "llvm.hexagon.M2.mpy.sat.hl.s1" => "__builtin_HEXAGON_M2_mpy_sat_hl_s1", - "llvm.hexagon.M2.mpy.sat.lh.s0" => "__builtin_HEXAGON_M2_mpy_sat_lh_s0", - "llvm.hexagon.M2.mpy.sat.lh.s1" => "__builtin_HEXAGON_M2_mpy_sat_lh_s1", - "llvm.hexagon.M2.mpy.sat.ll.s0" => "__builtin_HEXAGON_M2_mpy_sat_ll_s0", - "llvm.hexagon.M2.mpy.sat.ll.s1" => "__builtin_HEXAGON_M2_mpy_sat_ll_s1", - "llvm.hexagon.M2.mpy.sat.rnd.hh.s0" => "__builtin_HEXAGON_M2_mpy_sat_rnd_hh_s0", - "llvm.hexagon.M2.mpy.sat.rnd.hh.s1" => "__builtin_HEXAGON_M2_mpy_sat_rnd_hh_s1", - "llvm.hexagon.M2.mpy.sat.rnd.hl.s0" => "__builtin_HEXAGON_M2_mpy_sat_rnd_hl_s0", - "llvm.hexagon.M2.mpy.sat.rnd.hl.s1" => "__builtin_HEXAGON_M2_mpy_sat_rnd_hl_s1", - "llvm.hexagon.M2.mpy.sat.rnd.lh.s0" => "__builtin_HEXAGON_M2_mpy_sat_rnd_lh_s0", - "llvm.hexagon.M2.mpy.sat.rnd.lh.s1" => "__builtin_HEXAGON_M2_mpy_sat_rnd_lh_s1", - "llvm.hexagon.M2.mpy.sat.rnd.ll.s0" => "__builtin_HEXAGON_M2_mpy_sat_rnd_ll_s0", - "llvm.hexagon.M2.mpy.sat.rnd.ll.s1" => "__builtin_HEXAGON_M2_mpy_sat_rnd_ll_s1", - "llvm.hexagon.M2.mpy.up" => "__builtin_HEXAGON_M2_mpy_up", - "llvm.hexagon.M2.mpy.up.s1" => "__builtin_HEXAGON_M2_mpy_up_s1", - "llvm.hexagon.M2.mpy.up.s1.sat" => "__builtin_HEXAGON_M2_mpy_up_s1_sat", - "llvm.hexagon.M2.mpyd.acc.hh.s0" => "__builtin_HEXAGON_M2_mpyd_acc_hh_s0", - "llvm.hexagon.M2.mpyd.acc.hh.s1" => "__builtin_HEXAGON_M2_mpyd_acc_hh_s1", - "llvm.hexagon.M2.mpyd.acc.hl.s0" => "__builtin_HEXAGON_M2_mpyd_acc_hl_s0", - "llvm.hexagon.M2.mpyd.acc.hl.s1" => "__builtin_HEXAGON_M2_mpyd_acc_hl_s1", - "llvm.hexagon.M2.mpyd.acc.lh.s0" => "__builtin_HEXAGON_M2_mpyd_acc_lh_s0", - "llvm.hexagon.M2.mpyd.acc.lh.s1" => "__builtin_HEXAGON_M2_mpyd_acc_lh_s1", - "llvm.hexagon.M2.mpyd.acc.ll.s0" => "__builtin_HEXAGON_M2_mpyd_acc_ll_s0", - "llvm.hexagon.M2.mpyd.acc.ll.s1" => "__builtin_HEXAGON_M2_mpyd_acc_ll_s1", - "llvm.hexagon.M2.mpyd.hh.s0" => "__builtin_HEXAGON_M2_mpyd_hh_s0", - "llvm.hexagon.M2.mpyd.hh.s1" => "__builtin_HEXAGON_M2_mpyd_hh_s1", - "llvm.hexagon.M2.mpyd.hl.s0" => "__builtin_HEXAGON_M2_mpyd_hl_s0", - "llvm.hexagon.M2.mpyd.hl.s1" => "__builtin_HEXAGON_M2_mpyd_hl_s1", - "llvm.hexagon.M2.mpyd.lh.s0" => "__builtin_HEXAGON_M2_mpyd_lh_s0", - "llvm.hexagon.M2.mpyd.lh.s1" => "__builtin_HEXAGON_M2_mpyd_lh_s1", - "llvm.hexagon.M2.mpyd.ll.s0" => "__builtin_HEXAGON_M2_mpyd_ll_s0", - "llvm.hexagon.M2.mpyd.ll.s1" => "__builtin_HEXAGON_M2_mpyd_ll_s1", - "llvm.hexagon.M2.mpyd.nac.hh.s0" => "__builtin_HEXAGON_M2_mpyd_nac_hh_s0", - "llvm.hexagon.M2.mpyd.nac.hh.s1" => "__builtin_HEXAGON_M2_mpyd_nac_hh_s1", - "llvm.hexagon.M2.mpyd.nac.hl.s0" => "__builtin_HEXAGON_M2_mpyd_nac_hl_s0", - "llvm.hexagon.M2.mpyd.nac.hl.s1" => "__builtin_HEXAGON_M2_mpyd_nac_hl_s1", - "llvm.hexagon.M2.mpyd.nac.lh.s0" => "__builtin_HEXAGON_M2_mpyd_nac_lh_s0", - "llvm.hexagon.M2.mpyd.nac.lh.s1" => "__builtin_HEXAGON_M2_mpyd_nac_lh_s1", - "llvm.hexagon.M2.mpyd.nac.ll.s0" => "__builtin_HEXAGON_M2_mpyd_nac_ll_s0", - "llvm.hexagon.M2.mpyd.nac.ll.s1" => "__builtin_HEXAGON_M2_mpyd_nac_ll_s1", - "llvm.hexagon.M2.mpyd.rnd.hh.s0" => "__builtin_HEXAGON_M2_mpyd_rnd_hh_s0", - "llvm.hexagon.M2.mpyd.rnd.hh.s1" => "__builtin_HEXAGON_M2_mpyd_rnd_hh_s1", - "llvm.hexagon.M2.mpyd.rnd.hl.s0" => "__builtin_HEXAGON_M2_mpyd_rnd_hl_s0", - "llvm.hexagon.M2.mpyd.rnd.hl.s1" => "__builtin_HEXAGON_M2_mpyd_rnd_hl_s1", - "llvm.hexagon.M2.mpyd.rnd.lh.s0" => "__builtin_HEXAGON_M2_mpyd_rnd_lh_s0", - "llvm.hexagon.M2.mpyd.rnd.lh.s1" => "__builtin_HEXAGON_M2_mpyd_rnd_lh_s1", - "llvm.hexagon.M2.mpyd.rnd.ll.s0" => "__builtin_HEXAGON_M2_mpyd_rnd_ll_s0", - "llvm.hexagon.M2.mpyd.rnd.ll.s1" => "__builtin_HEXAGON_M2_mpyd_rnd_ll_s1", - "llvm.hexagon.M2.mpyi" => "__builtin_HEXAGON_M2_mpyi", - "llvm.hexagon.M2.mpysmi" => "__builtin_HEXAGON_M2_mpysmi", - "llvm.hexagon.M2.mpysu.up" => "__builtin_HEXAGON_M2_mpysu_up", - "llvm.hexagon.M2.mpyu.acc.hh.s0" => "__builtin_HEXAGON_M2_mpyu_acc_hh_s0", - "llvm.hexagon.M2.mpyu.acc.hh.s1" => "__builtin_HEXAGON_M2_mpyu_acc_hh_s1", - "llvm.hexagon.M2.mpyu.acc.hl.s0" => "__builtin_HEXAGON_M2_mpyu_acc_hl_s0", - "llvm.hexagon.M2.mpyu.acc.hl.s1" => "__builtin_HEXAGON_M2_mpyu_acc_hl_s1", - "llvm.hexagon.M2.mpyu.acc.lh.s0" => "__builtin_HEXAGON_M2_mpyu_acc_lh_s0", - "llvm.hexagon.M2.mpyu.acc.lh.s1" => "__builtin_HEXAGON_M2_mpyu_acc_lh_s1", - "llvm.hexagon.M2.mpyu.acc.ll.s0" => "__builtin_HEXAGON_M2_mpyu_acc_ll_s0", - "llvm.hexagon.M2.mpyu.acc.ll.s1" => "__builtin_HEXAGON_M2_mpyu_acc_ll_s1", - "llvm.hexagon.M2.mpyu.hh.s0" => "__builtin_HEXAGON_M2_mpyu_hh_s0", - "llvm.hexagon.M2.mpyu.hh.s1" => "__builtin_HEXAGON_M2_mpyu_hh_s1", - "llvm.hexagon.M2.mpyu.hl.s0" => "__builtin_HEXAGON_M2_mpyu_hl_s0", - "llvm.hexagon.M2.mpyu.hl.s1" => "__builtin_HEXAGON_M2_mpyu_hl_s1", - "llvm.hexagon.M2.mpyu.lh.s0" => "__builtin_HEXAGON_M2_mpyu_lh_s0", - "llvm.hexagon.M2.mpyu.lh.s1" => "__builtin_HEXAGON_M2_mpyu_lh_s1", - "llvm.hexagon.M2.mpyu.ll.s0" => "__builtin_HEXAGON_M2_mpyu_ll_s0", - "llvm.hexagon.M2.mpyu.ll.s1" => "__builtin_HEXAGON_M2_mpyu_ll_s1", - "llvm.hexagon.M2.mpyu.nac.hh.s0" => "__builtin_HEXAGON_M2_mpyu_nac_hh_s0", - "llvm.hexagon.M2.mpyu.nac.hh.s1" => "__builtin_HEXAGON_M2_mpyu_nac_hh_s1", - "llvm.hexagon.M2.mpyu.nac.hl.s0" => "__builtin_HEXAGON_M2_mpyu_nac_hl_s0", - "llvm.hexagon.M2.mpyu.nac.hl.s1" => "__builtin_HEXAGON_M2_mpyu_nac_hl_s1", - "llvm.hexagon.M2.mpyu.nac.lh.s0" => "__builtin_HEXAGON_M2_mpyu_nac_lh_s0", - "llvm.hexagon.M2.mpyu.nac.lh.s1" => "__builtin_HEXAGON_M2_mpyu_nac_lh_s1", - "llvm.hexagon.M2.mpyu.nac.ll.s0" => "__builtin_HEXAGON_M2_mpyu_nac_ll_s0", - "llvm.hexagon.M2.mpyu.nac.ll.s1" => "__builtin_HEXAGON_M2_mpyu_nac_ll_s1", - "llvm.hexagon.M2.mpyu.up" => "__builtin_HEXAGON_M2_mpyu_up", - "llvm.hexagon.M2.mpyud.acc.hh.s0" => "__builtin_HEXAGON_M2_mpyud_acc_hh_s0", - "llvm.hexagon.M2.mpyud.acc.hh.s1" => "__builtin_HEXAGON_M2_mpyud_acc_hh_s1", - "llvm.hexagon.M2.mpyud.acc.hl.s0" => "__builtin_HEXAGON_M2_mpyud_acc_hl_s0", - "llvm.hexagon.M2.mpyud.acc.hl.s1" => "__builtin_HEXAGON_M2_mpyud_acc_hl_s1", - "llvm.hexagon.M2.mpyud.acc.lh.s0" => "__builtin_HEXAGON_M2_mpyud_acc_lh_s0", - "llvm.hexagon.M2.mpyud.acc.lh.s1" => "__builtin_HEXAGON_M2_mpyud_acc_lh_s1", - "llvm.hexagon.M2.mpyud.acc.ll.s0" => "__builtin_HEXAGON_M2_mpyud_acc_ll_s0", - "llvm.hexagon.M2.mpyud.acc.ll.s1" => "__builtin_HEXAGON_M2_mpyud_acc_ll_s1", - "llvm.hexagon.M2.mpyud.hh.s0" => "__builtin_HEXAGON_M2_mpyud_hh_s0", - "llvm.hexagon.M2.mpyud.hh.s1" => "__builtin_HEXAGON_M2_mpyud_hh_s1", - "llvm.hexagon.M2.mpyud.hl.s0" => "__builtin_HEXAGON_M2_mpyud_hl_s0", - "llvm.hexagon.M2.mpyud.hl.s1" => "__builtin_HEXAGON_M2_mpyud_hl_s1", - "llvm.hexagon.M2.mpyud.lh.s0" => "__builtin_HEXAGON_M2_mpyud_lh_s0", - "llvm.hexagon.M2.mpyud.lh.s1" => "__builtin_HEXAGON_M2_mpyud_lh_s1", - "llvm.hexagon.M2.mpyud.ll.s0" => "__builtin_HEXAGON_M2_mpyud_ll_s0", - "llvm.hexagon.M2.mpyud.ll.s1" => "__builtin_HEXAGON_M2_mpyud_ll_s1", - "llvm.hexagon.M2.mpyud.nac.hh.s0" => "__builtin_HEXAGON_M2_mpyud_nac_hh_s0", - "llvm.hexagon.M2.mpyud.nac.hh.s1" => "__builtin_HEXAGON_M2_mpyud_nac_hh_s1", - "llvm.hexagon.M2.mpyud.nac.hl.s0" => "__builtin_HEXAGON_M2_mpyud_nac_hl_s0", - "llvm.hexagon.M2.mpyud.nac.hl.s1" => "__builtin_HEXAGON_M2_mpyud_nac_hl_s1", - "llvm.hexagon.M2.mpyud.nac.lh.s0" => "__builtin_HEXAGON_M2_mpyud_nac_lh_s0", - "llvm.hexagon.M2.mpyud.nac.lh.s1" => "__builtin_HEXAGON_M2_mpyud_nac_lh_s1", - "llvm.hexagon.M2.mpyud.nac.ll.s0" => "__builtin_HEXAGON_M2_mpyud_nac_ll_s0", - "llvm.hexagon.M2.mpyud.nac.ll.s1" => "__builtin_HEXAGON_M2_mpyud_nac_ll_s1", - "llvm.hexagon.M2.mpyui" => "__builtin_HEXAGON_M2_mpyui", - "llvm.hexagon.M2.nacci" => "__builtin_HEXAGON_M2_nacci", - "llvm.hexagon.M2.naccii" => "__builtin_HEXAGON_M2_naccii", - "llvm.hexagon.M2.subacc" => "__builtin_HEXAGON_M2_subacc", - "llvm.hexagon.M2.vabsdiffh" => "__builtin_HEXAGON_M2_vabsdiffh", - "llvm.hexagon.M2.vabsdiffw" => "__builtin_HEXAGON_M2_vabsdiffw", - "llvm.hexagon.M2.vcmac.s0.sat.i" => "__builtin_HEXAGON_M2_vcmac_s0_sat_i", - "llvm.hexagon.M2.vcmac.s0.sat.r" => "__builtin_HEXAGON_M2_vcmac_s0_sat_r", - "llvm.hexagon.M2.vcmpy.s0.sat.i" => "__builtin_HEXAGON_M2_vcmpy_s0_sat_i", - "llvm.hexagon.M2.vcmpy.s0.sat.r" => "__builtin_HEXAGON_M2_vcmpy_s0_sat_r", - "llvm.hexagon.M2.vcmpy.s1.sat.i" => "__builtin_HEXAGON_M2_vcmpy_s1_sat_i", - "llvm.hexagon.M2.vcmpy.s1.sat.r" => "__builtin_HEXAGON_M2_vcmpy_s1_sat_r", - "llvm.hexagon.M2.vdmacs.s0" => "__builtin_HEXAGON_M2_vdmacs_s0", - "llvm.hexagon.M2.vdmacs.s1" => "__builtin_HEXAGON_M2_vdmacs_s1", - "llvm.hexagon.M2.vdmpyrs.s0" => "__builtin_HEXAGON_M2_vdmpyrs_s0", - "llvm.hexagon.M2.vdmpyrs.s1" => "__builtin_HEXAGON_M2_vdmpyrs_s1", - "llvm.hexagon.M2.vdmpys.s0" => "__builtin_HEXAGON_M2_vdmpys_s0", - "llvm.hexagon.M2.vdmpys.s1" => "__builtin_HEXAGON_M2_vdmpys_s1", - "llvm.hexagon.M2.vmac2" => "__builtin_HEXAGON_M2_vmac2", - "llvm.hexagon.M2.vmac2es" => "__builtin_HEXAGON_M2_vmac2es", - "llvm.hexagon.M2.vmac2es.s0" => "__builtin_HEXAGON_M2_vmac2es_s0", - "llvm.hexagon.M2.vmac2es.s1" => "__builtin_HEXAGON_M2_vmac2es_s1", - "llvm.hexagon.M2.vmac2s.s0" => "__builtin_HEXAGON_M2_vmac2s_s0", - "llvm.hexagon.M2.vmac2s.s1" => "__builtin_HEXAGON_M2_vmac2s_s1", - "llvm.hexagon.M2.vmac2su.s0" => "__builtin_HEXAGON_M2_vmac2su_s0", - "llvm.hexagon.M2.vmac2su.s1" => "__builtin_HEXAGON_M2_vmac2su_s1", - "llvm.hexagon.M2.vmpy2es.s0" => "__builtin_HEXAGON_M2_vmpy2es_s0", - "llvm.hexagon.M2.vmpy2es.s1" => "__builtin_HEXAGON_M2_vmpy2es_s1", - "llvm.hexagon.M2.vmpy2s.s0" => "__builtin_HEXAGON_M2_vmpy2s_s0", - "llvm.hexagon.M2.vmpy2s.s0pack" => "__builtin_HEXAGON_M2_vmpy2s_s0pack", - "llvm.hexagon.M2.vmpy2s.s1" => "__builtin_HEXAGON_M2_vmpy2s_s1", - "llvm.hexagon.M2.vmpy2s.s1pack" => "__builtin_HEXAGON_M2_vmpy2s_s1pack", - "llvm.hexagon.M2.vmpy2su.s0" => "__builtin_HEXAGON_M2_vmpy2su_s0", - "llvm.hexagon.M2.vmpy2su.s1" => "__builtin_HEXAGON_M2_vmpy2su_s1", - "llvm.hexagon.M2.vraddh" => "__builtin_HEXAGON_M2_vraddh", - "llvm.hexagon.M2.vradduh" => "__builtin_HEXAGON_M2_vradduh", - "llvm.hexagon.M2.vrcmaci.s0" => "__builtin_HEXAGON_M2_vrcmaci_s0", - "llvm.hexagon.M2.vrcmaci.s0c" => "__builtin_HEXAGON_M2_vrcmaci_s0c", - "llvm.hexagon.M2.vrcmacr.s0" => "__builtin_HEXAGON_M2_vrcmacr_s0", - "llvm.hexagon.M2.vrcmacr.s0c" => "__builtin_HEXAGON_M2_vrcmacr_s0c", - "llvm.hexagon.M2.vrcmpyi.s0" => "__builtin_HEXAGON_M2_vrcmpyi_s0", - "llvm.hexagon.M2.vrcmpyi.s0c" => "__builtin_HEXAGON_M2_vrcmpyi_s0c", - "llvm.hexagon.M2.vrcmpyr.s0" => "__builtin_HEXAGON_M2_vrcmpyr_s0", - "llvm.hexagon.M2.vrcmpyr.s0c" => "__builtin_HEXAGON_M2_vrcmpyr_s0c", - "llvm.hexagon.M2.vrcmpys.acc.s1" => "__builtin_HEXAGON_M2_vrcmpys_acc_s1", - "llvm.hexagon.M2.vrcmpys.s1" => "__builtin_HEXAGON_M2_vrcmpys_s1", - "llvm.hexagon.M2.vrcmpys.s1rp" => "__builtin_HEXAGON_M2_vrcmpys_s1rp", - "llvm.hexagon.M2.vrmac.s0" => "__builtin_HEXAGON_M2_vrmac_s0", - "llvm.hexagon.M2.vrmpy.s0" => "__builtin_HEXAGON_M2_vrmpy_s0", - "llvm.hexagon.M2.xor.xacc" => "__builtin_HEXAGON_M2_xor_xacc", - "llvm.hexagon.M4.and.and" => "__builtin_HEXAGON_M4_and_and", - "llvm.hexagon.M4.and.andn" => "__builtin_HEXAGON_M4_and_andn", - "llvm.hexagon.M4.and.or" => "__builtin_HEXAGON_M4_and_or", - "llvm.hexagon.M4.and.xor" => "__builtin_HEXAGON_M4_and_xor", - "llvm.hexagon.M4.cmpyi.wh" => "__builtin_HEXAGON_M4_cmpyi_wh", - "llvm.hexagon.M4.cmpyi.whc" => "__builtin_HEXAGON_M4_cmpyi_whc", - "llvm.hexagon.M4.cmpyr.wh" => "__builtin_HEXAGON_M4_cmpyr_wh", - "llvm.hexagon.M4.cmpyr.whc" => "__builtin_HEXAGON_M4_cmpyr_whc", - "llvm.hexagon.M4.mac.up.s1.sat" => "__builtin_HEXAGON_M4_mac_up_s1_sat", - "llvm.hexagon.M4.mpyri.addi" => "__builtin_HEXAGON_M4_mpyri_addi", - "llvm.hexagon.M4.mpyri.addr" => "__builtin_HEXAGON_M4_mpyri_addr", - "llvm.hexagon.M4.mpyri.addr.u2" => "__builtin_HEXAGON_M4_mpyri_addr_u2", - "llvm.hexagon.M4.mpyrr.addi" => "__builtin_HEXAGON_M4_mpyrr_addi", - "llvm.hexagon.M4.mpyrr.addr" => "__builtin_HEXAGON_M4_mpyrr_addr", - "llvm.hexagon.M4.nac.up.s1.sat" => "__builtin_HEXAGON_M4_nac_up_s1_sat", - "llvm.hexagon.M4.or.and" => "__builtin_HEXAGON_M4_or_and", - "llvm.hexagon.M4.or.andn" => "__builtin_HEXAGON_M4_or_andn", - "llvm.hexagon.M4.or.or" => "__builtin_HEXAGON_M4_or_or", - "llvm.hexagon.M4.or.xor" => "__builtin_HEXAGON_M4_or_xor", - "llvm.hexagon.M4.pmpyw" => "__builtin_HEXAGON_M4_pmpyw", - "llvm.hexagon.M4.pmpyw.acc" => "__builtin_HEXAGON_M4_pmpyw_acc", - "llvm.hexagon.M4.vpmpyh" => "__builtin_HEXAGON_M4_vpmpyh", - "llvm.hexagon.M4.vpmpyh.acc" => "__builtin_HEXAGON_M4_vpmpyh_acc", - "llvm.hexagon.M4.vrmpyeh.acc.s0" => "__builtin_HEXAGON_M4_vrmpyeh_acc_s0", - "llvm.hexagon.M4.vrmpyeh.acc.s1" => "__builtin_HEXAGON_M4_vrmpyeh_acc_s1", - "llvm.hexagon.M4.vrmpyeh.s0" => "__builtin_HEXAGON_M4_vrmpyeh_s0", - "llvm.hexagon.M4.vrmpyeh.s1" => "__builtin_HEXAGON_M4_vrmpyeh_s1", - "llvm.hexagon.M4.vrmpyoh.acc.s0" => "__builtin_HEXAGON_M4_vrmpyoh_acc_s0", - "llvm.hexagon.M4.vrmpyoh.acc.s1" => "__builtin_HEXAGON_M4_vrmpyoh_acc_s1", - "llvm.hexagon.M4.vrmpyoh.s0" => "__builtin_HEXAGON_M4_vrmpyoh_s0", - "llvm.hexagon.M4.vrmpyoh.s1" => "__builtin_HEXAGON_M4_vrmpyoh_s1", - "llvm.hexagon.M4.xor.and" => "__builtin_HEXAGON_M4_xor_and", - "llvm.hexagon.M4.xor.andn" => "__builtin_HEXAGON_M4_xor_andn", - "llvm.hexagon.M4.xor.or" => "__builtin_HEXAGON_M4_xor_or", - "llvm.hexagon.M4.xor.xacc" => "__builtin_HEXAGON_M4_xor_xacc", - "llvm.hexagon.M5.vdmacbsu" => "__builtin_HEXAGON_M5_vdmacbsu", - "llvm.hexagon.M5.vdmpybsu" => "__builtin_HEXAGON_M5_vdmpybsu", - "llvm.hexagon.M5.vmacbsu" => "__builtin_HEXAGON_M5_vmacbsu", - "llvm.hexagon.M5.vmacbuu" => "__builtin_HEXAGON_M5_vmacbuu", - "llvm.hexagon.M5.vmpybsu" => "__builtin_HEXAGON_M5_vmpybsu", - "llvm.hexagon.M5.vmpybuu" => "__builtin_HEXAGON_M5_vmpybuu", - "llvm.hexagon.M5.vrmacbsu" => "__builtin_HEXAGON_M5_vrmacbsu", - "llvm.hexagon.M5.vrmacbuu" => "__builtin_HEXAGON_M5_vrmacbuu", - "llvm.hexagon.M5.vrmpybsu" => "__builtin_HEXAGON_M5_vrmpybsu", - "llvm.hexagon.M5.vrmpybuu" => "__builtin_HEXAGON_M5_vrmpybuu", - "llvm.hexagon.M6.vabsdiffb" => "__builtin_HEXAGON_M6_vabsdiffb", - "llvm.hexagon.M6.vabsdiffub" => "__builtin_HEXAGON_M6_vabsdiffub", - "llvm.hexagon.M7.dcmpyiw" => "__builtin_HEXAGON_M7_dcmpyiw", - "llvm.hexagon.M7.dcmpyiw.acc" => "__builtin_HEXAGON_M7_dcmpyiw_acc", - "llvm.hexagon.M7.dcmpyiwc" => "__builtin_HEXAGON_M7_dcmpyiwc", - "llvm.hexagon.M7.dcmpyiwc.acc" => "__builtin_HEXAGON_M7_dcmpyiwc_acc", - "llvm.hexagon.M7.dcmpyrw" => "__builtin_HEXAGON_M7_dcmpyrw", - "llvm.hexagon.M7.dcmpyrw.acc" => "__builtin_HEXAGON_M7_dcmpyrw_acc", - "llvm.hexagon.M7.dcmpyrwc" => "__builtin_HEXAGON_M7_dcmpyrwc", - "llvm.hexagon.M7.dcmpyrwc.acc" => "__builtin_HEXAGON_M7_dcmpyrwc_acc", - "llvm.hexagon.M7.vdmpy" => "__builtin_HEXAGON_M7_vdmpy", - "llvm.hexagon.M7.vdmpy.acc" => "__builtin_HEXAGON_M7_vdmpy_acc", - "llvm.hexagon.M7.wcmpyiw" => "__builtin_HEXAGON_M7_wcmpyiw", - "llvm.hexagon.M7.wcmpyiw.rnd" => "__builtin_HEXAGON_M7_wcmpyiw_rnd", - "llvm.hexagon.M7.wcmpyiwc" => "__builtin_HEXAGON_M7_wcmpyiwc", - "llvm.hexagon.M7.wcmpyiwc.rnd" => "__builtin_HEXAGON_M7_wcmpyiwc_rnd", - "llvm.hexagon.M7.wcmpyrw" => "__builtin_HEXAGON_M7_wcmpyrw", - "llvm.hexagon.M7.wcmpyrw.rnd" => "__builtin_HEXAGON_M7_wcmpyrw_rnd", - "llvm.hexagon.M7.wcmpyrwc" => "__builtin_HEXAGON_M7_wcmpyrwc", - "llvm.hexagon.M7.wcmpyrwc.rnd" => "__builtin_HEXAGON_M7_wcmpyrwc_rnd", - "llvm.hexagon.S2.addasl.rrri" => "__builtin_HEXAGON_S2_addasl_rrri", - "llvm.hexagon.S2.asl.i.p" => "__builtin_HEXAGON_S2_asl_i_p", - "llvm.hexagon.S2.asl.i.p.acc" => "__builtin_HEXAGON_S2_asl_i_p_acc", - "llvm.hexagon.S2.asl.i.p.and" => "__builtin_HEXAGON_S2_asl_i_p_and", - "llvm.hexagon.S2.asl.i.p.nac" => "__builtin_HEXAGON_S2_asl_i_p_nac", - "llvm.hexagon.S2.asl.i.p.or" => "__builtin_HEXAGON_S2_asl_i_p_or", - "llvm.hexagon.S2.asl.i.p.xacc" => "__builtin_HEXAGON_S2_asl_i_p_xacc", - "llvm.hexagon.S2.asl.i.r" => "__builtin_HEXAGON_S2_asl_i_r", - "llvm.hexagon.S2.asl.i.r.acc" => "__builtin_HEXAGON_S2_asl_i_r_acc", - "llvm.hexagon.S2.asl.i.r.and" => "__builtin_HEXAGON_S2_asl_i_r_and", - "llvm.hexagon.S2.asl.i.r.nac" => "__builtin_HEXAGON_S2_asl_i_r_nac", - "llvm.hexagon.S2.asl.i.r.or" => "__builtin_HEXAGON_S2_asl_i_r_or", - "llvm.hexagon.S2.asl.i.r.sat" => "__builtin_HEXAGON_S2_asl_i_r_sat", - "llvm.hexagon.S2.asl.i.r.xacc" => "__builtin_HEXAGON_S2_asl_i_r_xacc", - "llvm.hexagon.S2.asl.i.vh" => "__builtin_HEXAGON_S2_asl_i_vh", - "llvm.hexagon.S2.asl.i.vw" => "__builtin_HEXAGON_S2_asl_i_vw", - "llvm.hexagon.S2.asl.r.p" => "__builtin_HEXAGON_S2_asl_r_p", - "llvm.hexagon.S2.asl.r.p.acc" => "__builtin_HEXAGON_S2_asl_r_p_acc", - "llvm.hexagon.S2.asl.r.p.and" => "__builtin_HEXAGON_S2_asl_r_p_and", - "llvm.hexagon.S2.asl.r.p.nac" => "__builtin_HEXAGON_S2_asl_r_p_nac", - "llvm.hexagon.S2.asl.r.p.or" => "__builtin_HEXAGON_S2_asl_r_p_or", - "llvm.hexagon.S2.asl.r.p.xor" => "__builtin_HEXAGON_S2_asl_r_p_xor", - "llvm.hexagon.S2.asl.r.r" => "__builtin_HEXAGON_S2_asl_r_r", - "llvm.hexagon.S2.asl.r.r.acc" => "__builtin_HEXAGON_S2_asl_r_r_acc", - "llvm.hexagon.S2.asl.r.r.and" => "__builtin_HEXAGON_S2_asl_r_r_and", - "llvm.hexagon.S2.asl.r.r.nac" => "__builtin_HEXAGON_S2_asl_r_r_nac", - "llvm.hexagon.S2.asl.r.r.or" => "__builtin_HEXAGON_S2_asl_r_r_or", - "llvm.hexagon.S2.asl.r.r.sat" => "__builtin_HEXAGON_S2_asl_r_r_sat", - "llvm.hexagon.S2.asl.r.vh" => "__builtin_HEXAGON_S2_asl_r_vh", - "llvm.hexagon.S2.asl.r.vw" => "__builtin_HEXAGON_S2_asl_r_vw", - "llvm.hexagon.S2.asr.i.p" => "__builtin_HEXAGON_S2_asr_i_p", - "llvm.hexagon.S2.asr.i.p.acc" => "__builtin_HEXAGON_S2_asr_i_p_acc", - "llvm.hexagon.S2.asr.i.p.and" => "__builtin_HEXAGON_S2_asr_i_p_and", - "llvm.hexagon.S2.asr.i.p.nac" => "__builtin_HEXAGON_S2_asr_i_p_nac", - "llvm.hexagon.S2.asr.i.p.or" => "__builtin_HEXAGON_S2_asr_i_p_or", - "llvm.hexagon.S2.asr.i.p.rnd" => "__builtin_HEXAGON_S2_asr_i_p_rnd", - "llvm.hexagon.S2.asr.i.p.rnd.goodsyntax" => "__builtin_HEXAGON_S2_asr_i_p_rnd_goodsyntax", - "llvm.hexagon.S2.asr.i.r" => "__builtin_HEXAGON_S2_asr_i_r", - "llvm.hexagon.S2.asr.i.r.acc" => "__builtin_HEXAGON_S2_asr_i_r_acc", - "llvm.hexagon.S2.asr.i.r.and" => "__builtin_HEXAGON_S2_asr_i_r_and", - "llvm.hexagon.S2.asr.i.r.nac" => "__builtin_HEXAGON_S2_asr_i_r_nac", - "llvm.hexagon.S2.asr.i.r.or" => "__builtin_HEXAGON_S2_asr_i_r_or", - "llvm.hexagon.S2.asr.i.r.rnd" => "__builtin_HEXAGON_S2_asr_i_r_rnd", - "llvm.hexagon.S2.asr.i.r.rnd.goodsyntax" => "__builtin_HEXAGON_S2_asr_i_r_rnd_goodsyntax", - "llvm.hexagon.S2.asr.i.svw.trun" => "__builtin_HEXAGON_S2_asr_i_svw_trun", - "llvm.hexagon.S2.asr.i.vh" => "__builtin_HEXAGON_S2_asr_i_vh", - "llvm.hexagon.S2.asr.i.vw" => "__builtin_HEXAGON_S2_asr_i_vw", - "llvm.hexagon.S2.asr.r.p" => "__builtin_HEXAGON_S2_asr_r_p", - "llvm.hexagon.S2.asr.r.p.acc" => "__builtin_HEXAGON_S2_asr_r_p_acc", - "llvm.hexagon.S2.asr.r.p.and" => "__builtin_HEXAGON_S2_asr_r_p_and", - "llvm.hexagon.S2.asr.r.p.nac" => "__builtin_HEXAGON_S2_asr_r_p_nac", - "llvm.hexagon.S2.asr.r.p.or" => "__builtin_HEXAGON_S2_asr_r_p_or", - "llvm.hexagon.S2.asr.r.p.xor" => "__builtin_HEXAGON_S2_asr_r_p_xor", - "llvm.hexagon.S2.asr.r.r" => "__builtin_HEXAGON_S2_asr_r_r", - "llvm.hexagon.S2.asr.r.r.acc" => "__builtin_HEXAGON_S2_asr_r_r_acc", - "llvm.hexagon.S2.asr.r.r.and" => "__builtin_HEXAGON_S2_asr_r_r_and", - "llvm.hexagon.S2.asr.r.r.nac" => "__builtin_HEXAGON_S2_asr_r_r_nac", - "llvm.hexagon.S2.asr.r.r.or" => "__builtin_HEXAGON_S2_asr_r_r_or", - "llvm.hexagon.S2.asr.r.r.sat" => "__builtin_HEXAGON_S2_asr_r_r_sat", - "llvm.hexagon.S2.asr.r.svw.trun" => "__builtin_HEXAGON_S2_asr_r_svw_trun", - "llvm.hexagon.S2.asr.r.vh" => "__builtin_HEXAGON_S2_asr_r_vh", - "llvm.hexagon.S2.asr.r.vw" => "__builtin_HEXAGON_S2_asr_r_vw", - "llvm.hexagon.S2.brev" => "__builtin_HEXAGON_S2_brev", - "llvm.hexagon.S2.brevp" => "__builtin_HEXAGON_S2_brevp", - "llvm.hexagon.S2.cabacencbin" => "__builtin_HEXAGON_S2_cabacencbin", - "llvm.hexagon.S2.cl0" => "__builtin_HEXAGON_S2_cl0", - "llvm.hexagon.S2.cl0p" => "__builtin_HEXAGON_S2_cl0p", - "llvm.hexagon.S2.cl1" => "__builtin_HEXAGON_S2_cl1", - "llvm.hexagon.S2.cl1p" => "__builtin_HEXAGON_S2_cl1p", - "llvm.hexagon.S2.clb" => "__builtin_HEXAGON_S2_clb", - "llvm.hexagon.S2.clbnorm" => "__builtin_HEXAGON_S2_clbnorm", - "llvm.hexagon.S2.clbp" => "__builtin_HEXAGON_S2_clbp", - "llvm.hexagon.S2.clrbit.i" => "__builtin_HEXAGON_S2_clrbit_i", - "llvm.hexagon.S2.clrbit.r" => "__builtin_HEXAGON_S2_clrbit_r", - "llvm.hexagon.S2.ct0" => "__builtin_HEXAGON_S2_ct0", - "llvm.hexagon.S2.ct0p" => "__builtin_HEXAGON_S2_ct0p", - "llvm.hexagon.S2.ct1" => "__builtin_HEXAGON_S2_ct1", - "llvm.hexagon.S2.ct1p" => "__builtin_HEXAGON_S2_ct1p", - "llvm.hexagon.S2.deinterleave" => "__builtin_HEXAGON_S2_deinterleave", - "llvm.hexagon.S2.extractu" => "__builtin_HEXAGON_S2_extractu", - "llvm.hexagon.S2.extractu.rp" => "__builtin_HEXAGON_S2_extractu_rp", - "llvm.hexagon.S2.extractup" => "__builtin_HEXAGON_S2_extractup", - "llvm.hexagon.S2.extractup.rp" => "__builtin_HEXAGON_S2_extractup_rp", - "llvm.hexagon.S2.insert" => "__builtin_HEXAGON_S2_insert", - "llvm.hexagon.S2.insert.rp" => "__builtin_HEXAGON_S2_insert_rp", - "llvm.hexagon.S2.insertp" => "__builtin_HEXAGON_S2_insertp", - "llvm.hexagon.S2.insertp.rp" => "__builtin_HEXAGON_S2_insertp_rp", - "llvm.hexagon.S2.interleave" => "__builtin_HEXAGON_S2_interleave", - "llvm.hexagon.S2.lfsp" => "__builtin_HEXAGON_S2_lfsp", - "llvm.hexagon.S2.lsl.r.p" => "__builtin_HEXAGON_S2_lsl_r_p", - "llvm.hexagon.S2.lsl.r.p.acc" => "__builtin_HEXAGON_S2_lsl_r_p_acc", - "llvm.hexagon.S2.lsl.r.p.and" => "__builtin_HEXAGON_S2_lsl_r_p_and", - "llvm.hexagon.S2.lsl.r.p.nac" => "__builtin_HEXAGON_S2_lsl_r_p_nac", - "llvm.hexagon.S2.lsl.r.p.or" => "__builtin_HEXAGON_S2_lsl_r_p_or", - "llvm.hexagon.S2.lsl.r.p.xor" => "__builtin_HEXAGON_S2_lsl_r_p_xor", - "llvm.hexagon.S2.lsl.r.r" => "__builtin_HEXAGON_S2_lsl_r_r", - "llvm.hexagon.S2.lsl.r.r.acc" => "__builtin_HEXAGON_S2_lsl_r_r_acc", - "llvm.hexagon.S2.lsl.r.r.and" => "__builtin_HEXAGON_S2_lsl_r_r_and", - "llvm.hexagon.S2.lsl.r.r.nac" => "__builtin_HEXAGON_S2_lsl_r_r_nac", - "llvm.hexagon.S2.lsl.r.r.or" => "__builtin_HEXAGON_S2_lsl_r_r_or", - "llvm.hexagon.S2.lsl.r.vh" => "__builtin_HEXAGON_S2_lsl_r_vh", - "llvm.hexagon.S2.lsl.r.vw" => "__builtin_HEXAGON_S2_lsl_r_vw", - "llvm.hexagon.S2.lsr.i.p" => "__builtin_HEXAGON_S2_lsr_i_p", - "llvm.hexagon.S2.lsr.i.p.acc" => "__builtin_HEXAGON_S2_lsr_i_p_acc", - "llvm.hexagon.S2.lsr.i.p.and" => "__builtin_HEXAGON_S2_lsr_i_p_and", - "llvm.hexagon.S2.lsr.i.p.nac" => "__builtin_HEXAGON_S2_lsr_i_p_nac", - "llvm.hexagon.S2.lsr.i.p.or" => "__builtin_HEXAGON_S2_lsr_i_p_or", - "llvm.hexagon.S2.lsr.i.p.xacc" => "__builtin_HEXAGON_S2_lsr_i_p_xacc", - "llvm.hexagon.S2.lsr.i.r" => "__builtin_HEXAGON_S2_lsr_i_r", - "llvm.hexagon.S2.lsr.i.r.acc" => "__builtin_HEXAGON_S2_lsr_i_r_acc", - "llvm.hexagon.S2.lsr.i.r.and" => "__builtin_HEXAGON_S2_lsr_i_r_and", - "llvm.hexagon.S2.lsr.i.r.nac" => "__builtin_HEXAGON_S2_lsr_i_r_nac", - "llvm.hexagon.S2.lsr.i.r.or" => "__builtin_HEXAGON_S2_lsr_i_r_or", - "llvm.hexagon.S2.lsr.i.r.xacc" => "__builtin_HEXAGON_S2_lsr_i_r_xacc", - "llvm.hexagon.S2.lsr.i.vh" => "__builtin_HEXAGON_S2_lsr_i_vh", - "llvm.hexagon.S2.lsr.i.vw" => "__builtin_HEXAGON_S2_lsr_i_vw", - "llvm.hexagon.S2.lsr.r.p" => "__builtin_HEXAGON_S2_lsr_r_p", - "llvm.hexagon.S2.lsr.r.p.acc" => "__builtin_HEXAGON_S2_lsr_r_p_acc", - "llvm.hexagon.S2.lsr.r.p.and" => "__builtin_HEXAGON_S2_lsr_r_p_and", - "llvm.hexagon.S2.lsr.r.p.nac" => "__builtin_HEXAGON_S2_lsr_r_p_nac", - "llvm.hexagon.S2.lsr.r.p.or" => "__builtin_HEXAGON_S2_lsr_r_p_or", - "llvm.hexagon.S2.lsr.r.p.xor" => "__builtin_HEXAGON_S2_lsr_r_p_xor", - "llvm.hexagon.S2.lsr.r.r" => "__builtin_HEXAGON_S2_lsr_r_r", - "llvm.hexagon.S2.lsr.r.r.acc" => "__builtin_HEXAGON_S2_lsr_r_r_acc", - "llvm.hexagon.S2.lsr.r.r.and" => "__builtin_HEXAGON_S2_lsr_r_r_and", - "llvm.hexagon.S2.lsr.r.r.nac" => "__builtin_HEXAGON_S2_lsr_r_r_nac", - "llvm.hexagon.S2.lsr.r.r.or" => "__builtin_HEXAGON_S2_lsr_r_r_or", - "llvm.hexagon.S2.lsr.r.vh" => "__builtin_HEXAGON_S2_lsr_r_vh", - "llvm.hexagon.S2.lsr.r.vw" => "__builtin_HEXAGON_S2_lsr_r_vw", - "llvm.hexagon.S2.mask" => "__builtin_HEXAGON_S2_mask", - "llvm.hexagon.S2.packhl" => "__builtin_HEXAGON_S2_packhl", - "llvm.hexagon.S2.parityp" => "__builtin_HEXAGON_S2_parityp", - "llvm.hexagon.S2.setbit.i" => "__builtin_HEXAGON_S2_setbit_i", - "llvm.hexagon.S2.setbit.r" => "__builtin_HEXAGON_S2_setbit_r", - "llvm.hexagon.S2.shuffeb" => "__builtin_HEXAGON_S2_shuffeb", - "llvm.hexagon.S2.shuffeh" => "__builtin_HEXAGON_S2_shuffeh", - "llvm.hexagon.S2.shuffob" => "__builtin_HEXAGON_S2_shuffob", - "llvm.hexagon.S2.shuffoh" => "__builtin_HEXAGON_S2_shuffoh", - "llvm.hexagon.S2.storerb.pbr" => "__builtin_brev_stb", - "llvm.hexagon.S2.storerd.pbr" => "__builtin_brev_std", - "llvm.hexagon.S2.storerf.pbr" => "__builtin_brev_sthhi", - "llvm.hexagon.S2.storerh.pbr" => "__builtin_brev_sth", - "llvm.hexagon.S2.storeri.pbr" => "__builtin_brev_stw", - "llvm.hexagon.S2.storew.locked" => "__builtin_HEXAGON_S2_storew_locked", - "llvm.hexagon.S2.svsathb" => "__builtin_HEXAGON_S2_svsathb", - "llvm.hexagon.S2.svsathub" => "__builtin_HEXAGON_S2_svsathub", - "llvm.hexagon.S2.tableidxb.goodsyntax" => "__builtin_HEXAGON_S2_tableidxb_goodsyntax", - "llvm.hexagon.S2.tableidxd.goodsyntax" => "__builtin_HEXAGON_S2_tableidxd_goodsyntax", - "llvm.hexagon.S2.tableidxh.goodsyntax" => "__builtin_HEXAGON_S2_tableidxh_goodsyntax", - "llvm.hexagon.S2.tableidxw.goodsyntax" => "__builtin_HEXAGON_S2_tableidxw_goodsyntax", - "llvm.hexagon.S2.togglebit.i" => "__builtin_HEXAGON_S2_togglebit_i", - "llvm.hexagon.S2.togglebit.r" => "__builtin_HEXAGON_S2_togglebit_r", - "llvm.hexagon.S2.tstbit.i" => "__builtin_HEXAGON_S2_tstbit_i", - "llvm.hexagon.S2.tstbit.r" => "__builtin_HEXAGON_S2_tstbit_r", - "llvm.hexagon.S2.valignib" => "__builtin_HEXAGON_S2_valignib", - "llvm.hexagon.S2.valignrb" => "__builtin_HEXAGON_S2_valignrb", - "llvm.hexagon.S2.vcnegh" => "__builtin_HEXAGON_S2_vcnegh", - "llvm.hexagon.S2.vcrotate" => "__builtin_HEXAGON_S2_vcrotate", - "llvm.hexagon.S2.vrcnegh" => "__builtin_HEXAGON_S2_vrcnegh", - "llvm.hexagon.S2.vrndpackwh" => "__builtin_HEXAGON_S2_vrndpackwh", - "llvm.hexagon.S2.vrndpackwhs" => "__builtin_HEXAGON_S2_vrndpackwhs", - "llvm.hexagon.S2.vsathb" => "__builtin_HEXAGON_S2_vsathb", - "llvm.hexagon.S2.vsathb.nopack" => "__builtin_HEXAGON_S2_vsathb_nopack", - "llvm.hexagon.S2.vsathub" => "__builtin_HEXAGON_S2_vsathub", - "llvm.hexagon.S2.vsathub.nopack" => "__builtin_HEXAGON_S2_vsathub_nopack", - "llvm.hexagon.S2.vsatwh" => "__builtin_HEXAGON_S2_vsatwh", - "llvm.hexagon.S2.vsatwh.nopack" => "__builtin_HEXAGON_S2_vsatwh_nopack", - "llvm.hexagon.S2.vsatwuh" => "__builtin_HEXAGON_S2_vsatwuh", - "llvm.hexagon.S2.vsatwuh.nopack" => "__builtin_HEXAGON_S2_vsatwuh_nopack", - "llvm.hexagon.S2.vsplatrb" => "__builtin_HEXAGON_S2_vsplatrb", - "llvm.hexagon.S2.vsplatrh" => "__builtin_HEXAGON_S2_vsplatrh", - "llvm.hexagon.S2.vspliceib" => "__builtin_HEXAGON_S2_vspliceib", - "llvm.hexagon.S2.vsplicerb" => "__builtin_HEXAGON_S2_vsplicerb", - "llvm.hexagon.S2.vsxtbh" => "__builtin_HEXAGON_S2_vsxtbh", - "llvm.hexagon.S2.vsxthw" => "__builtin_HEXAGON_S2_vsxthw", - "llvm.hexagon.S2.vtrunehb" => "__builtin_HEXAGON_S2_vtrunehb", - "llvm.hexagon.S2.vtrunewh" => "__builtin_HEXAGON_S2_vtrunewh", - "llvm.hexagon.S2.vtrunohb" => "__builtin_HEXAGON_S2_vtrunohb", - "llvm.hexagon.S2.vtrunowh" => "__builtin_HEXAGON_S2_vtrunowh", - "llvm.hexagon.S2.vzxtbh" => "__builtin_HEXAGON_S2_vzxtbh", - "llvm.hexagon.S2.vzxthw" => "__builtin_HEXAGON_S2_vzxthw", - "llvm.hexagon.S4.addaddi" => "__builtin_HEXAGON_S4_addaddi", - "llvm.hexagon.S4.addi.asl.ri" => "__builtin_HEXAGON_S4_addi_asl_ri", - "llvm.hexagon.S4.addi.lsr.ri" => "__builtin_HEXAGON_S4_addi_lsr_ri", - "llvm.hexagon.S4.andi.asl.ri" => "__builtin_HEXAGON_S4_andi_asl_ri", - "llvm.hexagon.S4.andi.lsr.ri" => "__builtin_HEXAGON_S4_andi_lsr_ri", - "llvm.hexagon.S4.clbaddi" => "__builtin_HEXAGON_S4_clbaddi", - "llvm.hexagon.S4.clbpaddi" => "__builtin_HEXAGON_S4_clbpaddi", - "llvm.hexagon.S4.clbpnorm" => "__builtin_HEXAGON_S4_clbpnorm", - "llvm.hexagon.S4.extract" => "__builtin_HEXAGON_S4_extract", - "llvm.hexagon.S4.extract.rp" => "__builtin_HEXAGON_S4_extract_rp", - "llvm.hexagon.S4.extractp" => "__builtin_HEXAGON_S4_extractp", - "llvm.hexagon.S4.extractp.rp" => "__builtin_HEXAGON_S4_extractp_rp", - "llvm.hexagon.S4.lsli" => "__builtin_HEXAGON_S4_lsli", - "llvm.hexagon.S4.ntstbit.i" => "__builtin_HEXAGON_S4_ntstbit_i", - "llvm.hexagon.S4.ntstbit.r" => "__builtin_HEXAGON_S4_ntstbit_r", - "llvm.hexagon.S4.or.andi" => "__builtin_HEXAGON_S4_or_andi", - "llvm.hexagon.S4.or.andix" => "__builtin_HEXAGON_S4_or_andix", - "llvm.hexagon.S4.or.ori" => "__builtin_HEXAGON_S4_or_ori", - "llvm.hexagon.S4.ori.asl.ri" => "__builtin_HEXAGON_S4_ori_asl_ri", - "llvm.hexagon.S4.ori.lsr.ri" => "__builtin_HEXAGON_S4_ori_lsr_ri", - "llvm.hexagon.S4.parity" => "__builtin_HEXAGON_S4_parity", - "llvm.hexagon.S4.stored.locked" => "__builtin_HEXAGON_S4_stored_locked", - "llvm.hexagon.S4.subaddi" => "__builtin_HEXAGON_S4_subaddi", - "llvm.hexagon.S4.subi.asl.ri" => "__builtin_HEXAGON_S4_subi_asl_ri", - "llvm.hexagon.S4.subi.lsr.ri" => "__builtin_HEXAGON_S4_subi_lsr_ri", - "llvm.hexagon.S4.vrcrotate" => "__builtin_HEXAGON_S4_vrcrotate", - "llvm.hexagon.S4.vrcrotate.acc" => "__builtin_HEXAGON_S4_vrcrotate_acc", - "llvm.hexagon.S4.vxaddsubh" => "__builtin_HEXAGON_S4_vxaddsubh", - "llvm.hexagon.S4.vxaddsubhr" => "__builtin_HEXAGON_S4_vxaddsubhr", - "llvm.hexagon.S4.vxaddsubw" => "__builtin_HEXAGON_S4_vxaddsubw", - "llvm.hexagon.S4.vxsubaddh" => "__builtin_HEXAGON_S4_vxsubaddh", - "llvm.hexagon.S4.vxsubaddhr" => "__builtin_HEXAGON_S4_vxsubaddhr", - "llvm.hexagon.S4.vxsubaddw" => "__builtin_HEXAGON_S4_vxsubaddw", - "llvm.hexagon.S5.asrhub.rnd.sat.goodsyntax" => "__builtin_HEXAGON_S5_asrhub_rnd_sat_goodsyntax", - "llvm.hexagon.S5.asrhub.sat" => "__builtin_HEXAGON_S5_asrhub_sat", - "llvm.hexagon.S5.popcountp" => "__builtin_HEXAGON_S5_popcountp", - "llvm.hexagon.S5.vasrhrnd.goodsyntax" => "__builtin_HEXAGON_S5_vasrhrnd_goodsyntax", - "llvm.hexagon.S6.rol.i.p" => "__builtin_HEXAGON_S6_rol_i_p", - "llvm.hexagon.S6.rol.i.p.acc" => "__builtin_HEXAGON_S6_rol_i_p_acc", - "llvm.hexagon.S6.rol.i.p.and" => "__builtin_HEXAGON_S6_rol_i_p_and", - "llvm.hexagon.S6.rol.i.p.nac" => "__builtin_HEXAGON_S6_rol_i_p_nac", - "llvm.hexagon.S6.rol.i.p.or" => "__builtin_HEXAGON_S6_rol_i_p_or", - "llvm.hexagon.S6.rol.i.p.xacc" => "__builtin_HEXAGON_S6_rol_i_p_xacc", - "llvm.hexagon.S6.rol.i.r" => "__builtin_HEXAGON_S6_rol_i_r", - "llvm.hexagon.S6.rol.i.r.acc" => "__builtin_HEXAGON_S6_rol_i_r_acc", - "llvm.hexagon.S6.rol.i.r.and" => "__builtin_HEXAGON_S6_rol_i_r_and", - "llvm.hexagon.S6.rol.i.r.nac" => "__builtin_HEXAGON_S6_rol_i_r_nac", - "llvm.hexagon.S6.rol.i.r.or" => "__builtin_HEXAGON_S6_rol_i_r_or", - "llvm.hexagon.S6.rol.i.r.xacc" => "__builtin_HEXAGON_S6_rol_i_r_xacc", - "llvm.hexagon.S6.vsplatrbp" => "__builtin_HEXAGON_S6_vsplatrbp", - "llvm.hexagon.S6.vtrunehb.ppp" => "__builtin_HEXAGON_S6_vtrunehb_ppp", - "llvm.hexagon.S6.vtrunohb.ppp" => "__builtin_HEXAGON_S6_vtrunohb_ppp", - "llvm.hexagon.SI.to.SXTHI.asrh" => "__builtin_SI_to_SXTHI_asrh", - "llvm.hexagon.V6.extractw" => "__builtin_HEXAGON_V6_extractw", - "llvm.hexagon.V6.extractw.128B" => "__builtin_HEXAGON_V6_extractw_128B", - "llvm.hexagon.V6.get.qfext" => "__builtin_HEXAGON_V6_get_qfext", - "llvm.hexagon.V6.get.qfext.128B" => "__builtin_HEXAGON_V6_get_qfext_128B", - "llvm.hexagon.V6.get.qfext.oracc" => "__builtin_HEXAGON_V6_get_qfext_oracc", - "llvm.hexagon.V6.get.qfext.oracc.128B" => "__builtin_HEXAGON_V6_get_qfext_oracc_128B", - "llvm.hexagon.V6.hi" => "__builtin_HEXAGON_V6_hi", - "llvm.hexagon.V6.hi.128B" => "__builtin_HEXAGON_V6_hi_128B", - "llvm.hexagon.V6.lo" => "__builtin_HEXAGON_V6_lo", - "llvm.hexagon.V6.lo.128B" => "__builtin_HEXAGON_V6_lo_128B", - "llvm.hexagon.V6.lvsplatb" => "__builtin_HEXAGON_V6_lvsplatb", - "llvm.hexagon.V6.lvsplatb.128B" => "__builtin_HEXAGON_V6_lvsplatb_128B", - "llvm.hexagon.V6.lvsplath" => "__builtin_HEXAGON_V6_lvsplath", - "llvm.hexagon.V6.lvsplath.128B" => "__builtin_HEXAGON_V6_lvsplath_128B", - "llvm.hexagon.V6.lvsplatw" => "__builtin_HEXAGON_V6_lvsplatw", - "llvm.hexagon.V6.lvsplatw.128B" => "__builtin_HEXAGON_V6_lvsplatw_128B", - "llvm.hexagon.V6.pred.and" => "__builtin_HEXAGON_V6_pred_and", - "llvm.hexagon.V6.pred.and.128B" => "__builtin_HEXAGON_V6_pred_and_128B", - "llvm.hexagon.V6.pred.and.n" => "__builtin_HEXAGON_V6_pred_and_n", - "llvm.hexagon.V6.pred.and.n.128B" => "__builtin_HEXAGON_V6_pred_and_n_128B", - "llvm.hexagon.V6.pred.not" => "__builtin_HEXAGON_V6_pred_not", - "llvm.hexagon.V6.pred.not.128B" => "__builtin_HEXAGON_V6_pred_not_128B", - "llvm.hexagon.V6.pred.or" => "__builtin_HEXAGON_V6_pred_or", - "llvm.hexagon.V6.pred.or.128B" => "__builtin_HEXAGON_V6_pred_or_128B", - "llvm.hexagon.V6.pred.or.n" => "__builtin_HEXAGON_V6_pred_or_n", - "llvm.hexagon.V6.pred.or.n.128B" => "__builtin_HEXAGON_V6_pred_or_n_128B", - "llvm.hexagon.V6.pred.scalar2" => "__builtin_HEXAGON_V6_pred_scalar2", - "llvm.hexagon.V6.pred.scalar2.128B" => "__builtin_HEXAGON_V6_pred_scalar2_128B", - "llvm.hexagon.V6.pred.scalar2v2" => "__builtin_HEXAGON_V6_pred_scalar2v2", - "llvm.hexagon.V6.pred.scalar2v2.128B" => "__builtin_HEXAGON_V6_pred_scalar2v2_128B", - "llvm.hexagon.V6.pred.xor" => "__builtin_HEXAGON_V6_pred_xor", - "llvm.hexagon.V6.pred.xor.128B" => "__builtin_HEXAGON_V6_pred_xor_128B", - "llvm.hexagon.V6.set.qfext" => "__builtin_HEXAGON_V6_set_qfext", - "llvm.hexagon.V6.set.qfext.128B" => "__builtin_HEXAGON_V6_set_qfext_128B", - "llvm.hexagon.V6.shuffeqh" => "__builtin_HEXAGON_V6_shuffeqh", - "llvm.hexagon.V6.shuffeqh.128B" => "__builtin_HEXAGON_V6_shuffeqh_128B", - "llvm.hexagon.V6.shuffeqw" => "__builtin_HEXAGON_V6_shuffeqw", - "llvm.hexagon.V6.shuffeqw.128B" => "__builtin_HEXAGON_V6_shuffeqw_128B", - "llvm.hexagon.V6.v6mpyhubs10" => "__builtin_HEXAGON_V6_v6mpyhubs10", - "llvm.hexagon.V6.v6mpyhubs10.128B" => "__builtin_HEXAGON_V6_v6mpyhubs10_128B", - "llvm.hexagon.V6.v6mpyhubs10.vxx" => "__builtin_HEXAGON_V6_v6mpyhubs10_vxx", - "llvm.hexagon.V6.v6mpyhubs10.vxx.128B" => "__builtin_HEXAGON_V6_v6mpyhubs10_vxx_128B", - "llvm.hexagon.V6.v6mpyvubs10" => "__builtin_HEXAGON_V6_v6mpyvubs10", - "llvm.hexagon.V6.v6mpyvubs10.128B" => "__builtin_HEXAGON_V6_v6mpyvubs10_128B", - "llvm.hexagon.V6.v6mpyvubs10.vxx" => "__builtin_HEXAGON_V6_v6mpyvubs10_vxx", - "llvm.hexagon.V6.v6mpyvubs10.vxx.128B" => "__builtin_HEXAGON_V6_v6mpyvubs10_vxx_128B", - "llvm.hexagon.V6.vS32b.nqpred.ai" => "__builtin_HEXAGON_V6_vS32b_nqpred_ai", - "llvm.hexagon.V6.vS32b.nqpred.ai.128B" => "__builtin_HEXAGON_V6_vS32b_nqpred_ai_128B", - "llvm.hexagon.V6.vS32b.nt.nqpred.ai" => "__builtin_HEXAGON_V6_vS32b_nt_nqpred_ai", - "llvm.hexagon.V6.vS32b.nt.nqpred.ai.128B" => "__builtin_HEXAGON_V6_vS32b_nt_nqpred_ai_128B", - "llvm.hexagon.V6.vS32b.nt.qpred.ai" => "__builtin_HEXAGON_V6_vS32b_nt_qpred_ai", - "llvm.hexagon.V6.vS32b.nt.qpred.ai.128B" => "__builtin_HEXAGON_V6_vS32b_nt_qpred_ai_128B", - "llvm.hexagon.V6.vS32b.qpred.ai" => "__builtin_HEXAGON_V6_vS32b_qpred_ai", - "llvm.hexagon.V6.vS32b.qpred.ai.128B" => "__builtin_HEXAGON_V6_vS32b_qpred_ai_128B", - "llvm.hexagon.V6.vabs.f8" => "__builtin_HEXAGON_V6_vabs_f8", - "llvm.hexagon.V6.vabs.f8.128B" => "__builtin_HEXAGON_V6_vabs_f8_128B", - "llvm.hexagon.V6.vabs.hf" => "__builtin_HEXAGON_V6_vabs_hf", - "llvm.hexagon.V6.vabs.hf.128B" => "__builtin_HEXAGON_V6_vabs_hf_128B", - "llvm.hexagon.V6.vabs.sf" => "__builtin_HEXAGON_V6_vabs_sf", - "llvm.hexagon.V6.vabs.sf.128B" => "__builtin_HEXAGON_V6_vabs_sf_128B", - "llvm.hexagon.V6.vabsb" => "__builtin_HEXAGON_V6_vabsb", - "llvm.hexagon.V6.vabsb.128B" => "__builtin_HEXAGON_V6_vabsb_128B", - "llvm.hexagon.V6.vabsb.sat" => "__builtin_HEXAGON_V6_vabsb_sat", - "llvm.hexagon.V6.vabsb.sat.128B" => "__builtin_HEXAGON_V6_vabsb_sat_128B", - "llvm.hexagon.V6.vabsdiffh" => "__builtin_HEXAGON_V6_vabsdiffh", - "llvm.hexagon.V6.vabsdiffh.128B" => "__builtin_HEXAGON_V6_vabsdiffh_128B", - "llvm.hexagon.V6.vabsdiffub" => "__builtin_HEXAGON_V6_vabsdiffub", - "llvm.hexagon.V6.vabsdiffub.128B" => "__builtin_HEXAGON_V6_vabsdiffub_128B", - "llvm.hexagon.V6.vabsdiffuh" => "__builtin_HEXAGON_V6_vabsdiffuh", - "llvm.hexagon.V6.vabsdiffuh.128B" => "__builtin_HEXAGON_V6_vabsdiffuh_128B", - "llvm.hexagon.V6.vabsdiffw" => "__builtin_HEXAGON_V6_vabsdiffw", - "llvm.hexagon.V6.vabsdiffw.128B" => "__builtin_HEXAGON_V6_vabsdiffw_128B", - "llvm.hexagon.V6.vabsh" => "__builtin_HEXAGON_V6_vabsh", - "llvm.hexagon.V6.vabsh.128B" => "__builtin_HEXAGON_V6_vabsh_128B", - "llvm.hexagon.V6.vabsh.sat" => "__builtin_HEXAGON_V6_vabsh_sat", - "llvm.hexagon.V6.vabsh.sat.128B" => "__builtin_HEXAGON_V6_vabsh_sat_128B", - "llvm.hexagon.V6.vabsw" => "__builtin_HEXAGON_V6_vabsw", - "llvm.hexagon.V6.vabsw.128B" => "__builtin_HEXAGON_V6_vabsw_128B", - "llvm.hexagon.V6.vabsw.sat" => "__builtin_HEXAGON_V6_vabsw_sat", - "llvm.hexagon.V6.vabsw.sat.128B" => "__builtin_HEXAGON_V6_vabsw_sat_128B", - "llvm.hexagon.V6.vadd.hf" => "__builtin_HEXAGON_V6_vadd_hf", - "llvm.hexagon.V6.vadd.hf.128B" => "__builtin_HEXAGON_V6_vadd_hf_128B", - "llvm.hexagon.V6.vadd.hf.f8" => "__builtin_HEXAGON_V6_vadd_hf_f8", - "llvm.hexagon.V6.vadd.hf.f8.128B" => "__builtin_HEXAGON_V6_vadd_hf_f8_128B", - "llvm.hexagon.V6.vadd.hf.hf" => "__builtin_HEXAGON_V6_vadd_hf_hf", - "llvm.hexagon.V6.vadd.hf.hf.128B" => "__builtin_HEXAGON_V6_vadd_hf_hf_128B", - "llvm.hexagon.V6.vadd.qf16" => "__builtin_HEXAGON_V6_vadd_qf16", - "llvm.hexagon.V6.vadd.qf16.128B" => "__builtin_HEXAGON_V6_vadd_qf16_128B", - "llvm.hexagon.V6.vadd.qf16.mix" => "__builtin_HEXAGON_V6_vadd_qf16_mix", - "llvm.hexagon.V6.vadd.qf16.mix.128B" => "__builtin_HEXAGON_V6_vadd_qf16_mix_128B", - "llvm.hexagon.V6.vadd.qf32" => "__builtin_HEXAGON_V6_vadd_qf32", - "llvm.hexagon.V6.vadd.qf32.128B" => "__builtin_HEXAGON_V6_vadd_qf32_128B", - "llvm.hexagon.V6.vadd.qf32.mix" => "__builtin_HEXAGON_V6_vadd_qf32_mix", - "llvm.hexagon.V6.vadd.qf32.mix.128B" => "__builtin_HEXAGON_V6_vadd_qf32_mix_128B", - "llvm.hexagon.V6.vadd.sf" => "__builtin_HEXAGON_V6_vadd_sf", - "llvm.hexagon.V6.vadd.sf.128B" => "__builtin_HEXAGON_V6_vadd_sf_128B", - "llvm.hexagon.V6.vadd.sf.bf" => "__builtin_HEXAGON_V6_vadd_sf_bf", - "llvm.hexagon.V6.vadd.sf.bf.128B" => "__builtin_HEXAGON_V6_vadd_sf_bf_128B", - "llvm.hexagon.V6.vadd.sf.hf" => "__builtin_HEXAGON_V6_vadd_sf_hf", - "llvm.hexagon.V6.vadd.sf.hf.128B" => "__builtin_HEXAGON_V6_vadd_sf_hf_128B", - "llvm.hexagon.V6.vadd.sf.sf" => "__builtin_HEXAGON_V6_vadd_sf_sf", - "llvm.hexagon.V6.vadd.sf.sf.128B" => "__builtin_HEXAGON_V6_vadd_sf_sf_128B", - "llvm.hexagon.V6.vaddb" => "__builtin_HEXAGON_V6_vaddb", - "llvm.hexagon.V6.vaddb.128B" => "__builtin_HEXAGON_V6_vaddb_128B", - "llvm.hexagon.V6.vaddb.dv" => "__builtin_HEXAGON_V6_vaddb_dv", - "llvm.hexagon.V6.vaddb.dv.128B" => "__builtin_HEXAGON_V6_vaddb_dv_128B", - "llvm.hexagon.V6.vaddbnq" => "__builtin_HEXAGON_V6_vaddbnq", - "llvm.hexagon.V6.vaddbnq.128B" => "__builtin_HEXAGON_V6_vaddbnq_128B", - "llvm.hexagon.V6.vaddbq" => "__builtin_HEXAGON_V6_vaddbq", - "llvm.hexagon.V6.vaddbq.128B" => "__builtin_HEXAGON_V6_vaddbq_128B", - "llvm.hexagon.V6.vaddbsat" => "__builtin_HEXAGON_V6_vaddbsat", - "llvm.hexagon.V6.vaddbsat.128B" => "__builtin_HEXAGON_V6_vaddbsat_128B", - "llvm.hexagon.V6.vaddbsat.dv" => "__builtin_HEXAGON_V6_vaddbsat_dv", - "llvm.hexagon.V6.vaddbsat.dv.128B" => "__builtin_HEXAGON_V6_vaddbsat_dv_128B", - "llvm.hexagon.V6.vaddcarrysat" => "__builtin_HEXAGON_V6_vaddcarrysat", - "llvm.hexagon.V6.vaddcarrysat.128B" => "__builtin_HEXAGON_V6_vaddcarrysat_128B", - "llvm.hexagon.V6.vaddclbh" => "__builtin_HEXAGON_V6_vaddclbh", - "llvm.hexagon.V6.vaddclbh.128B" => "__builtin_HEXAGON_V6_vaddclbh_128B", - "llvm.hexagon.V6.vaddclbw" => "__builtin_HEXAGON_V6_vaddclbw", - "llvm.hexagon.V6.vaddclbw.128B" => "__builtin_HEXAGON_V6_vaddclbw_128B", - "llvm.hexagon.V6.vaddh" => "__builtin_HEXAGON_V6_vaddh", - "llvm.hexagon.V6.vaddh.128B" => "__builtin_HEXAGON_V6_vaddh_128B", - "llvm.hexagon.V6.vaddh.dv" => "__builtin_HEXAGON_V6_vaddh_dv", - "llvm.hexagon.V6.vaddh.dv.128B" => "__builtin_HEXAGON_V6_vaddh_dv_128B", - "llvm.hexagon.V6.vaddhnq" => "__builtin_HEXAGON_V6_vaddhnq", - "llvm.hexagon.V6.vaddhnq.128B" => "__builtin_HEXAGON_V6_vaddhnq_128B", - "llvm.hexagon.V6.vaddhq" => "__builtin_HEXAGON_V6_vaddhq", - "llvm.hexagon.V6.vaddhq.128B" => "__builtin_HEXAGON_V6_vaddhq_128B", - "llvm.hexagon.V6.vaddhsat" => "__builtin_HEXAGON_V6_vaddhsat", - "llvm.hexagon.V6.vaddhsat.128B" => "__builtin_HEXAGON_V6_vaddhsat_128B", - "llvm.hexagon.V6.vaddhsat.dv" => "__builtin_HEXAGON_V6_vaddhsat_dv", - "llvm.hexagon.V6.vaddhsat.dv.128B" => "__builtin_HEXAGON_V6_vaddhsat_dv_128B", - "llvm.hexagon.V6.vaddhw" => "__builtin_HEXAGON_V6_vaddhw", - "llvm.hexagon.V6.vaddhw.128B" => "__builtin_HEXAGON_V6_vaddhw_128B", - "llvm.hexagon.V6.vaddhw.acc" => "__builtin_HEXAGON_V6_vaddhw_acc", - "llvm.hexagon.V6.vaddhw.acc.128B" => "__builtin_HEXAGON_V6_vaddhw_acc_128B", - "llvm.hexagon.V6.vaddubh" => "__builtin_HEXAGON_V6_vaddubh", - "llvm.hexagon.V6.vaddubh.128B" => "__builtin_HEXAGON_V6_vaddubh_128B", - "llvm.hexagon.V6.vaddubh.acc" => "__builtin_HEXAGON_V6_vaddubh_acc", - "llvm.hexagon.V6.vaddubh.acc.128B" => "__builtin_HEXAGON_V6_vaddubh_acc_128B", - "llvm.hexagon.V6.vaddubsat" => "__builtin_HEXAGON_V6_vaddubsat", - "llvm.hexagon.V6.vaddubsat.128B" => "__builtin_HEXAGON_V6_vaddubsat_128B", - "llvm.hexagon.V6.vaddubsat.dv" => "__builtin_HEXAGON_V6_vaddubsat_dv", - "llvm.hexagon.V6.vaddubsat.dv.128B" => "__builtin_HEXAGON_V6_vaddubsat_dv_128B", - "llvm.hexagon.V6.vaddububb.sat" => "__builtin_HEXAGON_V6_vaddububb_sat", - "llvm.hexagon.V6.vaddububb.sat.128B" => "__builtin_HEXAGON_V6_vaddububb_sat_128B", - "llvm.hexagon.V6.vadduhsat" => "__builtin_HEXAGON_V6_vadduhsat", - "llvm.hexagon.V6.vadduhsat.128B" => "__builtin_HEXAGON_V6_vadduhsat_128B", - "llvm.hexagon.V6.vadduhsat.dv" => "__builtin_HEXAGON_V6_vadduhsat_dv", - "llvm.hexagon.V6.vadduhsat.dv.128B" => "__builtin_HEXAGON_V6_vadduhsat_dv_128B", - "llvm.hexagon.V6.vadduhw" => "__builtin_HEXAGON_V6_vadduhw", - "llvm.hexagon.V6.vadduhw.128B" => "__builtin_HEXAGON_V6_vadduhw_128B", - "llvm.hexagon.V6.vadduhw.acc" => "__builtin_HEXAGON_V6_vadduhw_acc", - "llvm.hexagon.V6.vadduhw.acc.128B" => "__builtin_HEXAGON_V6_vadduhw_acc_128B", - "llvm.hexagon.V6.vadduwsat" => "__builtin_HEXAGON_V6_vadduwsat", - "llvm.hexagon.V6.vadduwsat.128B" => "__builtin_HEXAGON_V6_vadduwsat_128B", - "llvm.hexagon.V6.vadduwsat.dv" => "__builtin_HEXAGON_V6_vadduwsat_dv", - "llvm.hexagon.V6.vadduwsat.dv.128B" => "__builtin_HEXAGON_V6_vadduwsat_dv_128B", - "llvm.hexagon.V6.vaddw" => "__builtin_HEXAGON_V6_vaddw", - "llvm.hexagon.V6.vaddw.128B" => "__builtin_HEXAGON_V6_vaddw_128B", - "llvm.hexagon.V6.vaddw.dv" => "__builtin_HEXAGON_V6_vaddw_dv", - "llvm.hexagon.V6.vaddw.dv.128B" => "__builtin_HEXAGON_V6_vaddw_dv_128B", - "llvm.hexagon.V6.vaddwnq" => "__builtin_HEXAGON_V6_vaddwnq", - "llvm.hexagon.V6.vaddwnq.128B" => "__builtin_HEXAGON_V6_vaddwnq_128B", - "llvm.hexagon.V6.vaddwq" => "__builtin_HEXAGON_V6_vaddwq", - "llvm.hexagon.V6.vaddwq.128B" => "__builtin_HEXAGON_V6_vaddwq_128B", - "llvm.hexagon.V6.vaddwsat" => "__builtin_HEXAGON_V6_vaddwsat", - "llvm.hexagon.V6.vaddwsat.128B" => "__builtin_HEXAGON_V6_vaddwsat_128B", - "llvm.hexagon.V6.vaddwsat.dv" => "__builtin_HEXAGON_V6_vaddwsat_dv", - "llvm.hexagon.V6.vaddwsat.dv.128B" => "__builtin_HEXAGON_V6_vaddwsat_dv_128B", - "llvm.hexagon.V6.valignb" => "__builtin_HEXAGON_V6_valignb", - "llvm.hexagon.V6.valignb.128B" => "__builtin_HEXAGON_V6_valignb_128B", - "llvm.hexagon.V6.valignbi" => "__builtin_HEXAGON_V6_valignbi", - "llvm.hexagon.V6.valignbi.128B" => "__builtin_HEXAGON_V6_valignbi_128B", - "llvm.hexagon.V6.vand" => "__builtin_HEXAGON_V6_vand", - "llvm.hexagon.V6.vand.128B" => "__builtin_HEXAGON_V6_vand_128B", - "llvm.hexagon.V6.vandnqrt" => "__builtin_HEXAGON_V6_vandnqrt", - "llvm.hexagon.V6.vandnqrt.128B" => "__builtin_HEXAGON_V6_vandnqrt_128B", - "llvm.hexagon.V6.vandnqrt.acc" => "__builtin_HEXAGON_V6_vandnqrt_acc", - "llvm.hexagon.V6.vandnqrt.acc.128B" => "__builtin_HEXAGON_V6_vandnqrt_acc_128B", - "llvm.hexagon.V6.vandqrt" => "__builtin_HEXAGON_V6_vandqrt", - "llvm.hexagon.V6.vandqrt.128B" => "__builtin_HEXAGON_V6_vandqrt_128B", - "llvm.hexagon.V6.vandqrt.acc" => "__builtin_HEXAGON_V6_vandqrt_acc", - "llvm.hexagon.V6.vandqrt.acc.128B" => "__builtin_HEXAGON_V6_vandqrt_acc_128B", - "llvm.hexagon.V6.vandvnqv" => "__builtin_HEXAGON_V6_vandvnqv", - "llvm.hexagon.V6.vandvnqv.128B" => "__builtin_HEXAGON_V6_vandvnqv_128B", - "llvm.hexagon.V6.vandvqv" => "__builtin_HEXAGON_V6_vandvqv", - "llvm.hexagon.V6.vandvqv.128B" => "__builtin_HEXAGON_V6_vandvqv_128B", - "llvm.hexagon.V6.vandvrt" => "__builtin_HEXAGON_V6_vandvrt", - "llvm.hexagon.V6.vandvrt.128B" => "__builtin_HEXAGON_V6_vandvrt_128B", - "llvm.hexagon.V6.vandvrt.acc" => "__builtin_HEXAGON_V6_vandvrt_acc", - "llvm.hexagon.V6.vandvrt.acc.128B" => "__builtin_HEXAGON_V6_vandvrt_acc_128B", - "llvm.hexagon.V6.vaslh" => "__builtin_HEXAGON_V6_vaslh", - "llvm.hexagon.V6.vaslh.128B" => "__builtin_HEXAGON_V6_vaslh_128B", - "llvm.hexagon.V6.vaslh.acc" => "__builtin_HEXAGON_V6_vaslh_acc", - "llvm.hexagon.V6.vaslh.acc.128B" => "__builtin_HEXAGON_V6_vaslh_acc_128B", - "llvm.hexagon.V6.vaslhv" => "__builtin_HEXAGON_V6_vaslhv", - "llvm.hexagon.V6.vaslhv.128B" => "__builtin_HEXAGON_V6_vaslhv_128B", - "llvm.hexagon.V6.vaslw" => "__builtin_HEXAGON_V6_vaslw", - "llvm.hexagon.V6.vaslw.128B" => "__builtin_HEXAGON_V6_vaslw_128B", - "llvm.hexagon.V6.vaslw.acc" => "__builtin_HEXAGON_V6_vaslw_acc", - "llvm.hexagon.V6.vaslw.acc.128B" => "__builtin_HEXAGON_V6_vaslw_acc_128B", - "llvm.hexagon.V6.vaslwv" => "__builtin_HEXAGON_V6_vaslwv", - "llvm.hexagon.V6.vaslwv.128B" => "__builtin_HEXAGON_V6_vaslwv_128B", - "llvm.hexagon.V6.vasr.into" => "__builtin_HEXAGON_V6_vasr_into", - "llvm.hexagon.V6.vasr.into.128B" => "__builtin_HEXAGON_V6_vasr_into_128B", - "llvm.hexagon.V6.vasrh" => "__builtin_HEXAGON_V6_vasrh", - "llvm.hexagon.V6.vasrh.128B" => "__builtin_HEXAGON_V6_vasrh_128B", - "llvm.hexagon.V6.vasrh.acc" => "__builtin_HEXAGON_V6_vasrh_acc", - "llvm.hexagon.V6.vasrh.acc.128B" => "__builtin_HEXAGON_V6_vasrh_acc_128B", - "llvm.hexagon.V6.vasrhbrndsat" => "__builtin_HEXAGON_V6_vasrhbrndsat", - "llvm.hexagon.V6.vasrhbrndsat.128B" => "__builtin_HEXAGON_V6_vasrhbrndsat_128B", - "llvm.hexagon.V6.vasrhbsat" => "__builtin_HEXAGON_V6_vasrhbsat", - "llvm.hexagon.V6.vasrhbsat.128B" => "__builtin_HEXAGON_V6_vasrhbsat_128B", - "llvm.hexagon.V6.vasrhubrndsat" => "__builtin_HEXAGON_V6_vasrhubrndsat", - "llvm.hexagon.V6.vasrhubrndsat.128B" => "__builtin_HEXAGON_V6_vasrhubrndsat_128B", - "llvm.hexagon.V6.vasrhubsat" => "__builtin_HEXAGON_V6_vasrhubsat", - "llvm.hexagon.V6.vasrhubsat.128B" => "__builtin_HEXAGON_V6_vasrhubsat_128B", - "llvm.hexagon.V6.vasrhv" => "__builtin_HEXAGON_V6_vasrhv", - "llvm.hexagon.V6.vasrhv.128B" => "__builtin_HEXAGON_V6_vasrhv_128B", - "llvm.hexagon.V6.vasruhubrndsat" => "__builtin_HEXAGON_V6_vasruhubrndsat", - "llvm.hexagon.V6.vasruhubrndsat.128B" => "__builtin_HEXAGON_V6_vasruhubrndsat_128B", - "llvm.hexagon.V6.vasruhubsat" => "__builtin_HEXAGON_V6_vasruhubsat", - "llvm.hexagon.V6.vasruhubsat.128B" => "__builtin_HEXAGON_V6_vasruhubsat_128B", - "llvm.hexagon.V6.vasruwuhrndsat" => "__builtin_HEXAGON_V6_vasruwuhrndsat", - "llvm.hexagon.V6.vasruwuhrndsat.128B" => "__builtin_HEXAGON_V6_vasruwuhrndsat_128B", - "llvm.hexagon.V6.vasruwuhsat" => "__builtin_HEXAGON_V6_vasruwuhsat", - "llvm.hexagon.V6.vasruwuhsat.128B" => "__builtin_HEXAGON_V6_vasruwuhsat_128B", - "llvm.hexagon.V6.vasrvuhubrndsat" => "__builtin_HEXAGON_V6_vasrvuhubrndsat", - "llvm.hexagon.V6.vasrvuhubrndsat.128B" => "__builtin_HEXAGON_V6_vasrvuhubrndsat_128B", - "llvm.hexagon.V6.vasrvuhubsat" => "__builtin_HEXAGON_V6_vasrvuhubsat", - "llvm.hexagon.V6.vasrvuhubsat.128B" => "__builtin_HEXAGON_V6_vasrvuhubsat_128B", - "llvm.hexagon.V6.vasrvwuhrndsat" => "__builtin_HEXAGON_V6_vasrvwuhrndsat", - "llvm.hexagon.V6.vasrvwuhrndsat.128B" => "__builtin_HEXAGON_V6_vasrvwuhrndsat_128B", - "llvm.hexagon.V6.vasrvwuhsat" => "__builtin_HEXAGON_V6_vasrvwuhsat", - "llvm.hexagon.V6.vasrvwuhsat.128B" => "__builtin_HEXAGON_V6_vasrvwuhsat_128B", - "llvm.hexagon.V6.vasrw" => "__builtin_HEXAGON_V6_vasrw", - "llvm.hexagon.V6.vasrw.128B" => "__builtin_HEXAGON_V6_vasrw_128B", - "llvm.hexagon.V6.vasrw.acc" => "__builtin_HEXAGON_V6_vasrw_acc", - "llvm.hexagon.V6.vasrw.acc.128B" => "__builtin_HEXAGON_V6_vasrw_acc_128B", - "llvm.hexagon.V6.vasrwh" => "__builtin_HEXAGON_V6_vasrwh", - "llvm.hexagon.V6.vasrwh.128B" => "__builtin_HEXAGON_V6_vasrwh_128B", - "llvm.hexagon.V6.vasrwhrndsat" => "__builtin_HEXAGON_V6_vasrwhrndsat", - "llvm.hexagon.V6.vasrwhrndsat.128B" => "__builtin_HEXAGON_V6_vasrwhrndsat_128B", - "llvm.hexagon.V6.vasrwhsat" => "__builtin_HEXAGON_V6_vasrwhsat", - "llvm.hexagon.V6.vasrwhsat.128B" => "__builtin_HEXAGON_V6_vasrwhsat_128B", - "llvm.hexagon.V6.vasrwuhrndsat" => "__builtin_HEXAGON_V6_vasrwuhrndsat", - "llvm.hexagon.V6.vasrwuhrndsat.128B" => "__builtin_HEXAGON_V6_vasrwuhrndsat_128B", - "llvm.hexagon.V6.vasrwuhsat" => "__builtin_HEXAGON_V6_vasrwuhsat", - "llvm.hexagon.V6.vasrwuhsat.128B" => "__builtin_HEXAGON_V6_vasrwuhsat_128B", - "llvm.hexagon.V6.vasrwv" => "__builtin_HEXAGON_V6_vasrwv", - "llvm.hexagon.V6.vasrwv.128B" => "__builtin_HEXAGON_V6_vasrwv_128B", - "llvm.hexagon.V6.vassign" => "__builtin_HEXAGON_V6_vassign", - "llvm.hexagon.V6.vassign.128B" => "__builtin_HEXAGON_V6_vassign_128B", - "llvm.hexagon.V6.vassign.fp" => "__builtin_HEXAGON_V6_vassign_fp", - "llvm.hexagon.V6.vassign.fp.128B" => "__builtin_HEXAGON_V6_vassign_fp_128B", - "llvm.hexagon.V6.vassignp" => "__builtin_HEXAGON_V6_vassignp", - "llvm.hexagon.V6.vassignp.128B" => "__builtin_HEXAGON_V6_vassignp_128B", - "llvm.hexagon.V6.vavgb" => "__builtin_HEXAGON_V6_vavgb", - "llvm.hexagon.V6.vavgb.128B" => "__builtin_HEXAGON_V6_vavgb_128B", - "llvm.hexagon.V6.vavgbrnd" => "__builtin_HEXAGON_V6_vavgbrnd", - "llvm.hexagon.V6.vavgbrnd.128B" => "__builtin_HEXAGON_V6_vavgbrnd_128B", - "llvm.hexagon.V6.vavgh" => "__builtin_HEXAGON_V6_vavgh", - "llvm.hexagon.V6.vavgh.128B" => "__builtin_HEXAGON_V6_vavgh_128B", - "llvm.hexagon.V6.vavghrnd" => "__builtin_HEXAGON_V6_vavghrnd", - "llvm.hexagon.V6.vavghrnd.128B" => "__builtin_HEXAGON_V6_vavghrnd_128B", - "llvm.hexagon.V6.vavgub" => "__builtin_HEXAGON_V6_vavgub", - "llvm.hexagon.V6.vavgub.128B" => "__builtin_HEXAGON_V6_vavgub_128B", - "llvm.hexagon.V6.vavgubrnd" => "__builtin_HEXAGON_V6_vavgubrnd", - "llvm.hexagon.V6.vavgubrnd.128B" => "__builtin_HEXAGON_V6_vavgubrnd_128B", - "llvm.hexagon.V6.vavguh" => "__builtin_HEXAGON_V6_vavguh", - "llvm.hexagon.V6.vavguh.128B" => "__builtin_HEXAGON_V6_vavguh_128B", - "llvm.hexagon.V6.vavguhrnd" => "__builtin_HEXAGON_V6_vavguhrnd", - "llvm.hexagon.V6.vavguhrnd.128B" => "__builtin_HEXAGON_V6_vavguhrnd_128B", - "llvm.hexagon.V6.vavguw" => "__builtin_HEXAGON_V6_vavguw", - "llvm.hexagon.V6.vavguw.128B" => "__builtin_HEXAGON_V6_vavguw_128B", - "llvm.hexagon.V6.vavguwrnd" => "__builtin_HEXAGON_V6_vavguwrnd", - "llvm.hexagon.V6.vavguwrnd.128B" => "__builtin_HEXAGON_V6_vavguwrnd_128B", - "llvm.hexagon.V6.vavgw" => "__builtin_HEXAGON_V6_vavgw", - "llvm.hexagon.V6.vavgw.128B" => "__builtin_HEXAGON_V6_vavgw_128B", - "llvm.hexagon.V6.vavgwrnd" => "__builtin_HEXAGON_V6_vavgwrnd", - "llvm.hexagon.V6.vavgwrnd.128B" => "__builtin_HEXAGON_V6_vavgwrnd_128B", - "llvm.hexagon.V6.vcl0h" => "__builtin_HEXAGON_V6_vcl0h", - "llvm.hexagon.V6.vcl0h.128B" => "__builtin_HEXAGON_V6_vcl0h_128B", - "llvm.hexagon.V6.vcl0w" => "__builtin_HEXAGON_V6_vcl0w", - "llvm.hexagon.V6.vcl0w.128B" => "__builtin_HEXAGON_V6_vcl0w_128B", - "llvm.hexagon.V6.vcombine" => "__builtin_HEXAGON_V6_vcombine", - "llvm.hexagon.V6.vcombine.128B" => "__builtin_HEXAGON_V6_vcombine_128B", - "llvm.hexagon.V6.vconv.h.hf" => "__builtin_HEXAGON_V6_vconv_h_hf", - "llvm.hexagon.V6.vconv.h.hf.128B" => "__builtin_HEXAGON_V6_vconv_h_hf_128B", - "llvm.hexagon.V6.vconv.hf.h" => "__builtin_HEXAGON_V6_vconv_hf_h", - "llvm.hexagon.V6.vconv.hf.h.128B" => "__builtin_HEXAGON_V6_vconv_hf_h_128B", - "llvm.hexagon.V6.vconv.hf.qf16" => "__builtin_HEXAGON_V6_vconv_hf_qf16", - "llvm.hexagon.V6.vconv.hf.qf16.128B" => "__builtin_HEXAGON_V6_vconv_hf_qf16_128B", - "llvm.hexagon.V6.vconv.hf.qf32" => "__builtin_HEXAGON_V6_vconv_hf_qf32", - "llvm.hexagon.V6.vconv.hf.qf32.128B" => "__builtin_HEXAGON_V6_vconv_hf_qf32_128B", - "llvm.hexagon.V6.vconv.sf.qf32" => "__builtin_HEXAGON_V6_vconv_sf_qf32", - "llvm.hexagon.V6.vconv.sf.qf32.128B" => "__builtin_HEXAGON_V6_vconv_sf_qf32_128B", - "llvm.hexagon.V6.vconv.sf.w" => "__builtin_HEXAGON_V6_vconv_sf_w", - "llvm.hexagon.V6.vconv.sf.w.128B" => "__builtin_HEXAGON_V6_vconv_sf_w_128B", - "llvm.hexagon.V6.vconv.w.sf" => "__builtin_HEXAGON_V6_vconv_w_sf", - "llvm.hexagon.V6.vconv.w.sf.128B" => "__builtin_HEXAGON_V6_vconv_w_sf_128B", - "llvm.hexagon.V6.vcvt.b.hf" => "__builtin_HEXAGON_V6_vcvt_b_hf", - "llvm.hexagon.V6.vcvt.b.hf.128B" => "__builtin_HEXAGON_V6_vcvt_b_hf_128B", - "llvm.hexagon.V6.vcvt.bf.sf" => "__builtin_HEXAGON_V6_vcvt_bf_sf", - "llvm.hexagon.V6.vcvt.bf.sf.128B" => "__builtin_HEXAGON_V6_vcvt_bf_sf_128B", - "llvm.hexagon.V6.vcvt.f8.hf" => "__builtin_HEXAGON_V6_vcvt_f8_hf", - "llvm.hexagon.V6.vcvt.f8.hf.128B" => "__builtin_HEXAGON_V6_vcvt_f8_hf_128B", - "llvm.hexagon.V6.vcvt.h.hf" => "__builtin_HEXAGON_V6_vcvt_h_hf", - "llvm.hexagon.V6.vcvt.h.hf.128B" => "__builtin_HEXAGON_V6_vcvt_h_hf_128B", - "llvm.hexagon.V6.vcvt.hf.b" => "__builtin_HEXAGON_V6_vcvt_hf_b", - "llvm.hexagon.V6.vcvt.hf.b.128B" => "__builtin_HEXAGON_V6_vcvt_hf_b_128B", - "llvm.hexagon.V6.vcvt.hf.f8" => "__builtin_HEXAGON_V6_vcvt_hf_f8", - "llvm.hexagon.V6.vcvt.hf.f8.128B" => "__builtin_HEXAGON_V6_vcvt_hf_f8_128B", - "llvm.hexagon.V6.vcvt.hf.h" => "__builtin_HEXAGON_V6_vcvt_hf_h", - "llvm.hexagon.V6.vcvt.hf.h.128B" => "__builtin_HEXAGON_V6_vcvt_hf_h_128B", - "llvm.hexagon.V6.vcvt.hf.sf" => "__builtin_HEXAGON_V6_vcvt_hf_sf", - "llvm.hexagon.V6.vcvt.hf.sf.128B" => "__builtin_HEXAGON_V6_vcvt_hf_sf_128B", - "llvm.hexagon.V6.vcvt.hf.ub" => "__builtin_HEXAGON_V6_vcvt_hf_ub", - "llvm.hexagon.V6.vcvt.hf.ub.128B" => "__builtin_HEXAGON_V6_vcvt_hf_ub_128B", - "llvm.hexagon.V6.vcvt.hf.uh" => "__builtin_HEXAGON_V6_vcvt_hf_uh", - "llvm.hexagon.V6.vcvt.hf.uh.128B" => "__builtin_HEXAGON_V6_vcvt_hf_uh_128B", - "llvm.hexagon.V6.vcvt.sf.hf" => "__builtin_HEXAGON_V6_vcvt_sf_hf", - "llvm.hexagon.V6.vcvt.sf.hf.128B" => "__builtin_HEXAGON_V6_vcvt_sf_hf_128B", - "llvm.hexagon.V6.vcvt.ub.hf" => "__builtin_HEXAGON_V6_vcvt_ub_hf", - "llvm.hexagon.V6.vcvt.ub.hf.128B" => "__builtin_HEXAGON_V6_vcvt_ub_hf_128B", - "llvm.hexagon.V6.vcvt.uh.hf" => "__builtin_HEXAGON_V6_vcvt_uh_hf", - "llvm.hexagon.V6.vcvt.uh.hf.128B" => "__builtin_HEXAGON_V6_vcvt_uh_hf_128B", - "llvm.hexagon.V6.vcvt2.b.hf" => "__builtin_HEXAGON_V6_vcvt2_b_hf", - "llvm.hexagon.V6.vcvt2.b.hf.128B" => "__builtin_HEXAGON_V6_vcvt2_b_hf_128B", - "llvm.hexagon.V6.vcvt2.hf.b" => "__builtin_HEXAGON_V6_vcvt2_hf_b", - "llvm.hexagon.V6.vcvt2.hf.b.128B" => "__builtin_HEXAGON_V6_vcvt2_hf_b_128B", - "llvm.hexagon.V6.vcvt2.hf.ub" => "__builtin_HEXAGON_V6_vcvt2_hf_ub", - "llvm.hexagon.V6.vcvt2.hf.ub.128B" => "__builtin_HEXAGON_V6_vcvt2_hf_ub_128B", - "llvm.hexagon.V6.vcvt2.ub.hf" => "__builtin_HEXAGON_V6_vcvt2_ub_hf", - "llvm.hexagon.V6.vcvt2.ub.hf.128B" => "__builtin_HEXAGON_V6_vcvt2_ub_hf_128B", - "llvm.hexagon.V6.vd0" => "__builtin_HEXAGON_V6_vd0", - "llvm.hexagon.V6.vd0.128B" => "__builtin_HEXAGON_V6_vd0_128B", - "llvm.hexagon.V6.vdd0" => "__builtin_HEXAGON_V6_vdd0", - "llvm.hexagon.V6.vdd0.128B" => "__builtin_HEXAGON_V6_vdd0_128B", - "llvm.hexagon.V6.vdealb" => "__builtin_HEXAGON_V6_vdealb", - "llvm.hexagon.V6.vdealb.128B" => "__builtin_HEXAGON_V6_vdealb_128B", - "llvm.hexagon.V6.vdealb4w" => "__builtin_HEXAGON_V6_vdealb4w", - "llvm.hexagon.V6.vdealb4w.128B" => "__builtin_HEXAGON_V6_vdealb4w_128B", - "llvm.hexagon.V6.vdealh" => "__builtin_HEXAGON_V6_vdealh", - "llvm.hexagon.V6.vdealh.128B" => "__builtin_HEXAGON_V6_vdealh_128B", - "llvm.hexagon.V6.vdealvdd" => "__builtin_HEXAGON_V6_vdealvdd", - "llvm.hexagon.V6.vdealvdd.128B" => "__builtin_HEXAGON_V6_vdealvdd_128B", - "llvm.hexagon.V6.vdelta" => "__builtin_HEXAGON_V6_vdelta", - "llvm.hexagon.V6.vdelta.128B" => "__builtin_HEXAGON_V6_vdelta_128B", - "llvm.hexagon.V6.vdmpy.sf.hf" => "__builtin_HEXAGON_V6_vdmpy_sf_hf", - "llvm.hexagon.V6.vdmpy.sf.hf.128B" => "__builtin_HEXAGON_V6_vdmpy_sf_hf_128B", - "llvm.hexagon.V6.vdmpy.sf.hf.acc" => "__builtin_HEXAGON_V6_vdmpy_sf_hf_acc", - "llvm.hexagon.V6.vdmpy.sf.hf.acc.128B" => "__builtin_HEXAGON_V6_vdmpy_sf_hf_acc_128B", - "llvm.hexagon.V6.vdmpybus" => "__builtin_HEXAGON_V6_vdmpybus", - "llvm.hexagon.V6.vdmpybus.128B" => "__builtin_HEXAGON_V6_vdmpybus_128B", - "llvm.hexagon.V6.vdmpybus.acc" => "__builtin_HEXAGON_V6_vdmpybus_acc", - "llvm.hexagon.V6.vdmpybus.acc.128B" => "__builtin_HEXAGON_V6_vdmpybus_acc_128B", - "llvm.hexagon.V6.vdmpybus.dv" => "__builtin_HEXAGON_V6_vdmpybus_dv", - "llvm.hexagon.V6.vdmpybus.dv.128B" => "__builtin_HEXAGON_V6_vdmpybus_dv_128B", - "llvm.hexagon.V6.vdmpybus.dv.acc" => "__builtin_HEXAGON_V6_vdmpybus_dv_acc", - "llvm.hexagon.V6.vdmpybus.dv.acc.128B" => "__builtin_HEXAGON_V6_vdmpybus_dv_acc_128B", - "llvm.hexagon.V6.vdmpyhb" => "__builtin_HEXAGON_V6_vdmpyhb", - "llvm.hexagon.V6.vdmpyhb.128B" => "__builtin_HEXAGON_V6_vdmpyhb_128B", - "llvm.hexagon.V6.vdmpyhb.acc" => "__builtin_HEXAGON_V6_vdmpyhb_acc", - "llvm.hexagon.V6.vdmpyhb.acc.128B" => "__builtin_HEXAGON_V6_vdmpyhb_acc_128B", - "llvm.hexagon.V6.vdmpyhb.dv" => "__builtin_HEXAGON_V6_vdmpyhb_dv", - "llvm.hexagon.V6.vdmpyhb.dv.128B" => "__builtin_HEXAGON_V6_vdmpyhb_dv_128B", - "llvm.hexagon.V6.vdmpyhb.dv.acc" => "__builtin_HEXAGON_V6_vdmpyhb_dv_acc", - "llvm.hexagon.V6.vdmpyhb.dv.acc.128B" => "__builtin_HEXAGON_V6_vdmpyhb_dv_acc_128B", - "llvm.hexagon.V6.vdmpyhisat" => "__builtin_HEXAGON_V6_vdmpyhisat", - "llvm.hexagon.V6.vdmpyhisat.128B" => "__builtin_HEXAGON_V6_vdmpyhisat_128B", - "llvm.hexagon.V6.vdmpyhisat.acc" => "__builtin_HEXAGON_V6_vdmpyhisat_acc", - "llvm.hexagon.V6.vdmpyhisat.acc.128B" => "__builtin_HEXAGON_V6_vdmpyhisat_acc_128B", - "llvm.hexagon.V6.vdmpyhsat" => "__builtin_HEXAGON_V6_vdmpyhsat", - "llvm.hexagon.V6.vdmpyhsat.128B" => "__builtin_HEXAGON_V6_vdmpyhsat_128B", - "llvm.hexagon.V6.vdmpyhsat.acc" => "__builtin_HEXAGON_V6_vdmpyhsat_acc", - "llvm.hexagon.V6.vdmpyhsat.acc.128B" => "__builtin_HEXAGON_V6_vdmpyhsat_acc_128B", - "llvm.hexagon.V6.vdmpyhsuisat" => "__builtin_HEXAGON_V6_vdmpyhsuisat", - "llvm.hexagon.V6.vdmpyhsuisat.128B" => "__builtin_HEXAGON_V6_vdmpyhsuisat_128B", - "llvm.hexagon.V6.vdmpyhsuisat.acc" => "__builtin_HEXAGON_V6_vdmpyhsuisat_acc", - "llvm.hexagon.V6.vdmpyhsuisat.acc.128B" => "__builtin_HEXAGON_V6_vdmpyhsuisat_acc_128B", - "llvm.hexagon.V6.vdmpyhsusat" => "__builtin_HEXAGON_V6_vdmpyhsusat", - "llvm.hexagon.V6.vdmpyhsusat.128B" => "__builtin_HEXAGON_V6_vdmpyhsusat_128B", - "llvm.hexagon.V6.vdmpyhsusat.acc" => "__builtin_HEXAGON_V6_vdmpyhsusat_acc", - "llvm.hexagon.V6.vdmpyhsusat.acc.128B" => "__builtin_HEXAGON_V6_vdmpyhsusat_acc_128B", - "llvm.hexagon.V6.vdmpyhvsat" => "__builtin_HEXAGON_V6_vdmpyhvsat", - "llvm.hexagon.V6.vdmpyhvsat.128B" => "__builtin_HEXAGON_V6_vdmpyhvsat_128B", - "llvm.hexagon.V6.vdmpyhvsat.acc" => "__builtin_HEXAGON_V6_vdmpyhvsat_acc", - "llvm.hexagon.V6.vdmpyhvsat.acc.128B" => "__builtin_HEXAGON_V6_vdmpyhvsat_acc_128B", - "llvm.hexagon.V6.vdsaduh" => "__builtin_HEXAGON_V6_vdsaduh", - "llvm.hexagon.V6.vdsaduh.128B" => "__builtin_HEXAGON_V6_vdsaduh_128B", - "llvm.hexagon.V6.vdsaduh.acc" => "__builtin_HEXAGON_V6_vdsaduh_acc", - "llvm.hexagon.V6.vdsaduh.acc.128B" => "__builtin_HEXAGON_V6_vdsaduh_acc_128B", - "llvm.hexagon.V6.veqb" => "__builtin_HEXAGON_V6_veqb", - "llvm.hexagon.V6.veqb.128B" => "__builtin_HEXAGON_V6_veqb_128B", - "llvm.hexagon.V6.veqb.and" => "__builtin_HEXAGON_V6_veqb_and", - "llvm.hexagon.V6.veqb.and.128B" => "__builtin_HEXAGON_V6_veqb_and_128B", - "llvm.hexagon.V6.veqb.or" => "__builtin_HEXAGON_V6_veqb_or", - "llvm.hexagon.V6.veqb.or.128B" => "__builtin_HEXAGON_V6_veqb_or_128B", - "llvm.hexagon.V6.veqb.xor" => "__builtin_HEXAGON_V6_veqb_xor", - "llvm.hexagon.V6.veqb.xor.128B" => "__builtin_HEXAGON_V6_veqb_xor_128B", - "llvm.hexagon.V6.veqh" => "__builtin_HEXAGON_V6_veqh", - "llvm.hexagon.V6.veqh.128B" => "__builtin_HEXAGON_V6_veqh_128B", - "llvm.hexagon.V6.veqh.and" => "__builtin_HEXAGON_V6_veqh_and", - "llvm.hexagon.V6.veqh.and.128B" => "__builtin_HEXAGON_V6_veqh_and_128B", - "llvm.hexagon.V6.veqh.or" => "__builtin_HEXAGON_V6_veqh_or", - "llvm.hexagon.V6.veqh.or.128B" => "__builtin_HEXAGON_V6_veqh_or_128B", - "llvm.hexagon.V6.veqh.xor" => "__builtin_HEXAGON_V6_veqh_xor", - "llvm.hexagon.V6.veqh.xor.128B" => "__builtin_HEXAGON_V6_veqh_xor_128B", - "llvm.hexagon.V6.veqw" => "__builtin_HEXAGON_V6_veqw", - "llvm.hexagon.V6.veqw.128B" => "__builtin_HEXAGON_V6_veqw_128B", - "llvm.hexagon.V6.veqw.and" => "__builtin_HEXAGON_V6_veqw_and", - "llvm.hexagon.V6.veqw.and.128B" => "__builtin_HEXAGON_V6_veqw_and_128B", - "llvm.hexagon.V6.veqw.or" => "__builtin_HEXAGON_V6_veqw_or", - "llvm.hexagon.V6.veqw.or.128B" => "__builtin_HEXAGON_V6_veqw_or_128B", - "llvm.hexagon.V6.veqw.xor" => "__builtin_HEXAGON_V6_veqw_xor", - "llvm.hexagon.V6.veqw.xor.128B" => "__builtin_HEXAGON_V6_veqw_xor_128B", - "llvm.hexagon.V6.vfmax.f8" => "__builtin_HEXAGON_V6_vfmax_f8", - "llvm.hexagon.V6.vfmax.f8.128B" => "__builtin_HEXAGON_V6_vfmax_f8_128B", - "llvm.hexagon.V6.vfmax.hf" => "__builtin_HEXAGON_V6_vfmax_hf", - "llvm.hexagon.V6.vfmax.hf.128B" => "__builtin_HEXAGON_V6_vfmax_hf_128B", - "llvm.hexagon.V6.vfmax.sf" => "__builtin_HEXAGON_V6_vfmax_sf", - "llvm.hexagon.V6.vfmax.sf.128B" => "__builtin_HEXAGON_V6_vfmax_sf_128B", - "llvm.hexagon.V6.vfmin.f8" => "__builtin_HEXAGON_V6_vfmin_f8", - "llvm.hexagon.V6.vfmin.f8.128B" => "__builtin_HEXAGON_V6_vfmin_f8_128B", - "llvm.hexagon.V6.vfmin.hf" => "__builtin_HEXAGON_V6_vfmin_hf", - "llvm.hexagon.V6.vfmin.hf.128B" => "__builtin_HEXAGON_V6_vfmin_hf_128B", - "llvm.hexagon.V6.vfmin.sf" => "__builtin_HEXAGON_V6_vfmin_sf", - "llvm.hexagon.V6.vfmin.sf.128B" => "__builtin_HEXAGON_V6_vfmin_sf_128B", - "llvm.hexagon.V6.vfneg.f8" => "__builtin_HEXAGON_V6_vfneg_f8", - "llvm.hexagon.V6.vfneg.f8.128B" => "__builtin_HEXAGON_V6_vfneg_f8_128B", - "llvm.hexagon.V6.vfneg.hf" => "__builtin_HEXAGON_V6_vfneg_hf", - "llvm.hexagon.V6.vfneg.hf.128B" => "__builtin_HEXAGON_V6_vfneg_hf_128B", - "llvm.hexagon.V6.vfneg.sf" => "__builtin_HEXAGON_V6_vfneg_sf", - "llvm.hexagon.V6.vfneg.sf.128B" => "__builtin_HEXAGON_V6_vfneg_sf_128B", - "llvm.hexagon.V6.vgathermh" => "__builtin_HEXAGON_V6_vgathermh", - "llvm.hexagon.V6.vgathermh.128B" => "__builtin_HEXAGON_V6_vgathermh_128B", - "llvm.hexagon.V6.vgathermhq" => "__builtin_HEXAGON_V6_vgathermhq", - "llvm.hexagon.V6.vgathermhq.128B" => "__builtin_HEXAGON_V6_vgathermhq_128B", - "llvm.hexagon.V6.vgathermhw" => "__builtin_HEXAGON_V6_vgathermhw", - "llvm.hexagon.V6.vgathermhw.128B" => "__builtin_HEXAGON_V6_vgathermhw_128B", - "llvm.hexagon.V6.vgathermhwq" => "__builtin_HEXAGON_V6_vgathermhwq", - "llvm.hexagon.V6.vgathermhwq.128B" => "__builtin_HEXAGON_V6_vgathermhwq_128B", - "llvm.hexagon.V6.vgathermw" => "__builtin_HEXAGON_V6_vgathermw", - "llvm.hexagon.V6.vgathermw.128B" => "__builtin_HEXAGON_V6_vgathermw_128B", - "llvm.hexagon.V6.vgathermwq" => "__builtin_HEXAGON_V6_vgathermwq", - "llvm.hexagon.V6.vgathermwq.128B" => "__builtin_HEXAGON_V6_vgathermwq_128B", - "llvm.hexagon.V6.vgtb" => "__builtin_HEXAGON_V6_vgtb", - "llvm.hexagon.V6.vgtb.128B" => "__builtin_HEXAGON_V6_vgtb_128B", - "llvm.hexagon.V6.vgtb.and" => "__builtin_HEXAGON_V6_vgtb_and", - "llvm.hexagon.V6.vgtb.and.128B" => "__builtin_HEXAGON_V6_vgtb_and_128B", - "llvm.hexagon.V6.vgtb.or" => "__builtin_HEXAGON_V6_vgtb_or", - "llvm.hexagon.V6.vgtb.or.128B" => "__builtin_HEXAGON_V6_vgtb_or_128B", - "llvm.hexagon.V6.vgtb.xor" => "__builtin_HEXAGON_V6_vgtb_xor", - "llvm.hexagon.V6.vgtb.xor.128B" => "__builtin_HEXAGON_V6_vgtb_xor_128B", - "llvm.hexagon.V6.vgtbf" => "__builtin_HEXAGON_V6_vgtbf", - "llvm.hexagon.V6.vgtbf.128B" => "__builtin_HEXAGON_V6_vgtbf_128B", - "llvm.hexagon.V6.vgtbf.and" => "__builtin_HEXAGON_V6_vgtbf_and", - "llvm.hexagon.V6.vgtbf.and.128B" => "__builtin_HEXAGON_V6_vgtbf_and_128B", - "llvm.hexagon.V6.vgtbf.or" => "__builtin_HEXAGON_V6_vgtbf_or", - "llvm.hexagon.V6.vgtbf.or.128B" => "__builtin_HEXAGON_V6_vgtbf_or_128B", - "llvm.hexagon.V6.vgtbf.xor" => "__builtin_HEXAGON_V6_vgtbf_xor", - "llvm.hexagon.V6.vgtbf.xor.128B" => "__builtin_HEXAGON_V6_vgtbf_xor_128B", - "llvm.hexagon.V6.vgth" => "__builtin_HEXAGON_V6_vgth", - "llvm.hexagon.V6.vgth.128B" => "__builtin_HEXAGON_V6_vgth_128B", - "llvm.hexagon.V6.vgth.and" => "__builtin_HEXAGON_V6_vgth_and", - "llvm.hexagon.V6.vgth.and.128B" => "__builtin_HEXAGON_V6_vgth_and_128B", - "llvm.hexagon.V6.vgth.or" => "__builtin_HEXAGON_V6_vgth_or", - "llvm.hexagon.V6.vgth.or.128B" => "__builtin_HEXAGON_V6_vgth_or_128B", - "llvm.hexagon.V6.vgth.xor" => "__builtin_HEXAGON_V6_vgth_xor", - "llvm.hexagon.V6.vgth.xor.128B" => "__builtin_HEXAGON_V6_vgth_xor_128B", - "llvm.hexagon.V6.vgthf" => "__builtin_HEXAGON_V6_vgthf", - "llvm.hexagon.V6.vgthf.128B" => "__builtin_HEXAGON_V6_vgthf_128B", - "llvm.hexagon.V6.vgthf.and" => "__builtin_HEXAGON_V6_vgthf_and", - "llvm.hexagon.V6.vgthf.and.128B" => "__builtin_HEXAGON_V6_vgthf_and_128B", - "llvm.hexagon.V6.vgthf.or" => "__builtin_HEXAGON_V6_vgthf_or", - "llvm.hexagon.V6.vgthf.or.128B" => "__builtin_HEXAGON_V6_vgthf_or_128B", - "llvm.hexagon.V6.vgthf.xor" => "__builtin_HEXAGON_V6_vgthf_xor", - "llvm.hexagon.V6.vgthf.xor.128B" => "__builtin_HEXAGON_V6_vgthf_xor_128B", - "llvm.hexagon.V6.vgtsf" => "__builtin_HEXAGON_V6_vgtsf", - "llvm.hexagon.V6.vgtsf.128B" => "__builtin_HEXAGON_V6_vgtsf_128B", - "llvm.hexagon.V6.vgtsf.and" => "__builtin_HEXAGON_V6_vgtsf_and", - "llvm.hexagon.V6.vgtsf.and.128B" => "__builtin_HEXAGON_V6_vgtsf_and_128B", - "llvm.hexagon.V6.vgtsf.or" => "__builtin_HEXAGON_V6_vgtsf_or", - "llvm.hexagon.V6.vgtsf.or.128B" => "__builtin_HEXAGON_V6_vgtsf_or_128B", - "llvm.hexagon.V6.vgtsf.xor" => "__builtin_HEXAGON_V6_vgtsf_xor", - "llvm.hexagon.V6.vgtsf.xor.128B" => "__builtin_HEXAGON_V6_vgtsf_xor_128B", - "llvm.hexagon.V6.vgtub" => "__builtin_HEXAGON_V6_vgtub", - "llvm.hexagon.V6.vgtub.128B" => "__builtin_HEXAGON_V6_vgtub_128B", - "llvm.hexagon.V6.vgtub.and" => "__builtin_HEXAGON_V6_vgtub_and", - "llvm.hexagon.V6.vgtub.and.128B" => "__builtin_HEXAGON_V6_vgtub_and_128B", - "llvm.hexagon.V6.vgtub.or" => "__builtin_HEXAGON_V6_vgtub_or", - "llvm.hexagon.V6.vgtub.or.128B" => "__builtin_HEXAGON_V6_vgtub_or_128B", - "llvm.hexagon.V6.vgtub.xor" => "__builtin_HEXAGON_V6_vgtub_xor", - "llvm.hexagon.V6.vgtub.xor.128B" => "__builtin_HEXAGON_V6_vgtub_xor_128B", - "llvm.hexagon.V6.vgtuh" => "__builtin_HEXAGON_V6_vgtuh", - "llvm.hexagon.V6.vgtuh.128B" => "__builtin_HEXAGON_V6_vgtuh_128B", - "llvm.hexagon.V6.vgtuh.and" => "__builtin_HEXAGON_V6_vgtuh_and", - "llvm.hexagon.V6.vgtuh.and.128B" => "__builtin_HEXAGON_V6_vgtuh_and_128B", - "llvm.hexagon.V6.vgtuh.or" => "__builtin_HEXAGON_V6_vgtuh_or", - "llvm.hexagon.V6.vgtuh.or.128B" => "__builtin_HEXAGON_V6_vgtuh_or_128B", - "llvm.hexagon.V6.vgtuh.xor" => "__builtin_HEXAGON_V6_vgtuh_xor", - "llvm.hexagon.V6.vgtuh.xor.128B" => "__builtin_HEXAGON_V6_vgtuh_xor_128B", - "llvm.hexagon.V6.vgtuw" => "__builtin_HEXAGON_V6_vgtuw", - "llvm.hexagon.V6.vgtuw.128B" => "__builtin_HEXAGON_V6_vgtuw_128B", - "llvm.hexagon.V6.vgtuw.and" => "__builtin_HEXAGON_V6_vgtuw_and", - "llvm.hexagon.V6.vgtuw.and.128B" => "__builtin_HEXAGON_V6_vgtuw_and_128B", - "llvm.hexagon.V6.vgtuw.or" => "__builtin_HEXAGON_V6_vgtuw_or", - "llvm.hexagon.V6.vgtuw.or.128B" => "__builtin_HEXAGON_V6_vgtuw_or_128B", - "llvm.hexagon.V6.vgtuw.xor" => "__builtin_HEXAGON_V6_vgtuw_xor", - "llvm.hexagon.V6.vgtuw.xor.128B" => "__builtin_HEXAGON_V6_vgtuw_xor_128B", - "llvm.hexagon.V6.vgtw" => "__builtin_HEXAGON_V6_vgtw", - "llvm.hexagon.V6.vgtw.128B" => "__builtin_HEXAGON_V6_vgtw_128B", - "llvm.hexagon.V6.vgtw.and" => "__builtin_HEXAGON_V6_vgtw_and", - "llvm.hexagon.V6.vgtw.and.128B" => "__builtin_HEXAGON_V6_vgtw_and_128B", - "llvm.hexagon.V6.vgtw.or" => "__builtin_HEXAGON_V6_vgtw_or", - "llvm.hexagon.V6.vgtw.or.128B" => "__builtin_HEXAGON_V6_vgtw_or_128B", - "llvm.hexagon.V6.vgtw.xor" => "__builtin_HEXAGON_V6_vgtw_xor", - "llvm.hexagon.V6.vgtw.xor.128B" => "__builtin_HEXAGON_V6_vgtw_xor_128B", - "llvm.hexagon.V6.vinsertwr" => "__builtin_HEXAGON_V6_vinsertwr", - "llvm.hexagon.V6.vinsertwr.128B" => "__builtin_HEXAGON_V6_vinsertwr_128B", - "llvm.hexagon.V6.vlalignb" => "__builtin_HEXAGON_V6_vlalignb", - "llvm.hexagon.V6.vlalignb.128B" => "__builtin_HEXAGON_V6_vlalignb_128B", - "llvm.hexagon.V6.vlalignbi" => "__builtin_HEXAGON_V6_vlalignbi", - "llvm.hexagon.V6.vlalignbi.128B" => "__builtin_HEXAGON_V6_vlalignbi_128B", - "llvm.hexagon.V6.vlsrb" => "__builtin_HEXAGON_V6_vlsrb", - "llvm.hexagon.V6.vlsrb.128B" => "__builtin_HEXAGON_V6_vlsrb_128B", - "llvm.hexagon.V6.vlsrh" => "__builtin_HEXAGON_V6_vlsrh", - "llvm.hexagon.V6.vlsrh.128B" => "__builtin_HEXAGON_V6_vlsrh_128B", - "llvm.hexagon.V6.vlsrhv" => "__builtin_HEXAGON_V6_vlsrhv", - "llvm.hexagon.V6.vlsrhv.128B" => "__builtin_HEXAGON_V6_vlsrhv_128B", - "llvm.hexagon.V6.vlsrw" => "__builtin_HEXAGON_V6_vlsrw", - "llvm.hexagon.V6.vlsrw.128B" => "__builtin_HEXAGON_V6_vlsrw_128B", - "llvm.hexagon.V6.vlsrwv" => "__builtin_HEXAGON_V6_vlsrwv", - "llvm.hexagon.V6.vlsrwv.128B" => "__builtin_HEXAGON_V6_vlsrwv_128B", - "llvm.hexagon.V6.vlut4" => "__builtin_HEXAGON_V6_vlut4", - "llvm.hexagon.V6.vlut4.128B" => "__builtin_HEXAGON_V6_vlut4_128B", - "llvm.hexagon.V6.vlutb" => "__builtin_HEXAGON_V6_vlutb", - "llvm.hexagon.V6.vlutb.128B" => "__builtin_HEXAGON_V6_vlutb_128B", - "llvm.hexagon.V6.vlutb.acc" => "__builtin_HEXAGON_V6_vlutb_acc", - "llvm.hexagon.V6.vlutb.acc.128B" => "__builtin_HEXAGON_V6_vlutb_acc_128B", - "llvm.hexagon.V6.vlutb.dv" => "__builtin_HEXAGON_V6_vlutb_dv", - "llvm.hexagon.V6.vlutb.dv.128B" => "__builtin_HEXAGON_V6_vlutb_dv_128B", - "llvm.hexagon.V6.vlutb.dv.acc" => "__builtin_HEXAGON_V6_vlutb_dv_acc", - "llvm.hexagon.V6.vlutb.dv.acc.128B" => "__builtin_HEXAGON_V6_vlutb_dv_acc_128B", - "llvm.hexagon.V6.vlutvvb" => "__builtin_HEXAGON_V6_vlutvvb", - "llvm.hexagon.V6.vlutvvb.128B" => "__builtin_HEXAGON_V6_vlutvvb_128B", - "llvm.hexagon.V6.vlutvvb.nm" => "__builtin_HEXAGON_V6_vlutvvb_nm", - "llvm.hexagon.V6.vlutvvb.nm.128B" => "__builtin_HEXAGON_V6_vlutvvb_nm_128B", - "llvm.hexagon.V6.vlutvvb.oracc" => "__builtin_HEXAGON_V6_vlutvvb_oracc", - "llvm.hexagon.V6.vlutvvb.oracc.128B" => "__builtin_HEXAGON_V6_vlutvvb_oracc_128B", - "llvm.hexagon.V6.vlutvvb.oracci" => "__builtin_HEXAGON_V6_vlutvvb_oracci", - "llvm.hexagon.V6.vlutvvb.oracci.128B" => "__builtin_HEXAGON_V6_vlutvvb_oracci_128B", - "llvm.hexagon.V6.vlutvvbi" => "__builtin_HEXAGON_V6_vlutvvbi", - "llvm.hexagon.V6.vlutvvbi.128B" => "__builtin_HEXAGON_V6_vlutvvbi_128B", - "llvm.hexagon.V6.vlutvwh" => "__builtin_HEXAGON_V6_vlutvwh", - "llvm.hexagon.V6.vlutvwh.128B" => "__builtin_HEXAGON_V6_vlutvwh_128B", - "llvm.hexagon.V6.vlutvwh.nm" => "__builtin_HEXAGON_V6_vlutvwh_nm", - "llvm.hexagon.V6.vlutvwh.nm.128B" => "__builtin_HEXAGON_V6_vlutvwh_nm_128B", - "llvm.hexagon.V6.vlutvwh.oracc" => "__builtin_HEXAGON_V6_vlutvwh_oracc", - "llvm.hexagon.V6.vlutvwh.oracc.128B" => "__builtin_HEXAGON_V6_vlutvwh_oracc_128B", - "llvm.hexagon.V6.vlutvwh.oracci" => "__builtin_HEXAGON_V6_vlutvwh_oracci", - "llvm.hexagon.V6.vlutvwh.oracci.128B" => "__builtin_HEXAGON_V6_vlutvwh_oracci_128B", - "llvm.hexagon.V6.vlutvwhi" => "__builtin_HEXAGON_V6_vlutvwhi", - "llvm.hexagon.V6.vlutvwhi.128B" => "__builtin_HEXAGON_V6_vlutvwhi_128B", - "llvm.hexagon.V6.vmax.bf" => "__builtin_HEXAGON_V6_vmax_bf", - "llvm.hexagon.V6.vmax.bf.128B" => "__builtin_HEXAGON_V6_vmax_bf_128B", - "llvm.hexagon.V6.vmax.hf" => "__builtin_HEXAGON_V6_vmax_hf", - "llvm.hexagon.V6.vmax.hf.128B" => "__builtin_HEXAGON_V6_vmax_hf_128B", - "llvm.hexagon.V6.vmax.sf" => "__builtin_HEXAGON_V6_vmax_sf", - "llvm.hexagon.V6.vmax.sf.128B" => "__builtin_HEXAGON_V6_vmax_sf_128B", - "llvm.hexagon.V6.vmaxb" => "__builtin_HEXAGON_V6_vmaxb", - "llvm.hexagon.V6.vmaxb.128B" => "__builtin_HEXAGON_V6_vmaxb_128B", - "llvm.hexagon.V6.vmaxh" => "__builtin_HEXAGON_V6_vmaxh", - "llvm.hexagon.V6.vmaxh.128B" => "__builtin_HEXAGON_V6_vmaxh_128B", - "llvm.hexagon.V6.vmaxub" => "__builtin_HEXAGON_V6_vmaxub", - "llvm.hexagon.V6.vmaxub.128B" => "__builtin_HEXAGON_V6_vmaxub_128B", - "llvm.hexagon.V6.vmaxuh" => "__builtin_HEXAGON_V6_vmaxuh", - "llvm.hexagon.V6.vmaxuh.128B" => "__builtin_HEXAGON_V6_vmaxuh_128B", - "llvm.hexagon.V6.vmaxw" => "__builtin_HEXAGON_V6_vmaxw", - "llvm.hexagon.V6.vmaxw.128B" => "__builtin_HEXAGON_V6_vmaxw_128B", - "llvm.hexagon.V6.vmerge.qf" => "__builtin_HEXAGON_V6_vmerge_qf", - "llvm.hexagon.V6.vmerge.qf.128B" => "__builtin_HEXAGON_V6_vmerge_qf_128B", - "llvm.hexagon.V6.vmin.bf" => "__builtin_HEXAGON_V6_vmin_bf", - "llvm.hexagon.V6.vmin.bf.128B" => "__builtin_HEXAGON_V6_vmin_bf_128B", - "llvm.hexagon.V6.vmin.hf" => "__builtin_HEXAGON_V6_vmin_hf", - "llvm.hexagon.V6.vmin.hf.128B" => "__builtin_HEXAGON_V6_vmin_hf_128B", - "llvm.hexagon.V6.vmin.sf" => "__builtin_HEXAGON_V6_vmin_sf", - "llvm.hexagon.V6.vmin.sf.128B" => "__builtin_HEXAGON_V6_vmin_sf_128B", - "llvm.hexagon.V6.vminb" => "__builtin_HEXAGON_V6_vminb", - "llvm.hexagon.V6.vminb.128B" => "__builtin_HEXAGON_V6_vminb_128B", - "llvm.hexagon.V6.vminh" => "__builtin_HEXAGON_V6_vminh", - "llvm.hexagon.V6.vminh.128B" => "__builtin_HEXAGON_V6_vminh_128B", - "llvm.hexagon.V6.vminub" => "__builtin_HEXAGON_V6_vminub", - "llvm.hexagon.V6.vminub.128B" => "__builtin_HEXAGON_V6_vminub_128B", - "llvm.hexagon.V6.vminuh" => "__builtin_HEXAGON_V6_vminuh", - "llvm.hexagon.V6.vminuh.128B" => "__builtin_HEXAGON_V6_vminuh_128B", - "llvm.hexagon.V6.vminw" => "__builtin_HEXAGON_V6_vminw", - "llvm.hexagon.V6.vminw.128B" => "__builtin_HEXAGON_V6_vminw_128B", - "llvm.hexagon.V6.vmpabus" => "__builtin_HEXAGON_V6_vmpabus", - "llvm.hexagon.V6.vmpabus.128B" => "__builtin_HEXAGON_V6_vmpabus_128B", - "llvm.hexagon.V6.vmpabus.acc" => "__builtin_HEXAGON_V6_vmpabus_acc", - "llvm.hexagon.V6.vmpabus.acc.128B" => "__builtin_HEXAGON_V6_vmpabus_acc_128B", - "llvm.hexagon.V6.vmpabusv" => "__builtin_HEXAGON_V6_vmpabusv", - "llvm.hexagon.V6.vmpabusv.128B" => "__builtin_HEXAGON_V6_vmpabusv_128B", - "llvm.hexagon.V6.vmpabuu" => "__builtin_HEXAGON_V6_vmpabuu", - "llvm.hexagon.V6.vmpabuu.128B" => "__builtin_HEXAGON_V6_vmpabuu_128B", - "llvm.hexagon.V6.vmpabuu.acc" => "__builtin_HEXAGON_V6_vmpabuu_acc", - "llvm.hexagon.V6.vmpabuu.acc.128B" => "__builtin_HEXAGON_V6_vmpabuu_acc_128B", - "llvm.hexagon.V6.vmpabuuv" => "__builtin_HEXAGON_V6_vmpabuuv", - "llvm.hexagon.V6.vmpabuuv.128B" => "__builtin_HEXAGON_V6_vmpabuuv_128B", - "llvm.hexagon.V6.vmpahb" => "__builtin_HEXAGON_V6_vmpahb", - "llvm.hexagon.V6.vmpahb.128B" => "__builtin_HEXAGON_V6_vmpahb_128B", - "llvm.hexagon.V6.vmpahb.acc" => "__builtin_HEXAGON_V6_vmpahb_acc", - "llvm.hexagon.V6.vmpahb.acc.128B" => "__builtin_HEXAGON_V6_vmpahb_acc_128B", - "llvm.hexagon.V6.vmpahhsat" => "__builtin_HEXAGON_V6_vmpahhsat", - "llvm.hexagon.V6.vmpahhsat.128B" => "__builtin_HEXAGON_V6_vmpahhsat_128B", - "llvm.hexagon.V6.vmpauhb" => "__builtin_HEXAGON_V6_vmpauhb", - "llvm.hexagon.V6.vmpauhb.128B" => "__builtin_HEXAGON_V6_vmpauhb_128B", - "llvm.hexagon.V6.vmpauhb.acc" => "__builtin_HEXAGON_V6_vmpauhb_acc", - "llvm.hexagon.V6.vmpauhb.acc.128B" => "__builtin_HEXAGON_V6_vmpauhb_acc_128B", - "llvm.hexagon.V6.vmpauhuhsat" => "__builtin_HEXAGON_V6_vmpauhuhsat", - "llvm.hexagon.V6.vmpauhuhsat.128B" => "__builtin_HEXAGON_V6_vmpauhuhsat_128B", - "llvm.hexagon.V6.vmpsuhuhsat" => "__builtin_HEXAGON_V6_vmpsuhuhsat", - "llvm.hexagon.V6.vmpsuhuhsat.128B" => "__builtin_HEXAGON_V6_vmpsuhuhsat_128B", - "llvm.hexagon.V6.vmpy.hf.f8" => "__builtin_HEXAGON_V6_vmpy_hf_f8", - "llvm.hexagon.V6.vmpy.hf.f8.128B" => "__builtin_HEXAGON_V6_vmpy_hf_f8_128B", - "llvm.hexagon.V6.vmpy.hf.f8.acc" => "__builtin_HEXAGON_V6_vmpy_hf_f8_acc", - "llvm.hexagon.V6.vmpy.hf.f8.acc.128B" => "__builtin_HEXAGON_V6_vmpy_hf_f8_acc_128B", - "llvm.hexagon.V6.vmpy.hf.hf" => "__builtin_HEXAGON_V6_vmpy_hf_hf", - "llvm.hexagon.V6.vmpy.hf.hf.128B" => "__builtin_HEXAGON_V6_vmpy_hf_hf_128B", - "llvm.hexagon.V6.vmpy.hf.hf.acc" => "__builtin_HEXAGON_V6_vmpy_hf_hf_acc", - "llvm.hexagon.V6.vmpy.hf.hf.acc.128B" => "__builtin_HEXAGON_V6_vmpy_hf_hf_acc_128B", - "llvm.hexagon.V6.vmpy.qf16" => "__builtin_HEXAGON_V6_vmpy_qf16", - "llvm.hexagon.V6.vmpy.qf16.128B" => "__builtin_HEXAGON_V6_vmpy_qf16_128B", - "llvm.hexagon.V6.vmpy.qf16.hf" => "__builtin_HEXAGON_V6_vmpy_qf16_hf", - "llvm.hexagon.V6.vmpy.qf16.hf.128B" => "__builtin_HEXAGON_V6_vmpy_qf16_hf_128B", - "llvm.hexagon.V6.vmpy.qf16.mix.hf" => "__builtin_HEXAGON_V6_vmpy_qf16_mix_hf", - "llvm.hexagon.V6.vmpy.qf16.mix.hf.128B" => "__builtin_HEXAGON_V6_vmpy_qf16_mix_hf_128B", - "llvm.hexagon.V6.vmpy.qf32" => "__builtin_HEXAGON_V6_vmpy_qf32", - "llvm.hexagon.V6.vmpy.qf32.128B" => "__builtin_HEXAGON_V6_vmpy_qf32_128B", - "llvm.hexagon.V6.vmpy.qf32.hf" => "__builtin_HEXAGON_V6_vmpy_qf32_hf", - "llvm.hexagon.V6.vmpy.qf32.hf.128B" => "__builtin_HEXAGON_V6_vmpy_qf32_hf_128B", - "llvm.hexagon.V6.vmpy.qf32.mix.hf" => "__builtin_HEXAGON_V6_vmpy_qf32_mix_hf", - "llvm.hexagon.V6.vmpy.qf32.mix.hf.128B" => "__builtin_HEXAGON_V6_vmpy_qf32_mix_hf_128B", - "llvm.hexagon.V6.vmpy.qf32.qf16" => "__builtin_HEXAGON_V6_vmpy_qf32_qf16", - "llvm.hexagon.V6.vmpy.qf32.qf16.128B" => "__builtin_HEXAGON_V6_vmpy_qf32_qf16_128B", - "llvm.hexagon.V6.vmpy.qf32.sf" => "__builtin_HEXAGON_V6_vmpy_qf32_sf", - "llvm.hexagon.V6.vmpy.qf32.sf.128B" => "__builtin_HEXAGON_V6_vmpy_qf32_sf_128B", - "llvm.hexagon.V6.vmpy.rt.hf" => "__builtin_HEXAGON_V6_vmpy_rt_hf", - "llvm.hexagon.V6.vmpy.rt.hf.128B" => "__builtin_HEXAGON_V6_vmpy_rt_hf_128B", - "llvm.hexagon.V6.vmpy.rt.qf16" => "__builtin_HEXAGON_V6_vmpy_rt_qf16", - "llvm.hexagon.V6.vmpy.rt.qf16.128B" => "__builtin_HEXAGON_V6_vmpy_rt_qf16_128B", - "llvm.hexagon.V6.vmpy.rt.sf" => "__builtin_HEXAGON_V6_vmpy_rt_sf", - "llvm.hexagon.V6.vmpy.rt.sf.128B" => "__builtin_HEXAGON_V6_vmpy_rt_sf_128B", - "llvm.hexagon.V6.vmpy.sf.bf" => "__builtin_HEXAGON_V6_vmpy_sf_bf", - "llvm.hexagon.V6.vmpy.sf.bf.128B" => "__builtin_HEXAGON_V6_vmpy_sf_bf_128B", - "llvm.hexagon.V6.vmpy.sf.bf.acc" => "__builtin_HEXAGON_V6_vmpy_sf_bf_acc", - "llvm.hexagon.V6.vmpy.sf.bf.acc.128B" => "__builtin_HEXAGON_V6_vmpy_sf_bf_acc_128B", - "llvm.hexagon.V6.vmpy.sf.hf" => "__builtin_HEXAGON_V6_vmpy_sf_hf", - "llvm.hexagon.V6.vmpy.sf.hf.128B" => "__builtin_HEXAGON_V6_vmpy_sf_hf_128B", - "llvm.hexagon.V6.vmpy.sf.hf.acc" => "__builtin_HEXAGON_V6_vmpy_sf_hf_acc", - "llvm.hexagon.V6.vmpy.sf.hf.acc.128B" => "__builtin_HEXAGON_V6_vmpy_sf_hf_acc_128B", - "llvm.hexagon.V6.vmpy.sf.sf" => "__builtin_HEXAGON_V6_vmpy_sf_sf", - "llvm.hexagon.V6.vmpy.sf.sf.128B" => "__builtin_HEXAGON_V6_vmpy_sf_sf_128B", - "llvm.hexagon.V6.vmpybus" => "__builtin_HEXAGON_V6_vmpybus", - "llvm.hexagon.V6.vmpybus.128B" => "__builtin_HEXAGON_V6_vmpybus_128B", - "llvm.hexagon.V6.vmpybus.acc" => "__builtin_HEXAGON_V6_vmpybus_acc", - "llvm.hexagon.V6.vmpybus.acc.128B" => "__builtin_HEXAGON_V6_vmpybus_acc_128B", - "llvm.hexagon.V6.vmpybusv" => "__builtin_HEXAGON_V6_vmpybusv", - "llvm.hexagon.V6.vmpybusv.128B" => "__builtin_HEXAGON_V6_vmpybusv_128B", - "llvm.hexagon.V6.vmpybusv.acc" => "__builtin_HEXAGON_V6_vmpybusv_acc", - "llvm.hexagon.V6.vmpybusv.acc.128B" => "__builtin_HEXAGON_V6_vmpybusv_acc_128B", - "llvm.hexagon.V6.vmpybv" => "__builtin_HEXAGON_V6_vmpybv", - "llvm.hexagon.V6.vmpybv.128B" => "__builtin_HEXAGON_V6_vmpybv_128B", - "llvm.hexagon.V6.vmpybv.acc" => "__builtin_HEXAGON_V6_vmpybv_acc", - "llvm.hexagon.V6.vmpybv.acc.128B" => "__builtin_HEXAGON_V6_vmpybv_acc_128B", - "llvm.hexagon.V6.vmpyewuh" => "__builtin_HEXAGON_V6_vmpyewuh", - "llvm.hexagon.V6.vmpyewuh.128B" => "__builtin_HEXAGON_V6_vmpyewuh_128B", - "llvm.hexagon.V6.vmpyewuh.64" => "__builtin_HEXAGON_V6_vmpyewuh_64", - "llvm.hexagon.V6.vmpyewuh.64.128B" => "__builtin_HEXAGON_V6_vmpyewuh_64_128B", - "llvm.hexagon.V6.vmpyh" => "__builtin_HEXAGON_V6_vmpyh", - "llvm.hexagon.V6.vmpyh.128B" => "__builtin_HEXAGON_V6_vmpyh_128B", - "llvm.hexagon.V6.vmpyh.acc" => "__builtin_HEXAGON_V6_vmpyh_acc", - "llvm.hexagon.V6.vmpyh.acc.128B" => "__builtin_HEXAGON_V6_vmpyh_acc_128B", - "llvm.hexagon.V6.vmpyhsat.acc" => "__builtin_HEXAGON_V6_vmpyhsat_acc", - "llvm.hexagon.V6.vmpyhsat.acc.128B" => "__builtin_HEXAGON_V6_vmpyhsat_acc_128B", - "llvm.hexagon.V6.vmpyhsrs" => "__builtin_HEXAGON_V6_vmpyhsrs", - "llvm.hexagon.V6.vmpyhsrs.128B" => "__builtin_HEXAGON_V6_vmpyhsrs_128B", - "llvm.hexagon.V6.vmpyhss" => "__builtin_HEXAGON_V6_vmpyhss", - "llvm.hexagon.V6.vmpyhss.128B" => "__builtin_HEXAGON_V6_vmpyhss_128B", - "llvm.hexagon.V6.vmpyhus" => "__builtin_HEXAGON_V6_vmpyhus", - "llvm.hexagon.V6.vmpyhus.128B" => "__builtin_HEXAGON_V6_vmpyhus_128B", - "llvm.hexagon.V6.vmpyhus.acc" => "__builtin_HEXAGON_V6_vmpyhus_acc", - "llvm.hexagon.V6.vmpyhus.acc.128B" => "__builtin_HEXAGON_V6_vmpyhus_acc_128B", - "llvm.hexagon.V6.vmpyhv" => "__builtin_HEXAGON_V6_vmpyhv", - "llvm.hexagon.V6.vmpyhv.128B" => "__builtin_HEXAGON_V6_vmpyhv_128B", - "llvm.hexagon.V6.vmpyhv.acc" => "__builtin_HEXAGON_V6_vmpyhv_acc", - "llvm.hexagon.V6.vmpyhv.acc.128B" => "__builtin_HEXAGON_V6_vmpyhv_acc_128B", - "llvm.hexagon.V6.vmpyhvsrs" => "__builtin_HEXAGON_V6_vmpyhvsrs", - "llvm.hexagon.V6.vmpyhvsrs.128B" => "__builtin_HEXAGON_V6_vmpyhvsrs_128B", - "llvm.hexagon.V6.vmpyieoh" => "__builtin_HEXAGON_V6_vmpyieoh", - "llvm.hexagon.V6.vmpyieoh.128B" => "__builtin_HEXAGON_V6_vmpyieoh_128B", - "llvm.hexagon.V6.vmpyiewh.acc" => "__builtin_HEXAGON_V6_vmpyiewh_acc", - "llvm.hexagon.V6.vmpyiewh.acc.128B" => "__builtin_HEXAGON_V6_vmpyiewh_acc_128B", - "llvm.hexagon.V6.vmpyiewuh" => "__builtin_HEXAGON_V6_vmpyiewuh", - "llvm.hexagon.V6.vmpyiewuh.128B" => "__builtin_HEXAGON_V6_vmpyiewuh_128B", - "llvm.hexagon.V6.vmpyiewuh.acc" => "__builtin_HEXAGON_V6_vmpyiewuh_acc", - "llvm.hexagon.V6.vmpyiewuh.acc.128B" => "__builtin_HEXAGON_V6_vmpyiewuh_acc_128B", - "llvm.hexagon.V6.vmpyih" => "__builtin_HEXAGON_V6_vmpyih", - "llvm.hexagon.V6.vmpyih.128B" => "__builtin_HEXAGON_V6_vmpyih_128B", - "llvm.hexagon.V6.vmpyih.acc" => "__builtin_HEXAGON_V6_vmpyih_acc", - "llvm.hexagon.V6.vmpyih.acc.128B" => "__builtin_HEXAGON_V6_vmpyih_acc_128B", - "llvm.hexagon.V6.vmpyihb" => "__builtin_HEXAGON_V6_vmpyihb", - "llvm.hexagon.V6.vmpyihb.128B" => "__builtin_HEXAGON_V6_vmpyihb_128B", - "llvm.hexagon.V6.vmpyihb.acc" => "__builtin_HEXAGON_V6_vmpyihb_acc", - "llvm.hexagon.V6.vmpyihb.acc.128B" => "__builtin_HEXAGON_V6_vmpyihb_acc_128B", - "llvm.hexagon.V6.vmpyiowh" => "__builtin_HEXAGON_V6_vmpyiowh", - "llvm.hexagon.V6.vmpyiowh.128B" => "__builtin_HEXAGON_V6_vmpyiowh_128B", - "llvm.hexagon.V6.vmpyiwb" => "__builtin_HEXAGON_V6_vmpyiwb", - "llvm.hexagon.V6.vmpyiwb.128B" => "__builtin_HEXAGON_V6_vmpyiwb_128B", - "llvm.hexagon.V6.vmpyiwb.acc" => "__builtin_HEXAGON_V6_vmpyiwb_acc", - "llvm.hexagon.V6.vmpyiwb.acc.128B" => "__builtin_HEXAGON_V6_vmpyiwb_acc_128B", - "llvm.hexagon.V6.vmpyiwh" => "__builtin_HEXAGON_V6_vmpyiwh", - "llvm.hexagon.V6.vmpyiwh.128B" => "__builtin_HEXAGON_V6_vmpyiwh_128B", - "llvm.hexagon.V6.vmpyiwh.acc" => "__builtin_HEXAGON_V6_vmpyiwh_acc", - "llvm.hexagon.V6.vmpyiwh.acc.128B" => "__builtin_HEXAGON_V6_vmpyiwh_acc_128B", - "llvm.hexagon.V6.vmpyiwub" => "__builtin_HEXAGON_V6_vmpyiwub", - "llvm.hexagon.V6.vmpyiwub.128B" => "__builtin_HEXAGON_V6_vmpyiwub_128B", - "llvm.hexagon.V6.vmpyiwub.acc" => "__builtin_HEXAGON_V6_vmpyiwub_acc", - "llvm.hexagon.V6.vmpyiwub.acc.128B" => "__builtin_HEXAGON_V6_vmpyiwub_acc_128B", - "llvm.hexagon.V6.vmpyowh" => "__builtin_HEXAGON_V6_vmpyowh", - "llvm.hexagon.V6.vmpyowh.128B" => "__builtin_HEXAGON_V6_vmpyowh_128B", - "llvm.hexagon.V6.vmpyowh.64.acc" => "__builtin_HEXAGON_V6_vmpyowh_64_acc", - "llvm.hexagon.V6.vmpyowh.64.acc.128B" => "__builtin_HEXAGON_V6_vmpyowh_64_acc_128B", - "llvm.hexagon.V6.vmpyowh.rnd" => "__builtin_HEXAGON_V6_vmpyowh_rnd", - "llvm.hexagon.V6.vmpyowh.rnd.128B" => "__builtin_HEXAGON_V6_vmpyowh_rnd_128B", - "llvm.hexagon.V6.vmpyowh.rnd.sacc" => "__builtin_HEXAGON_V6_vmpyowh_rnd_sacc", - "llvm.hexagon.V6.vmpyowh.rnd.sacc.128B" => "__builtin_HEXAGON_V6_vmpyowh_rnd_sacc_128B", - "llvm.hexagon.V6.vmpyowh.sacc" => "__builtin_HEXAGON_V6_vmpyowh_sacc", - "llvm.hexagon.V6.vmpyowh.sacc.128B" => "__builtin_HEXAGON_V6_vmpyowh_sacc_128B", - "llvm.hexagon.V6.vmpyub" => "__builtin_HEXAGON_V6_vmpyub", - "llvm.hexagon.V6.vmpyub.128B" => "__builtin_HEXAGON_V6_vmpyub_128B", - "llvm.hexagon.V6.vmpyub.acc" => "__builtin_HEXAGON_V6_vmpyub_acc", - "llvm.hexagon.V6.vmpyub.acc.128B" => "__builtin_HEXAGON_V6_vmpyub_acc_128B", - "llvm.hexagon.V6.vmpyubv" => "__builtin_HEXAGON_V6_vmpyubv", - "llvm.hexagon.V6.vmpyubv.128B" => "__builtin_HEXAGON_V6_vmpyubv_128B", - "llvm.hexagon.V6.vmpyubv.acc" => "__builtin_HEXAGON_V6_vmpyubv_acc", - "llvm.hexagon.V6.vmpyubv.acc.128B" => "__builtin_HEXAGON_V6_vmpyubv_acc_128B", - "llvm.hexagon.V6.vmpyuh" => "__builtin_HEXAGON_V6_vmpyuh", - "llvm.hexagon.V6.vmpyuh.128B" => "__builtin_HEXAGON_V6_vmpyuh_128B", - "llvm.hexagon.V6.vmpyuh.acc" => "__builtin_HEXAGON_V6_vmpyuh_acc", - "llvm.hexagon.V6.vmpyuh.acc.128B" => "__builtin_HEXAGON_V6_vmpyuh_acc_128B", - "llvm.hexagon.V6.vmpyuhe" => "__builtin_HEXAGON_V6_vmpyuhe", - "llvm.hexagon.V6.vmpyuhe.128B" => "__builtin_HEXAGON_V6_vmpyuhe_128B", - "llvm.hexagon.V6.vmpyuhe.acc" => "__builtin_HEXAGON_V6_vmpyuhe_acc", - "llvm.hexagon.V6.vmpyuhe.acc.128B" => "__builtin_HEXAGON_V6_vmpyuhe_acc_128B", - "llvm.hexagon.V6.vmpyuhv" => "__builtin_HEXAGON_V6_vmpyuhv", - "llvm.hexagon.V6.vmpyuhv.128B" => "__builtin_HEXAGON_V6_vmpyuhv_128B", - "llvm.hexagon.V6.vmpyuhv.acc" => "__builtin_HEXAGON_V6_vmpyuhv_acc", - "llvm.hexagon.V6.vmpyuhv.acc.128B" => "__builtin_HEXAGON_V6_vmpyuhv_acc_128B", - "llvm.hexagon.V6.vmpyuhvs" => "__builtin_HEXAGON_V6_vmpyuhvs", - "llvm.hexagon.V6.vmpyuhvs.128B" => "__builtin_HEXAGON_V6_vmpyuhvs_128B", - "llvm.hexagon.V6.vmux" => "__builtin_HEXAGON_V6_vmux", - "llvm.hexagon.V6.vmux.128B" => "__builtin_HEXAGON_V6_vmux_128B", - "llvm.hexagon.V6.vnavgb" => "__builtin_HEXAGON_V6_vnavgb", - "llvm.hexagon.V6.vnavgb.128B" => "__builtin_HEXAGON_V6_vnavgb_128B", - "llvm.hexagon.V6.vnavgh" => "__builtin_HEXAGON_V6_vnavgh", - "llvm.hexagon.V6.vnavgh.128B" => "__builtin_HEXAGON_V6_vnavgh_128B", - "llvm.hexagon.V6.vnavgub" => "__builtin_HEXAGON_V6_vnavgub", - "llvm.hexagon.V6.vnavgub.128B" => "__builtin_HEXAGON_V6_vnavgub_128B", - "llvm.hexagon.V6.vnavgw" => "__builtin_HEXAGON_V6_vnavgw", - "llvm.hexagon.V6.vnavgw.128B" => "__builtin_HEXAGON_V6_vnavgw_128B", - "llvm.hexagon.V6.vnormamth" => "__builtin_HEXAGON_V6_vnormamth", - "llvm.hexagon.V6.vnormamth.128B" => "__builtin_HEXAGON_V6_vnormamth_128B", - "llvm.hexagon.V6.vnormamtw" => "__builtin_HEXAGON_V6_vnormamtw", - "llvm.hexagon.V6.vnormamtw.128B" => "__builtin_HEXAGON_V6_vnormamtw_128B", - "llvm.hexagon.V6.vnot" => "__builtin_HEXAGON_V6_vnot", - "llvm.hexagon.V6.vnot.128B" => "__builtin_HEXAGON_V6_vnot_128B", - "llvm.hexagon.V6.vor" => "__builtin_HEXAGON_V6_vor", - "llvm.hexagon.V6.vor.128B" => "__builtin_HEXAGON_V6_vor_128B", - "llvm.hexagon.V6.vpackeb" => "__builtin_HEXAGON_V6_vpackeb", - "llvm.hexagon.V6.vpackeb.128B" => "__builtin_HEXAGON_V6_vpackeb_128B", - "llvm.hexagon.V6.vpackeh" => "__builtin_HEXAGON_V6_vpackeh", - "llvm.hexagon.V6.vpackeh.128B" => "__builtin_HEXAGON_V6_vpackeh_128B", - "llvm.hexagon.V6.vpackhb.sat" => "__builtin_HEXAGON_V6_vpackhb_sat", - "llvm.hexagon.V6.vpackhb.sat.128B" => "__builtin_HEXAGON_V6_vpackhb_sat_128B", - "llvm.hexagon.V6.vpackhub.sat" => "__builtin_HEXAGON_V6_vpackhub_sat", - "llvm.hexagon.V6.vpackhub.sat.128B" => "__builtin_HEXAGON_V6_vpackhub_sat_128B", - "llvm.hexagon.V6.vpackob" => "__builtin_HEXAGON_V6_vpackob", - "llvm.hexagon.V6.vpackob.128B" => "__builtin_HEXAGON_V6_vpackob_128B", - "llvm.hexagon.V6.vpackoh" => "__builtin_HEXAGON_V6_vpackoh", - "llvm.hexagon.V6.vpackoh.128B" => "__builtin_HEXAGON_V6_vpackoh_128B", - "llvm.hexagon.V6.vpackwh.sat" => "__builtin_HEXAGON_V6_vpackwh_sat", - "llvm.hexagon.V6.vpackwh.sat.128B" => "__builtin_HEXAGON_V6_vpackwh_sat_128B", - "llvm.hexagon.V6.vpackwuh.sat" => "__builtin_HEXAGON_V6_vpackwuh_sat", - "llvm.hexagon.V6.vpackwuh.sat.128B" => "__builtin_HEXAGON_V6_vpackwuh_sat_128B", - "llvm.hexagon.V6.vpopcounth" => "__builtin_HEXAGON_V6_vpopcounth", - "llvm.hexagon.V6.vpopcounth.128B" => "__builtin_HEXAGON_V6_vpopcounth_128B", - "llvm.hexagon.V6.vprefixqb" => "__builtin_HEXAGON_V6_vprefixqb", - "llvm.hexagon.V6.vprefixqb.128B" => "__builtin_HEXAGON_V6_vprefixqb_128B", - "llvm.hexagon.V6.vprefixqh" => "__builtin_HEXAGON_V6_vprefixqh", - "llvm.hexagon.V6.vprefixqh.128B" => "__builtin_HEXAGON_V6_vprefixqh_128B", - "llvm.hexagon.V6.vprefixqw" => "__builtin_HEXAGON_V6_vprefixqw", - "llvm.hexagon.V6.vprefixqw.128B" => "__builtin_HEXAGON_V6_vprefixqw_128B", - "llvm.hexagon.V6.vrdelta" => "__builtin_HEXAGON_V6_vrdelta", - "llvm.hexagon.V6.vrdelta.128B" => "__builtin_HEXAGON_V6_vrdelta_128B", - "llvm.hexagon.V6.vrmpybub.rtt" => "__builtin_HEXAGON_V6_vrmpybub_rtt", - "llvm.hexagon.V6.vrmpybub.rtt.128B" => "__builtin_HEXAGON_V6_vrmpybub_rtt_128B", - "llvm.hexagon.V6.vrmpybub.rtt.acc" => "__builtin_HEXAGON_V6_vrmpybub_rtt_acc", - "llvm.hexagon.V6.vrmpybub.rtt.acc.128B" => "__builtin_HEXAGON_V6_vrmpybub_rtt_acc_128B", - "llvm.hexagon.V6.vrmpybus" => "__builtin_HEXAGON_V6_vrmpybus", - "llvm.hexagon.V6.vrmpybus.128B" => "__builtin_HEXAGON_V6_vrmpybus_128B", - "llvm.hexagon.V6.vrmpybus.acc" => "__builtin_HEXAGON_V6_vrmpybus_acc", - "llvm.hexagon.V6.vrmpybus.acc.128B" => "__builtin_HEXAGON_V6_vrmpybus_acc_128B", - "llvm.hexagon.V6.vrmpybusi" => "__builtin_HEXAGON_V6_vrmpybusi", - "llvm.hexagon.V6.vrmpybusi.128B" => "__builtin_HEXAGON_V6_vrmpybusi_128B", - "llvm.hexagon.V6.vrmpybusi.acc" => "__builtin_HEXAGON_V6_vrmpybusi_acc", - "llvm.hexagon.V6.vrmpybusi.acc.128B" => "__builtin_HEXAGON_V6_vrmpybusi_acc_128B", - "llvm.hexagon.V6.vrmpybusv" => "__builtin_HEXAGON_V6_vrmpybusv", - "llvm.hexagon.V6.vrmpybusv.128B" => "__builtin_HEXAGON_V6_vrmpybusv_128B", - "llvm.hexagon.V6.vrmpybusv.acc" => "__builtin_HEXAGON_V6_vrmpybusv_acc", - "llvm.hexagon.V6.vrmpybusv.acc.128B" => "__builtin_HEXAGON_V6_vrmpybusv_acc_128B", - "llvm.hexagon.V6.vrmpybv" => "__builtin_HEXAGON_V6_vrmpybv", - "llvm.hexagon.V6.vrmpybv.128B" => "__builtin_HEXAGON_V6_vrmpybv_128B", - "llvm.hexagon.V6.vrmpybv.acc" => "__builtin_HEXAGON_V6_vrmpybv_acc", - "llvm.hexagon.V6.vrmpybv.acc.128B" => "__builtin_HEXAGON_V6_vrmpybv_acc_128B", - "llvm.hexagon.V6.vrmpyub" => "__builtin_HEXAGON_V6_vrmpyub", - "llvm.hexagon.V6.vrmpyub.128B" => "__builtin_HEXAGON_V6_vrmpyub_128B", - "llvm.hexagon.V6.vrmpyub.acc" => "__builtin_HEXAGON_V6_vrmpyub_acc", - "llvm.hexagon.V6.vrmpyub.acc.128B" => "__builtin_HEXAGON_V6_vrmpyub_acc_128B", - "llvm.hexagon.V6.vrmpyub.rtt" => "__builtin_HEXAGON_V6_vrmpyub_rtt", - "llvm.hexagon.V6.vrmpyub.rtt.128B" => "__builtin_HEXAGON_V6_vrmpyub_rtt_128B", - "llvm.hexagon.V6.vrmpyub.rtt.acc" => "__builtin_HEXAGON_V6_vrmpyub_rtt_acc", - "llvm.hexagon.V6.vrmpyub.rtt.acc.128B" => "__builtin_HEXAGON_V6_vrmpyub_rtt_acc_128B", - "llvm.hexagon.V6.vrmpyubi" => "__builtin_HEXAGON_V6_vrmpyubi", - "llvm.hexagon.V6.vrmpyubi.128B" => "__builtin_HEXAGON_V6_vrmpyubi_128B", - "llvm.hexagon.V6.vrmpyubi.acc" => "__builtin_HEXAGON_V6_vrmpyubi_acc", - "llvm.hexagon.V6.vrmpyubi.acc.128B" => "__builtin_HEXAGON_V6_vrmpyubi_acc_128B", - "llvm.hexagon.V6.vrmpyubv" => "__builtin_HEXAGON_V6_vrmpyubv", - "llvm.hexagon.V6.vrmpyubv.128B" => "__builtin_HEXAGON_V6_vrmpyubv_128B", - "llvm.hexagon.V6.vrmpyubv.acc" => "__builtin_HEXAGON_V6_vrmpyubv_acc", - "llvm.hexagon.V6.vrmpyubv.acc.128B" => "__builtin_HEXAGON_V6_vrmpyubv_acc_128B", - "llvm.hexagon.V6.vror" => "__builtin_HEXAGON_V6_vror", - "llvm.hexagon.V6.vror.128B" => "__builtin_HEXAGON_V6_vror_128B", - "llvm.hexagon.V6.vrotr" => "__builtin_HEXAGON_V6_vrotr", - "llvm.hexagon.V6.vrotr.128B" => "__builtin_HEXAGON_V6_vrotr_128B", - "llvm.hexagon.V6.vroundhb" => "__builtin_HEXAGON_V6_vroundhb", - "llvm.hexagon.V6.vroundhb.128B" => "__builtin_HEXAGON_V6_vroundhb_128B", - "llvm.hexagon.V6.vroundhub" => "__builtin_HEXAGON_V6_vroundhub", - "llvm.hexagon.V6.vroundhub.128B" => "__builtin_HEXAGON_V6_vroundhub_128B", - "llvm.hexagon.V6.vrounduhub" => "__builtin_HEXAGON_V6_vrounduhub", - "llvm.hexagon.V6.vrounduhub.128B" => "__builtin_HEXAGON_V6_vrounduhub_128B", - "llvm.hexagon.V6.vrounduwuh" => "__builtin_HEXAGON_V6_vrounduwuh", - "llvm.hexagon.V6.vrounduwuh.128B" => "__builtin_HEXAGON_V6_vrounduwuh_128B", - "llvm.hexagon.V6.vroundwh" => "__builtin_HEXAGON_V6_vroundwh", - "llvm.hexagon.V6.vroundwh.128B" => "__builtin_HEXAGON_V6_vroundwh_128B", - "llvm.hexagon.V6.vroundwuh" => "__builtin_HEXAGON_V6_vroundwuh", - "llvm.hexagon.V6.vroundwuh.128B" => "__builtin_HEXAGON_V6_vroundwuh_128B", - "llvm.hexagon.V6.vrsadubi" => "__builtin_HEXAGON_V6_vrsadubi", - "llvm.hexagon.V6.vrsadubi.128B" => "__builtin_HEXAGON_V6_vrsadubi_128B", - "llvm.hexagon.V6.vrsadubi.acc" => "__builtin_HEXAGON_V6_vrsadubi_acc", - "llvm.hexagon.V6.vrsadubi.acc.128B" => "__builtin_HEXAGON_V6_vrsadubi_acc_128B", - "llvm.hexagon.V6.vsatdw" => "__builtin_HEXAGON_V6_vsatdw", - "llvm.hexagon.V6.vsatdw.128B" => "__builtin_HEXAGON_V6_vsatdw_128B", - "llvm.hexagon.V6.vsathub" => "__builtin_HEXAGON_V6_vsathub", - "llvm.hexagon.V6.vsathub.128B" => "__builtin_HEXAGON_V6_vsathub_128B", - "llvm.hexagon.V6.vsatuwuh" => "__builtin_HEXAGON_V6_vsatuwuh", - "llvm.hexagon.V6.vsatuwuh.128B" => "__builtin_HEXAGON_V6_vsatuwuh_128B", - "llvm.hexagon.V6.vsatwh" => "__builtin_HEXAGON_V6_vsatwh", - "llvm.hexagon.V6.vsatwh.128B" => "__builtin_HEXAGON_V6_vsatwh_128B", - "llvm.hexagon.V6.vsb" => "__builtin_HEXAGON_V6_vsb", - "llvm.hexagon.V6.vsb.128B" => "__builtin_HEXAGON_V6_vsb_128B", - "llvm.hexagon.V6.vscattermh" => "__builtin_HEXAGON_V6_vscattermh", - "llvm.hexagon.V6.vscattermh.128B" => "__builtin_HEXAGON_V6_vscattermh_128B", - "llvm.hexagon.V6.vscattermh.add" => "__builtin_HEXAGON_V6_vscattermh_add", - "llvm.hexagon.V6.vscattermh.add.128B" => "__builtin_HEXAGON_V6_vscattermh_add_128B", - "llvm.hexagon.V6.vscattermhq" => "__builtin_HEXAGON_V6_vscattermhq", - "llvm.hexagon.V6.vscattermhq.128B" => "__builtin_HEXAGON_V6_vscattermhq_128B", - "llvm.hexagon.V6.vscattermhw" => "__builtin_HEXAGON_V6_vscattermhw", - "llvm.hexagon.V6.vscattermhw.128B" => "__builtin_HEXAGON_V6_vscattermhw_128B", - "llvm.hexagon.V6.vscattermhw.add" => "__builtin_HEXAGON_V6_vscattermhw_add", - "llvm.hexagon.V6.vscattermhw.add.128B" => "__builtin_HEXAGON_V6_vscattermhw_add_128B", - "llvm.hexagon.V6.vscattermhwq" => "__builtin_HEXAGON_V6_vscattermhwq", - "llvm.hexagon.V6.vscattermhwq.128B" => "__builtin_HEXAGON_V6_vscattermhwq_128B", - "llvm.hexagon.V6.vscattermw" => "__builtin_HEXAGON_V6_vscattermw", - "llvm.hexagon.V6.vscattermw.128B" => "__builtin_HEXAGON_V6_vscattermw_128B", - "llvm.hexagon.V6.vscattermw.add" => "__builtin_HEXAGON_V6_vscattermw_add", - "llvm.hexagon.V6.vscattermw.add.128B" => "__builtin_HEXAGON_V6_vscattermw_add_128B", - "llvm.hexagon.V6.vscattermwq" => "__builtin_HEXAGON_V6_vscattermwq", - "llvm.hexagon.V6.vscattermwq.128B" => "__builtin_HEXAGON_V6_vscattermwq_128B", - "llvm.hexagon.V6.vsh" => "__builtin_HEXAGON_V6_vsh", - "llvm.hexagon.V6.vsh.128B" => "__builtin_HEXAGON_V6_vsh_128B", - "llvm.hexagon.V6.vshufeh" => "__builtin_HEXAGON_V6_vshufeh", - "llvm.hexagon.V6.vshufeh.128B" => "__builtin_HEXAGON_V6_vshufeh_128B", - "llvm.hexagon.V6.vshuffb" => "__builtin_HEXAGON_V6_vshuffb", - "llvm.hexagon.V6.vshuffb.128B" => "__builtin_HEXAGON_V6_vshuffb_128B", - "llvm.hexagon.V6.vshuffeb" => "__builtin_HEXAGON_V6_vshuffeb", - "llvm.hexagon.V6.vshuffeb.128B" => "__builtin_HEXAGON_V6_vshuffeb_128B", - "llvm.hexagon.V6.vshuffh" => "__builtin_HEXAGON_V6_vshuffh", - "llvm.hexagon.V6.vshuffh.128B" => "__builtin_HEXAGON_V6_vshuffh_128B", - "llvm.hexagon.V6.vshuffob" => "__builtin_HEXAGON_V6_vshuffob", - "llvm.hexagon.V6.vshuffob.128B" => "__builtin_HEXAGON_V6_vshuffob_128B", - "llvm.hexagon.V6.vshuffvdd" => "__builtin_HEXAGON_V6_vshuffvdd", - "llvm.hexagon.V6.vshuffvdd.128B" => "__builtin_HEXAGON_V6_vshuffvdd_128B", - "llvm.hexagon.V6.vshufoeb" => "__builtin_HEXAGON_V6_vshufoeb", - "llvm.hexagon.V6.vshufoeb.128B" => "__builtin_HEXAGON_V6_vshufoeb_128B", - "llvm.hexagon.V6.vshufoeh" => "__builtin_HEXAGON_V6_vshufoeh", - "llvm.hexagon.V6.vshufoeh.128B" => "__builtin_HEXAGON_V6_vshufoeh_128B", - "llvm.hexagon.V6.vshufoh" => "__builtin_HEXAGON_V6_vshufoh", - "llvm.hexagon.V6.vshufoh.128B" => "__builtin_HEXAGON_V6_vshufoh_128B", - "llvm.hexagon.V6.vsub.hf" => "__builtin_HEXAGON_V6_vsub_hf", - "llvm.hexagon.V6.vsub.hf.128B" => "__builtin_HEXAGON_V6_vsub_hf_128B", - "llvm.hexagon.V6.vsub.hf.f8" => "__builtin_HEXAGON_V6_vsub_hf_f8", - "llvm.hexagon.V6.vsub.hf.f8.128B" => "__builtin_HEXAGON_V6_vsub_hf_f8_128B", - "llvm.hexagon.V6.vsub.hf.hf" => "__builtin_HEXAGON_V6_vsub_hf_hf", - "llvm.hexagon.V6.vsub.hf.hf.128B" => "__builtin_HEXAGON_V6_vsub_hf_hf_128B", - "llvm.hexagon.V6.vsub.qf16" => "__builtin_HEXAGON_V6_vsub_qf16", - "llvm.hexagon.V6.vsub.qf16.128B" => "__builtin_HEXAGON_V6_vsub_qf16_128B", - "llvm.hexagon.V6.vsub.qf16.mix" => "__builtin_HEXAGON_V6_vsub_qf16_mix", - "llvm.hexagon.V6.vsub.qf16.mix.128B" => "__builtin_HEXAGON_V6_vsub_qf16_mix_128B", - "llvm.hexagon.V6.vsub.qf32" => "__builtin_HEXAGON_V6_vsub_qf32", - "llvm.hexagon.V6.vsub.qf32.128B" => "__builtin_HEXAGON_V6_vsub_qf32_128B", - "llvm.hexagon.V6.vsub.qf32.mix" => "__builtin_HEXAGON_V6_vsub_qf32_mix", - "llvm.hexagon.V6.vsub.qf32.mix.128B" => "__builtin_HEXAGON_V6_vsub_qf32_mix_128B", - "llvm.hexagon.V6.vsub.sf" => "__builtin_HEXAGON_V6_vsub_sf", - "llvm.hexagon.V6.vsub.sf.128B" => "__builtin_HEXAGON_V6_vsub_sf_128B", - "llvm.hexagon.V6.vsub.sf.bf" => "__builtin_HEXAGON_V6_vsub_sf_bf", - "llvm.hexagon.V6.vsub.sf.bf.128B" => "__builtin_HEXAGON_V6_vsub_sf_bf_128B", - "llvm.hexagon.V6.vsub.sf.hf" => "__builtin_HEXAGON_V6_vsub_sf_hf", - "llvm.hexagon.V6.vsub.sf.hf.128B" => "__builtin_HEXAGON_V6_vsub_sf_hf_128B", - "llvm.hexagon.V6.vsub.sf.sf" => "__builtin_HEXAGON_V6_vsub_sf_sf", - "llvm.hexagon.V6.vsub.sf.sf.128B" => "__builtin_HEXAGON_V6_vsub_sf_sf_128B", - "llvm.hexagon.V6.vsubb" => "__builtin_HEXAGON_V6_vsubb", - "llvm.hexagon.V6.vsubb.128B" => "__builtin_HEXAGON_V6_vsubb_128B", - "llvm.hexagon.V6.vsubb.dv" => "__builtin_HEXAGON_V6_vsubb_dv", - "llvm.hexagon.V6.vsubb.dv.128B" => "__builtin_HEXAGON_V6_vsubb_dv_128B", - "llvm.hexagon.V6.vsubbnq" => "__builtin_HEXAGON_V6_vsubbnq", - "llvm.hexagon.V6.vsubbnq.128B" => "__builtin_HEXAGON_V6_vsubbnq_128B", - "llvm.hexagon.V6.vsubbq" => "__builtin_HEXAGON_V6_vsubbq", - "llvm.hexagon.V6.vsubbq.128B" => "__builtin_HEXAGON_V6_vsubbq_128B", - "llvm.hexagon.V6.vsubbsat" => "__builtin_HEXAGON_V6_vsubbsat", - "llvm.hexagon.V6.vsubbsat.128B" => "__builtin_HEXAGON_V6_vsubbsat_128B", - "llvm.hexagon.V6.vsubbsat.dv" => "__builtin_HEXAGON_V6_vsubbsat_dv", - "llvm.hexagon.V6.vsubbsat.dv.128B" => "__builtin_HEXAGON_V6_vsubbsat_dv_128B", - "llvm.hexagon.V6.vsubh" => "__builtin_HEXAGON_V6_vsubh", - "llvm.hexagon.V6.vsubh.128B" => "__builtin_HEXAGON_V6_vsubh_128B", - "llvm.hexagon.V6.vsubh.dv" => "__builtin_HEXAGON_V6_vsubh_dv", - "llvm.hexagon.V6.vsubh.dv.128B" => "__builtin_HEXAGON_V6_vsubh_dv_128B", - "llvm.hexagon.V6.vsubhnq" => "__builtin_HEXAGON_V6_vsubhnq", - "llvm.hexagon.V6.vsubhnq.128B" => "__builtin_HEXAGON_V6_vsubhnq_128B", - "llvm.hexagon.V6.vsubhq" => "__builtin_HEXAGON_V6_vsubhq", - "llvm.hexagon.V6.vsubhq.128B" => "__builtin_HEXAGON_V6_vsubhq_128B", - "llvm.hexagon.V6.vsubhsat" => "__builtin_HEXAGON_V6_vsubhsat", - "llvm.hexagon.V6.vsubhsat.128B" => "__builtin_HEXAGON_V6_vsubhsat_128B", - "llvm.hexagon.V6.vsubhsat.dv" => "__builtin_HEXAGON_V6_vsubhsat_dv", - "llvm.hexagon.V6.vsubhsat.dv.128B" => "__builtin_HEXAGON_V6_vsubhsat_dv_128B", - "llvm.hexagon.V6.vsubhw" => "__builtin_HEXAGON_V6_vsubhw", - "llvm.hexagon.V6.vsubhw.128B" => "__builtin_HEXAGON_V6_vsubhw_128B", - "llvm.hexagon.V6.vsububh" => "__builtin_HEXAGON_V6_vsububh", - "llvm.hexagon.V6.vsububh.128B" => "__builtin_HEXAGON_V6_vsububh_128B", - "llvm.hexagon.V6.vsububsat" => "__builtin_HEXAGON_V6_vsububsat", - "llvm.hexagon.V6.vsububsat.128B" => "__builtin_HEXAGON_V6_vsububsat_128B", - "llvm.hexagon.V6.vsububsat.dv" => "__builtin_HEXAGON_V6_vsububsat_dv", - "llvm.hexagon.V6.vsububsat.dv.128B" => "__builtin_HEXAGON_V6_vsububsat_dv_128B", - "llvm.hexagon.V6.vsubububb.sat" => "__builtin_HEXAGON_V6_vsubububb_sat", - "llvm.hexagon.V6.vsubububb.sat.128B" => "__builtin_HEXAGON_V6_vsubububb_sat_128B", - "llvm.hexagon.V6.vsubuhsat" => "__builtin_HEXAGON_V6_vsubuhsat", - "llvm.hexagon.V6.vsubuhsat.128B" => "__builtin_HEXAGON_V6_vsubuhsat_128B", - "llvm.hexagon.V6.vsubuhsat.dv" => "__builtin_HEXAGON_V6_vsubuhsat_dv", - "llvm.hexagon.V6.vsubuhsat.dv.128B" => "__builtin_HEXAGON_V6_vsubuhsat_dv_128B", - "llvm.hexagon.V6.vsubuhw" => "__builtin_HEXAGON_V6_vsubuhw", - "llvm.hexagon.V6.vsubuhw.128B" => "__builtin_HEXAGON_V6_vsubuhw_128B", - "llvm.hexagon.V6.vsubuwsat" => "__builtin_HEXAGON_V6_vsubuwsat", - "llvm.hexagon.V6.vsubuwsat.128B" => "__builtin_HEXAGON_V6_vsubuwsat_128B", - "llvm.hexagon.V6.vsubuwsat.dv" => "__builtin_HEXAGON_V6_vsubuwsat_dv", - "llvm.hexagon.V6.vsubuwsat.dv.128B" => "__builtin_HEXAGON_V6_vsubuwsat_dv_128B", - "llvm.hexagon.V6.vsubw" => "__builtin_HEXAGON_V6_vsubw", - "llvm.hexagon.V6.vsubw.128B" => "__builtin_HEXAGON_V6_vsubw_128B", - "llvm.hexagon.V6.vsubw.dv" => "__builtin_HEXAGON_V6_vsubw_dv", - "llvm.hexagon.V6.vsubw.dv.128B" => "__builtin_HEXAGON_V6_vsubw_dv_128B", - "llvm.hexagon.V6.vsubwnq" => "__builtin_HEXAGON_V6_vsubwnq", - "llvm.hexagon.V6.vsubwnq.128B" => "__builtin_HEXAGON_V6_vsubwnq_128B", - "llvm.hexagon.V6.vsubwq" => "__builtin_HEXAGON_V6_vsubwq", - "llvm.hexagon.V6.vsubwq.128B" => "__builtin_HEXAGON_V6_vsubwq_128B", - "llvm.hexagon.V6.vsubwsat" => "__builtin_HEXAGON_V6_vsubwsat", - "llvm.hexagon.V6.vsubwsat.128B" => "__builtin_HEXAGON_V6_vsubwsat_128B", - "llvm.hexagon.V6.vsubwsat.dv" => "__builtin_HEXAGON_V6_vsubwsat_dv", - "llvm.hexagon.V6.vsubwsat.dv.128B" => "__builtin_HEXAGON_V6_vsubwsat_dv_128B", - "llvm.hexagon.V6.vswap" => "__builtin_HEXAGON_V6_vswap", - "llvm.hexagon.V6.vswap.128B" => "__builtin_HEXAGON_V6_vswap_128B", - "llvm.hexagon.V6.vtmpyb" => "__builtin_HEXAGON_V6_vtmpyb", - "llvm.hexagon.V6.vtmpyb.128B" => "__builtin_HEXAGON_V6_vtmpyb_128B", - "llvm.hexagon.V6.vtmpyb.acc" => "__builtin_HEXAGON_V6_vtmpyb_acc", - "llvm.hexagon.V6.vtmpyb.acc.128B" => "__builtin_HEXAGON_V6_vtmpyb_acc_128B", - "llvm.hexagon.V6.vtmpybus" => "__builtin_HEXAGON_V6_vtmpybus", - "llvm.hexagon.V6.vtmpybus.128B" => "__builtin_HEXAGON_V6_vtmpybus_128B", - "llvm.hexagon.V6.vtmpybus.acc" => "__builtin_HEXAGON_V6_vtmpybus_acc", - "llvm.hexagon.V6.vtmpybus.acc.128B" => "__builtin_HEXAGON_V6_vtmpybus_acc_128B", - "llvm.hexagon.V6.vtmpyhb" => "__builtin_HEXAGON_V6_vtmpyhb", - "llvm.hexagon.V6.vtmpyhb.128B" => "__builtin_HEXAGON_V6_vtmpyhb_128B", - "llvm.hexagon.V6.vtmpyhb.acc" => "__builtin_HEXAGON_V6_vtmpyhb_acc", - "llvm.hexagon.V6.vtmpyhb.acc.128B" => "__builtin_HEXAGON_V6_vtmpyhb_acc_128B", - "llvm.hexagon.V6.vunpackb" => "__builtin_HEXAGON_V6_vunpackb", - "llvm.hexagon.V6.vunpackb.128B" => "__builtin_HEXAGON_V6_vunpackb_128B", - "llvm.hexagon.V6.vunpackh" => "__builtin_HEXAGON_V6_vunpackh", - "llvm.hexagon.V6.vunpackh.128B" => "__builtin_HEXAGON_V6_vunpackh_128B", - "llvm.hexagon.V6.vunpackob" => "__builtin_HEXAGON_V6_vunpackob", - "llvm.hexagon.V6.vunpackob.128B" => "__builtin_HEXAGON_V6_vunpackob_128B", - "llvm.hexagon.V6.vunpackoh" => "__builtin_HEXAGON_V6_vunpackoh", - "llvm.hexagon.V6.vunpackoh.128B" => "__builtin_HEXAGON_V6_vunpackoh_128B", - "llvm.hexagon.V6.vunpackub" => "__builtin_HEXAGON_V6_vunpackub", - "llvm.hexagon.V6.vunpackub.128B" => "__builtin_HEXAGON_V6_vunpackub_128B", - "llvm.hexagon.V6.vunpackuh" => "__builtin_HEXAGON_V6_vunpackuh", - "llvm.hexagon.V6.vunpackuh.128B" => "__builtin_HEXAGON_V6_vunpackuh_128B", - "llvm.hexagon.V6.vxor" => "__builtin_HEXAGON_V6_vxor", - "llvm.hexagon.V6.vxor.128B" => "__builtin_HEXAGON_V6_vxor_128B", - "llvm.hexagon.V6.vzb" => "__builtin_HEXAGON_V6_vzb", - "llvm.hexagon.V6.vzb.128B" => "__builtin_HEXAGON_V6_vzb_128B", - "llvm.hexagon.V6.vzh" => "__builtin_HEXAGON_V6_vzh", - "llvm.hexagon.V6.vzh.128B" => "__builtin_HEXAGON_V6_vzh_128B", - "llvm.hexagon.Y2.dccleana" => "__builtin_HEXAGON_Y2_dccleana", - "llvm.hexagon.Y2.dccleaninva" => "__builtin_HEXAGON_Y2_dccleaninva", - "llvm.hexagon.Y2.dcfetch" => "__builtin_HEXAGON_Y2_dcfetch", - "llvm.hexagon.Y2.dcinva" => "__builtin_HEXAGON_Y2_dcinva", - "llvm.hexagon.Y2.dczeroa" => "__builtin_HEXAGON_Y2_dczeroa", - "llvm.hexagon.Y4.l2fetch" => "__builtin_HEXAGON_Y4_l2fetch", - "llvm.hexagon.Y5.l2fetch" => "__builtin_HEXAGON_Y5_l2fetch", - "llvm.hexagon.Y6.dmlink" => "__builtin_HEXAGON_Y6_dmlink", - "llvm.hexagon.Y6.dmpause" => "__builtin_HEXAGON_Y6_dmpause", - "llvm.hexagon.Y6.dmpoll" => "__builtin_HEXAGON_Y6_dmpoll", - "llvm.hexagon.Y6.dmresume" => "__builtin_HEXAGON_Y6_dmresume", - "llvm.hexagon.Y6.dmstart" => "__builtin_HEXAGON_Y6_dmstart", - "llvm.hexagon.Y6.dmwait" => "__builtin_HEXAGON_Y6_dmwait", - "llvm.hexagon.brev.ldb" => "__builtin_brev_ldb", - "llvm.hexagon.brev.ldd" => "__builtin_brev_ldd", - "llvm.hexagon.brev.ldh" => "__builtin_brev_ldh", - "llvm.hexagon.brev.ldub" => "__builtin_brev_ldub", - "llvm.hexagon.brev.lduh" => "__builtin_brev_lduh", - "llvm.hexagon.brev.ldw" => "__builtin_brev_ldw", - "llvm.hexagon.brev.stb" => "__builtin_brev_stb", - "llvm.hexagon.brev.std" => "__builtin_brev_std", - "llvm.hexagon.brev.sth" => "__builtin_brev_sth", - "llvm.hexagon.brev.sthhi" => "__builtin_brev_sthhi", - "llvm.hexagon.brev.stw" => "__builtin_brev_stw", - "llvm.hexagon.circ.ldb" => "__builtin_circ_ldb", - "llvm.hexagon.circ.ldd" => "__builtin_circ_ldd", - "llvm.hexagon.circ.ldh" => "__builtin_circ_ldh", - "llvm.hexagon.circ.ldub" => "__builtin_circ_ldub", - "llvm.hexagon.circ.lduh" => "__builtin_circ_lduh", - "llvm.hexagon.circ.ldw" => "__builtin_circ_ldw", - "llvm.hexagon.circ.stb" => "__builtin_circ_stb", - "llvm.hexagon.circ.std" => "__builtin_circ_std", - "llvm.hexagon.circ.sth" => "__builtin_circ_sth", - "llvm.hexagon.circ.sthhi" => "__builtin_circ_sthhi", - "llvm.hexagon.circ.stw" => "__builtin_circ_stw", - "llvm.hexagon.mm256i.vaddw" => "__builtin__mm256i_vaddw", - "llvm.hexagon.prefetch" => "__builtin_HEXAGON_prefetch", - "llvm.hexagon.vmemcpy" => "__builtin_hexagon_vmemcpy", - "llvm.hexagon.vmemset" => "__builtin_hexagon_vmemset", - // loongarch - "llvm.loongarch.asrtgt.d" => "__builtin_loongarch_asrtgt_d", - "llvm.loongarch.asrtle.d" => "__builtin_loongarch_asrtle_d", - "llvm.loongarch.break" => "__builtin_loongarch_break", - "llvm.loongarch.cacop.d" => "__builtin_loongarch_cacop_d", - "llvm.loongarch.cacop.w" => "__builtin_loongarch_cacop_w", - "llvm.loongarch.cpucfg" => "__builtin_loongarch_cpucfg", - "llvm.loongarch.crc.w.b.w" => "__builtin_loongarch_crc_w_b_w", - "llvm.loongarch.crc.w.d.w" => "__builtin_loongarch_crc_w_d_w", - "llvm.loongarch.crc.w.h.w" => "__builtin_loongarch_crc_w_h_w", - "llvm.loongarch.crc.w.w.w" => "__builtin_loongarch_crc_w_w_w", - "llvm.loongarch.crcc.w.b.w" => "__builtin_loongarch_crcc_w_b_w", - "llvm.loongarch.crcc.w.d.w" => "__builtin_loongarch_crcc_w_d_w", - "llvm.loongarch.crcc.w.h.w" => "__builtin_loongarch_crcc_w_h_w", - "llvm.loongarch.crcc.w.w.w" => "__builtin_loongarch_crcc_w_w_w", - "llvm.loongarch.csrrd.d" => "__builtin_loongarch_csrrd_d", - "llvm.loongarch.csrrd.w" => "__builtin_loongarch_csrrd_w", - "llvm.loongarch.csrwr.d" => "__builtin_loongarch_csrwr_d", - "llvm.loongarch.csrwr.w" => "__builtin_loongarch_csrwr_w", - "llvm.loongarch.csrxchg.d" => "__builtin_loongarch_csrxchg_d", - "llvm.loongarch.csrxchg.w" => "__builtin_loongarch_csrxchg_w", - "llvm.loongarch.dbar" => "__builtin_loongarch_dbar", - "llvm.loongarch.frecipe.d" => "__builtin_loongarch_frecipe_d", - "llvm.loongarch.frecipe.s" => "__builtin_loongarch_frecipe_s", - "llvm.loongarch.frsqrte.d" => "__builtin_loongarch_frsqrte_d", - "llvm.loongarch.frsqrte.s" => "__builtin_loongarch_frsqrte_s", - "llvm.loongarch.ibar" => "__builtin_loongarch_ibar", - "llvm.loongarch.iocsrrd.b" => "__builtin_loongarch_iocsrrd_b", - "llvm.loongarch.iocsrrd.d" => "__builtin_loongarch_iocsrrd_d", - "llvm.loongarch.iocsrrd.h" => "__builtin_loongarch_iocsrrd_h", - "llvm.loongarch.iocsrrd.w" => "__builtin_loongarch_iocsrrd_w", - "llvm.loongarch.iocsrwr.b" => "__builtin_loongarch_iocsrwr_b", - "llvm.loongarch.iocsrwr.d" => "__builtin_loongarch_iocsrwr_d", - "llvm.loongarch.iocsrwr.h" => "__builtin_loongarch_iocsrwr_h", - "llvm.loongarch.iocsrwr.w" => "__builtin_loongarch_iocsrwr_w", - "llvm.loongarch.lasx.vext2xv.d.b" => "__builtin_lasx_vext2xv_d_b", - "llvm.loongarch.lasx.vext2xv.d.h" => "__builtin_lasx_vext2xv_d_h", - "llvm.loongarch.lasx.vext2xv.d.w" => "__builtin_lasx_vext2xv_d_w", - "llvm.loongarch.lasx.vext2xv.du.bu" => "__builtin_lasx_vext2xv_du_bu", - "llvm.loongarch.lasx.vext2xv.du.hu" => "__builtin_lasx_vext2xv_du_hu", - "llvm.loongarch.lasx.vext2xv.du.wu" => "__builtin_lasx_vext2xv_du_wu", - "llvm.loongarch.lasx.vext2xv.h.b" => "__builtin_lasx_vext2xv_h_b", - "llvm.loongarch.lasx.vext2xv.hu.bu" => "__builtin_lasx_vext2xv_hu_bu", - "llvm.loongarch.lasx.vext2xv.w.b" => "__builtin_lasx_vext2xv_w_b", - "llvm.loongarch.lasx.vext2xv.w.h" => "__builtin_lasx_vext2xv_w_h", - "llvm.loongarch.lasx.vext2xv.wu.bu" => "__builtin_lasx_vext2xv_wu_bu", - "llvm.loongarch.lasx.vext2xv.wu.hu" => "__builtin_lasx_vext2xv_wu_hu", - "llvm.loongarch.lasx.xbnz.b" => "__builtin_lasx_xbnz_b", - "llvm.loongarch.lasx.xbnz.d" => "__builtin_lasx_xbnz_d", - "llvm.loongarch.lasx.xbnz.h" => "__builtin_lasx_xbnz_h", - "llvm.loongarch.lasx.xbnz.v" => "__builtin_lasx_xbnz_v", - "llvm.loongarch.lasx.xbnz.w" => "__builtin_lasx_xbnz_w", - "llvm.loongarch.lasx.xbz.b" => "__builtin_lasx_xbz_b", - "llvm.loongarch.lasx.xbz.d" => "__builtin_lasx_xbz_d", - "llvm.loongarch.lasx.xbz.h" => "__builtin_lasx_xbz_h", - "llvm.loongarch.lasx.xbz.v" => "__builtin_lasx_xbz_v", - "llvm.loongarch.lasx.xbz.w" => "__builtin_lasx_xbz_w", - "llvm.loongarch.lasx.xvabsd.b" => "__builtin_lasx_xvabsd_b", - "llvm.loongarch.lasx.xvabsd.bu" => "__builtin_lasx_xvabsd_bu", - "llvm.loongarch.lasx.xvabsd.d" => "__builtin_lasx_xvabsd_d", - "llvm.loongarch.lasx.xvabsd.du" => "__builtin_lasx_xvabsd_du", - "llvm.loongarch.lasx.xvabsd.h" => "__builtin_lasx_xvabsd_h", - "llvm.loongarch.lasx.xvabsd.hu" => "__builtin_lasx_xvabsd_hu", - "llvm.loongarch.lasx.xvabsd.w" => "__builtin_lasx_xvabsd_w", - "llvm.loongarch.lasx.xvabsd.wu" => "__builtin_lasx_xvabsd_wu", - "llvm.loongarch.lasx.xvadd.b" => "__builtin_lasx_xvadd_b", - "llvm.loongarch.lasx.xvadd.d" => "__builtin_lasx_xvadd_d", - "llvm.loongarch.lasx.xvadd.h" => "__builtin_lasx_xvadd_h", - "llvm.loongarch.lasx.xvadd.q" => "__builtin_lasx_xvadd_q", - "llvm.loongarch.lasx.xvadd.w" => "__builtin_lasx_xvadd_w", - "llvm.loongarch.lasx.xvadda.b" => "__builtin_lasx_xvadda_b", - "llvm.loongarch.lasx.xvadda.d" => "__builtin_lasx_xvadda_d", - "llvm.loongarch.lasx.xvadda.h" => "__builtin_lasx_xvadda_h", - "llvm.loongarch.lasx.xvadda.w" => "__builtin_lasx_xvadda_w", - "llvm.loongarch.lasx.xvaddi.bu" => "__builtin_lasx_xvaddi_bu", - "llvm.loongarch.lasx.xvaddi.du" => "__builtin_lasx_xvaddi_du", - "llvm.loongarch.lasx.xvaddi.hu" => "__builtin_lasx_xvaddi_hu", - "llvm.loongarch.lasx.xvaddi.wu" => "__builtin_lasx_xvaddi_wu", - "llvm.loongarch.lasx.xvaddwev.d.w" => "__builtin_lasx_xvaddwev_d_w", - "llvm.loongarch.lasx.xvaddwev.d.wu" => "__builtin_lasx_xvaddwev_d_wu", - "llvm.loongarch.lasx.xvaddwev.d.wu.w" => "__builtin_lasx_xvaddwev_d_wu_w", - "llvm.loongarch.lasx.xvaddwev.h.b" => "__builtin_lasx_xvaddwev_h_b", - "llvm.loongarch.lasx.xvaddwev.h.bu" => "__builtin_lasx_xvaddwev_h_bu", - "llvm.loongarch.lasx.xvaddwev.h.bu.b" => "__builtin_lasx_xvaddwev_h_bu_b", - "llvm.loongarch.lasx.xvaddwev.q.d" => "__builtin_lasx_xvaddwev_q_d", - "llvm.loongarch.lasx.xvaddwev.q.du" => "__builtin_lasx_xvaddwev_q_du", - "llvm.loongarch.lasx.xvaddwev.q.du.d" => "__builtin_lasx_xvaddwev_q_du_d", - "llvm.loongarch.lasx.xvaddwev.w.h" => "__builtin_lasx_xvaddwev_w_h", - "llvm.loongarch.lasx.xvaddwev.w.hu" => "__builtin_lasx_xvaddwev_w_hu", - "llvm.loongarch.lasx.xvaddwev.w.hu.h" => "__builtin_lasx_xvaddwev_w_hu_h", - "llvm.loongarch.lasx.xvaddwod.d.w" => "__builtin_lasx_xvaddwod_d_w", - "llvm.loongarch.lasx.xvaddwod.d.wu" => "__builtin_lasx_xvaddwod_d_wu", - "llvm.loongarch.lasx.xvaddwod.d.wu.w" => "__builtin_lasx_xvaddwod_d_wu_w", - "llvm.loongarch.lasx.xvaddwod.h.b" => "__builtin_lasx_xvaddwod_h_b", - "llvm.loongarch.lasx.xvaddwod.h.bu" => "__builtin_lasx_xvaddwod_h_bu", - "llvm.loongarch.lasx.xvaddwod.h.bu.b" => "__builtin_lasx_xvaddwod_h_bu_b", - "llvm.loongarch.lasx.xvaddwod.q.d" => "__builtin_lasx_xvaddwod_q_d", - "llvm.loongarch.lasx.xvaddwod.q.du" => "__builtin_lasx_xvaddwod_q_du", - "llvm.loongarch.lasx.xvaddwod.q.du.d" => "__builtin_lasx_xvaddwod_q_du_d", - "llvm.loongarch.lasx.xvaddwod.w.h" => "__builtin_lasx_xvaddwod_w_h", - "llvm.loongarch.lasx.xvaddwod.w.hu" => "__builtin_lasx_xvaddwod_w_hu", - "llvm.loongarch.lasx.xvaddwod.w.hu.h" => "__builtin_lasx_xvaddwod_w_hu_h", - "llvm.loongarch.lasx.xvand.v" => "__builtin_lasx_xvand_v", - "llvm.loongarch.lasx.xvandi.b" => "__builtin_lasx_xvandi_b", - "llvm.loongarch.lasx.xvandn.v" => "__builtin_lasx_xvandn_v", - "llvm.loongarch.lasx.xvavg.b" => "__builtin_lasx_xvavg_b", - "llvm.loongarch.lasx.xvavg.bu" => "__builtin_lasx_xvavg_bu", - "llvm.loongarch.lasx.xvavg.d" => "__builtin_lasx_xvavg_d", - "llvm.loongarch.lasx.xvavg.du" => "__builtin_lasx_xvavg_du", - "llvm.loongarch.lasx.xvavg.h" => "__builtin_lasx_xvavg_h", - "llvm.loongarch.lasx.xvavg.hu" => "__builtin_lasx_xvavg_hu", - "llvm.loongarch.lasx.xvavg.w" => "__builtin_lasx_xvavg_w", - "llvm.loongarch.lasx.xvavg.wu" => "__builtin_lasx_xvavg_wu", - "llvm.loongarch.lasx.xvavgr.b" => "__builtin_lasx_xvavgr_b", - "llvm.loongarch.lasx.xvavgr.bu" => "__builtin_lasx_xvavgr_bu", - "llvm.loongarch.lasx.xvavgr.d" => "__builtin_lasx_xvavgr_d", - "llvm.loongarch.lasx.xvavgr.du" => "__builtin_lasx_xvavgr_du", - "llvm.loongarch.lasx.xvavgr.h" => "__builtin_lasx_xvavgr_h", - "llvm.loongarch.lasx.xvavgr.hu" => "__builtin_lasx_xvavgr_hu", - "llvm.loongarch.lasx.xvavgr.w" => "__builtin_lasx_xvavgr_w", - "llvm.loongarch.lasx.xvavgr.wu" => "__builtin_lasx_xvavgr_wu", - "llvm.loongarch.lasx.xvbitclr.b" => "__builtin_lasx_xvbitclr_b", - "llvm.loongarch.lasx.xvbitclr.d" => "__builtin_lasx_xvbitclr_d", - "llvm.loongarch.lasx.xvbitclr.h" => "__builtin_lasx_xvbitclr_h", - "llvm.loongarch.lasx.xvbitclr.w" => "__builtin_lasx_xvbitclr_w", - "llvm.loongarch.lasx.xvbitclri.b" => "__builtin_lasx_xvbitclri_b", - "llvm.loongarch.lasx.xvbitclri.d" => "__builtin_lasx_xvbitclri_d", - "llvm.loongarch.lasx.xvbitclri.h" => "__builtin_lasx_xvbitclri_h", - "llvm.loongarch.lasx.xvbitclri.w" => "__builtin_lasx_xvbitclri_w", - "llvm.loongarch.lasx.xvbitrev.b" => "__builtin_lasx_xvbitrev_b", - "llvm.loongarch.lasx.xvbitrev.d" => "__builtin_lasx_xvbitrev_d", - "llvm.loongarch.lasx.xvbitrev.h" => "__builtin_lasx_xvbitrev_h", - "llvm.loongarch.lasx.xvbitrev.w" => "__builtin_lasx_xvbitrev_w", - "llvm.loongarch.lasx.xvbitrevi.b" => "__builtin_lasx_xvbitrevi_b", - "llvm.loongarch.lasx.xvbitrevi.d" => "__builtin_lasx_xvbitrevi_d", - "llvm.loongarch.lasx.xvbitrevi.h" => "__builtin_lasx_xvbitrevi_h", - "llvm.loongarch.lasx.xvbitrevi.w" => "__builtin_lasx_xvbitrevi_w", - "llvm.loongarch.lasx.xvbitsel.v" => "__builtin_lasx_xvbitsel_v", - "llvm.loongarch.lasx.xvbitseli.b" => "__builtin_lasx_xvbitseli_b", - "llvm.loongarch.lasx.xvbitset.b" => "__builtin_lasx_xvbitset_b", - "llvm.loongarch.lasx.xvbitset.d" => "__builtin_lasx_xvbitset_d", - "llvm.loongarch.lasx.xvbitset.h" => "__builtin_lasx_xvbitset_h", - "llvm.loongarch.lasx.xvbitset.w" => "__builtin_lasx_xvbitset_w", - "llvm.loongarch.lasx.xvbitseti.b" => "__builtin_lasx_xvbitseti_b", - "llvm.loongarch.lasx.xvbitseti.d" => "__builtin_lasx_xvbitseti_d", - "llvm.loongarch.lasx.xvbitseti.h" => "__builtin_lasx_xvbitseti_h", - "llvm.loongarch.lasx.xvbitseti.w" => "__builtin_lasx_xvbitseti_w", - "llvm.loongarch.lasx.xvbsll.v" => "__builtin_lasx_xvbsll_v", - "llvm.loongarch.lasx.xvbsrl.v" => "__builtin_lasx_xvbsrl_v", - "llvm.loongarch.lasx.xvclo.b" => "__builtin_lasx_xvclo_b", - "llvm.loongarch.lasx.xvclo.d" => "__builtin_lasx_xvclo_d", - "llvm.loongarch.lasx.xvclo.h" => "__builtin_lasx_xvclo_h", - "llvm.loongarch.lasx.xvclo.w" => "__builtin_lasx_xvclo_w", - "llvm.loongarch.lasx.xvclz.b" => "__builtin_lasx_xvclz_b", - "llvm.loongarch.lasx.xvclz.d" => "__builtin_lasx_xvclz_d", - "llvm.loongarch.lasx.xvclz.h" => "__builtin_lasx_xvclz_h", - "llvm.loongarch.lasx.xvclz.w" => "__builtin_lasx_xvclz_w", - "llvm.loongarch.lasx.xvdiv.b" => "__builtin_lasx_xvdiv_b", - "llvm.loongarch.lasx.xvdiv.bu" => "__builtin_lasx_xvdiv_bu", - "llvm.loongarch.lasx.xvdiv.d" => "__builtin_lasx_xvdiv_d", - "llvm.loongarch.lasx.xvdiv.du" => "__builtin_lasx_xvdiv_du", - "llvm.loongarch.lasx.xvdiv.h" => "__builtin_lasx_xvdiv_h", - "llvm.loongarch.lasx.xvdiv.hu" => "__builtin_lasx_xvdiv_hu", - "llvm.loongarch.lasx.xvdiv.w" => "__builtin_lasx_xvdiv_w", - "llvm.loongarch.lasx.xvdiv.wu" => "__builtin_lasx_xvdiv_wu", - "llvm.loongarch.lasx.xvexth.d.w" => "__builtin_lasx_xvexth_d_w", - "llvm.loongarch.lasx.xvexth.du.wu" => "__builtin_lasx_xvexth_du_wu", - "llvm.loongarch.lasx.xvexth.h.b" => "__builtin_lasx_xvexth_h_b", - "llvm.loongarch.lasx.xvexth.hu.bu" => "__builtin_lasx_xvexth_hu_bu", - "llvm.loongarch.lasx.xvexth.q.d" => "__builtin_lasx_xvexth_q_d", - "llvm.loongarch.lasx.xvexth.qu.du" => "__builtin_lasx_xvexth_qu_du", - "llvm.loongarch.lasx.xvexth.w.h" => "__builtin_lasx_xvexth_w_h", - "llvm.loongarch.lasx.xvexth.wu.hu" => "__builtin_lasx_xvexth_wu_hu", - "llvm.loongarch.lasx.xvextl.q.d" => "__builtin_lasx_xvextl_q_d", - "llvm.loongarch.lasx.xvextl.qu.du" => "__builtin_lasx_xvextl_qu_du", - "llvm.loongarch.lasx.xvextrins.b" => "__builtin_lasx_xvextrins_b", - "llvm.loongarch.lasx.xvextrins.d" => "__builtin_lasx_xvextrins_d", - "llvm.loongarch.lasx.xvextrins.h" => "__builtin_lasx_xvextrins_h", - "llvm.loongarch.lasx.xvextrins.w" => "__builtin_lasx_xvextrins_w", - "llvm.loongarch.lasx.xvfadd.d" => "__builtin_lasx_xvfadd_d", - "llvm.loongarch.lasx.xvfadd.s" => "__builtin_lasx_xvfadd_s", - "llvm.loongarch.lasx.xvfclass.d" => "__builtin_lasx_xvfclass_d", - "llvm.loongarch.lasx.xvfclass.s" => "__builtin_lasx_xvfclass_s", - "llvm.loongarch.lasx.xvfcmp.caf.d" => "__builtin_lasx_xvfcmp_caf_d", - "llvm.loongarch.lasx.xvfcmp.caf.s" => "__builtin_lasx_xvfcmp_caf_s", - "llvm.loongarch.lasx.xvfcmp.ceq.d" => "__builtin_lasx_xvfcmp_ceq_d", - "llvm.loongarch.lasx.xvfcmp.ceq.s" => "__builtin_lasx_xvfcmp_ceq_s", - "llvm.loongarch.lasx.xvfcmp.cle.d" => "__builtin_lasx_xvfcmp_cle_d", - "llvm.loongarch.lasx.xvfcmp.cle.s" => "__builtin_lasx_xvfcmp_cle_s", - "llvm.loongarch.lasx.xvfcmp.clt.d" => "__builtin_lasx_xvfcmp_clt_d", - "llvm.loongarch.lasx.xvfcmp.clt.s" => "__builtin_lasx_xvfcmp_clt_s", - "llvm.loongarch.lasx.xvfcmp.cne.d" => "__builtin_lasx_xvfcmp_cne_d", - "llvm.loongarch.lasx.xvfcmp.cne.s" => "__builtin_lasx_xvfcmp_cne_s", - "llvm.loongarch.lasx.xvfcmp.cor.d" => "__builtin_lasx_xvfcmp_cor_d", - "llvm.loongarch.lasx.xvfcmp.cor.s" => "__builtin_lasx_xvfcmp_cor_s", - "llvm.loongarch.lasx.xvfcmp.cueq.d" => "__builtin_lasx_xvfcmp_cueq_d", - "llvm.loongarch.lasx.xvfcmp.cueq.s" => "__builtin_lasx_xvfcmp_cueq_s", - "llvm.loongarch.lasx.xvfcmp.cule.d" => "__builtin_lasx_xvfcmp_cule_d", - "llvm.loongarch.lasx.xvfcmp.cule.s" => "__builtin_lasx_xvfcmp_cule_s", - "llvm.loongarch.lasx.xvfcmp.cult.d" => "__builtin_lasx_xvfcmp_cult_d", - "llvm.loongarch.lasx.xvfcmp.cult.s" => "__builtin_lasx_xvfcmp_cult_s", - "llvm.loongarch.lasx.xvfcmp.cun.d" => "__builtin_lasx_xvfcmp_cun_d", - "llvm.loongarch.lasx.xvfcmp.cun.s" => "__builtin_lasx_xvfcmp_cun_s", - "llvm.loongarch.lasx.xvfcmp.cune.d" => "__builtin_lasx_xvfcmp_cune_d", - "llvm.loongarch.lasx.xvfcmp.cune.s" => "__builtin_lasx_xvfcmp_cune_s", - "llvm.loongarch.lasx.xvfcmp.saf.d" => "__builtin_lasx_xvfcmp_saf_d", - "llvm.loongarch.lasx.xvfcmp.saf.s" => "__builtin_lasx_xvfcmp_saf_s", - "llvm.loongarch.lasx.xvfcmp.seq.d" => "__builtin_lasx_xvfcmp_seq_d", - "llvm.loongarch.lasx.xvfcmp.seq.s" => "__builtin_lasx_xvfcmp_seq_s", - "llvm.loongarch.lasx.xvfcmp.sle.d" => "__builtin_lasx_xvfcmp_sle_d", - "llvm.loongarch.lasx.xvfcmp.sle.s" => "__builtin_lasx_xvfcmp_sle_s", - "llvm.loongarch.lasx.xvfcmp.slt.d" => "__builtin_lasx_xvfcmp_slt_d", - "llvm.loongarch.lasx.xvfcmp.slt.s" => "__builtin_lasx_xvfcmp_slt_s", - "llvm.loongarch.lasx.xvfcmp.sne.d" => "__builtin_lasx_xvfcmp_sne_d", - "llvm.loongarch.lasx.xvfcmp.sne.s" => "__builtin_lasx_xvfcmp_sne_s", - "llvm.loongarch.lasx.xvfcmp.sor.d" => "__builtin_lasx_xvfcmp_sor_d", - "llvm.loongarch.lasx.xvfcmp.sor.s" => "__builtin_lasx_xvfcmp_sor_s", - "llvm.loongarch.lasx.xvfcmp.sueq.d" => "__builtin_lasx_xvfcmp_sueq_d", - "llvm.loongarch.lasx.xvfcmp.sueq.s" => "__builtin_lasx_xvfcmp_sueq_s", - "llvm.loongarch.lasx.xvfcmp.sule.d" => "__builtin_lasx_xvfcmp_sule_d", - "llvm.loongarch.lasx.xvfcmp.sule.s" => "__builtin_lasx_xvfcmp_sule_s", - "llvm.loongarch.lasx.xvfcmp.sult.d" => "__builtin_lasx_xvfcmp_sult_d", - "llvm.loongarch.lasx.xvfcmp.sult.s" => "__builtin_lasx_xvfcmp_sult_s", - "llvm.loongarch.lasx.xvfcmp.sun.d" => "__builtin_lasx_xvfcmp_sun_d", - "llvm.loongarch.lasx.xvfcmp.sun.s" => "__builtin_lasx_xvfcmp_sun_s", - "llvm.loongarch.lasx.xvfcmp.sune.d" => "__builtin_lasx_xvfcmp_sune_d", - "llvm.loongarch.lasx.xvfcmp.sune.s" => "__builtin_lasx_xvfcmp_sune_s", - "llvm.loongarch.lasx.xvfcvt.h.s" => "__builtin_lasx_xvfcvt_h_s", - "llvm.loongarch.lasx.xvfcvt.s.d" => "__builtin_lasx_xvfcvt_s_d", - "llvm.loongarch.lasx.xvfcvth.d.s" => "__builtin_lasx_xvfcvth_d_s", - "llvm.loongarch.lasx.xvfcvth.s.h" => "__builtin_lasx_xvfcvth_s_h", - "llvm.loongarch.lasx.xvfcvtl.d.s" => "__builtin_lasx_xvfcvtl_d_s", - "llvm.loongarch.lasx.xvfcvtl.s.h" => "__builtin_lasx_xvfcvtl_s_h", - "llvm.loongarch.lasx.xvfdiv.d" => "__builtin_lasx_xvfdiv_d", - "llvm.loongarch.lasx.xvfdiv.s" => "__builtin_lasx_xvfdiv_s", - "llvm.loongarch.lasx.xvffint.d.l" => "__builtin_lasx_xvffint_d_l", - "llvm.loongarch.lasx.xvffint.d.lu" => "__builtin_lasx_xvffint_d_lu", - "llvm.loongarch.lasx.xvffint.s.l" => "__builtin_lasx_xvffint_s_l", - "llvm.loongarch.lasx.xvffint.s.w" => "__builtin_lasx_xvffint_s_w", - "llvm.loongarch.lasx.xvffint.s.wu" => "__builtin_lasx_xvffint_s_wu", - "llvm.loongarch.lasx.xvffinth.d.w" => "__builtin_lasx_xvffinth_d_w", - "llvm.loongarch.lasx.xvffintl.d.w" => "__builtin_lasx_xvffintl_d_w", - "llvm.loongarch.lasx.xvflogb.d" => "__builtin_lasx_xvflogb_d", - "llvm.loongarch.lasx.xvflogb.s" => "__builtin_lasx_xvflogb_s", - "llvm.loongarch.lasx.xvfmadd.d" => "__builtin_lasx_xvfmadd_d", - "llvm.loongarch.lasx.xvfmadd.s" => "__builtin_lasx_xvfmadd_s", - "llvm.loongarch.lasx.xvfmax.d" => "__builtin_lasx_xvfmax_d", - "llvm.loongarch.lasx.xvfmax.s" => "__builtin_lasx_xvfmax_s", - "llvm.loongarch.lasx.xvfmaxa.d" => "__builtin_lasx_xvfmaxa_d", - "llvm.loongarch.lasx.xvfmaxa.s" => "__builtin_lasx_xvfmaxa_s", - "llvm.loongarch.lasx.xvfmin.d" => "__builtin_lasx_xvfmin_d", - "llvm.loongarch.lasx.xvfmin.s" => "__builtin_lasx_xvfmin_s", - "llvm.loongarch.lasx.xvfmina.d" => "__builtin_lasx_xvfmina_d", - "llvm.loongarch.lasx.xvfmina.s" => "__builtin_lasx_xvfmina_s", - "llvm.loongarch.lasx.xvfmsub.d" => "__builtin_lasx_xvfmsub_d", - "llvm.loongarch.lasx.xvfmsub.s" => "__builtin_lasx_xvfmsub_s", - "llvm.loongarch.lasx.xvfmul.d" => "__builtin_lasx_xvfmul_d", - "llvm.loongarch.lasx.xvfmul.s" => "__builtin_lasx_xvfmul_s", - "llvm.loongarch.lasx.xvfnmadd.d" => "__builtin_lasx_xvfnmadd_d", - "llvm.loongarch.lasx.xvfnmadd.s" => "__builtin_lasx_xvfnmadd_s", - "llvm.loongarch.lasx.xvfnmsub.d" => "__builtin_lasx_xvfnmsub_d", - "llvm.loongarch.lasx.xvfnmsub.s" => "__builtin_lasx_xvfnmsub_s", - "llvm.loongarch.lasx.xvfrecip.d" => "__builtin_lasx_xvfrecip_d", - "llvm.loongarch.lasx.xvfrecip.s" => "__builtin_lasx_xvfrecip_s", - "llvm.loongarch.lasx.xvfrecipe.d" => "__builtin_lasx_xvfrecipe_d", - "llvm.loongarch.lasx.xvfrecipe.s" => "__builtin_lasx_xvfrecipe_s", - "llvm.loongarch.lasx.xvfrint.d" => "__builtin_lasx_xvfrint_d", - "llvm.loongarch.lasx.xvfrint.s" => "__builtin_lasx_xvfrint_s", - "llvm.loongarch.lasx.xvfrintrm.d" => "__builtin_lasx_xvfrintrm_d", - "llvm.loongarch.lasx.xvfrintrm.s" => "__builtin_lasx_xvfrintrm_s", - "llvm.loongarch.lasx.xvfrintrne.d" => "__builtin_lasx_xvfrintrne_d", - "llvm.loongarch.lasx.xvfrintrne.s" => "__builtin_lasx_xvfrintrne_s", - "llvm.loongarch.lasx.xvfrintrp.d" => "__builtin_lasx_xvfrintrp_d", - "llvm.loongarch.lasx.xvfrintrp.s" => "__builtin_lasx_xvfrintrp_s", - "llvm.loongarch.lasx.xvfrintrz.d" => "__builtin_lasx_xvfrintrz_d", - "llvm.loongarch.lasx.xvfrintrz.s" => "__builtin_lasx_xvfrintrz_s", - "llvm.loongarch.lasx.xvfrsqrt.d" => "__builtin_lasx_xvfrsqrt_d", - "llvm.loongarch.lasx.xvfrsqrt.s" => "__builtin_lasx_xvfrsqrt_s", - "llvm.loongarch.lasx.xvfrsqrte.d" => "__builtin_lasx_xvfrsqrte_d", - "llvm.loongarch.lasx.xvfrsqrte.s" => "__builtin_lasx_xvfrsqrte_s", - "llvm.loongarch.lasx.xvfrstp.b" => "__builtin_lasx_xvfrstp_b", - "llvm.loongarch.lasx.xvfrstp.h" => "__builtin_lasx_xvfrstp_h", - "llvm.loongarch.lasx.xvfrstpi.b" => "__builtin_lasx_xvfrstpi_b", - "llvm.loongarch.lasx.xvfrstpi.h" => "__builtin_lasx_xvfrstpi_h", - "llvm.loongarch.lasx.xvfsqrt.d" => "__builtin_lasx_xvfsqrt_d", - "llvm.loongarch.lasx.xvfsqrt.s" => "__builtin_lasx_xvfsqrt_s", - "llvm.loongarch.lasx.xvfsub.d" => "__builtin_lasx_xvfsub_d", - "llvm.loongarch.lasx.xvfsub.s" => "__builtin_lasx_xvfsub_s", - "llvm.loongarch.lasx.xvftint.l.d" => "__builtin_lasx_xvftint_l_d", - "llvm.loongarch.lasx.xvftint.lu.d" => "__builtin_lasx_xvftint_lu_d", - "llvm.loongarch.lasx.xvftint.w.d" => "__builtin_lasx_xvftint_w_d", - "llvm.loongarch.lasx.xvftint.w.s" => "__builtin_lasx_xvftint_w_s", - "llvm.loongarch.lasx.xvftint.wu.s" => "__builtin_lasx_xvftint_wu_s", - "llvm.loongarch.lasx.xvftinth.l.s" => "__builtin_lasx_xvftinth_l_s", - "llvm.loongarch.lasx.xvftintl.l.s" => "__builtin_lasx_xvftintl_l_s", - "llvm.loongarch.lasx.xvftintrm.l.d" => "__builtin_lasx_xvftintrm_l_d", - "llvm.loongarch.lasx.xvftintrm.w.d" => "__builtin_lasx_xvftintrm_w_d", - "llvm.loongarch.lasx.xvftintrm.w.s" => "__builtin_lasx_xvftintrm_w_s", - "llvm.loongarch.lasx.xvftintrmh.l.s" => "__builtin_lasx_xvftintrmh_l_s", - "llvm.loongarch.lasx.xvftintrml.l.s" => "__builtin_lasx_xvftintrml_l_s", - "llvm.loongarch.lasx.xvftintrne.l.d" => "__builtin_lasx_xvftintrne_l_d", - "llvm.loongarch.lasx.xvftintrne.w.d" => "__builtin_lasx_xvftintrne_w_d", - "llvm.loongarch.lasx.xvftintrne.w.s" => "__builtin_lasx_xvftintrne_w_s", - "llvm.loongarch.lasx.xvftintrneh.l.s" => "__builtin_lasx_xvftintrneh_l_s", - "llvm.loongarch.lasx.xvftintrnel.l.s" => "__builtin_lasx_xvftintrnel_l_s", - "llvm.loongarch.lasx.xvftintrp.l.d" => "__builtin_lasx_xvftintrp_l_d", - "llvm.loongarch.lasx.xvftintrp.w.d" => "__builtin_lasx_xvftintrp_w_d", - "llvm.loongarch.lasx.xvftintrp.w.s" => "__builtin_lasx_xvftintrp_w_s", - "llvm.loongarch.lasx.xvftintrph.l.s" => "__builtin_lasx_xvftintrph_l_s", - "llvm.loongarch.lasx.xvftintrpl.l.s" => "__builtin_lasx_xvftintrpl_l_s", - "llvm.loongarch.lasx.xvftintrz.l.d" => "__builtin_lasx_xvftintrz_l_d", - "llvm.loongarch.lasx.xvftintrz.lu.d" => "__builtin_lasx_xvftintrz_lu_d", - "llvm.loongarch.lasx.xvftintrz.w.d" => "__builtin_lasx_xvftintrz_w_d", - "llvm.loongarch.lasx.xvftintrz.w.s" => "__builtin_lasx_xvftintrz_w_s", - "llvm.loongarch.lasx.xvftintrz.wu.s" => "__builtin_lasx_xvftintrz_wu_s", - "llvm.loongarch.lasx.xvftintrzh.l.s" => "__builtin_lasx_xvftintrzh_l_s", - "llvm.loongarch.lasx.xvftintrzl.l.s" => "__builtin_lasx_xvftintrzl_l_s", - "llvm.loongarch.lasx.xvhaddw.d.w" => "__builtin_lasx_xvhaddw_d_w", - "llvm.loongarch.lasx.xvhaddw.du.wu" => "__builtin_lasx_xvhaddw_du_wu", - "llvm.loongarch.lasx.xvhaddw.h.b" => "__builtin_lasx_xvhaddw_h_b", - "llvm.loongarch.lasx.xvhaddw.hu.bu" => "__builtin_lasx_xvhaddw_hu_bu", - "llvm.loongarch.lasx.xvhaddw.q.d" => "__builtin_lasx_xvhaddw_q_d", - "llvm.loongarch.lasx.xvhaddw.qu.du" => "__builtin_lasx_xvhaddw_qu_du", - "llvm.loongarch.lasx.xvhaddw.w.h" => "__builtin_lasx_xvhaddw_w_h", - "llvm.loongarch.lasx.xvhaddw.wu.hu" => "__builtin_lasx_xvhaddw_wu_hu", - "llvm.loongarch.lasx.xvhsubw.d.w" => "__builtin_lasx_xvhsubw_d_w", - "llvm.loongarch.lasx.xvhsubw.du.wu" => "__builtin_lasx_xvhsubw_du_wu", - "llvm.loongarch.lasx.xvhsubw.h.b" => "__builtin_lasx_xvhsubw_h_b", - "llvm.loongarch.lasx.xvhsubw.hu.bu" => "__builtin_lasx_xvhsubw_hu_bu", - "llvm.loongarch.lasx.xvhsubw.q.d" => "__builtin_lasx_xvhsubw_q_d", - "llvm.loongarch.lasx.xvhsubw.qu.du" => "__builtin_lasx_xvhsubw_qu_du", - "llvm.loongarch.lasx.xvhsubw.w.h" => "__builtin_lasx_xvhsubw_w_h", - "llvm.loongarch.lasx.xvhsubw.wu.hu" => "__builtin_lasx_xvhsubw_wu_hu", - "llvm.loongarch.lasx.xvilvh.b" => "__builtin_lasx_xvilvh_b", - "llvm.loongarch.lasx.xvilvh.d" => "__builtin_lasx_xvilvh_d", - "llvm.loongarch.lasx.xvilvh.h" => "__builtin_lasx_xvilvh_h", - "llvm.loongarch.lasx.xvilvh.w" => "__builtin_lasx_xvilvh_w", - "llvm.loongarch.lasx.xvilvl.b" => "__builtin_lasx_xvilvl_b", - "llvm.loongarch.lasx.xvilvl.d" => "__builtin_lasx_xvilvl_d", - "llvm.loongarch.lasx.xvilvl.h" => "__builtin_lasx_xvilvl_h", - "llvm.loongarch.lasx.xvilvl.w" => "__builtin_lasx_xvilvl_w", - "llvm.loongarch.lasx.xvinsgr2vr.d" => "__builtin_lasx_xvinsgr2vr_d", - "llvm.loongarch.lasx.xvinsgr2vr.w" => "__builtin_lasx_xvinsgr2vr_w", - "llvm.loongarch.lasx.xvinsve0.d" => "__builtin_lasx_xvinsve0_d", - "llvm.loongarch.lasx.xvinsve0.w" => "__builtin_lasx_xvinsve0_w", - "llvm.loongarch.lasx.xvld" => "__builtin_lasx_xvld", - "llvm.loongarch.lasx.xvldi" => "__builtin_lasx_xvldi", - "llvm.loongarch.lasx.xvldrepl.b" => "__builtin_lasx_xvldrepl_b", - "llvm.loongarch.lasx.xvldrepl.d" => "__builtin_lasx_xvldrepl_d", - "llvm.loongarch.lasx.xvldrepl.h" => "__builtin_lasx_xvldrepl_h", - "llvm.loongarch.lasx.xvldrepl.w" => "__builtin_lasx_xvldrepl_w", - "llvm.loongarch.lasx.xvldx" => "__builtin_lasx_xvldx", - "llvm.loongarch.lasx.xvmadd.b" => "__builtin_lasx_xvmadd_b", - "llvm.loongarch.lasx.xvmadd.d" => "__builtin_lasx_xvmadd_d", - "llvm.loongarch.lasx.xvmadd.h" => "__builtin_lasx_xvmadd_h", - "llvm.loongarch.lasx.xvmadd.w" => "__builtin_lasx_xvmadd_w", - "llvm.loongarch.lasx.xvmaddwev.d.w" => "__builtin_lasx_xvmaddwev_d_w", - "llvm.loongarch.lasx.xvmaddwev.d.wu" => "__builtin_lasx_xvmaddwev_d_wu", - "llvm.loongarch.lasx.xvmaddwev.d.wu.w" => "__builtin_lasx_xvmaddwev_d_wu_w", - "llvm.loongarch.lasx.xvmaddwev.h.b" => "__builtin_lasx_xvmaddwev_h_b", - "llvm.loongarch.lasx.xvmaddwev.h.bu" => "__builtin_lasx_xvmaddwev_h_bu", - "llvm.loongarch.lasx.xvmaddwev.h.bu.b" => "__builtin_lasx_xvmaddwev_h_bu_b", - "llvm.loongarch.lasx.xvmaddwev.q.d" => "__builtin_lasx_xvmaddwev_q_d", - "llvm.loongarch.lasx.xvmaddwev.q.du" => "__builtin_lasx_xvmaddwev_q_du", - "llvm.loongarch.lasx.xvmaddwev.q.du.d" => "__builtin_lasx_xvmaddwev_q_du_d", - "llvm.loongarch.lasx.xvmaddwev.w.h" => "__builtin_lasx_xvmaddwev_w_h", - "llvm.loongarch.lasx.xvmaddwev.w.hu" => "__builtin_lasx_xvmaddwev_w_hu", - "llvm.loongarch.lasx.xvmaddwev.w.hu.h" => "__builtin_lasx_xvmaddwev_w_hu_h", - "llvm.loongarch.lasx.xvmaddwod.d.w" => "__builtin_lasx_xvmaddwod_d_w", - "llvm.loongarch.lasx.xvmaddwod.d.wu" => "__builtin_lasx_xvmaddwod_d_wu", - "llvm.loongarch.lasx.xvmaddwod.d.wu.w" => "__builtin_lasx_xvmaddwod_d_wu_w", - "llvm.loongarch.lasx.xvmaddwod.h.b" => "__builtin_lasx_xvmaddwod_h_b", - "llvm.loongarch.lasx.xvmaddwod.h.bu" => "__builtin_lasx_xvmaddwod_h_bu", - "llvm.loongarch.lasx.xvmaddwod.h.bu.b" => "__builtin_lasx_xvmaddwod_h_bu_b", - "llvm.loongarch.lasx.xvmaddwod.q.d" => "__builtin_lasx_xvmaddwod_q_d", - "llvm.loongarch.lasx.xvmaddwod.q.du" => "__builtin_lasx_xvmaddwod_q_du", - "llvm.loongarch.lasx.xvmaddwod.q.du.d" => "__builtin_lasx_xvmaddwod_q_du_d", - "llvm.loongarch.lasx.xvmaddwod.w.h" => "__builtin_lasx_xvmaddwod_w_h", - "llvm.loongarch.lasx.xvmaddwod.w.hu" => "__builtin_lasx_xvmaddwod_w_hu", - "llvm.loongarch.lasx.xvmaddwod.w.hu.h" => "__builtin_lasx_xvmaddwod_w_hu_h", - "llvm.loongarch.lasx.xvmax.b" => "__builtin_lasx_xvmax_b", - "llvm.loongarch.lasx.xvmax.bu" => "__builtin_lasx_xvmax_bu", - "llvm.loongarch.lasx.xvmax.d" => "__builtin_lasx_xvmax_d", - "llvm.loongarch.lasx.xvmax.du" => "__builtin_lasx_xvmax_du", - "llvm.loongarch.lasx.xvmax.h" => "__builtin_lasx_xvmax_h", - "llvm.loongarch.lasx.xvmax.hu" => "__builtin_lasx_xvmax_hu", - "llvm.loongarch.lasx.xvmax.w" => "__builtin_lasx_xvmax_w", - "llvm.loongarch.lasx.xvmax.wu" => "__builtin_lasx_xvmax_wu", - "llvm.loongarch.lasx.xvmaxi.b" => "__builtin_lasx_xvmaxi_b", - "llvm.loongarch.lasx.xvmaxi.bu" => "__builtin_lasx_xvmaxi_bu", - "llvm.loongarch.lasx.xvmaxi.d" => "__builtin_lasx_xvmaxi_d", - "llvm.loongarch.lasx.xvmaxi.du" => "__builtin_lasx_xvmaxi_du", - "llvm.loongarch.lasx.xvmaxi.h" => "__builtin_lasx_xvmaxi_h", - "llvm.loongarch.lasx.xvmaxi.hu" => "__builtin_lasx_xvmaxi_hu", - "llvm.loongarch.lasx.xvmaxi.w" => "__builtin_lasx_xvmaxi_w", - "llvm.loongarch.lasx.xvmaxi.wu" => "__builtin_lasx_xvmaxi_wu", - "llvm.loongarch.lasx.xvmin.b" => "__builtin_lasx_xvmin_b", - "llvm.loongarch.lasx.xvmin.bu" => "__builtin_lasx_xvmin_bu", - "llvm.loongarch.lasx.xvmin.d" => "__builtin_lasx_xvmin_d", - "llvm.loongarch.lasx.xvmin.du" => "__builtin_lasx_xvmin_du", - "llvm.loongarch.lasx.xvmin.h" => "__builtin_lasx_xvmin_h", - "llvm.loongarch.lasx.xvmin.hu" => "__builtin_lasx_xvmin_hu", - "llvm.loongarch.lasx.xvmin.w" => "__builtin_lasx_xvmin_w", - "llvm.loongarch.lasx.xvmin.wu" => "__builtin_lasx_xvmin_wu", - "llvm.loongarch.lasx.xvmini.b" => "__builtin_lasx_xvmini_b", - "llvm.loongarch.lasx.xvmini.bu" => "__builtin_lasx_xvmini_bu", - "llvm.loongarch.lasx.xvmini.d" => "__builtin_lasx_xvmini_d", - "llvm.loongarch.lasx.xvmini.du" => "__builtin_lasx_xvmini_du", - "llvm.loongarch.lasx.xvmini.h" => "__builtin_lasx_xvmini_h", - "llvm.loongarch.lasx.xvmini.hu" => "__builtin_lasx_xvmini_hu", - "llvm.loongarch.lasx.xvmini.w" => "__builtin_lasx_xvmini_w", - "llvm.loongarch.lasx.xvmini.wu" => "__builtin_lasx_xvmini_wu", - "llvm.loongarch.lasx.xvmod.b" => "__builtin_lasx_xvmod_b", - "llvm.loongarch.lasx.xvmod.bu" => "__builtin_lasx_xvmod_bu", - "llvm.loongarch.lasx.xvmod.d" => "__builtin_lasx_xvmod_d", - "llvm.loongarch.lasx.xvmod.du" => "__builtin_lasx_xvmod_du", - "llvm.loongarch.lasx.xvmod.h" => "__builtin_lasx_xvmod_h", - "llvm.loongarch.lasx.xvmod.hu" => "__builtin_lasx_xvmod_hu", - "llvm.loongarch.lasx.xvmod.w" => "__builtin_lasx_xvmod_w", - "llvm.loongarch.lasx.xvmod.wu" => "__builtin_lasx_xvmod_wu", - "llvm.loongarch.lasx.xvmskgez.b" => "__builtin_lasx_xvmskgez_b", - "llvm.loongarch.lasx.xvmskltz.b" => "__builtin_lasx_xvmskltz_b", - "llvm.loongarch.lasx.xvmskltz.d" => "__builtin_lasx_xvmskltz_d", - "llvm.loongarch.lasx.xvmskltz.h" => "__builtin_lasx_xvmskltz_h", - "llvm.loongarch.lasx.xvmskltz.w" => "__builtin_lasx_xvmskltz_w", - "llvm.loongarch.lasx.xvmsknz.b" => "__builtin_lasx_xvmsknz_b", - "llvm.loongarch.lasx.xvmsub.b" => "__builtin_lasx_xvmsub_b", - "llvm.loongarch.lasx.xvmsub.d" => "__builtin_lasx_xvmsub_d", - "llvm.loongarch.lasx.xvmsub.h" => "__builtin_lasx_xvmsub_h", - "llvm.loongarch.lasx.xvmsub.w" => "__builtin_lasx_xvmsub_w", - "llvm.loongarch.lasx.xvmuh.b" => "__builtin_lasx_xvmuh_b", - "llvm.loongarch.lasx.xvmuh.bu" => "__builtin_lasx_xvmuh_bu", - "llvm.loongarch.lasx.xvmuh.d" => "__builtin_lasx_xvmuh_d", - "llvm.loongarch.lasx.xvmuh.du" => "__builtin_lasx_xvmuh_du", - "llvm.loongarch.lasx.xvmuh.h" => "__builtin_lasx_xvmuh_h", - "llvm.loongarch.lasx.xvmuh.hu" => "__builtin_lasx_xvmuh_hu", - "llvm.loongarch.lasx.xvmuh.w" => "__builtin_lasx_xvmuh_w", - "llvm.loongarch.lasx.xvmuh.wu" => "__builtin_lasx_xvmuh_wu", - "llvm.loongarch.lasx.xvmul.b" => "__builtin_lasx_xvmul_b", - "llvm.loongarch.lasx.xvmul.d" => "__builtin_lasx_xvmul_d", - "llvm.loongarch.lasx.xvmul.h" => "__builtin_lasx_xvmul_h", - "llvm.loongarch.lasx.xvmul.w" => "__builtin_lasx_xvmul_w", - "llvm.loongarch.lasx.xvmulwev.d.w" => "__builtin_lasx_xvmulwev_d_w", - "llvm.loongarch.lasx.xvmulwev.d.wu" => "__builtin_lasx_xvmulwev_d_wu", - "llvm.loongarch.lasx.xvmulwev.d.wu.w" => "__builtin_lasx_xvmulwev_d_wu_w", - "llvm.loongarch.lasx.xvmulwev.h.b" => "__builtin_lasx_xvmulwev_h_b", - "llvm.loongarch.lasx.xvmulwev.h.bu" => "__builtin_lasx_xvmulwev_h_bu", - "llvm.loongarch.lasx.xvmulwev.h.bu.b" => "__builtin_lasx_xvmulwev_h_bu_b", - "llvm.loongarch.lasx.xvmulwev.q.d" => "__builtin_lasx_xvmulwev_q_d", - "llvm.loongarch.lasx.xvmulwev.q.du" => "__builtin_lasx_xvmulwev_q_du", - "llvm.loongarch.lasx.xvmulwev.q.du.d" => "__builtin_lasx_xvmulwev_q_du_d", - "llvm.loongarch.lasx.xvmulwev.w.h" => "__builtin_lasx_xvmulwev_w_h", - "llvm.loongarch.lasx.xvmulwev.w.hu" => "__builtin_lasx_xvmulwev_w_hu", - "llvm.loongarch.lasx.xvmulwev.w.hu.h" => "__builtin_lasx_xvmulwev_w_hu_h", - "llvm.loongarch.lasx.xvmulwod.d.w" => "__builtin_lasx_xvmulwod_d_w", - "llvm.loongarch.lasx.xvmulwod.d.wu" => "__builtin_lasx_xvmulwod_d_wu", - "llvm.loongarch.lasx.xvmulwod.d.wu.w" => "__builtin_lasx_xvmulwod_d_wu_w", - "llvm.loongarch.lasx.xvmulwod.h.b" => "__builtin_lasx_xvmulwod_h_b", - "llvm.loongarch.lasx.xvmulwod.h.bu" => "__builtin_lasx_xvmulwod_h_bu", - "llvm.loongarch.lasx.xvmulwod.h.bu.b" => "__builtin_lasx_xvmulwod_h_bu_b", - "llvm.loongarch.lasx.xvmulwod.q.d" => "__builtin_lasx_xvmulwod_q_d", - "llvm.loongarch.lasx.xvmulwod.q.du" => "__builtin_lasx_xvmulwod_q_du", - "llvm.loongarch.lasx.xvmulwod.q.du.d" => "__builtin_lasx_xvmulwod_q_du_d", - "llvm.loongarch.lasx.xvmulwod.w.h" => "__builtin_lasx_xvmulwod_w_h", - "llvm.loongarch.lasx.xvmulwod.w.hu" => "__builtin_lasx_xvmulwod_w_hu", - "llvm.loongarch.lasx.xvmulwod.w.hu.h" => "__builtin_lasx_xvmulwod_w_hu_h", - "llvm.loongarch.lasx.xvneg.b" => "__builtin_lasx_xvneg_b", - "llvm.loongarch.lasx.xvneg.d" => "__builtin_lasx_xvneg_d", - "llvm.loongarch.lasx.xvneg.h" => "__builtin_lasx_xvneg_h", - "llvm.loongarch.lasx.xvneg.w" => "__builtin_lasx_xvneg_w", - "llvm.loongarch.lasx.xvnor.v" => "__builtin_lasx_xvnor_v", - "llvm.loongarch.lasx.xvnori.b" => "__builtin_lasx_xvnori_b", - "llvm.loongarch.lasx.xvor.v" => "__builtin_lasx_xvor_v", - "llvm.loongarch.lasx.xvori.b" => "__builtin_lasx_xvori_b", - "llvm.loongarch.lasx.xvorn.v" => "__builtin_lasx_xvorn_v", - "llvm.loongarch.lasx.xvpackev.b" => "__builtin_lasx_xvpackev_b", - "llvm.loongarch.lasx.xvpackev.d" => "__builtin_lasx_xvpackev_d", - "llvm.loongarch.lasx.xvpackev.h" => "__builtin_lasx_xvpackev_h", - "llvm.loongarch.lasx.xvpackev.w" => "__builtin_lasx_xvpackev_w", - "llvm.loongarch.lasx.xvpackod.b" => "__builtin_lasx_xvpackod_b", - "llvm.loongarch.lasx.xvpackod.d" => "__builtin_lasx_xvpackod_d", - "llvm.loongarch.lasx.xvpackod.h" => "__builtin_lasx_xvpackod_h", - "llvm.loongarch.lasx.xvpackod.w" => "__builtin_lasx_xvpackod_w", - "llvm.loongarch.lasx.xvpcnt.b" => "__builtin_lasx_xvpcnt_b", - "llvm.loongarch.lasx.xvpcnt.d" => "__builtin_lasx_xvpcnt_d", - "llvm.loongarch.lasx.xvpcnt.h" => "__builtin_lasx_xvpcnt_h", - "llvm.loongarch.lasx.xvpcnt.w" => "__builtin_lasx_xvpcnt_w", - "llvm.loongarch.lasx.xvperm.w" => "__builtin_lasx_xvperm_w", - "llvm.loongarch.lasx.xvpermi.d" => "__builtin_lasx_xvpermi_d", - "llvm.loongarch.lasx.xvpermi.q" => "__builtin_lasx_xvpermi_q", - "llvm.loongarch.lasx.xvpermi.w" => "__builtin_lasx_xvpermi_w", - "llvm.loongarch.lasx.xvpickev.b" => "__builtin_lasx_xvpickev_b", - "llvm.loongarch.lasx.xvpickev.d" => "__builtin_lasx_xvpickev_d", - "llvm.loongarch.lasx.xvpickev.h" => "__builtin_lasx_xvpickev_h", - "llvm.loongarch.lasx.xvpickev.w" => "__builtin_lasx_xvpickev_w", - "llvm.loongarch.lasx.xvpickod.b" => "__builtin_lasx_xvpickod_b", - "llvm.loongarch.lasx.xvpickod.d" => "__builtin_lasx_xvpickod_d", - "llvm.loongarch.lasx.xvpickod.h" => "__builtin_lasx_xvpickod_h", - "llvm.loongarch.lasx.xvpickod.w" => "__builtin_lasx_xvpickod_w", - "llvm.loongarch.lasx.xvpickve.d" => "__builtin_lasx_xvpickve_d", - "llvm.loongarch.lasx.xvpickve.d.f" => "__builtin_lasx_xvpickve_d_f", - "llvm.loongarch.lasx.xvpickve.w" => "__builtin_lasx_xvpickve_w", - "llvm.loongarch.lasx.xvpickve.w.f" => "__builtin_lasx_xvpickve_w_f", - "llvm.loongarch.lasx.xvpickve2gr.d" => "__builtin_lasx_xvpickve2gr_d", - "llvm.loongarch.lasx.xvpickve2gr.du" => "__builtin_lasx_xvpickve2gr_du", - "llvm.loongarch.lasx.xvpickve2gr.w" => "__builtin_lasx_xvpickve2gr_w", - "llvm.loongarch.lasx.xvpickve2gr.wu" => "__builtin_lasx_xvpickve2gr_wu", - "llvm.loongarch.lasx.xvrepl128vei.b" => "__builtin_lasx_xvrepl128vei_b", - "llvm.loongarch.lasx.xvrepl128vei.d" => "__builtin_lasx_xvrepl128vei_d", - "llvm.loongarch.lasx.xvrepl128vei.h" => "__builtin_lasx_xvrepl128vei_h", - "llvm.loongarch.lasx.xvrepl128vei.w" => "__builtin_lasx_xvrepl128vei_w", - "llvm.loongarch.lasx.xvreplgr2vr.b" => "__builtin_lasx_xvreplgr2vr_b", - "llvm.loongarch.lasx.xvreplgr2vr.d" => "__builtin_lasx_xvreplgr2vr_d", - "llvm.loongarch.lasx.xvreplgr2vr.h" => "__builtin_lasx_xvreplgr2vr_h", - "llvm.loongarch.lasx.xvreplgr2vr.w" => "__builtin_lasx_xvreplgr2vr_w", - "llvm.loongarch.lasx.xvrepli.b" => "__builtin_lasx_xvrepli_b", - "llvm.loongarch.lasx.xvrepli.d" => "__builtin_lasx_xvrepli_d", - "llvm.loongarch.lasx.xvrepli.h" => "__builtin_lasx_xvrepli_h", - "llvm.loongarch.lasx.xvrepli.w" => "__builtin_lasx_xvrepli_w", - "llvm.loongarch.lasx.xvreplve.b" => "__builtin_lasx_xvreplve_b", - "llvm.loongarch.lasx.xvreplve.d" => "__builtin_lasx_xvreplve_d", - "llvm.loongarch.lasx.xvreplve.h" => "__builtin_lasx_xvreplve_h", - "llvm.loongarch.lasx.xvreplve.w" => "__builtin_lasx_xvreplve_w", - "llvm.loongarch.lasx.xvreplve0.b" => "__builtin_lasx_xvreplve0_b", - "llvm.loongarch.lasx.xvreplve0.d" => "__builtin_lasx_xvreplve0_d", - "llvm.loongarch.lasx.xvreplve0.h" => "__builtin_lasx_xvreplve0_h", - "llvm.loongarch.lasx.xvreplve0.q" => "__builtin_lasx_xvreplve0_q", - "llvm.loongarch.lasx.xvreplve0.w" => "__builtin_lasx_xvreplve0_w", - "llvm.loongarch.lasx.xvrotr.b" => "__builtin_lasx_xvrotr_b", - "llvm.loongarch.lasx.xvrotr.d" => "__builtin_lasx_xvrotr_d", - "llvm.loongarch.lasx.xvrotr.h" => "__builtin_lasx_xvrotr_h", - "llvm.loongarch.lasx.xvrotr.w" => "__builtin_lasx_xvrotr_w", - "llvm.loongarch.lasx.xvrotri.b" => "__builtin_lasx_xvrotri_b", - "llvm.loongarch.lasx.xvrotri.d" => "__builtin_lasx_xvrotri_d", - "llvm.loongarch.lasx.xvrotri.h" => "__builtin_lasx_xvrotri_h", - "llvm.loongarch.lasx.xvrotri.w" => "__builtin_lasx_xvrotri_w", - "llvm.loongarch.lasx.xvsadd.b" => "__builtin_lasx_xvsadd_b", - "llvm.loongarch.lasx.xvsadd.bu" => "__builtin_lasx_xvsadd_bu", - "llvm.loongarch.lasx.xvsadd.d" => "__builtin_lasx_xvsadd_d", - "llvm.loongarch.lasx.xvsadd.du" => "__builtin_lasx_xvsadd_du", - "llvm.loongarch.lasx.xvsadd.h" => "__builtin_lasx_xvsadd_h", - "llvm.loongarch.lasx.xvsadd.hu" => "__builtin_lasx_xvsadd_hu", - "llvm.loongarch.lasx.xvsadd.w" => "__builtin_lasx_xvsadd_w", - "llvm.loongarch.lasx.xvsadd.wu" => "__builtin_lasx_xvsadd_wu", - "llvm.loongarch.lasx.xvsat.b" => "__builtin_lasx_xvsat_b", - "llvm.loongarch.lasx.xvsat.bu" => "__builtin_lasx_xvsat_bu", - "llvm.loongarch.lasx.xvsat.d" => "__builtin_lasx_xvsat_d", - "llvm.loongarch.lasx.xvsat.du" => "__builtin_lasx_xvsat_du", - "llvm.loongarch.lasx.xvsat.h" => "__builtin_lasx_xvsat_h", - "llvm.loongarch.lasx.xvsat.hu" => "__builtin_lasx_xvsat_hu", - "llvm.loongarch.lasx.xvsat.w" => "__builtin_lasx_xvsat_w", - "llvm.loongarch.lasx.xvsat.wu" => "__builtin_lasx_xvsat_wu", - "llvm.loongarch.lasx.xvseq.b" => "__builtin_lasx_xvseq_b", - "llvm.loongarch.lasx.xvseq.d" => "__builtin_lasx_xvseq_d", - "llvm.loongarch.lasx.xvseq.h" => "__builtin_lasx_xvseq_h", - "llvm.loongarch.lasx.xvseq.w" => "__builtin_lasx_xvseq_w", - "llvm.loongarch.lasx.xvseqi.b" => "__builtin_lasx_xvseqi_b", - "llvm.loongarch.lasx.xvseqi.d" => "__builtin_lasx_xvseqi_d", - "llvm.loongarch.lasx.xvseqi.h" => "__builtin_lasx_xvseqi_h", - "llvm.loongarch.lasx.xvseqi.w" => "__builtin_lasx_xvseqi_w", - "llvm.loongarch.lasx.xvshuf.b" => "__builtin_lasx_xvshuf_b", - "llvm.loongarch.lasx.xvshuf.d" => "__builtin_lasx_xvshuf_d", - "llvm.loongarch.lasx.xvshuf.h" => "__builtin_lasx_xvshuf_h", - "llvm.loongarch.lasx.xvshuf.w" => "__builtin_lasx_xvshuf_w", - "llvm.loongarch.lasx.xvshuf4i.b" => "__builtin_lasx_xvshuf4i_b", - "llvm.loongarch.lasx.xvshuf4i.d" => "__builtin_lasx_xvshuf4i_d", - "llvm.loongarch.lasx.xvshuf4i.h" => "__builtin_lasx_xvshuf4i_h", - "llvm.loongarch.lasx.xvshuf4i.w" => "__builtin_lasx_xvshuf4i_w", - "llvm.loongarch.lasx.xvsigncov.b" => "__builtin_lasx_xvsigncov_b", - "llvm.loongarch.lasx.xvsigncov.d" => "__builtin_lasx_xvsigncov_d", - "llvm.loongarch.lasx.xvsigncov.h" => "__builtin_lasx_xvsigncov_h", - "llvm.loongarch.lasx.xvsigncov.w" => "__builtin_lasx_xvsigncov_w", - "llvm.loongarch.lasx.xvsle.b" => "__builtin_lasx_xvsle_b", - "llvm.loongarch.lasx.xvsle.bu" => "__builtin_lasx_xvsle_bu", - "llvm.loongarch.lasx.xvsle.d" => "__builtin_lasx_xvsle_d", - "llvm.loongarch.lasx.xvsle.du" => "__builtin_lasx_xvsle_du", - "llvm.loongarch.lasx.xvsle.h" => "__builtin_lasx_xvsle_h", - "llvm.loongarch.lasx.xvsle.hu" => "__builtin_lasx_xvsle_hu", - "llvm.loongarch.lasx.xvsle.w" => "__builtin_lasx_xvsle_w", - "llvm.loongarch.lasx.xvsle.wu" => "__builtin_lasx_xvsle_wu", - "llvm.loongarch.lasx.xvslei.b" => "__builtin_lasx_xvslei_b", - "llvm.loongarch.lasx.xvslei.bu" => "__builtin_lasx_xvslei_bu", - "llvm.loongarch.lasx.xvslei.d" => "__builtin_lasx_xvslei_d", - "llvm.loongarch.lasx.xvslei.du" => "__builtin_lasx_xvslei_du", - "llvm.loongarch.lasx.xvslei.h" => "__builtin_lasx_xvslei_h", - "llvm.loongarch.lasx.xvslei.hu" => "__builtin_lasx_xvslei_hu", - "llvm.loongarch.lasx.xvslei.w" => "__builtin_lasx_xvslei_w", - "llvm.loongarch.lasx.xvslei.wu" => "__builtin_lasx_xvslei_wu", - "llvm.loongarch.lasx.xvsll.b" => "__builtin_lasx_xvsll_b", - "llvm.loongarch.lasx.xvsll.d" => "__builtin_lasx_xvsll_d", - "llvm.loongarch.lasx.xvsll.h" => "__builtin_lasx_xvsll_h", - "llvm.loongarch.lasx.xvsll.w" => "__builtin_lasx_xvsll_w", - "llvm.loongarch.lasx.xvslli.b" => "__builtin_lasx_xvslli_b", - "llvm.loongarch.lasx.xvslli.d" => "__builtin_lasx_xvslli_d", - "llvm.loongarch.lasx.xvslli.h" => "__builtin_lasx_xvslli_h", - "llvm.loongarch.lasx.xvslli.w" => "__builtin_lasx_xvslli_w", - "llvm.loongarch.lasx.xvsllwil.d.w" => "__builtin_lasx_xvsllwil_d_w", - "llvm.loongarch.lasx.xvsllwil.du.wu" => "__builtin_lasx_xvsllwil_du_wu", - "llvm.loongarch.lasx.xvsllwil.h.b" => "__builtin_lasx_xvsllwil_h_b", - "llvm.loongarch.lasx.xvsllwil.hu.bu" => "__builtin_lasx_xvsllwil_hu_bu", - "llvm.loongarch.lasx.xvsllwil.w.h" => "__builtin_lasx_xvsllwil_w_h", - "llvm.loongarch.lasx.xvsllwil.wu.hu" => "__builtin_lasx_xvsllwil_wu_hu", - "llvm.loongarch.lasx.xvslt.b" => "__builtin_lasx_xvslt_b", - "llvm.loongarch.lasx.xvslt.bu" => "__builtin_lasx_xvslt_bu", - "llvm.loongarch.lasx.xvslt.d" => "__builtin_lasx_xvslt_d", - "llvm.loongarch.lasx.xvslt.du" => "__builtin_lasx_xvslt_du", - "llvm.loongarch.lasx.xvslt.h" => "__builtin_lasx_xvslt_h", - "llvm.loongarch.lasx.xvslt.hu" => "__builtin_lasx_xvslt_hu", - "llvm.loongarch.lasx.xvslt.w" => "__builtin_lasx_xvslt_w", - "llvm.loongarch.lasx.xvslt.wu" => "__builtin_lasx_xvslt_wu", - "llvm.loongarch.lasx.xvslti.b" => "__builtin_lasx_xvslti_b", - "llvm.loongarch.lasx.xvslti.bu" => "__builtin_lasx_xvslti_bu", - "llvm.loongarch.lasx.xvslti.d" => "__builtin_lasx_xvslti_d", - "llvm.loongarch.lasx.xvslti.du" => "__builtin_lasx_xvslti_du", - "llvm.loongarch.lasx.xvslti.h" => "__builtin_lasx_xvslti_h", - "llvm.loongarch.lasx.xvslti.hu" => "__builtin_lasx_xvslti_hu", - "llvm.loongarch.lasx.xvslti.w" => "__builtin_lasx_xvslti_w", - "llvm.loongarch.lasx.xvslti.wu" => "__builtin_lasx_xvslti_wu", - "llvm.loongarch.lasx.xvsra.b" => "__builtin_lasx_xvsra_b", - "llvm.loongarch.lasx.xvsra.d" => "__builtin_lasx_xvsra_d", - "llvm.loongarch.lasx.xvsra.h" => "__builtin_lasx_xvsra_h", - "llvm.loongarch.lasx.xvsra.w" => "__builtin_lasx_xvsra_w", - "llvm.loongarch.lasx.xvsrai.b" => "__builtin_lasx_xvsrai_b", - "llvm.loongarch.lasx.xvsrai.d" => "__builtin_lasx_xvsrai_d", - "llvm.loongarch.lasx.xvsrai.h" => "__builtin_lasx_xvsrai_h", - "llvm.loongarch.lasx.xvsrai.w" => "__builtin_lasx_xvsrai_w", - "llvm.loongarch.lasx.xvsran.b.h" => "__builtin_lasx_xvsran_b_h", - "llvm.loongarch.lasx.xvsran.h.w" => "__builtin_lasx_xvsran_h_w", - "llvm.loongarch.lasx.xvsran.w.d" => "__builtin_lasx_xvsran_w_d", - "llvm.loongarch.lasx.xvsrani.b.h" => "__builtin_lasx_xvsrani_b_h", - "llvm.loongarch.lasx.xvsrani.d.q" => "__builtin_lasx_xvsrani_d_q", - "llvm.loongarch.lasx.xvsrani.h.w" => "__builtin_lasx_xvsrani_h_w", - "llvm.loongarch.lasx.xvsrani.w.d" => "__builtin_lasx_xvsrani_w_d", - "llvm.loongarch.lasx.xvsrar.b" => "__builtin_lasx_xvsrar_b", - "llvm.loongarch.lasx.xvsrar.d" => "__builtin_lasx_xvsrar_d", - "llvm.loongarch.lasx.xvsrar.h" => "__builtin_lasx_xvsrar_h", - "llvm.loongarch.lasx.xvsrar.w" => "__builtin_lasx_xvsrar_w", - "llvm.loongarch.lasx.xvsrari.b" => "__builtin_lasx_xvsrari_b", - "llvm.loongarch.lasx.xvsrari.d" => "__builtin_lasx_xvsrari_d", - "llvm.loongarch.lasx.xvsrari.h" => "__builtin_lasx_xvsrari_h", - "llvm.loongarch.lasx.xvsrari.w" => "__builtin_lasx_xvsrari_w", - "llvm.loongarch.lasx.xvsrarn.b.h" => "__builtin_lasx_xvsrarn_b_h", - "llvm.loongarch.lasx.xvsrarn.h.w" => "__builtin_lasx_xvsrarn_h_w", - "llvm.loongarch.lasx.xvsrarn.w.d" => "__builtin_lasx_xvsrarn_w_d", - "llvm.loongarch.lasx.xvsrarni.b.h" => "__builtin_lasx_xvsrarni_b_h", - "llvm.loongarch.lasx.xvsrarni.d.q" => "__builtin_lasx_xvsrarni_d_q", - "llvm.loongarch.lasx.xvsrarni.h.w" => "__builtin_lasx_xvsrarni_h_w", - "llvm.loongarch.lasx.xvsrarni.w.d" => "__builtin_lasx_xvsrarni_w_d", - "llvm.loongarch.lasx.xvsrl.b" => "__builtin_lasx_xvsrl_b", - "llvm.loongarch.lasx.xvsrl.d" => "__builtin_lasx_xvsrl_d", - "llvm.loongarch.lasx.xvsrl.h" => "__builtin_lasx_xvsrl_h", - "llvm.loongarch.lasx.xvsrl.w" => "__builtin_lasx_xvsrl_w", - "llvm.loongarch.lasx.xvsrli.b" => "__builtin_lasx_xvsrli_b", - "llvm.loongarch.lasx.xvsrli.d" => "__builtin_lasx_xvsrli_d", - "llvm.loongarch.lasx.xvsrli.h" => "__builtin_lasx_xvsrli_h", - "llvm.loongarch.lasx.xvsrli.w" => "__builtin_lasx_xvsrli_w", - "llvm.loongarch.lasx.xvsrln.b.h" => "__builtin_lasx_xvsrln_b_h", - "llvm.loongarch.lasx.xvsrln.h.w" => "__builtin_lasx_xvsrln_h_w", - "llvm.loongarch.lasx.xvsrln.w.d" => "__builtin_lasx_xvsrln_w_d", - "llvm.loongarch.lasx.xvsrlni.b.h" => "__builtin_lasx_xvsrlni_b_h", - "llvm.loongarch.lasx.xvsrlni.d.q" => "__builtin_lasx_xvsrlni_d_q", - "llvm.loongarch.lasx.xvsrlni.h.w" => "__builtin_lasx_xvsrlni_h_w", - "llvm.loongarch.lasx.xvsrlni.w.d" => "__builtin_lasx_xvsrlni_w_d", - "llvm.loongarch.lasx.xvsrlr.b" => "__builtin_lasx_xvsrlr_b", - "llvm.loongarch.lasx.xvsrlr.d" => "__builtin_lasx_xvsrlr_d", - "llvm.loongarch.lasx.xvsrlr.h" => "__builtin_lasx_xvsrlr_h", - "llvm.loongarch.lasx.xvsrlr.w" => "__builtin_lasx_xvsrlr_w", - "llvm.loongarch.lasx.xvsrlri.b" => "__builtin_lasx_xvsrlri_b", - "llvm.loongarch.lasx.xvsrlri.d" => "__builtin_lasx_xvsrlri_d", - "llvm.loongarch.lasx.xvsrlri.h" => "__builtin_lasx_xvsrlri_h", - "llvm.loongarch.lasx.xvsrlri.w" => "__builtin_lasx_xvsrlri_w", - "llvm.loongarch.lasx.xvsrlrn.b.h" => "__builtin_lasx_xvsrlrn_b_h", - "llvm.loongarch.lasx.xvsrlrn.h.w" => "__builtin_lasx_xvsrlrn_h_w", - "llvm.loongarch.lasx.xvsrlrn.w.d" => "__builtin_lasx_xvsrlrn_w_d", - "llvm.loongarch.lasx.xvsrlrni.b.h" => "__builtin_lasx_xvsrlrni_b_h", - "llvm.loongarch.lasx.xvsrlrni.d.q" => "__builtin_lasx_xvsrlrni_d_q", - "llvm.loongarch.lasx.xvsrlrni.h.w" => "__builtin_lasx_xvsrlrni_h_w", - "llvm.loongarch.lasx.xvsrlrni.w.d" => "__builtin_lasx_xvsrlrni_w_d", - "llvm.loongarch.lasx.xvssran.b.h" => "__builtin_lasx_xvssran_b_h", - "llvm.loongarch.lasx.xvssran.bu.h" => "__builtin_lasx_xvssran_bu_h", - "llvm.loongarch.lasx.xvssran.h.w" => "__builtin_lasx_xvssran_h_w", - "llvm.loongarch.lasx.xvssran.hu.w" => "__builtin_lasx_xvssran_hu_w", - "llvm.loongarch.lasx.xvssran.w.d" => "__builtin_lasx_xvssran_w_d", - "llvm.loongarch.lasx.xvssran.wu.d" => "__builtin_lasx_xvssran_wu_d", - "llvm.loongarch.lasx.xvssrani.b.h" => "__builtin_lasx_xvssrani_b_h", - "llvm.loongarch.lasx.xvssrani.bu.h" => "__builtin_lasx_xvssrani_bu_h", - "llvm.loongarch.lasx.xvssrani.d.q" => "__builtin_lasx_xvssrani_d_q", - "llvm.loongarch.lasx.xvssrani.du.q" => "__builtin_lasx_xvssrani_du_q", - "llvm.loongarch.lasx.xvssrani.h.w" => "__builtin_lasx_xvssrani_h_w", - "llvm.loongarch.lasx.xvssrani.hu.w" => "__builtin_lasx_xvssrani_hu_w", - "llvm.loongarch.lasx.xvssrani.w.d" => "__builtin_lasx_xvssrani_w_d", - "llvm.loongarch.lasx.xvssrani.wu.d" => "__builtin_lasx_xvssrani_wu_d", - "llvm.loongarch.lasx.xvssrarn.b.h" => "__builtin_lasx_xvssrarn_b_h", - "llvm.loongarch.lasx.xvssrarn.bu.h" => "__builtin_lasx_xvssrarn_bu_h", - "llvm.loongarch.lasx.xvssrarn.h.w" => "__builtin_lasx_xvssrarn_h_w", - "llvm.loongarch.lasx.xvssrarn.hu.w" => "__builtin_lasx_xvssrarn_hu_w", - "llvm.loongarch.lasx.xvssrarn.w.d" => "__builtin_lasx_xvssrarn_w_d", - "llvm.loongarch.lasx.xvssrarn.wu.d" => "__builtin_lasx_xvssrarn_wu_d", - "llvm.loongarch.lasx.xvssrarni.b.h" => "__builtin_lasx_xvssrarni_b_h", - "llvm.loongarch.lasx.xvssrarni.bu.h" => "__builtin_lasx_xvssrarni_bu_h", - "llvm.loongarch.lasx.xvssrarni.d.q" => "__builtin_lasx_xvssrarni_d_q", - "llvm.loongarch.lasx.xvssrarni.du.q" => "__builtin_lasx_xvssrarni_du_q", - "llvm.loongarch.lasx.xvssrarni.h.w" => "__builtin_lasx_xvssrarni_h_w", - "llvm.loongarch.lasx.xvssrarni.hu.w" => "__builtin_lasx_xvssrarni_hu_w", - "llvm.loongarch.lasx.xvssrarni.w.d" => "__builtin_lasx_xvssrarni_w_d", - "llvm.loongarch.lasx.xvssrarni.wu.d" => "__builtin_lasx_xvssrarni_wu_d", - "llvm.loongarch.lasx.xvssrln.b.h" => "__builtin_lasx_xvssrln_b_h", - "llvm.loongarch.lasx.xvssrln.bu.h" => "__builtin_lasx_xvssrln_bu_h", - "llvm.loongarch.lasx.xvssrln.h.w" => "__builtin_lasx_xvssrln_h_w", - "llvm.loongarch.lasx.xvssrln.hu.w" => "__builtin_lasx_xvssrln_hu_w", - "llvm.loongarch.lasx.xvssrln.w.d" => "__builtin_lasx_xvssrln_w_d", - "llvm.loongarch.lasx.xvssrln.wu.d" => "__builtin_lasx_xvssrln_wu_d", - "llvm.loongarch.lasx.xvssrlni.b.h" => "__builtin_lasx_xvssrlni_b_h", - "llvm.loongarch.lasx.xvssrlni.bu.h" => "__builtin_lasx_xvssrlni_bu_h", - "llvm.loongarch.lasx.xvssrlni.d.q" => "__builtin_lasx_xvssrlni_d_q", - "llvm.loongarch.lasx.xvssrlni.du.q" => "__builtin_lasx_xvssrlni_du_q", - "llvm.loongarch.lasx.xvssrlni.h.w" => "__builtin_lasx_xvssrlni_h_w", - "llvm.loongarch.lasx.xvssrlni.hu.w" => "__builtin_lasx_xvssrlni_hu_w", - "llvm.loongarch.lasx.xvssrlni.w.d" => "__builtin_lasx_xvssrlni_w_d", - "llvm.loongarch.lasx.xvssrlni.wu.d" => "__builtin_lasx_xvssrlni_wu_d", - "llvm.loongarch.lasx.xvssrlrn.b.h" => "__builtin_lasx_xvssrlrn_b_h", - "llvm.loongarch.lasx.xvssrlrn.bu.h" => "__builtin_lasx_xvssrlrn_bu_h", - "llvm.loongarch.lasx.xvssrlrn.h.w" => "__builtin_lasx_xvssrlrn_h_w", - "llvm.loongarch.lasx.xvssrlrn.hu.w" => "__builtin_lasx_xvssrlrn_hu_w", - "llvm.loongarch.lasx.xvssrlrn.w.d" => "__builtin_lasx_xvssrlrn_w_d", - "llvm.loongarch.lasx.xvssrlrn.wu.d" => "__builtin_lasx_xvssrlrn_wu_d", - "llvm.loongarch.lasx.xvssrlrni.b.h" => "__builtin_lasx_xvssrlrni_b_h", - "llvm.loongarch.lasx.xvssrlrni.bu.h" => "__builtin_lasx_xvssrlrni_bu_h", - "llvm.loongarch.lasx.xvssrlrni.d.q" => "__builtin_lasx_xvssrlrni_d_q", - "llvm.loongarch.lasx.xvssrlrni.du.q" => "__builtin_lasx_xvssrlrni_du_q", - "llvm.loongarch.lasx.xvssrlrni.h.w" => "__builtin_lasx_xvssrlrni_h_w", - "llvm.loongarch.lasx.xvssrlrni.hu.w" => "__builtin_lasx_xvssrlrni_hu_w", - "llvm.loongarch.lasx.xvssrlrni.w.d" => "__builtin_lasx_xvssrlrni_w_d", - "llvm.loongarch.lasx.xvssrlrni.wu.d" => "__builtin_lasx_xvssrlrni_wu_d", - "llvm.loongarch.lasx.xvssub.b" => "__builtin_lasx_xvssub_b", - "llvm.loongarch.lasx.xvssub.bu" => "__builtin_lasx_xvssub_bu", - "llvm.loongarch.lasx.xvssub.d" => "__builtin_lasx_xvssub_d", - "llvm.loongarch.lasx.xvssub.du" => "__builtin_lasx_xvssub_du", - "llvm.loongarch.lasx.xvssub.h" => "__builtin_lasx_xvssub_h", - "llvm.loongarch.lasx.xvssub.hu" => "__builtin_lasx_xvssub_hu", - "llvm.loongarch.lasx.xvssub.w" => "__builtin_lasx_xvssub_w", - "llvm.loongarch.lasx.xvssub.wu" => "__builtin_lasx_xvssub_wu", - "llvm.loongarch.lasx.xvst" => "__builtin_lasx_xvst", - "llvm.loongarch.lasx.xvstelm.b" => "__builtin_lasx_xvstelm_b", - "llvm.loongarch.lasx.xvstelm.d" => "__builtin_lasx_xvstelm_d", - "llvm.loongarch.lasx.xvstelm.h" => "__builtin_lasx_xvstelm_h", - "llvm.loongarch.lasx.xvstelm.w" => "__builtin_lasx_xvstelm_w", - "llvm.loongarch.lasx.xvstx" => "__builtin_lasx_xvstx", - "llvm.loongarch.lasx.xvsub.b" => "__builtin_lasx_xvsub_b", - "llvm.loongarch.lasx.xvsub.d" => "__builtin_lasx_xvsub_d", - "llvm.loongarch.lasx.xvsub.h" => "__builtin_lasx_xvsub_h", - "llvm.loongarch.lasx.xvsub.q" => "__builtin_lasx_xvsub_q", - "llvm.loongarch.lasx.xvsub.w" => "__builtin_lasx_xvsub_w", - "llvm.loongarch.lasx.xvsubi.bu" => "__builtin_lasx_xvsubi_bu", - "llvm.loongarch.lasx.xvsubi.du" => "__builtin_lasx_xvsubi_du", - "llvm.loongarch.lasx.xvsubi.hu" => "__builtin_lasx_xvsubi_hu", - "llvm.loongarch.lasx.xvsubi.wu" => "__builtin_lasx_xvsubi_wu", - "llvm.loongarch.lasx.xvsubwev.d.w" => "__builtin_lasx_xvsubwev_d_w", - "llvm.loongarch.lasx.xvsubwev.d.wu" => "__builtin_lasx_xvsubwev_d_wu", - "llvm.loongarch.lasx.xvsubwev.h.b" => "__builtin_lasx_xvsubwev_h_b", - "llvm.loongarch.lasx.xvsubwev.h.bu" => "__builtin_lasx_xvsubwev_h_bu", - "llvm.loongarch.lasx.xvsubwev.q.d" => "__builtin_lasx_xvsubwev_q_d", - "llvm.loongarch.lasx.xvsubwev.q.du" => "__builtin_lasx_xvsubwev_q_du", - "llvm.loongarch.lasx.xvsubwev.w.h" => "__builtin_lasx_xvsubwev_w_h", - "llvm.loongarch.lasx.xvsubwev.w.hu" => "__builtin_lasx_xvsubwev_w_hu", - "llvm.loongarch.lasx.xvsubwod.d.w" => "__builtin_lasx_xvsubwod_d_w", - "llvm.loongarch.lasx.xvsubwod.d.wu" => "__builtin_lasx_xvsubwod_d_wu", - "llvm.loongarch.lasx.xvsubwod.h.b" => "__builtin_lasx_xvsubwod_h_b", - "llvm.loongarch.lasx.xvsubwod.h.bu" => "__builtin_lasx_xvsubwod_h_bu", - "llvm.loongarch.lasx.xvsubwod.q.d" => "__builtin_lasx_xvsubwod_q_d", - "llvm.loongarch.lasx.xvsubwod.q.du" => "__builtin_lasx_xvsubwod_q_du", - "llvm.loongarch.lasx.xvsubwod.w.h" => "__builtin_lasx_xvsubwod_w_h", - "llvm.loongarch.lasx.xvsubwod.w.hu" => "__builtin_lasx_xvsubwod_w_hu", - "llvm.loongarch.lasx.xvxor.v" => "__builtin_lasx_xvxor_v", - "llvm.loongarch.lasx.xvxori.b" => "__builtin_lasx_xvxori_b", - "llvm.loongarch.lddir.d" => "__builtin_loongarch_lddir_d", - "llvm.loongarch.ldpte.d" => "__builtin_loongarch_ldpte_d", - "llvm.loongarch.lsx.bnz.b" => "__builtin_lsx_bnz_b", - "llvm.loongarch.lsx.bnz.d" => "__builtin_lsx_bnz_d", - "llvm.loongarch.lsx.bnz.h" => "__builtin_lsx_bnz_h", - "llvm.loongarch.lsx.bnz.v" => "__builtin_lsx_bnz_v", - "llvm.loongarch.lsx.bnz.w" => "__builtin_lsx_bnz_w", - "llvm.loongarch.lsx.bz.b" => "__builtin_lsx_bz_b", - "llvm.loongarch.lsx.bz.d" => "__builtin_lsx_bz_d", - "llvm.loongarch.lsx.bz.h" => "__builtin_lsx_bz_h", - "llvm.loongarch.lsx.bz.v" => "__builtin_lsx_bz_v", - "llvm.loongarch.lsx.bz.w" => "__builtin_lsx_bz_w", - "llvm.loongarch.lsx.vabsd.b" => "__builtin_lsx_vabsd_b", - "llvm.loongarch.lsx.vabsd.bu" => "__builtin_lsx_vabsd_bu", - "llvm.loongarch.lsx.vabsd.d" => "__builtin_lsx_vabsd_d", - "llvm.loongarch.lsx.vabsd.du" => "__builtin_lsx_vabsd_du", - "llvm.loongarch.lsx.vabsd.h" => "__builtin_lsx_vabsd_h", - "llvm.loongarch.lsx.vabsd.hu" => "__builtin_lsx_vabsd_hu", - "llvm.loongarch.lsx.vabsd.w" => "__builtin_lsx_vabsd_w", - "llvm.loongarch.lsx.vabsd.wu" => "__builtin_lsx_vabsd_wu", - "llvm.loongarch.lsx.vadd.b" => "__builtin_lsx_vadd_b", - "llvm.loongarch.lsx.vadd.d" => "__builtin_lsx_vadd_d", - "llvm.loongarch.lsx.vadd.h" => "__builtin_lsx_vadd_h", - "llvm.loongarch.lsx.vadd.q" => "__builtin_lsx_vadd_q", - "llvm.loongarch.lsx.vadd.w" => "__builtin_lsx_vadd_w", - "llvm.loongarch.lsx.vadda.b" => "__builtin_lsx_vadda_b", - "llvm.loongarch.lsx.vadda.d" => "__builtin_lsx_vadda_d", - "llvm.loongarch.lsx.vadda.h" => "__builtin_lsx_vadda_h", - "llvm.loongarch.lsx.vadda.w" => "__builtin_lsx_vadda_w", - "llvm.loongarch.lsx.vaddi.bu" => "__builtin_lsx_vaddi_bu", - "llvm.loongarch.lsx.vaddi.du" => "__builtin_lsx_vaddi_du", - "llvm.loongarch.lsx.vaddi.hu" => "__builtin_lsx_vaddi_hu", - "llvm.loongarch.lsx.vaddi.wu" => "__builtin_lsx_vaddi_wu", - "llvm.loongarch.lsx.vaddwev.d.w" => "__builtin_lsx_vaddwev_d_w", - "llvm.loongarch.lsx.vaddwev.d.wu" => "__builtin_lsx_vaddwev_d_wu", - "llvm.loongarch.lsx.vaddwev.d.wu.w" => "__builtin_lsx_vaddwev_d_wu_w", - "llvm.loongarch.lsx.vaddwev.h.b" => "__builtin_lsx_vaddwev_h_b", - "llvm.loongarch.lsx.vaddwev.h.bu" => "__builtin_lsx_vaddwev_h_bu", - "llvm.loongarch.lsx.vaddwev.h.bu.b" => "__builtin_lsx_vaddwev_h_bu_b", - "llvm.loongarch.lsx.vaddwev.q.d" => "__builtin_lsx_vaddwev_q_d", - "llvm.loongarch.lsx.vaddwev.q.du" => "__builtin_lsx_vaddwev_q_du", - "llvm.loongarch.lsx.vaddwev.q.du.d" => "__builtin_lsx_vaddwev_q_du_d", - "llvm.loongarch.lsx.vaddwev.w.h" => "__builtin_lsx_vaddwev_w_h", - "llvm.loongarch.lsx.vaddwev.w.hu" => "__builtin_lsx_vaddwev_w_hu", - "llvm.loongarch.lsx.vaddwev.w.hu.h" => "__builtin_lsx_vaddwev_w_hu_h", - "llvm.loongarch.lsx.vaddwod.d.w" => "__builtin_lsx_vaddwod_d_w", - "llvm.loongarch.lsx.vaddwod.d.wu" => "__builtin_lsx_vaddwod_d_wu", - "llvm.loongarch.lsx.vaddwod.d.wu.w" => "__builtin_lsx_vaddwod_d_wu_w", - "llvm.loongarch.lsx.vaddwod.h.b" => "__builtin_lsx_vaddwod_h_b", - "llvm.loongarch.lsx.vaddwod.h.bu" => "__builtin_lsx_vaddwod_h_bu", - "llvm.loongarch.lsx.vaddwod.h.bu.b" => "__builtin_lsx_vaddwod_h_bu_b", - "llvm.loongarch.lsx.vaddwod.q.d" => "__builtin_lsx_vaddwod_q_d", - "llvm.loongarch.lsx.vaddwod.q.du" => "__builtin_lsx_vaddwod_q_du", - "llvm.loongarch.lsx.vaddwod.q.du.d" => "__builtin_lsx_vaddwod_q_du_d", - "llvm.loongarch.lsx.vaddwod.w.h" => "__builtin_lsx_vaddwod_w_h", - "llvm.loongarch.lsx.vaddwod.w.hu" => "__builtin_lsx_vaddwod_w_hu", - "llvm.loongarch.lsx.vaddwod.w.hu.h" => "__builtin_lsx_vaddwod_w_hu_h", - "llvm.loongarch.lsx.vand.v" => "__builtin_lsx_vand_v", - "llvm.loongarch.lsx.vandi.b" => "__builtin_lsx_vandi_b", - "llvm.loongarch.lsx.vandn.v" => "__builtin_lsx_vandn_v", - "llvm.loongarch.lsx.vavg.b" => "__builtin_lsx_vavg_b", - "llvm.loongarch.lsx.vavg.bu" => "__builtin_lsx_vavg_bu", - "llvm.loongarch.lsx.vavg.d" => "__builtin_lsx_vavg_d", - "llvm.loongarch.lsx.vavg.du" => "__builtin_lsx_vavg_du", - "llvm.loongarch.lsx.vavg.h" => "__builtin_lsx_vavg_h", - "llvm.loongarch.lsx.vavg.hu" => "__builtin_lsx_vavg_hu", - "llvm.loongarch.lsx.vavg.w" => "__builtin_lsx_vavg_w", - "llvm.loongarch.lsx.vavg.wu" => "__builtin_lsx_vavg_wu", - "llvm.loongarch.lsx.vavgr.b" => "__builtin_lsx_vavgr_b", - "llvm.loongarch.lsx.vavgr.bu" => "__builtin_lsx_vavgr_bu", - "llvm.loongarch.lsx.vavgr.d" => "__builtin_lsx_vavgr_d", - "llvm.loongarch.lsx.vavgr.du" => "__builtin_lsx_vavgr_du", - "llvm.loongarch.lsx.vavgr.h" => "__builtin_lsx_vavgr_h", - "llvm.loongarch.lsx.vavgr.hu" => "__builtin_lsx_vavgr_hu", - "llvm.loongarch.lsx.vavgr.w" => "__builtin_lsx_vavgr_w", - "llvm.loongarch.lsx.vavgr.wu" => "__builtin_lsx_vavgr_wu", - "llvm.loongarch.lsx.vbitclr.b" => "__builtin_lsx_vbitclr_b", - "llvm.loongarch.lsx.vbitclr.d" => "__builtin_lsx_vbitclr_d", - "llvm.loongarch.lsx.vbitclr.h" => "__builtin_lsx_vbitclr_h", - "llvm.loongarch.lsx.vbitclr.w" => "__builtin_lsx_vbitclr_w", - "llvm.loongarch.lsx.vbitclri.b" => "__builtin_lsx_vbitclri_b", - "llvm.loongarch.lsx.vbitclri.d" => "__builtin_lsx_vbitclri_d", - "llvm.loongarch.lsx.vbitclri.h" => "__builtin_lsx_vbitclri_h", - "llvm.loongarch.lsx.vbitclri.w" => "__builtin_lsx_vbitclri_w", - "llvm.loongarch.lsx.vbitrev.b" => "__builtin_lsx_vbitrev_b", - "llvm.loongarch.lsx.vbitrev.d" => "__builtin_lsx_vbitrev_d", - "llvm.loongarch.lsx.vbitrev.h" => "__builtin_lsx_vbitrev_h", - "llvm.loongarch.lsx.vbitrev.w" => "__builtin_lsx_vbitrev_w", - "llvm.loongarch.lsx.vbitrevi.b" => "__builtin_lsx_vbitrevi_b", - "llvm.loongarch.lsx.vbitrevi.d" => "__builtin_lsx_vbitrevi_d", - "llvm.loongarch.lsx.vbitrevi.h" => "__builtin_lsx_vbitrevi_h", - "llvm.loongarch.lsx.vbitrevi.w" => "__builtin_lsx_vbitrevi_w", - "llvm.loongarch.lsx.vbitsel.v" => "__builtin_lsx_vbitsel_v", - "llvm.loongarch.lsx.vbitseli.b" => "__builtin_lsx_vbitseli_b", - "llvm.loongarch.lsx.vbitset.b" => "__builtin_lsx_vbitset_b", - "llvm.loongarch.lsx.vbitset.d" => "__builtin_lsx_vbitset_d", - "llvm.loongarch.lsx.vbitset.h" => "__builtin_lsx_vbitset_h", - "llvm.loongarch.lsx.vbitset.w" => "__builtin_lsx_vbitset_w", - "llvm.loongarch.lsx.vbitseti.b" => "__builtin_lsx_vbitseti_b", - "llvm.loongarch.lsx.vbitseti.d" => "__builtin_lsx_vbitseti_d", - "llvm.loongarch.lsx.vbitseti.h" => "__builtin_lsx_vbitseti_h", - "llvm.loongarch.lsx.vbitseti.w" => "__builtin_lsx_vbitseti_w", - "llvm.loongarch.lsx.vbsll.v" => "__builtin_lsx_vbsll_v", - "llvm.loongarch.lsx.vbsrl.v" => "__builtin_lsx_vbsrl_v", - "llvm.loongarch.lsx.vclo.b" => "__builtin_lsx_vclo_b", - "llvm.loongarch.lsx.vclo.d" => "__builtin_lsx_vclo_d", - "llvm.loongarch.lsx.vclo.h" => "__builtin_lsx_vclo_h", - "llvm.loongarch.lsx.vclo.w" => "__builtin_lsx_vclo_w", - "llvm.loongarch.lsx.vclz.b" => "__builtin_lsx_vclz_b", - "llvm.loongarch.lsx.vclz.d" => "__builtin_lsx_vclz_d", - "llvm.loongarch.lsx.vclz.h" => "__builtin_lsx_vclz_h", - "llvm.loongarch.lsx.vclz.w" => "__builtin_lsx_vclz_w", - "llvm.loongarch.lsx.vdiv.b" => "__builtin_lsx_vdiv_b", - "llvm.loongarch.lsx.vdiv.bu" => "__builtin_lsx_vdiv_bu", - "llvm.loongarch.lsx.vdiv.d" => "__builtin_lsx_vdiv_d", - "llvm.loongarch.lsx.vdiv.du" => "__builtin_lsx_vdiv_du", - "llvm.loongarch.lsx.vdiv.h" => "__builtin_lsx_vdiv_h", - "llvm.loongarch.lsx.vdiv.hu" => "__builtin_lsx_vdiv_hu", - "llvm.loongarch.lsx.vdiv.w" => "__builtin_lsx_vdiv_w", - "llvm.loongarch.lsx.vdiv.wu" => "__builtin_lsx_vdiv_wu", - "llvm.loongarch.lsx.vexth.d.w" => "__builtin_lsx_vexth_d_w", - "llvm.loongarch.lsx.vexth.du.wu" => "__builtin_lsx_vexth_du_wu", - "llvm.loongarch.lsx.vexth.h.b" => "__builtin_lsx_vexth_h_b", - "llvm.loongarch.lsx.vexth.hu.bu" => "__builtin_lsx_vexth_hu_bu", - "llvm.loongarch.lsx.vexth.q.d" => "__builtin_lsx_vexth_q_d", - "llvm.loongarch.lsx.vexth.qu.du" => "__builtin_lsx_vexth_qu_du", - "llvm.loongarch.lsx.vexth.w.h" => "__builtin_lsx_vexth_w_h", - "llvm.loongarch.lsx.vexth.wu.hu" => "__builtin_lsx_vexth_wu_hu", - "llvm.loongarch.lsx.vextl.q.d" => "__builtin_lsx_vextl_q_d", - "llvm.loongarch.lsx.vextl.qu.du" => "__builtin_lsx_vextl_qu_du", - "llvm.loongarch.lsx.vextrins.b" => "__builtin_lsx_vextrins_b", - "llvm.loongarch.lsx.vextrins.d" => "__builtin_lsx_vextrins_d", - "llvm.loongarch.lsx.vextrins.h" => "__builtin_lsx_vextrins_h", - "llvm.loongarch.lsx.vextrins.w" => "__builtin_lsx_vextrins_w", - "llvm.loongarch.lsx.vfadd.d" => "__builtin_lsx_vfadd_d", - "llvm.loongarch.lsx.vfadd.s" => "__builtin_lsx_vfadd_s", - "llvm.loongarch.lsx.vfclass.d" => "__builtin_lsx_vfclass_d", - "llvm.loongarch.lsx.vfclass.s" => "__builtin_lsx_vfclass_s", - "llvm.loongarch.lsx.vfcmp.caf.d" => "__builtin_lsx_vfcmp_caf_d", - "llvm.loongarch.lsx.vfcmp.caf.s" => "__builtin_lsx_vfcmp_caf_s", - "llvm.loongarch.lsx.vfcmp.ceq.d" => "__builtin_lsx_vfcmp_ceq_d", - "llvm.loongarch.lsx.vfcmp.ceq.s" => "__builtin_lsx_vfcmp_ceq_s", - "llvm.loongarch.lsx.vfcmp.cle.d" => "__builtin_lsx_vfcmp_cle_d", - "llvm.loongarch.lsx.vfcmp.cle.s" => "__builtin_lsx_vfcmp_cle_s", - "llvm.loongarch.lsx.vfcmp.clt.d" => "__builtin_lsx_vfcmp_clt_d", - "llvm.loongarch.lsx.vfcmp.clt.s" => "__builtin_lsx_vfcmp_clt_s", - "llvm.loongarch.lsx.vfcmp.cne.d" => "__builtin_lsx_vfcmp_cne_d", - "llvm.loongarch.lsx.vfcmp.cne.s" => "__builtin_lsx_vfcmp_cne_s", - "llvm.loongarch.lsx.vfcmp.cor.d" => "__builtin_lsx_vfcmp_cor_d", - "llvm.loongarch.lsx.vfcmp.cor.s" => "__builtin_lsx_vfcmp_cor_s", - "llvm.loongarch.lsx.vfcmp.cueq.d" => "__builtin_lsx_vfcmp_cueq_d", - "llvm.loongarch.lsx.vfcmp.cueq.s" => "__builtin_lsx_vfcmp_cueq_s", - "llvm.loongarch.lsx.vfcmp.cule.d" => "__builtin_lsx_vfcmp_cule_d", - "llvm.loongarch.lsx.vfcmp.cule.s" => "__builtin_lsx_vfcmp_cule_s", - "llvm.loongarch.lsx.vfcmp.cult.d" => "__builtin_lsx_vfcmp_cult_d", - "llvm.loongarch.lsx.vfcmp.cult.s" => "__builtin_lsx_vfcmp_cult_s", - "llvm.loongarch.lsx.vfcmp.cun.d" => "__builtin_lsx_vfcmp_cun_d", - "llvm.loongarch.lsx.vfcmp.cun.s" => "__builtin_lsx_vfcmp_cun_s", - "llvm.loongarch.lsx.vfcmp.cune.d" => "__builtin_lsx_vfcmp_cune_d", - "llvm.loongarch.lsx.vfcmp.cune.s" => "__builtin_lsx_vfcmp_cune_s", - "llvm.loongarch.lsx.vfcmp.saf.d" => "__builtin_lsx_vfcmp_saf_d", - "llvm.loongarch.lsx.vfcmp.saf.s" => "__builtin_lsx_vfcmp_saf_s", - "llvm.loongarch.lsx.vfcmp.seq.d" => "__builtin_lsx_vfcmp_seq_d", - "llvm.loongarch.lsx.vfcmp.seq.s" => "__builtin_lsx_vfcmp_seq_s", - "llvm.loongarch.lsx.vfcmp.sle.d" => "__builtin_lsx_vfcmp_sle_d", - "llvm.loongarch.lsx.vfcmp.sle.s" => "__builtin_lsx_vfcmp_sle_s", - "llvm.loongarch.lsx.vfcmp.slt.d" => "__builtin_lsx_vfcmp_slt_d", - "llvm.loongarch.lsx.vfcmp.slt.s" => "__builtin_lsx_vfcmp_slt_s", - "llvm.loongarch.lsx.vfcmp.sne.d" => "__builtin_lsx_vfcmp_sne_d", - "llvm.loongarch.lsx.vfcmp.sne.s" => "__builtin_lsx_vfcmp_sne_s", - "llvm.loongarch.lsx.vfcmp.sor.d" => "__builtin_lsx_vfcmp_sor_d", - "llvm.loongarch.lsx.vfcmp.sor.s" => "__builtin_lsx_vfcmp_sor_s", - "llvm.loongarch.lsx.vfcmp.sueq.d" => "__builtin_lsx_vfcmp_sueq_d", - "llvm.loongarch.lsx.vfcmp.sueq.s" => "__builtin_lsx_vfcmp_sueq_s", - "llvm.loongarch.lsx.vfcmp.sule.d" => "__builtin_lsx_vfcmp_sule_d", - "llvm.loongarch.lsx.vfcmp.sule.s" => "__builtin_lsx_vfcmp_sule_s", - "llvm.loongarch.lsx.vfcmp.sult.d" => "__builtin_lsx_vfcmp_sult_d", - "llvm.loongarch.lsx.vfcmp.sult.s" => "__builtin_lsx_vfcmp_sult_s", - "llvm.loongarch.lsx.vfcmp.sun.d" => "__builtin_lsx_vfcmp_sun_d", - "llvm.loongarch.lsx.vfcmp.sun.s" => "__builtin_lsx_vfcmp_sun_s", - "llvm.loongarch.lsx.vfcmp.sune.d" => "__builtin_lsx_vfcmp_sune_d", - "llvm.loongarch.lsx.vfcmp.sune.s" => "__builtin_lsx_vfcmp_sune_s", - "llvm.loongarch.lsx.vfcvt.h.s" => "__builtin_lsx_vfcvt_h_s", - "llvm.loongarch.lsx.vfcvt.s.d" => "__builtin_lsx_vfcvt_s_d", - "llvm.loongarch.lsx.vfcvth.d.s" => "__builtin_lsx_vfcvth_d_s", - "llvm.loongarch.lsx.vfcvth.s.h" => "__builtin_lsx_vfcvth_s_h", - "llvm.loongarch.lsx.vfcvtl.d.s" => "__builtin_lsx_vfcvtl_d_s", - "llvm.loongarch.lsx.vfcvtl.s.h" => "__builtin_lsx_vfcvtl_s_h", - "llvm.loongarch.lsx.vfdiv.d" => "__builtin_lsx_vfdiv_d", - "llvm.loongarch.lsx.vfdiv.s" => "__builtin_lsx_vfdiv_s", - "llvm.loongarch.lsx.vffint.d.l" => "__builtin_lsx_vffint_d_l", - "llvm.loongarch.lsx.vffint.d.lu" => "__builtin_lsx_vffint_d_lu", - "llvm.loongarch.lsx.vffint.s.l" => "__builtin_lsx_vffint_s_l", - "llvm.loongarch.lsx.vffint.s.w" => "__builtin_lsx_vffint_s_w", - "llvm.loongarch.lsx.vffint.s.wu" => "__builtin_lsx_vffint_s_wu", - "llvm.loongarch.lsx.vffinth.d.w" => "__builtin_lsx_vffinth_d_w", - "llvm.loongarch.lsx.vffintl.d.w" => "__builtin_lsx_vffintl_d_w", - "llvm.loongarch.lsx.vflogb.d" => "__builtin_lsx_vflogb_d", - "llvm.loongarch.lsx.vflogb.s" => "__builtin_lsx_vflogb_s", - "llvm.loongarch.lsx.vfmadd.d" => "__builtin_lsx_vfmadd_d", - "llvm.loongarch.lsx.vfmadd.s" => "__builtin_lsx_vfmadd_s", - "llvm.loongarch.lsx.vfmax.d" => "__builtin_lsx_vfmax_d", - "llvm.loongarch.lsx.vfmax.s" => "__builtin_lsx_vfmax_s", - "llvm.loongarch.lsx.vfmaxa.d" => "__builtin_lsx_vfmaxa_d", - "llvm.loongarch.lsx.vfmaxa.s" => "__builtin_lsx_vfmaxa_s", - "llvm.loongarch.lsx.vfmin.d" => "__builtin_lsx_vfmin_d", - "llvm.loongarch.lsx.vfmin.s" => "__builtin_lsx_vfmin_s", - "llvm.loongarch.lsx.vfmina.d" => "__builtin_lsx_vfmina_d", - "llvm.loongarch.lsx.vfmina.s" => "__builtin_lsx_vfmina_s", - "llvm.loongarch.lsx.vfmsub.d" => "__builtin_lsx_vfmsub_d", - "llvm.loongarch.lsx.vfmsub.s" => "__builtin_lsx_vfmsub_s", - "llvm.loongarch.lsx.vfmul.d" => "__builtin_lsx_vfmul_d", - "llvm.loongarch.lsx.vfmul.s" => "__builtin_lsx_vfmul_s", - "llvm.loongarch.lsx.vfnmadd.d" => "__builtin_lsx_vfnmadd_d", - "llvm.loongarch.lsx.vfnmadd.s" => "__builtin_lsx_vfnmadd_s", - "llvm.loongarch.lsx.vfnmsub.d" => "__builtin_lsx_vfnmsub_d", - "llvm.loongarch.lsx.vfnmsub.s" => "__builtin_lsx_vfnmsub_s", - "llvm.loongarch.lsx.vfrecip.d" => "__builtin_lsx_vfrecip_d", - "llvm.loongarch.lsx.vfrecip.s" => "__builtin_lsx_vfrecip_s", - "llvm.loongarch.lsx.vfrecipe.d" => "__builtin_lsx_vfrecipe_d", - "llvm.loongarch.lsx.vfrecipe.s" => "__builtin_lsx_vfrecipe_s", - "llvm.loongarch.lsx.vfrint.d" => "__builtin_lsx_vfrint_d", - "llvm.loongarch.lsx.vfrint.s" => "__builtin_lsx_vfrint_s", - "llvm.loongarch.lsx.vfrintrm.d" => "__builtin_lsx_vfrintrm_d", - "llvm.loongarch.lsx.vfrintrm.s" => "__builtin_lsx_vfrintrm_s", - "llvm.loongarch.lsx.vfrintrne.d" => "__builtin_lsx_vfrintrne_d", - "llvm.loongarch.lsx.vfrintrne.s" => "__builtin_lsx_vfrintrne_s", - "llvm.loongarch.lsx.vfrintrp.d" => "__builtin_lsx_vfrintrp_d", - "llvm.loongarch.lsx.vfrintrp.s" => "__builtin_lsx_vfrintrp_s", - "llvm.loongarch.lsx.vfrintrz.d" => "__builtin_lsx_vfrintrz_d", - "llvm.loongarch.lsx.vfrintrz.s" => "__builtin_lsx_vfrintrz_s", - "llvm.loongarch.lsx.vfrsqrt.d" => "__builtin_lsx_vfrsqrt_d", - "llvm.loongarch.lsx.vfrsqrt.s" => "__builtin_lsx_vfrsqrt_s", - "llvm.loongarch.lsx.vfrsqrte.d" => "__builtin_lsx_vfrsqrte_d", - "llvm.loongarch.lsx.vfrsqrte.s" => "__builtin_lsx_vfrsqrte_s", - "llvm.loongarch.lsx.vfrstp.b" => "__builtin_lsx_vfrstp_b", - "llvm.loongarch.lsx.vfrstp.h" => "__builtin_lsx_vfrstp_h", - "llvm.loongarch.lsx.vfrstpi.b" => "__builtin_lsx_vfrstpi_b", - "llvm.loongarch.lsx.vfrstpi.h" => "__builtin_lsx_vfrstpi_h", - "llvm.loongarch.lsx.vfsqrt.d" => "__builtin_lsx_vfsqrt_d", - "llvm.loongarch.lsx.vfsqrt.s" => "__builtin_lsx_vfsqrt_s", - "llvm.loongarch.lsx.vfsub.d" => "__builtin_lsx_vfsub_d", - "llvm.loongarch.lsx.vfsub.s" => "__builtin_lsx_vfsub_s", - "llvm.loongarch.lsx.vftint.l.d" => "__builtin_lsx_vftint_l_d", - "llvm.loongarch.lsx.vftint.lu.d" => "__builtin_lsx_vftint_lu_d", - "llvm.loongarch.lsx.vftint.w.d" => "__builtin_lsx_vftint_w_d", - "llvm.loongarch.lsx.vftint.w.s" => "__builtin_lsx_vftint_w_s", - "llvm.loongarch.lsx.vftint.wu.s" => "__builtin_lsx_vftint_wu_s", - "llvm.loongarch.lsx.vftinth.l.s" => "__builtin_lsx_vftinth_l_s", - "llvm.loongarch.lsx.vftintl.l.s" => "__builtin_lsx_vftintl_l_s", - "llvm.loongarch.lsx.vftintrm.l.d" => "__builtin_lsx_vftintrm_l_d", - "llvm.loongarch.lsx.vftintrm.w.d" => "__builtin_lsx_vftintrm_w_d", - "llvm.loongarch.lsx.vftintrm.w.s" => "__builtin_lsx_vftintrm_w_s", - "llvm.loongarch.lsx.vftintrmh.l.s" => "__builtin_lsx_vftintrmh_l_s", - "llvm.loongarch.lsx.vftintrml.l.s" => "__builtin_lsx_vftintrml_l_s", - "llvm.loongarch.lsx.vftintrne.l.d" => "__builtin_lsx_vftintrne_l_d", - "llvm.loongarch.lsx.vftintrne.w.d" => "__builtin_lsx_vftintrne_w_d", - "llvm.loongarch.lsx.vftintrne.w.s" => "__builtin_lsx_vftintrne_w_s", - "llvm.loongarch.lsx.vftintrneh.l.s" => "__builtin_lsx_vftintrneh_l_s", - "llvm.loongarch.lsx.vftintrnel.l.s" => "__builtin_lsx_vftintrnel_l_s", - "llvm.loongarch.lsx.vftintrp.l.d" => "__builtin_lsx_vftintrp_l_d", - "llvm.loongarch.lsx.vftintrp.w.d" => "__builtin_lsx_vftintrp_w_d", - "llvm.loongarch.lsx.vftintrp.w.s" => "__builtin_lsx_vftintrp_w_s", - "llvm.loongarch.lsx.vftintrph.l.s" => "__builtin_lsx_vftintrph_l_s", - "llvm.loongarch.lsx.vftintrpl.l.s" => "__builtin_lsx_vftintrpl_l_s", - "llvm.loongarch.lsx.vftintrz.l.d" => "__builtin_lsx_vftintrz_l_d", - "llvm.loongarch.lsx.vftintrz.lu.d" => "__builtin_lsx_vftintrz_lu_d", - "llvm.loongarch.lsx.vftintrz.w.d" => "__builtin_lsx_vftintrz_w_d", - "llvm.loongarch.lsx.vftintrz.w.s" => "__builtin_lsx_vftintrz_w_s", - "llvm.loongarch.lsx.vftintrz.wu.s" => "__builtin_lsx_vftintrz_wu_s", - "llvm.loongarch.lsx.vftintrzh.l.s" => "__builtin_lsx_vftintrzh_l_s", - "llvm.loongarch.lsx.vftintrzl.l.s" => "__builtin_lsx_vftintrzl_l_s", - "llvm.loongarch.lsx.vhaddw.d.w" => "__builtin_lsx_vhaddw_d_w", - "llvm.loongarch.lsx.vhaddw.du.wu" => "__builtin_lsx_vhaddw_du_wu", - "llvm.loongarch.lsx.vhaddw.h.b" => "__builtin_lsx_vhaddw_h_b", - "llvm.loongarch.lsx.vhaddw.hu.bu" => "__builtin_lsx_vhaddw_hu_bu", - "llvm.loongarch.lsx.vhaddw.q.d" => "__builtin_lsx_vhaddw_q_d", - "llvm.loongarch.lsx.vhaddw.qu.du" => "__builtin_lsx_vhaddw_qu_du", - "llvm.loongarch.lsx.vhaddw.w.h" => "__builtin_lsx_vhaddw_w_h", - "llvm.loongarch.lsx.vhaddw.wu.hu" => "__builtin_lsx_vhaddw_wu_hu", - "llvm.loongarch.lsx.vhsubw.d.w" => "__builtin_lsx_vhsubw_d_w", - "llvm.loongarch.lsx.vhsubw.du.wu" => "__builtin_lsx_vhsubw_du_wu", - "llvm.loongarch.lsx.vhsubw.h.b" => "__builtin_lsx_vhsubw_h_b", - "llvm.loongarch.lsx.vhsubw.hu.bu" => "__builtin_lsx_vhsubw_hu_bu", - "llvm.loongarch.lsx.vhsubw.q.d" => "__builtin_lsx_vhsubw_q_d", - "llvm.loongarch.lsx.vhsubw.qu.du" => "__builtin_lsx_vhsubw_qu_du", - "llvm.loongarch.lsx.vhsubw.w.h" => "__builtin_lsx_vhsubw_w_h", - "llvm.loongarch.lsx.vhsubw.wu.hu" => "__builtin_lsx_vhsubw_wu_hu", - "llvm.loongarch.lsx.vilvh.b" => "__builtin_lsx_vilvh_b", - "llvm.loongarch.lsx.vilvh.d" => "__builtin_lsx_vilvh_d", - "llvm.loongarch.lsx.vilvh.h" => "__builtin_lsx_vilvh_h", - "llvm.loongarch.lsx.vilvh.w" => "__builtin_lsx_vilvh_w", - "llvm.loongarch.lsx.vilvl.b" => "__builtin_lsx_vilvl_b", - "llvm.loongarch.lsx.vilvl.d" => "__builtin_lsx_vilvl_d", - "llvm.loongarch.lsx.vilvl.h" => "__builtin_lsx_vilvl_h", - "llvm.loongarch.lsx.vilvl.w" => "__builtin_lsx_vilvl_w", - "llvm.loongarch.lsx.vinsgr2vr.b" => "__builtin_lsx_vinsgr2vr_b", - "llvm.loongarch.lsx.vinsgr2vr.d" => "__builtin_lsx_vinsgr2vr_d", - "llvm.loongarch.lsx.vinsgr2vr.h" => "__builtin_lsx_vinsgr2vr_h", - "llvm.loongarch.lsx.vinsgr2vr.w" => "__builtin_lsx_vinsgr2vr_w", - "llvm.loongarch.lsx.vld" => "__builtin_lsx_vld", - "llvm.loongarch.lsx.vldi" => "__builtin_lsx_vldi", - "llvm.loongarch.lsx.vldrepl.b" => "__builtin_lsx_vldrepl_b", - "llvm.loongarch.lsx.vldrepl.d" => "__builtin_lsx_vldrepl_d", - "llvm.loongarch.lsx.vldrepl.h" => "__builtin_lsx_vldrepl_h", - "llvm.loongarch.lsx.vldrepl.w" => "__builtin_lsx_vldrepl_w", - "llvm.loongarch.lsx.vldx" => "__builtin_lsx_vldx", - "llvm.loongarch.lsx.vmadd.b" => "__builtin_lsx_vmadd_b", - "llvm.loongarch.lsx.vmadd.d" => "__builtin_lsx_vmadd_d", - "llvm.loongarch.lsx.vmadd.h" => "__builtin_lsx_vmadd_h", - "llvm.loongarch.lsx.vmadd.w" => "__builtin_lsx_vmadd_w", - "llvm.loongarch.lsx.vmaddwev.d.w" => "__builtin_lsx_vmaddwev_d_w", - "llvm.loongarch.lsx.vmaddwev.d.wu" => "__builtin_lsx_vmaddwev_d_wu", - "llvm.loongarch.lsx.vmaddwev.d.wu.w" => "__builtin_lsx_vmaddwev_d_wu_w", - "llvm.loongarch.lsx.vmaddwev.h.b" => "__builtin_lsx_vmaddwev_h_b", - "llvm.loongarch.lsx.vmaddwev.h.bu" => "__builtin_lsx_vmaddwev_h_bu", - "llvm.loongarch.lsx.vmaddwev.h.bu.b" => "__builtin_lsx_vmaddwev_h_bu_b", - "llvm.loongarch.lsx.vmaddwev.q.d" => "__builtin_lsx_vmaddwev_q_d", - "llvm.loongarch.lsx.vmaddwev.q.du" => "__builtin_lsx_vmaddwev_q_du", - "llvm.loongarch.lsx.vmaddwev.q.du.d" => "__builtin_lsx_vmaddwev_q_du_d", - "llvm.loongarch.lsx.vmaddwev.w.h" => "__builtin_lsx_vmaddwev_w_h", - "llvm.loongarch.lsx.vmaddwev.w.hu" => "__builtin_lsx_vmaddwev_w_hu", - "llvm.loongarch.lsx.vmaddwev.w.hu.h" => "__builtin_lsx_vmaddwev_w_hu_h", - "llvm.loongarch.lsx.vmaddwod.d.w" => "__builtin_lsx_vmaddwod_d_w", - "llvm.loongarch.lsx.vmaddwod.d.wu" => "__builtin_lsx_vmaddwod_d_wu", - "llvm.loongarch.lsx.vmaddwod.d.wu.w" => "__builtin_lsx_vmaddwod_d_wu_w", - "llvm.loongarch.lsx.vmaddwod.h.b" => "__builtin_lsx_vmaddwod_h_b", - "llvm.loongarch.lsx.vmaddwod.h.bu" => "__builtin_lsx_vmaddwod_h_bu", - "llvm.loongarch.lsx.vmaddwod.h.bu.b" => "__builtin_lsx_vmaddwod_h_bu_b", - "llvm.loongarch.lsx.vmaddwod.q.d" => "__builtin_lsx_vmaddwod_q_d", - "llvm.loongarch.lsx.vmaddwod.q.du" => "__builtin_lsx_vmaddwod_q_du", - "llvm.loongarch.lsx.vmaddwod.q.du.d" => "__builtin_lsx_vmaddwod_q_du_d", - "llvm.loongarch.lsx.vmaddwod.w.h" => "__builtin_lsx_vmaddwod_w_h", - "llvm.loongarch.lsx.vmaddwod.w.hu" => "__builtin_lsx_vmaddwod_w_hu", - "llvm.loongarch.lsx.vmaddwod.w.hu.h" => "__builtin_lsx_vmaddwod_w_hu_h", - "llvm.loongarch.lsx.vmax.b" => "__builtin_lsx_vmax_b", - "llvm.loongarch.lsx.vmax.bu" => "__builtin_lsx_vmax_bu", - "llvm.loongarch.lsx.vmax.d" => "__builtin_lsx_vmax_d", - "llvm.loongarch.lsx.vmax.du" => "__builtin_lsx_vmax_du", - "llvm.loongarch.lsx.vmax.h" => "__builtin_lsx_vmax_h", - "llvm.loongarch.lsx.vmax.hu" => "__builtin_lsx_vmax_hu", - "llvm.loongarch.lsx.vmax.w" => "__builtin_lsx_vmax_w", - "llvm.loongarch.lsx.vmax.wu" => "__builtin_lsx_vmax_wu", - "llvm.loongarch.lsx.vmaxi.b" => "__builtin_lsx_vmaxi_b", - "llvm.loongarch.lsx.vmaxi.bu" => "__builtin_lsx_vmaxi_bu", - "llvm.loongarch.lsx.vmaxi.d" => "__builtin_lsx_vmaxi_d", - "llvm.loongarch.lsx.vmaxi.du" => "__builtin_lsx_vmaxi_du", - "llvm.loongarch.lsx.vmaxi.h" => "__builtin_lsx_vmaxi_h", - "llvm.loongarch.lsx.vmaxi.hu" => "__builtin_lsx_vmaxi_hu", - "llvm.loongarch.lsx.vmaxi.w" => "__builtin_lsx_vmaxi_w", - "llvm.loongarch.lsx.vmaxi.wu" => "__builtin_lsx_vmaxi_wu", - "llvm.loongarch.lsx.vmin.b" => "__builtin_lsx_vmin_b", - "llvm.loongarch.lsx.vmin.bu" => "__builtin_lsx_vmin_bu", - "llvm.loongarch.lsx.vmin.d" => "__builtin_lsx_vmin_d", - "llvm.loongarch.lsx.vmin.du" => "__builtin_lsx_vmin_du", - "llvm.loongarch.lsx.vmin.h" => "__builtin_lsx_vmin_h", - "llvm.loongarch.lsx.vmin.hu" => "__builtin_lsx_vmin_hu", - "llvm.loongarch.lsx.vmin.w" => "__builtin_lsx_vmin_w", - "llvm.loongarch.lsx.vmin.wu" => "__builtin_lsx_vmin_wu", - "llvm.loongarch.lsx.vmini.b" => "__builtin_lsx_vmini_b", - "llvm.loongarch.lsx.vmini.bu" => "__builtin_lsx_vmini_bu", - "llvm.loongarch.lsx.vmini.d" => "__builtin_lsx_vmini_d", - "llvm.loongarch.lsx.vmini.du" => "__builtin_lsx_vmini_du", - "llvm.loongarch.lsx.vmini.h" => "__builtin_lsx_vmini_h", - "llvm.loongarch.lsx.vmini.hu" => "__builtin_lsx_vmini_hu", - "llvm.loongarch.lsx.vmini.w" => "__builtin_lsx_vmini_w", - "llvm.loongarch.lsx.vmini.wu" => "__builtin_lsx_vmini_wu", - "llvm.loongarch.lsx.vmod.b" => "__builtin_lsx_vmod_b", - "llvm.loongarch.lsx.vmod.bu" => "__builtin_lsx_vmod_bu", - "llvm.loongarch.lsx.vmod.d" => "__builtin_lsx_vmod_d", - "llvm.loongarch.lsx.vmod.du" => "__builtin_lsx_vmod_du", - "llvm.loongarch.lsx.vmod.h" => "__builtin_lsx_vmod_h", - "llvm.loongarch.lsx.vmod.hu" => "__builtin_lsx_vmod_hu", - "llvm.loongarch.lsx.vmod.w" => "__builtin_lsx_vmod_w", - "llvm.loongarch.lsx.vmod.wu" => "__builtin_lsx_vmod_wu", - "llvm.loongarch.lsx.vmskgez.b" => "__builtin_lsx_vmskgez_b", - "llvm.loongarch.lsx.vmskltz.b" => "__builtin_lsx_vmskltz_b", - "llvm.loongarch.lsx.vmskltz.d" => "__builtin_lsx_vmskltz_d", - "llvm.loongarch.lsx.vmskltz.h" => "__builtin_lsx_vmskltz_h", - "llvm.loongarch.lsx.vmskltz.w" => "__builtin_lsx_vmskltz_w", - "llvm.loongarch.lsx.vmsknz.b" => "__builtin_lsx_vmsknz_b", - "llvm.loongarch.lsx.vmsub.b" => "__builtin_lsx_vmsub_b", - "llvm.loongarch.lsx.vmsub.d" => "__builtin_lsx_vmsub_d", - "llvm.loongarch.lsx.vmsub.h" => "__builtin_lsx_vmsub_h", - "llvm.loongarch.lsx.vmsub.w" => "__builtin_lsx_vmsub_w", - "llvm.loongarch.lsx.vmuh.b" => "__builtin_lsx_vmuh_b", - "llvm.loongarch.lsx.vmuh.bu" => "__builtin_lsx_vmuh_bu", - "llvm.loongarch.lsx.vmuh.d" => "__builtin_lsx_vmuh_d", - "llvm.loongarch.lsx.vmuh.du" => "__builtin_lsx_vmuh_du", - "llvm.loongarch.lsx.vmuh.h" => "__builtin_lsx_vmuh_h", - "llvm.loongarch.lsx.vmuh.hu" => "__builtin_lsx_vmuh_hu", - "llvm.loongarch.lsx.vmuh.w" => "__builtin_lsx_vmuh_w", - "llvm.loongarch.lsx.vmuh.wu" => "__builtin_lsx_vmuh_wu", - "llvm.loongarch.lsx.vmul.b" => "__builtin_lsx_vmul_b", - "llvm.loongarch.lsx.vmul.d" => "__builtin_lsx_vmul_d", - "llvm.loongarch.lsx.vmul.h" => "__builtin_lsx_vmul_h", - "llvm.loongarch.lsx.vmul.w" => "__builtin_lsx_vmul_w", - "llvm.loongarch.lsx.vmulwev.d.w" => "__builtin_lsx_vmulwev_d_w", - "llvm.loongarch.lsx.vmulwev.d.wu" => "__builtin_lsx_vmulwev_d_wu", - "llvm.loongarch.lsx.vmulwev.d.wu.w" => "__builtin_lsx_vmulwev_d_wu_w", - "llvm.loongarch.lsx.vmulwev.h.b" => "__builtin_lsx_vmulwev_h_b", - "llvm.loongarch.lsx.vmulwev.h.bu" => "__builtin_lsx_vmulwev_h_bu", - "llvm.loongarch.lsx.vmulwev.h.bu.b" => "__builtin_lsx_vmulwev_h_bu_b", - "llvm.loongarch.lsx.vmulwev.q.d" => "__builtin_lsx_vmulwev_q_d", - "llvm.loongarch.lsx.vmulwev.q.du" => "__builtin_lsx_vmulwev_q_du", - "llvm.loongarch.lsx.vmulwev.q.du.d" => "__builtin_lsx_vmulwev_q_du_d", - "llvm.loongarch.lsx.vmulwev.w.h" => "__builtin_lsx_vmulwev_w_h", - "llvm.loongarch.lsx.vmulwev.w.hu" => "__builtin_lsx_vmulwev_w_hu", - "llvm.loongarch.lsx.vmulwev.w.hu.h" => "__builtin_lsx_vmulwev_w_hu_h", - "llvm.loongarch.lsx.vmulwod.d.w" => "__builtin_lsx_vmulwod_d_w", - "llvm.loongarch.lsx.vmulwod.d.wu" => "__builtin_lsx_vmulwod_d_wu", - "llvm.loongarch.lsx.vmulwod.d.wu.w" => "__builtin_lsx_vmulwod_d_wu_w", - "llvm.loongarch.lsx.vmulwod.h.b" => "__builtin_lsx_vmulwod_h_b", - "llvm.loongarch.lsx.vmulwod.h.bu" => "__builtin_lsx_vmulwod_h_bu", - "llvm.loongarch.lsx.vmulwod.h.bu.b" => "__builtin_lsx_vmulwod_h_bu_b", - "llvm.loongarch.lsx.vmulwod.q.d" => "__builtin_lsx_vmulwod_q_d", - "llvm.loongarch.lsx.vmulwod.q.du" => "__builtin_lsx_vmulwod_q_du", - "llvm.loongarch.lsx.vmulwod.q.du.d" => "__builtin_lsx_vmulwod_q_du_d", - "llvm.loongarch.lsx.vmulwod.w.h" => "__builtin_lsx_vmulwod_w_h", - "llvm.loongarch.lsx.vmulwod.w.hu" => "__builtin_lsx_vmulwod_w_hu", - "llvm.loongarch.lsx.vmulwod.w.hu.h" => "__builtin_lsx_vmulwod_w_hu_h", - "llvm.loongarch.lsx.vneg.b" => "__builtin_lsx_vneg_b", - "llvm.loongarch.lsx.vneg.d" => "__builtin_lsx_vneg_d", - "llvm.loongarch.lsx.vneg.h" => "__builtin_lsx_vneg_h", - "llvm.loongarch.lsx.vneg.w" => "__builtin_lsx_vneg_w", - "llvm.loongarch.lsx.vnor.v" => "__builtin_lsx_vnor_v", - "llvm.loongarch.lsx.vnori.b" => "__builtin_lsx_vnori_b", - "llvm.loongarch.lsx.vor.v" => "__builtin_lsx_vor_v", - "llvm.loongarch.lsx.vori.b" => "__builtin_lsx_vori_b", - "llvm.loongarch.lsx.vorn.v" => "__builtin_lsx_vorn_v", - "llvm.loongarch.lsx.vpackev.b" => "__builtin_lsx_vpackev_b", - "llvm.loongarch.lsx.vpackev.d" => "__builtin_lsx_vpackev_d", - "llvm.loongarch.lsx.vpackev.h" => "__builtin_lsx_vpackev_h", - "llvm.loongarch.lsx.vpackev.w" => "__builtin_lsx_vpackev_w", - "llvm.loongarch.lsx.vpackod.b" => "__builtin_lsx_vpackod_b", - "llvm.loongarch.lsx.vpackod.d" => "__builtin_lsx_vpackod_d", - "llvm.loongarch.lsx.vpackod.h" => "__builtin_lsx_vpackod_h", - "llvm.loongarch.lsx.vpackod.w" => "__builtin_lsx_vpackod_w", - "llvm.loongarch.lsx.vpcnt.b" => "__builtin_lsx_vpcnt_b", - "llvm.loongarch.lsx.vpcnt.d" => "__builtin_lsx_vpcnt_d", - "llvm.loongarch.lsx.vpcnt.h" => "__builtin_lsx_vpcnt_h", - "llvm.loongarch.lsx.vpcnt.w" => "__builtin_lsx_vpcnt_w", - "llvm.loongarch.lsx.vpermi.w" => "__builtin_lsx_vpermi_w", - "llvm.loongarch.lsx.vpickev.b" => "__builtin_lsx_vpickev_b", - "llvm.loongarch.lsx.vpickev.d" => "__builtin_lsx_vpickev_d", - "llvm.loongarch.lsx.vpickev.h" => "__builtin_lsx_vpickev_h", - "llvm.loongarch.lsx.vpickev.w" => "__builtin_lsx_vpickev_w", - "llvm.loongarch.lsx.vpickod.b" => "__builtin_lsx_vpickod_b", - "llvm.loongarch.lsx.vpickod.d" => "__builtin_lsx_vpickod_d", - "llvm.loongarch.lsx.vpickod.h" => "__builtin_lsx_vpickod_h", - "llvm.loongarch.lsx.vpickod.w" => "__builtin_lsx_vpickod_w", - "llvm.loongarch.lsx.vpickve2gr.b" => "__builtin_lsx_vpickve2gr_b", - "llvm.loongarch.lsx.vpickve2gr.bu" => "__builtin_lsx_vpickve2gr_bu", - "llvm.loongarch.lsx.vpickve2gr.d" => "__builtin_lsx_vpickve2gr_d", - "llvm.loongarch.lsx.vpickve2gr.du" => "__builtin_lsx_vpickve2gr_du", - "llvm.loongarch.lsx.vpickve2gr.h" => "__builtin_lsx_vpickve2gr_h", - "llvm.loongarch.lsx.vpickve2gr.hu" => "__builtin_lsx_vpickve2gr_hu", - "llvm.loongarch.lsx.vpickve2gr.w" => "__builtin_lsx_vpickve2gr_w", - "llvm.loongarch.lsx.vpickve2gr.wu" => "__builtin_lsx_vpickve2gr_wu", - "llvm.loongarch.lsx.vreplgr2vr.b" => "__builtin_lsx_vreplgr2vr_b", - "llvm.loongarch.lsx.vreplgr2vr.d" => "__builtin_lsx_vreplgr2vr_d", - "llvm.loongarch.lsx.vreplgr2vr.h" => "__builtin_lsx_vreplgr2vr_h", - "llvm.loongarch.lsx.vreplgr2vr.w" => "__builtin_lsx_vreplgr2vr_w", - "llvm.loongarch.lsx.vrepli.b" => "__builtin_lsx_vrepli_b", - "llvm.loongarch.lsx.vrepli.d" => "__builtin_lsx_vrepli_d", - "llvm.loongarch.lsx.vrepli.h" => "__builtin_lsx_vrepli_h", - "llvm.loongarch.lsx.vrepli.w" => "__builtin_lsx_vrepli_w", - "llvm.loongarch.lsx.vreplve.b" => "__builtin_lsx_vreplve_b", - "llvm.loongarch.lsx.vreplve.d" => "__builtin_lsx_vreplve_d", - "llvm.loongarch.lsx.vreplve.h" => "__builtin_lsx_vreplve_h", - "llvm.loongarch.lsx.vreplve.w" => "__builtin_lsx_vreplve_w", - "llvm.loongarch.lsx.vreplvei.b" => "__builtin_lsx_vreplvei_b", - "llvm.loongarch.lsx.vreplvei.d" => "__builtin_lsx_vreplvei_d", - "llvm.loongarch.lsx.vreplvei.h" => "__builtin_lsx_vreplvei_h", - "llvm.loongarch.lsx.vreplvei.w" => "__builtin_lsx_vreplvei_w", - "llvm.loongarch.lsx.vrotr.b" => "__builtin_lsx_vrotr_b", - "llvm.loongarch.lsx.vrotr.d" => "__builtin_lsx_vrotr_d", - "llvm.loongarch.lsx.vrotr.h" => "__builtin_lsx_vrotr_h", - "llvm.loongarch.lsx.vrotr.w" => "__builtin_lsx_vrotr_w", - "llvm.loongarch.lsx.vrotri.b" => "__builtin_lsx_vrotri_b", - "llvm.loongarch.lsx.vrotri.d" => "__builtin_lsx_vrotri_d", - "llvm.loongarch.lsx.vrotri.h" => "__builtin_lsx_vrotri_h", - "llvm.loongarch.lsx.vrotri.w" => "__builtin_lsx_vrotri_w", - "llvm.loongarch.lsx.vsadd.b" => "__builtin_lsx_vsadd_b", - "llvm.loongarch.lsx.vsadd.bu" => "__builtin_lsx_vsadd_bu", - "llvm.loongarch.lsx.vsadd.d" => "__builtin_lsx_vsadd_d", - "llvm.loongarch.lsx.vsadd.du" => "__builtin_lsx_vsadd_du", - "llvm.loongarch.lsx.vsadd.h" => "__builtin_lsx_vsadd_h", - "llvm.loongarch.lsx.vsadd.hu" => "__builtin_lsx_vsadd_hu", - "llvm.loongarch.lsx.vsadd.w" => "__builtin_lsx_vsadd_w", - "llvm.loongarch.lsx.vsadd.wu" => "__builtin_lsx_vsadd_wu", - "llvm.loongarch.lsx.vsat.b" => "__builtin_lsx_vsat_b", - "llvm.loongarch.lsx.vsat.bu" => "__builtin_lsx_vsat_bu", - "llvm.loongarch.lsx.vsat.d" => "__builtin_lsx_vsat_d", - "llvm.loongarch.lsx.vsat.du" => "__builtin_lsx_vsat_du", - "llvm.loongarch.lsx.vsat.h" => "__builtin_lsx_vsat_h", - "llvm.loongarch.lsx.vsat.hu" => "__builtin_lsx_vsat_hu", - "llvm.loongarch.lsx.vsat.w" => "__builtin_lsx_vsat_w", - "llvm.loongarch.lsx.vsat.wu" => "__builtin_lsx_vsat_wu", - "llvm.loongarch.lsx.vseq.b" => "__builtin_lsx_vseq_b", - "llvm.loongarch.lsx.vseq.d" => "__builtin_lsx_vseq_d", - "llvm.loongarch.lsx.vseq.h" => "__builtin_lsx_vseq_h", - "llvm.loongarch.lsx.vseq.w" => "__builtin_lsx_vseq_w", - "llvm.loongarch.lsx.vseqi.b" => "__builtin_lsx_vseqi_b", - "llvm.loongarch.lsx.vseqi.d" => "__builtin_lsx_vseqi_d", - "llvm.loongarch.lsx.vseqi.h" => "__builtin_lsx_vseqi_h", - "llvm.loongarch.lsx.vseqi.w" => "__builtin_lsx_vseqi_w", - "llvm.loongarch.lsx.vshuf.b" => "__builtin_lsx_vshuf_b", - "llvm.loongarch.lsx.vshuf.d" => "__builtin_lsx_vshuf_d", - "llvm.loongarch.lsx.vshuf.h" => "__builtin_lsx_vshuf_h", - "llvm.loongarch.lsx.vshuf.w" => "__builtin_lsx_vshuf_w", - "llvm.loongarch.lsx.vshuf4i.b" => "__builtin_lsx_vshuf4i_b", - "llvm.loongarch.lsx.vshuf4i.d" => "__builtin_lsx_vshuf4i_d", - "llvm.loongarch.lsx.vshuf4i.h" => "__builtin_lsx_vshuf4i_h", - "llvm.loongarch.lsx.vshuf4i.w" => "__builtin_lsx_vshuf4i_w", - "llvm.loongarch.lsx.vsigncov.b" => "__builtin_lsx_vsigncov_b", - "llvm.loongarch.lsx.vsigncov.d" => "__builtin_lsx_vsigncov_d", - "llvm.loongarch.lsx.vsigncov.h" => "__builtin_lsx_vsigncov_h", - "llvm.loongarch.lsx.vsigncov.w" => "__builtin_lsx_vsigncov_w", - "llvm.loongarch.lsx.vsle.b" => "__builtin_lsx_vsle_b", - "llvm.loongarch.lsx.vsle.bu" => "__builtin_lsx_vsle_bu", - "llvm.loongarch.lsx.vsle.d" => "__builtin_lsx_vsle_d", - "llvm.loongarch.lsx.vsle.du" => "__builtin_lsx_vsle_du", - "llvm.loongarch.lsx.vsle.h" => "__builtin_lsx_vsle_h", - "llvm.loongarch.lsx.vsle.hu" => "__builtin_lsx_vsle_hu", - "llvm.loongarch.lsx.vsle.w" => "__builtin_lsx_vsle_w", - "llvm.loongarch.lsx.vsle.wu" => "__builtin_lsx_vsle_wu", - "llvm.loongarch.lsx.vslei.b" => "__builtin_lsx_vslei_b", - "llvm.loongarch.lsx.vslei.bu" => "__builtin_lsx_vslei_bu", - "llvm.loongarch.lsx.vslei.d" => "__builtin_lsx_vslei_d", - "llvm.loongarch.lsx.vslei.du" => "__builtin_lsx_vslei_du", - "llvm.loongarch.lsx.vslei.h" => "__builtin_lsx_vslei_h", - "llvm.loongarch.lsx.vslei.hu" => "__builtin_lsx_vslei_hu", - "llvm.loongarch.lsx.vslei.w" => "__builtin_lsx_vslei_w", - "llvm.loongarch.lsx.vslei.wu" => "__builtin_lsx_vslei_wu", - "llvm.loongarch.lsx.vsll.b" => "__builtin_lsx_vsll_b", - "llvm.loongarch.lsx.vsll.d" => "__builtin_lsx_vsll_d", - "llvm.loongarch.lsx.vsll.h" => "__builtin_lsx_vsll_h", - "llvm.loongarch.lsx.vsll.w" => "__builtin_lsx_vsll_w", - "llvm.loongarch.lsx.vslli.b" => "__builtin_lsx_vslli_b", - "llvm.loongarch.lsx.vslli.d" => "__builtin_lsx_vslli_d", - "llvm.loongarch.lsx.vslli.h" => "__builtin_lsx_vslli_h", - "llvm.loongarch.lsx.vslli.w" => "__builtin_lsx_vslli_w", - "llvm.loongarch.lsx.vsllwil.d.w" => "__builtin_lsx_vsllwil_d_w", - "llvm.loongarch.lsx.vsllwil.du.wu" => "__builtin_lsx_vsllwil_du_wu", - "llvm.loongarch.lsx.vsllwil.h.b" => "__builtin_lsx_vsllwil_h_b", - "llvm.loongarch.lsx.vsllwil.hu.bu" => "__builtin_lsx_vsllwil_hu_bu", - "llvm.loongarch.lsx.vsllwil.w.h" => "__builtin_lsx_vsllwil_w_h", - "llvm.loongarch.lsx.vsllwil.wu.hu" => "__builtin_lsx_vsllwil_wu_hu", - "llvm.loongarch.lsx.vslt.b" => "__builtin_lsx_vslt_b", - "llvm.loongarch.lsx.vslt.bu" => "__builtin_lsx_vslt_bu", - "llvm.loongarch.lsx.vslt.d" => "__builtin_lsx_vslt_d", - "llvm.loongarch.lsx.vslt.du" => "__builtin_lsx_vslt_du", - "llvm.loongarch.lsx.vslt.h" => "__builtin_lsx_vslt_h", - "llvm.loongarch.lsx.vslt.hu" => "__builtin_lsx_vslt_hu", - "llvm.loongarch.lsx.vslt.w" => "__builtin_lsx_vslt_w", - "llvm.loongarch.lsx.vslt.wu" => "__builtin_lsx_vslt_wu", - "llvm.loongarch.lsx.vslti.b" => "__builtin_lsx_vslti_b", - "llvm.loongarch.lsx.vslti.bu" => "__builtin_lsx_vslti_bu", - "llvm.loongarch.lsx.vslti.d" => "__builtin_lsx_vslti_d", - "llvm.loongarch.lsx.vslti.du" => "__builtin_lsx_vslti_du", - "llvm.loongarch.lsx.vslti.h" => "__builtin_lsx_vslti_h", - "llvm.loongarch.lsx.vslti.hu" => "__builtin_lsx_vslti_hu", - "llvm.loongarch.lsx.vslti.w" => "__builtin_lsx_vslti_w", - "llvm.loongarch.lsx.vslti.wu" => "__builtin_lsx_vslti_wu", - "llvm.loongarch.lsx.vsra.b" => "__builtin_lsx_vsra_b", - "llvm.loongarch.lsx.vsra.d" => "__builtin_lsx_vsra_d", - "llvm.loongarch.lsx.vsra.h" => "__builtin_lsx_vsra_h", - "llvm.loongarch.lsx.vsra.w" => "__builtin_lsx_vsra_w", - "llvm.loongarch.lsx.vsrai.b" => "__builtin_lsx_vsrai_b", - "llvm.loongarch.lsx.vsrai.d" => "__builtin_lsx_vsrai_d", - "llvm.loongarch.lsx.vsrai.h" => "__builtin_lsx_vsrai_h", - "llvm.loongarch.lsx.vsrai.w" => "__builtin_lsx_vsrai_w", - "llvm.loongarch.lsx.vsran.b.h" => "__builtin_lsx_vsran_b_h", - "llvm.loongarch.lsx.vsran.h.w" => "__builtin_lsx_vsran_h_w", - "llvm.loongarch.lsx.vsran.w.d" => "__builtin_lsx_vsran_w_d", - "llvm.loongarch.lsx.vsrani.b.h" => "__builtin_lsx_vsrani_b_h", - "llvm.loongarch.lsx.vsrani.d.q" => "__builtin_lsx_vsrani_d_q", - "llvm.loongarch.lsx.vsrani.h.w" => "__builtin_lsx_vsrani_h_w", - "llvm.loongarch.lsx.vsrani.w.d" => "__builtin_lsx_vsrani_w_d", - "llvm.loongarch.lsx.vsrar.b" => "__builtin_lsx_vsrar_b", - "llvm.loongarch.lsx.vsrar.d" => "__builtin_lsx_vsrar_d", - "llvm.loongarch.lsx.vsrar.h" => "__builtin_lsx_vsrar_h", - "llvm.loongarch.lsx.vsrar.w" => "__builtin_lsx_vsrar_w", - "llvm.loongarch.lsx.vsrari.b" => "__builtin_lsx_vsrari_b", - "llvm.loongarch.lsx.vsrari.d" => "__builtin_lsx_vsrari_d", - "llvm.loongarch.lsx.vsrari.h" => "__builtin_lsx_vsrari_h", - "llvm.loongarch.lsx.vsrari.w" => "__builtin_lsx_vsrari_w", - "llvm.loongarch.lsx.vsrarn.b.h" => "__builtin_lsx_vsrarn_b_h", - "llvm.loongarch.lsx.vsrarn.h.w" => "__builtin_lsx_vsrarn_h_w", - "llvm.loongarch.lsx.vsrarn.w.d" => "__builtin_lsx_vsrarn_w_d", - "llvm.loongarch.lsx.vsrarni.b.h" => "__builtin_lsx_vsrarni_b_h", - "llvm.loongarch.lsx.vsrarni.d.q" => "__builtin_lsx_vsrarni_d_q", - "llvm.loongarch.lsx.vsrarni.h.w" => "__builtin_lsx_vsrarni_h_w", - "llvm.loongarch.lsx.vsrarni.w.d" => "__builtin_lsx_vsrarni_w_d", - "llvm.loongarch.lsx.vsrl.b" => "__builtin_lsx_vsrl_b", - "llvm.loongarch.lsx.vsrl.d" => "__builtin_lsx_vsrl_d", - "llvm.loongarch.lsx.vsrl.h" => "__builtin_lsx_vsrl_h", - "llvm.loongarch.lsx.vsrl.w" => "__builtin_lsx_vsrl_w", - "llvm.loongarch.lsx.vsrli.b" => "__builtin_lsx_vsrli_b", - "llvm.loongarch.lsx.vsrli.d" => "__builtin_lsx_vsrli_d", - "llvm.loongarch.lsx.vsrli.h" => "__builtin_lsx_vsrli_h", - "llvm.loongarch.lsx.vsrli.w" => "__builtin_lsx_vsrli_w", - "llvm.loongarch.lsx.vsrln.b.h" => "__builtin_lsx_vsrln_b_h", - "llvm.loongarch.lsx.vsrln.h.w" => "__builtin_lsx_vsrln_h_w", - "llvm.loongarch.lsx.vsrln.w.d" => "__builtin_lsx_vsrln_w_d", - "llvm.loongarch.lsx.vsrlni.b.h" => "__builtin_lsx_vsrlni_b_h", - "llvm.loongarch.lsx.vsrlni.d.q" => "__builtin_lsx_vsrlni_d_q", - "llvm.loongarch.lsx.vsrlni.h.w" => "__builtin_lsx_vsrlni_h_w", - "llvm.loongarch.lsx.vsrlni.w.d" => "__builtin_lsx_vsrlni_w_d", - "llvm.loongarch.lsx.vsrlr.b" => "__builtin_lsx_vsrlr_b", - "llvm.loongarch.lsx.vsrlr.d" => "__builtin_lsx_vsrlr_d", - "llvm.loongarch.lsx.vsrlr.h" => "__builtin_lsx_vsrlr_h", - "llvm.loongarch.lsx.vsrlr.w" => "__builtin_lsx_vsrlr_w", - "llvm.loongarch.lsx.vsrlri.b" => "__builtin_lsx_vsrlri_b", - "llvm.loongarch.lsx.vsrlri.d" => "__builtin_lsx_vsrlri_d", - "llvm.loongarch.lsx.vsrlri.h" => "__builtin_lsx_vsrlri_h", - "llvm.loongarch.lsx.vsrlri.w" => "__builtin_lsx_vsrlri_w", - "llvm.loongarch.lsx.vsrlrn.b.h" => "__builtin_lsx_vsrlrn_b_h", - "llvm.loongarch.lsx.vsrlrn.h.w" => "__builtin_lsx_vsrlrn_h_w", - "llvm.loongarch.lsx.vsrlrn.w.d" => "__builtin_lsx_vsrlrn_w_d", - "llvm.loongarch.lsx.vsrlrni.b.h" => "__builtin_lsx_vsrlrni_b_h", - "llvm.loongarch.lsx.vsrlrni.d.q" => "__builtin_lsx_vsrlrni_d_q", - "llvm.loongarch.lsx.vsrlrni.h.w" => "__builtin_lsx_vsrlrni_h_w", - "llvm.loongarch.lsx.vsrlrni.w.d" => "__builtin_lsx_vsrlrni_w_d", - "llvm.loongarch.lsx.vssran.b.h" => "__builtin_lsx_vssran_b_h", - "llvm.loongarch.lsx.vssran.bu.h" => "__builtin_lsx_vssran_bu_h", - "llvm.loongarch.lsx.vssran.h.w" => "__builtin_lsx_vssran_h_w", - "llvm.loongarch.lsx.vssran.hu.w" => "__builtin_lsx_vssran_hu_w", - "llvm.loongarch.lsx.vssran.w.d" => "__builtin_lsx_vssran_w_d", - "llvm.loongarch.lsx.vssran.wu.d" => "__builtin_lsx_vssran_wu_d", - "llvm.loongarch.lsx.vssrani.b.h" => "__builtin_lsx_vssrani_b_h", - "llvm.loongarch.lsx.vssrani.bu.h" => "__builtin_lsx_vssrani_bu_h", - "llvm.loongarch.lsx.vssrani.d.q" => "__builtin_lsx_vssrani_d_q", - "llvm.loongarch.lsx.vssrani.du.q" => "__builtin_lsx_vssrani_du_q", - "llvm.loongarch.lsx.vssrani.h.w" => "__builtin_lsx_vssrani_h_w", - "llvm.loongarch.lsx.vssrani.hu.w" => "__builtin_lsx_vssrani_hu_w", - "llvm.loongarch.lsx.vssrani.w.d" => "__builtin_lsx_vssrani_w_d", - "llvm.loongarch.lsx.vssrani.wu.d" => "__builtin_lsx_vssrani_wu_d", - "llvm.loongarch.lsx.vssrarn.b.h" => "__builtin_lsx_vssrarn_b_h", - "llvm.loongarch.lsx.vssrarn.bu.h" => "__builtin_lsx_vssrarn_bu_h", - "llvm.loongarch.lsx.vssrarn.h.w" => "__builtin_lsx_vssrarn_h_w", - "llvm.loongarch.lsx.vssrarn.hu.w" => "__builtin_lsx_vssrarn_hu_w", - "llvm.loongarch.lsx.vssrarn.w.d" => "__builtin_lsx_vssrarn_w_d", - "llvm.loongarch.lsx.vssrarn.wu.d" => "__builtin_lsx_vssrarn_wu_d", - "llvm.loongarch.lsx.vssrarni.b.h" => "__builtin_lsx_vssrarni_b_h", - "llvm.loongarch.lsx.vssrarni.bu.h" => "__builtin_lsx_vssrarni_bu_h", - "llvm.loongarch.lsx.vssrarni.d.q" => "__builtin_lsx_vssrarni_d_q", - "llvm.loongarch.lsx.vssrarni.du.q" => "__builtin_lsx_vssrarni_du_q", - "llvm.loongarch.lsx.vssrarni.h.w" => "__builtin_lsx_vssrarni_h_w", - "llvm.loongarch.lsx.vssrarni.hu.w" => "__builtin_lsx_vssrarni_hu_w", - "llvm.loongarch.lsx.vssrarni.w.d" => "__builtin_lsx_vssrarni_w_d", - "llvm.loongarch.lsx.vssrarni.wu.d" => "__builtin_lsx_vssrarni_wu_d", - "llvm.loongarch.lsx.vssrln.b.h" => "__builtin_lsx_vssrln_b_h", - "llvm.loongarch.lsx.vssrln.bu.h" => "__builtin_lsx_vssrln_bu_h", - "llvm.loongarch.lsx.vssrln.h.w" => "__builtin_lsx_vssrln_h_w", - "llvm.loongarch.lsx.vssrln.hu.w" => "__builtin_lsx_vssrln_hu_w", - "llvm.loongarch.lsx.vssrln.w.d" => "__builtin_lsx_vssrln_w_d", - "llvm.loongarch.lsx.vssrln.wu.d" => "__builtin_lsx_vssrln_wu_d", - "llvm.loongarch.lsx.vssrlni.b.h" => "__builtin_lsx_vssrlni_b_h", - "llvm.loongarch.lsx.vssrlni.bu.h" => "__builtin_lsx_vssrlni_bu_h", - "llvm.loongarch.lsx.vssrlni.d.q" => "__builtin_lsx_vssrlni_d_q", - "llvm.loongarch.lsx.vssrlni.du.q" => "__builtin_lsx_vssrlni_du_q", - "llvm.loongarch.lsx.vssrlni.h.w" => "__builtin_lsx_vssrlni_h_w", - "llvm.loongarch.lsx.vssrlni.hu.w" => "__builtin_lsx_vssrlni_hu_w", - "llvm.loongarch.lsx.vssrlni.w.d" => "__builtin_lsx_vssrlni_w_d", - "llvm.loongarch.lsx.vssrlni.wu.d" => "__builtin_lsx_vssrlni_wu_d", - "llvm.loongarch.lsx.vssrlrn.b.h" => "__builtin_lsx_vssrlrn_b_h", - "llvm.loongarch.lsx.vssrlrn.bu.h" => "__builtin_lsx_vssrlrn_bu_h", - "llvm.loongarch.lsx.vssrlrn.h.w" => "__builtin_lsx_vssrlrn_h_w", - "llvm.loongarch.lsx.vssrlrn.hu.w" => "__builtin_lsx_vssrlrn_hu_w", - "llvm.loongarch.lsx.vssrlrn.w.d" => "__builtin_lsx_vssrlrn_w_d", - "llvm.loongarch.lsx.vssrlrn.wu.d" => "__builtin_lsx_vssrlrn_wu_d", - "llvm.loongarch.lsx.vssrlrni.b.h" => "__builtin_lsx_vssrlrni_b_h", - "llvm.loongarch.lsx.vssrlrni.bu.h" => "__builtin_lsx_vssrlrni_bu_h", - "llvm.loongarch.lsx.vssrlrni.d.q" => "__builtin_lsx_vssrlrni_d_q", - "llvm.loongarch.lsx.vssrlrni.du.q" => "__builtin_lsx_vssrlrni_du_q", - "llvm.loongarch.lsx.vssrlrni.h.w" => "__builtin_lsx_vssrlrni_h_w", - "llvm.loongarch.lsx.vssrlrni.hu.w" => "__builtin_lsx_vssrlrni_hu_w", - "llvm.loongarch.lsx.vssrlrni.w.d" => "__builtin_lsx_vssrlrni_w_d", - "llvm.loongarch.lsx.vssrlrni.wu.d" => "__builtin_lsx_vssrlrni_wu_d", - "llvm.loongarch.lsx.vssub.b" => "__builtin_lsx_vssub_b", - "llvm.loongarch.lsx.vssub.bu" => "__builtin_lsx_vssub_bu", - "llvm.loongarch.lsx.vssub.d" => "__builtin_lsx_vssub_d", - "llvm.loongarch.lsx.vssub.du" => "__builtin_lsx_vssub_du", - "llvm.loongarch.lsx.vssub.h" => "__builtin_lsx_vssub_h", - "llvm.loongarch.lsx.vssub.hu" => "__builtin_lsx_vssub_hu", - "llvm.loongarch.lsx.vssub.w" => "__builtin_lsx_vssub_w", - "llvm.loongarch.lsx.vssub.wu" => "__builtin_lsx_vssub_wu", - "llvm.loongarch.lsx.vst" => "__builtin_lsx_vst", - "llvm.loongarch.lsx.vstelm.b" => "__builtin_lsx_vstelm_b", - "llvm.loongarch.lsx.vstelm.d" => "__builtin_lsx_vstelm_d", - "llvm.loongarch.lsx.vstelm.h" => "__builtin_lsx_vstelm_h", - "llvm.loongarch.lsx.vstelm.w" => "__builtin_lsx_vstelm_w", - "llvm.loongarch.lsx.vstx" => "__builtin_lsx_vstx", - "llvm.loongarch.lsx.vsub.b" => "__builtin_lsx_vsub_b", - "llvm.loongarch.lsx.vsub.d" => "__builtin_lsx_vsub_d", - "llvm.loongarch.lsx.vsub.h" => "__builtin_lsx_vsub_h", - "llvm.loongarch.lsx.vsub.q" => "__builtin_lsx_vsub_q", - "llvm.loongarch.lsx.vsub.w" => "__builtin_lsx_vsub_w", - "llvm.loongarch.lsx.vsubi.bu" => "__builtin_lsx_vsubi_bu", - "llvm.loongarch.lsx.vsubi.du" => "__builtin_lsx_vsubi_du", - "llvm.loongarch.lsx.vsubi.hu" => "__builtin_lsx_vsubi_hu", - "llvm.loongarch.lsx.vsubi.wu" => "__builtin_lsx_vsubi_wu", - "llvm.loongarch.lsx.vsubwev.d.w" => "__builtin_lsx_vsubwev_d_w", - "llvm.loongarch.lsx.vsubwev.d.wu" => "__builtin_lsx_vsubwev_d_wu", - "llvm.loongarch.lsx.vsubwev.h.b" => "__builtin_lsx_vsubwev_h_b", - "llvm.loongarch.lsx.vsubwev.h.bu" => "__builtin_lsx_vsubwev_h_bu", - "llvm.loongarch.lsx.vsubwev.q.d" => "__builtin_lsx_vsubwev_q_d", - "llvm.loongarch.lsx.vsubwev.q.du" => "__builtin_lsx_vsubwev_q_du", - "llvm.loongarch.lsx.vsubwev.w.h" => "__builtin_lsx_vsubwev_w_h", - "llvm.loongarch.lsx.vsubwev.w.hu" => "__builtin_lsx_vsubwev_w_hu", - "llvm.loongarch.lsx.vsubwod.d.w" => "__builtin_lsx_vsubwod_d_w", - "llvm.loongarch.lsx.vsubwod.d.wu" => "__builtin_lsx_vsubwod_d_wu", - "llvm.loongarch.lsx.vsubwod.h.b" => "__builtin_lsx_vsubwod_h_b", - "llvm.loongarch.lsx.vsubwod.h.bu" => "__builtin_lsx_vsubwod_h_bu", - "llvm.loongarch.lsx.vsubwod.q.d" => "__builtin_lsx_vsubwod_q_d", - "llvm.loongarch.lsx.vsubwod.q.du" => "__builtin_lsx_vsubwod_q_du", - "llvm.loongarch.lsx.vsubwod.w.h" => "__builtin_lsx_vsubwod_w_h", - "llvm.loongarch.lsx.vsubwod.w.hu" => "__builtin_lsx_vsubwod_w_hu", - "llvm.loongarch.lsx.vxor.v" => "__builtin_lsx_vxor_v", - "llvm.loongarch.lsx.vxori.b" => "__builtin_lsx_vxori_b", - "llvm.loongarch.movfcsr2gr" => "__builtin_loongarch_movfcsr2gr", - "llvm.loongarch.movgr2fcsr" => "__builtin_loongarch_movgr2fcsr", - "llvm.loongarch.syscall" => "__builtin_loongarch_syscall", - // mips - "llvm.mips.absq.s.ph" => "__builtin_mips_absq_s_ph", - "llvm.mips.absq.s.qb" => "__builtin_mips_absq_s_qb", - "llvm.mips.absq.s.w" => "__builtin_mips_absq_s_w", - "llvm.mips.add.a.b" => "__builtin_msa_add_a_b", - "llvm.mips.add.a.d" => "__builtin_msa_add_a_d", - "llvm.mips.add.a.h" => "__builtin_msa_add_a_h", - "llvm.mips.add.a.w" => "__builtin_msa_add_a_w", - "llvm.mips.addq.ph" => "__builtin_mips_addq_ph", - "llvm.mips.addq.s.ph" => "__builtin_mips_addq_s_ph", - "llvm.mips.addq.s.w" => "__builtin_mips_addq_s_w", - "llvm.mips.addqh.ph" => "__builtin_mips_addqh_ph", - "llvm.mips.addqh.r.ph" => "__builtin_mips_addqh_r_ph", - "llvm.mips.addqh.r.w" => "__builtin_mips_addqh_r_w", - "llvm.mips.addqh.w" => "__builtin_mips_addqh_w", - "llvm.mips.adds.a.b" => "__builtin_msa_adds_a_b", - "llvm.mips.adds.a.d" => "__builtin_msa_adds_a_d", - "llvm.mips.adds.a.h" => "__builtin_msa_adds_a_h", - "llvm.mips.adds.a.w" => "__builtin_msa_adds_a_w", - "llvm.mips.adds.s.b" => "__builtin_msa_adds_s_b", - "llvm.mips.adds.s.d" => "__builtin_msa_adds_s_d", - "llvm.mips.adds.s.h" => "__builtin_msa_adds_s_h", - "llvm.mips.adds.s.w" => "__builtin_msa_adds_s_w", - "llvm.mips.adds.u.b" => "__builtin_msa_adds_u_b", - "llvm.mips.adds.u.d" => "__builtin_msa_adds_u_d", - "llvm.mips.adds.u.h" => "__builtin_msa_adds_u_h", - "llvm.mips.adds.u.w" => "__builtin_msa_adds_u_w", - "llvm.mips.addsc" => "__builtin_mips_addsc", - "llvm.mips.addu.ph" => "__builtin_mips_addu_ph", - "llvm.mips.addu.qb" => "__builtin_mips_addu_qb", - "llvm.mips.addu.s.ph" => "__builtin_mips_addu_s_ph", - "llvm.mips.addu.s.qb" => "__builtin_mips_addu_s_qb", - "llvm.mips.adduh.qb" => "__builtin_mips_adduh_qb", - "llvm.mips.adduh.r.qb" => "__builtin_mips_adduh_r_qb", - "llvm.mips.addv.b" => "__builtin_msa_addv_b", - "llvm.mips.addv.d" => "__builtin_msa_addv_d", - "llvm.mips.addv.h" => "__builtin_msa_addv_h", - "llvm.mips.addv.w" => "__builtin_msa_addv_w", - "llvm.mips.addvi.b" => "__builtin_msa_addvi_b", - "llvm.mips.addvi.d" => "__builtin_msa_addvi_d", - "llvm.mips.addvi.h" => "__builtin_msa_addvi_h", - "llvm.mips.addvi.w" => "__builtin_msa_addvi_w", - "llvm.mips.addwc" => "__builtin_mips_addwc", - "llvm.mips.and.v" => "__builtin_msa_and_v", - "llvm.mips.andi.b" => "__builtin_msa_andi_b", - "llvm.mips.append" => "__builtin_mips_append", - "llvm.mips.asub.s.b" => "__builtin_msa_asub_s_b", - "llvm.mips.asub.s.d" => "__builtin_msa_asub_s_d", - "llvm.mips.asub.s.h" => "__builtin_msa_asub_s_h", - "llvm.mips.asub.s.w" => "__builtin_msa_asub_s_w", - "llvm.mips.asub.u.b" => "__builtin_msa_asub_u_b", - "llvm.mips.asub.u.d" => "__builtin_msa_asub_u_d", - "llvm.mips.asub.u.h" => "__builtin_msa_asub_u_h", - "llvm.mips.asub.u.w" => "__builtin_msa_asub_u_w", - "llvm.mips.ave.s.b" => "__builtin_msa_ave_s_b", - "llvm.mips.ave.s.d" => "__builtin_msa_ave_s_d", - "llvm.mips.ave.s.h" => "__builtin_msa_ave_s_h", - "llvm.mips.ave.s.w" => "__builtin_msa_ave_s_w", - "llvm.mips.ave.u.b" => "__builtin_msa_ave_u_b", - "llvm.mips.ave.u.d" => "__builtin_msa_ave_u_d", - "llvm.mips.ave.u.h" => "__builtin_msa_ave_u_h", - "llvm.mips.ave.u.w" => "__builtin_msa_ave_u_w", - "llvm.mips.aver.s.b" => "__builtin_msa_aver_s_b", - "llvm.mips.aver.s.d" => "__builtin_msa_aver_s_d", - "llvm.mips.aver.s.h" => "__builtin_msa_aver_s_h", - "llvm.mips.aver.s.w" => "__builtin_msa_aver_s_w", - "llvm.mips.aver.u.b" => "__builtin_msa_aver_u_b", - "llvm.mips.aver.u.d" => "__builtin_msa_aver_u_d", - "llvm.mips.aver.u.h" => "__builtin_msa_aver_u_h", - "llvm.mips.aver.u.w" => "__builtin_msa_aver_u_w", - "llvm.mips.balign" => "__builtin_mips_balign", - "llvm.mips.bclr.b" => "__builtin_msa_bclr_b", - "llvm.mips.bclr.d" => "__builtin_msa_bclr_d", - "llvm.mips.bclr.h" => "__builtin_msa_bclr_h", - "llvm.mips.bclr.w" => "__builtin_msa_bclr_w", - "llvm.mips.bclri.b" => "__builtin_msa_bclri_b", - "llvm.mips.bclri.d" => "__builtin_msa_bclri_d", - "llvm.mips.bclri.h" => "__builtin_msa_bclri_h", - "llvm.mips.bclri.w" => "__builtin_msa_bclri_w", - "llvm.mips.binsl.b" => "__builtin_msa_binsl_b", - "llvm.mips.binsl.d" => "__builtin_msa_binsl_d", - "llvm.mips.binsl.h" => "__builtin_msa_binsl_h", - "llvm.mips.binsl.w" => "__builtin_msa_binsl_w", - "llvm.mips.binsli.b" => "__builtin_msa_binsli_b", - "llvm.mips.binsli.d" => "__builtin_msa_binsli_d", - "llvm.mips.binsli.h" => "__builtin_msa_binsli_h", - "llvm.mips.binsli.w" => "__builtin_msa_binsli_w", - "llvm.mips.binsr.b" => "__builtin_msa_binsr_b", - "llvm.mips.binsr.d" => "__builtin_msa_binsr_d", - "llvm.mips.binsr.h" => "__builtin_msa_binsr_h", - "llvm.mips.binsr.w" => "__builtin_msa_binsr_w", - "llvm.mips.binsri.b" => "__builtin_msa_binsri_b", - "llvm.mips.binsri.d" => "__builtin_msa_binsri_d", - "llvm.mips.binsri.h" => "__builtin_msa_binsri_h", - "llvm.mips.binsri.w" => "__builtin_msa_binsri_w", - "llvm.mips.bitrev" => "__builtin_mips_bitrev", - "llvm.mips.bmnz.v" => "__builtin_msa_bmnz_v", - "llvm.mips.bmnzi.b" => "__builtin_msa_bmnzi_b", - "llvm.mips.bmz.v" => "__builtin_msa_bmz_v", - "llvm.mips.bmzi.b" => "__builtin_msa_bmzi_b", - "llvm.mips.bneg.b" => "__builtin_msa_bneg_b", - "llvm.mips.bneg.d" => "__builtin_msa_bneg_d", - "llvm.mips.bneg.h" => "__builtin_msa_bneg_h", - "llvm.mips.bneg.w" => "__builtin_msa_bneg_w", - "llvm.mips.bnegi.b" => "__builtin_msa_bnegi_b", - "llvm.mips.bnegi.d" => "__builtin_msa_bnegi_d", - "llvm.mips.bnegi.h" => "__builtin_msa_bnegi_h", - "llvm.mips.bnegi.w" => "__builtin_msa_bnegi_w", - "llvm.mips.bnz.b" => "__builtin_msa_bnz_b", - "llvm.mips.bnz.d" => "__builtin_msa_bnz_d", - "llvm.mips.bnz.h" => "__builtin_msa_bnz_h", - "llvm.mips.bnz.v" => "__builtin_msa_bnz_v", - "llvm.mips.bnz.w" => "__builtin_msa_bnz_w", - "llvm.mips.bposge32" => "__builtin_mips_bposge32", - "llvm.mips.bsel.v" => "__builtin_msa_bsel_v", - "llvm.mips.bseli.b" => "__builtin_msa_bseli_b", - "llvm.mips.bset.b" => "__builtin_msa_bset_b", - "llvm.mips.bset.d" => "__builtin_msa_bset_d", - "llvm.mips.bset.h" => "__builtin_msa_bset_h", - "llvm.mips.bset.w" => "__builtin_msa_bset_w", - "llvm.mips.bseti.b" => "__builtin_msa_bseti_b", - "llvm.mips.bseti.d" => "__builtin_msa_bseti_d", - "llvm.mips.bseti.h" => "__builtin_msa_bseti_h", - "llvm.mips.bseti.w" => "__builtin_msa_bseti_w", - "llvm.mips.bz.b" => "__builtin_msa_bz_b", - "llvm.mips.bz.d" => "__builtin_msa_bz_d", - "llvm.mips.bz.h" => "__builtin_msa_bz_h", - "llvm.mips.bz.v" => "__builtin_msa_bz_v", - "llvm.mips.bz.w" => "__builtin_msa_bz_w", - "llvm.mips.ceq.b" => "__builtin_msa_ceq_b", - "llvm.mips.ceq.d" => "__builtin_msa_ceq_d", - "llvm.mips.ceq.h" => "__builtin_msa_ceq_h", - "llvm.mips.ceq.w" => "__builtin_msa_ceq_w", - "llvm.mips.ceqi.b" => "__builtin_msa_ceqi_b", - "llvm.mips.ceqi.d" => "__builtin_msa_ceqi_d", - "llvm.mips.ceqi.h" => "__builtin_msa_ceqi_h", - "llvm.mips.ceqi.w" => "__builtin_msa_ceqi_w", - "llvm.mips.cfcmsa" => "__builtin_msa_cfcmsa", - "llvm.mips.cle.s.b" => "__builtin_msa_cle_s_b", - "llvm.mips.cle.s.d" => "__builtin_msa_cle_s_d", - "llvm.mips.cle.s.h" => "__builtin_msa_cle_s_h", - "llvm.mips.cle.s.w" => "__builtin_msa_cle_s_w", - "llvm.mips.cle.u.b" => "__builtin_msa_cle_u_b", - "llvm.mips.cle.u.d" => "__builtin_msa_cle_u_d", - "llvm.mips.cle.u.h" => "__builtin_msa_cle_u_h", - "llvm.mips.cle.u.w" => "__builtin_msa_cle_u_w", - "llvm.mips.clei.s.b" => "__builtin_msa_clei_s_b", - "llvm.mips.clei.s.d" => "__builtin_msa_clei_s_d", - "llvm.mips.clei.s.h" => "__builtin_msa_clei_s_h", - "llvm.mips.clei.s.w" => "__builtin_msa_clei_s_w", - "llvm.mips.clei.u.b" => "__builtin_msa_clei_u_b", - "llvm.mips.clei.u.d" => "__builtin_msa_clei_u_d", - "llvm.mips.clei.u.h" => "__builtin_msa_clei_u_h", - "llvm.mips.clei.u.w" => "__builtin_msa_clei_u_w", - "llvm.mips.clt.s.b" => "__builtin_msa_clt_s_b", - "llvm.mips.clt.s.d" => "__builtin_msa_clt_s_d", - "llvm.mips.clt.s.h" => "__builtin_msa_clt_s_h", - "llvm.mips.clt.s.w" => "__builtin_msa_clt_s_w", - "llvm.mips.clt.u.b" => "__builtin_msa_clt_u_b", - "llvm.mips.clt.u.d" => "__builtin_msa_clt_u_d", - "llvm.mips.clt.u.h" => "__builtin_msa_clt_u_h", - "llvm.mips.clt.u.w" => "__builtin_msa_clt_u_w", - "llvm.mips.clti.s.b" => "__builtin_msa_clti_s_b", - "llvm.mips.clti.s.d" => "__builtin_msa_clti_s_d", - "llvm.mips.clti.s.h" => "__builtin_msa_clti_s_h", - "llvm.mips.clti.s.w" => "__builtin_msa_clti_s_w", - "llvm.mips.clti.u.b" => "__builtin_msa_clti_u_b", - "llvm.mips.clti.u.d" => "__builtin_msa_clti_u_d", - "llvm.mips.clti.u.h" => "__builtin_msa_clti_u_h", - "llvm.mips.clti.u.w" => "__builtin_msa_clti_u_w", - "llvm.mips.cmp.eq.ph" => "__builtin_mips_cmp_eq_ph", - "llvm.mips.cmp.le.ph" => "__builtin_mips_cmp_le_ph", - "llvm.mips.cmp.lt.ph" => "__builtin_mips_cmp_lt_ph", - "llvm.mips.cmpgdu.eq.qb" => "__builtin_mips_cmpgdu_eq_qb", - "llvm.mips.cmpgdu.le.qb" => "__builtin_mips_cmpgdu_le_qb", - "llvm.mips.cmpgdu.lt.qb" => "__builtin_mips_cmpgdu_lt_qb", - "llvm.mips.cmpgu.eq.qb" => "__builtin_mips_cmpgu_eq_qb", - "llvm.mips.cmpgu.le.qb" => "__builtin_mips_cmpgu_le_qb", - "llvm.mips.cmpgu.lt.qb" => "__builtin_mips_cmpgu_lt_qb", - "llvm.mips.cmpu.eq.qb" => "__builtin_mips_cmpu_eq_qb", - "llvm.mips.cmpu.le.qb" => "__builtin_mips_cmpu_le_qb", - "llvm.mips.cmpu.lt.qb" => "__builtin_mips_cmpu_lt_qb", - "llvm.mips.copy.s.b" => "__builtin_msa_copy_s_b", - "llvm.mips.copy.s.d" => "__builtin_msa_copy_s_d", - "llvm.mips.copy.s.h" => "__builtin_msa_copy_s_h", - "llvm.mips.copy.s.w" => "__builtin_msa_copy_s_w", - "llvm.mips.copy.u.b" => "__builtin_msa_copy_u_b", - "llvm.mips.copy.u.d" => "__builtin_msa_copy_u_d", - "llvm.mips.copy.u.h" => "__builtin_msa_copy_u_h", - "llvm.mips.copy.u.w" => "__builtin_msa_copy_u_w", - "llvm.mips.ctcmsa" => "__builtin_msa_ctcmsa", - "llvm.mips.div.s.b" => "__builtin_msa_div_s_b", - "llvm.mips.div.s.d" => "__builtin_msa_div_s_d", - "llvm.mips.div.s.h" => "__builtin_msa_div_s_h", - "llvm.mips.div.s.w" => "__builtin_msa_div_s_w", - "llvm.mips.div.u.b" => "__builtin_msa_div_u_b", - "llvm.mips.div.u.d" => "__builtin_msa_div_u_d", - "llvm.mips.div.u.h" => "__builtin_msa_div_u_h", - "llvm.mips.div.u.w" => "__builtin_msa_div_u_w", - "llvm.mips.dlsa" => "__builtin_mips_dlsa", - "llvm.mips.dotp.s.d" => "__builtin_msa_dotp_s_d", - "llvm.mips.dotp.s.h" => "__builtin_msa_dotp_s_h", - "llvm.mips.dotp.s.w" => "__builtin_msa_dotp_s_w", - "llvm.mips.dotp.u.d" => "__builtin_msa_dotp_u_d", - "llvm.mips.dotp.u.h" => "__builtin_msa_dotp_u_h", - "llvm.mips.dotp.u.w" => "__builtin_msa_dotp_u_w", - "llvm.mips.dpa.w.ph" => "__builtin_mips_dpa_w_ph", - "llvm.mips.dpadd.s.d" => "__builtin_msa_dpadd_s_d", - "llvm.mips.dpadd.s.h" => "__builtin_msa_dpadd_s_h", - "llvm.mips.dpadd.s.w" => "__builtin_msa_dpadd_s_w", - "llvm.mips.dpadd.u.d" => "__builtin_msa_dpadd_u_d", - "llvm.mips.dpadd.u.h" => "__builtin_msa_dpadd_u_h", - "llvm.mips.dpadd.u.w" => "__builtin_msa_dpadd_u_w", - "llvm.mips.dpaq.s.w.ph" => "__builtin_mips_dpaq_s_w_ph", - "llvm.mips.dpaq.sa.l.w" => "__builtin_mips_dpaq_sa_l_w", - "llvm.mips.dpaqx.s.w.ph" => "__builtin_mips_dpaqx_s_w_ph", - "llvm.mips.dpaqx.sa.w.ph" => "__builtin_mips_dpaqx_sa_w_ph", - "llvm.mips.dpau.h.qbl" => "__builtin_mips_dpau_h_qbl", - "llvm.mips.dpau.h.qbr" => "__builtin_mips_dpau_h_qbr", - "llvm.mips.dpax.w.ph" => "__builtin_mips_dpax_w_ph", - "llvm.mips.dps.w.ph" => "__builtin_mips_dps_w_ph", - "llvm.mips.dpsq.s.w.ph" => "__builtin_mips_dpsq_s_w_ph", - "llvm.mips.dpsq.sa.l.w" => "__builtin_mips_dpsq_sa_l_w", - "llvm.mips.dpsqx.s.w.ph" => "__builtin_mips_dpsqx_s_w_ph", - "llvm.mips.dpsqx.sa.w.ph" => "__builtin_mips_dpsqx_sa_w_ph", - "llvm.mips.dpsu.h.qbl" => "__builtin_mips_dpsu_h_qbl", - "llvm.mips.dpsu.h.qbr" => "__builtin_mips_dpsu_h_qbr", - "llvm.mips.dpsub.s.d" => "__builtin_msa_dpsub_s_d", - "llvm.mips.dpsub.s.h" => "__builtin_msa_dpsub_s_h", - "llvm.mips.dpsub.s.w" => "__builtin_msa_dpsub_s_w", - "llvm.mips.dpsub.u.d" => "__builtin_msa_dpsub_u_d", - "llvm.mips.dpsub.u.h" => "__builtin_msa_dpsub_u_h", - "llvm.mips.dpsub.u.w" => "__builtin_msa_dpsub_u_w", - "llvm.mips.dpsx.w.ph" => "__builtin_mips_dpsx_w_ph", - "llvm.mips.extp" => "__builtin_mips_extp", - "llvm.mips.extpdp" => "__builtin_mips_extpdp", - "llvm.mips.extr.r.w" => "__builtin_mips_extr_r_w", - "llvm.mips.extr.rs.w" => "__builtin_mips_extr_rs_w", - "llvm.mips.extr.s.h" => "__builtin_mips_extr_s_h", - "llvm.mips.extr.w" => "__builtin_mips_extr_w", - "llvm.mips.fadd.d" => "__builtin_msa_fadd_d", - "llvm.mips.fadd.w" => "__builtin_msa_fadd_w", - "llvm.mips.fcaf.d" => "__builtin_msa_fcaf_d", - "llvm.mips.fcaf.w" => "__builtin_msa_fcaf_w", - "llvm.mips.fceq.d" => "__builtin_msa_fceq_d", - "llvm.mips.fceq.w" => "__builtin_msa_fceq_w", - "llvm.mips.fclass.d" => "__builtin_msa_fclass_d", - "llvm.mips.fclass.w" => "__builtin_msa_fclass_w", - "llvm.mips.fcle.d" => "__builtin_msa_fcle_d", - "llvm.mips.fcle.w" => "__builtin_msa_fcle_w", - "llvm.mips.fclt.d" => "__builtin_msa_fclt_d", - "llvm.mips.fclt.w" => "__builtin_msa_fclt_w", - "llvm.mips.fcne.d" => "__builtin_msa_fcne_d", - "llvm.mips.fcne.w" => "__builtin_msa_fcne_w", - "llvm.mips.fcor.d" => "__builtin_msa_fcor_d", - "llvm.mips.fcor.w" => "__builtin_msa_fcor_w", - "llvm.mips.fcueq.d" => "__builtin_msa_fcueq_d", - "llvm.mips.fcueq.w" => "__builtin_msa_fcueq_w", - "llvm.mips.fcule.d" => "__builtin_msa_fcule_d", - "llvm.mips.fcule.w" => "__builtin_msa_fcule_w", - "llvm.mips.fcult.d" => "__builtin_msa_fcult_d", - "llvm.mips.fcult.w" => "__builtin_msa_fcult_w", - "llvm.mips.fcun.d" => "__builtin_msa_fcun_d", - "llvm.mips.fcun.w" => "__builtin_msa_fcun_w", - "llvm.mips.fcune.d" => "__builtin_msa_fcune_d", - "llvm.mips.fcune.w" => "__builtin_msa_fcune_w", - "llvm.mips.fdiv.d" => "__builtin_msa_fdiv_d", - "llvm.mips.fdiv.w" => "__builtin_msa_fdiv_w", - "llvm.mips.fexdo.h" => "__builtin_msa_fexdo_h", - "llvm.mips.fexdo.w" => "__builtin_msa_fexdo_w", - "llvm.mips.fexp2.d" => "__builtin_msa_fexp2_d", - "llvm.mips.fexp2.w" => "__builtin_msa_fexp2_w", - "llvm.mips.fexupl.d" => "__builtin_msa_fexupl_d", - "llvm.mips.fexupl.w" => "__builtin_msa_fexupl_w", - "llvm.mips.fexupr.d" => "__builtin_msa_fexupr_d", - "llvm.mips.fexupr.w" => "__builtin_msa_fexupr_w", - "llvm.mips.ffint.s.d" => "__builtin_msa_ffint_s_d", - "llvm.mips.ffint.s.w" => "__builtin_msa_ffint_s_w", - "llvm.mips.ffint.u.d" => "__builtin_msa_ffint_u_d", - "llvm.mips.ffint.u.w" => "__builtin_msa_ffint_u_w", - "llvm.mips.ffql.d" => "__builtin_msa_ffql_d", - "llvm.mips.ffql.w" => "__builtin_msa_ffql_w", - "llvm.mips.ffqr.d" => "__builtin_msa_ffqr_d", - "llvm.mips.ffqr.w" => "__builtin_msa_ffqr_w", - "llvm.mips.fill.b" => "__builtin_msa_fill_b", - "llvm.mips.fill.d" => "__builtin_msa_fill_d", - "llvm.mips.fill.h" => "__builtin_msa_fill_h", - "llvm.mips.fill.w" => "__builtin_msa_fill_w", - "llvm.mips.flog2.d" => "__builtin_msa_flog2_d", - "llvm.mips.flog2.w" => "__builtin_msa_flog2_w", - "llvm.mips.fmadd.d" => "__builtin_msa_fmadd_d", - "llvm.mips.fmadd.w" => "__builtin_msa_fmadd_w", - "llvm.mips.fmax.a.d" => "__builtin_msa_fmax_a_d", - "llvm.mips.fmax.a.w" => "__builtin_msa_fmax_a_w", - "llvm.mips.fmax.d" => "__builtin_msa_fmax_d", - "llvm.mips.fmax.w" => "__builtin_msa_fmax_w", - "llvm.mips.fmin.a.d" => "__builtin_msa_fmin_a_d", - "llvm.mips.fmin.a.w" => "__builtin_msa_fmin_a_w", - "llvm.mips.fmin.d" => "__builtin_msa_fmin_d", - "llvm.mips.fmin.w" => "__builtin_msa_fmin_w", - "llvm.mips.fmsub.d" => "__builtin_msa_fmsub_d", - "llvm.mips.fmsub.w" => "__builtin_msa_fmsub_w", - "llvm.mips.fmul.d" => "__builtin_msa_fmul_d", - "llvm.mips.fmul.w" => "__builtin_msa_fmul_w", - "llvm.mips.frcp.d" => "__builtin_msa_frcp_d", - "llvm.mips.frcp.w" => "__builtin_msa_frcp_w", - "llvm.mips.frint.d" => "__builtin_msa_frint_d", - "llvm.mips.frint.w" => "__builtin_msa_frint_w", - "llvm.mips.frsqrt.d" => "__builtin_msa_frsqrt_d", - "llvm.mips.frsqrt.w" => "__builtin_msa_frsqrt_w", - "llvm.mips.fsaf.d" => "__builtin_msa_fsaf_d", - "llvm.mips.fsaf.w" => "__builtin_msa_fsaf_w", - "llvm.mips.fseq.d" => "__builtin_msa_fseq_d", - "llvm.mips.fseq.w" => "__builtin_msa_fseq_w", - "llvm.mips.fsle.d" => "__builtin_msa_fsle_d", - "llvm.mips.fsle.w" => "__builtin_msa_fsle_w", - "llvm.mips.fslt.d" => "__builtin_msa_fslt_d", - "llvm.mips.fslt.w" => "__builtin_msa_fslt_w", - "llvm.mips.fsne.d" => "__builtin_msa_fsne_d", - "llvm.mips.fsne.w" => "__builtin_msa_fsne_w", - "llvm.mips.fsor.d" => "__builtin_msa_fsor_d", - "llvm.mips.fsor.w" => "__builtin_msa_fsor_w", - "llvm.mips.fsqrt.d" => "__builtin_msa_fsqrt_d", - "llvm.mips.fsqrt.w" => "__builtin_msa_fsqrt_w", - "llvm.mips.fsub.d" => "__builtin_msa_fsub_d", - "llvm.mips.fsub.w" => "__builtin_msa_fsub_w", - "llvm.mips.fsueq.d" => "__builtin_msa_fsueq_d", - "llvm.mips.fsueq.w" => "__builtin_msa_fsueq_w", - "llvm.mips.fsule.d" => "__builtin_msa_fsule_d", - "llvm.mips.fsule.w" => "__builtin_msa_fsule_w", - "llvm.mips.fsult.d" => "__builtin_msa_fsult_d", - "llvm.mips.fsult.w" => "__builtin_msa_fsult_w", - "llvm.mips.fsun.d" => "__builtin_msa_fsun_d", - "llvm.mips.fsun.w" => "__builtin_msa_fsun_w", - "llvm.mips.fsune.d" => "__builtin_msa_fsune_d", - "llvm.mips.fsune.w" => "__builtin_msa_fsune_w", - "llvm.mips.ftint.s.d" => "__builtin_msa_ftint_s_d", - "llvm.mips.ftint.s.w" => "__builtin_msa_ftint_s_w", - "llvm.mips.ftint.u.d" => "__builtin_msa_ftint_u_d", - "llvm.mips.ftint.u.w" => "__builtin_msa_ftint_u_w", - "llvm.mips.ftq.h" => "__builtin_msa_ftq_h", - "llvm.mips.ftq.w" => "__builtin_msa_ftq_w", - "llvm.mips.ftrunc.s.d" => "__builtin_msa_ftrunc_s_d", - "llvm.mips.ftrunc.s.w" => "__builtin_msa_ftrunc_s_w", - "llvm.mips.ftrunc.u.d" => "__builtin_msa_ftrunc_u_d", - "llvm.mips.ftrunc.u.w" => "__builtin_msa_ftrunc_u_w", - "llvm.mips.hadd.s.d" => "__builtin_msa_hadd_s_d", - "llvm.mips.hadd.s.h" => "__builtin_msa_hadd_s_h", - "llvm.mips.hadd.s.w" => "__builtin_msa_hadd_s_w", - "llvm.mips.hadd.u.d" => "__builtin_msa_hadd_u_d", - "llvm.mips.hadd.u.h" => "__builtin_msa_hadd_u_h", - "llvm.mips.hadd.u.w" => "__builtin_msa_hadd_u_w", - "llvm.mips.hsub.s.d" => "__builtin_msa_hsub_s_d", - "llvm.mips.hsub.s.h" => "__builtin_msa_hsub_s_h", - "llvm.mips.hsub.s.w" => "__builtin_msa_hsub_s_w", - "llvm.mips.hsub.u.d" => "__builtin_msa_hsub_u_d", - "llvm.mips.hsub.u.h" => "__builtin_msa_hsub_u_h", - "llvm.mips.hsub.u.w" => "__builtin_msa_hsub_u_w", - "llvm.mips.ilvev.b" => "__builtin_msa_ilvev_b", - "llvm.mips.ilvev.d" => "__builtin_msa_ilvev_d", - "llvm.mips.ilvev.h" => "__builtin_msa_ilvev_h", - "llvm.mips.ilvev.w" => "__builtin_msa_ilvev_w", - "llvm.mips.ilvl.b" => "__builtin_msa_ilvl_b", - "llvm.mips.ilvl.d" => "__builtin_msa_ilvl_d", - "llvm.mips.ilvl.h" => "__builtin_msa_ilvl_h", - "llvm.mips.ilvl.w" => "__builtin_msa_ilvl_w", - "llvm.mips.ilvod.b" => "__builtin_msa_ilvod_b", - "llvm.mips.ilvod.d" => "__builtin_msa_ilvod_d", - "llvm.mips.ilvod.h" => "__builtin_msa_ilvod_h", - "llvm.mips.ilvod.w" => "__builtin_msa_ilvod_w", - "llvm.mips.ilvr.b" => "__builtin_msa_ilvr_b", - "llvm.mips.ilvr.d" => "__builtin_msa_ilvr_d", - "llvm.mips.ilvr.h" => "__builtin_msa_ilvr_h", - "llvm.mips.ilvr.w" => "__builtin_msa_ilvr_w", - "llvm.mips.insert.b" => "__builtin_msa_insert_b", - "llvm.mips.insert.d" => "__builtin_msa_insert_d", - "llvm.mips.insert.h" => "__builtin_msa_insert_h", - "llvm.mips.insert.w" => "__builtin_msa_insert_w", - "llvm.mips.insv" => "__builtin_mips_insv", - "llvm.mips.insve.b" => "__builtin_msa_insve_b", - "llvm.mips.insve.d" => "__builtin_msa_insve_d", - "llvm.mips.insve.h" => "__builtin_msa_insve_h", - "llvm.mips.insve.w" => "__builtin_msa_insve_w", - "llvm.mips.lbux" => "__builtin_mips_lbux", - "llvm.mips.ld.b" => "__builtin_msa_ld_b", - "llvm.mips.ld.d" => "__builtin_msa_ld_d", - "llvm.mips.ld.h" => "__builtin_msa_ld_h", - "llvm.mips.ld.w" => "__builtin_msa_ld_w", - "llvm.mips.ldi.b" => "__builtin_msa_ldi_b", - "llvm.mips.ldi.d" => "__builtin_msa_ldi_d", - "llvm.mips.ldi.h" => "__builtin_msa_ldi_h", - "llvm.mips.ldi.w" => "__builtin_msa_ldi_w", - "llvm.mips.ldr.d" => "__builtin_msa_ldr_d", - "llvm.mips.ldr.w" => "__builtin_msa_ldr_w", - "llvm.mips.lhx" => "__builtin_mips_lhx", - "llvm.mips.lsa" => "__builtin_mips_lsa", - "llvm.mips.lwx" => "__builtin_mips_lwx", - "llvm.mips.madd" => "__builtin_mips_madd", - "llvm.mips.madd.q.h" => "__builtin_msa_madd_q_h", - "llvm.mips.madd.q.w" => "__builtin_msa_madd_q_w", - "llvm.mips.maddr.q.h" => "__builtin_msa_maddr_q_h", - "llvm.mips.maddr.q.w" => "__builtin_msa_maddr_q_w", - "llvm.mips.maddu" => "__builtin_mips_maddu", - "llvm.mips.maddv.b" => "__builtin_msa_maddv_b", - "llvm.mips.maddv.d" => "__builtin_msa_maddv_d", - "llvm.mips.maddv.h" => "__builtin_msa_maddv_h", - "llvm.mips.maddv.w" => "__builtin_msa_maddv_w", - "llvm.mips.maq.s.w.phl" => "__builtin_mips_maq_s_w_phl", - "llvm.mips.maq.s.w.phr" => "__builtin_mips_maq_s_w_phr", - "llvm.mips.maq.sa.w.phl" => "__builtin_mips_maq_sa_w_phl", - "llvm.mips.maq.sa.w.phr" => "__builtin_mips_maq_sa_w_phr", - "llvm.mips.max.a.b" => "__builtin_msa_max_a_b", - "llvm.mips.max.a.d" => "__builtin_msa_max_a_d", - "llvm.mips.max.a.h" => "__builtin_msa_max_a_h", - "llvm.mips.max.a.w" => "__builtin_msa_max_a_w", - "llvm.mips.max.s.b" => "__builtin_msa_max_s_b", - "llvm.mips.max.s.d" => "__builtin_msa_max_s_d", - "llvm.mips.max.s.h" => "__builtin_msa_max_s_h", - "llvm.mips.max.s.w" => "__builtin_msa_max_s_w", - "llvm.mips.max.u.b" => "__builtin_msa_max_u_b", - "llvm.mips.max.u.d" => "__builtin_msa_max_u_d", - "llvm.mips.max.u.h" => "__builtin_msa_max_u_h", - "llvm.mips.max.u.w" => "__builtin_msa_max_u_w", - "llvm.mips.maxi.s.b" => "__builtin_msa_maxi_s_b", - "llvm.mips.maxi.s.d" => "__builtin_msa_maxi_s_d", - "llvm.mips.maxi.s.h" => "__builtin_msa_maxi_s_h", - "llvm.mips.maxi.s.w" => "__builtin_msa_maxi_s_w", - "llvm.mips.maxi.u.b" => "__builtin_msa_maxi_u_b", - "llvm.mips.maxi.u.d" => "__builtin_msa_maxi_u_d", - "llvm.mips.maxi.u.h" => "__builtin_msa_maxi_u_h", - "llvm.mips.maxi.u.w" => "__builtin_msa_maxi_u_w", - "llvm.mips.min.a.b" => "__builtin_msa_min_a_b", - "llvm.mips.min.a.d" => "__builtin_msa_min_a_d", - "llvm.mips.min.a.h" => "__builtin_msa_min_a_h", - "llvm.mips.min.a.w" => "__builtin_msa_min_a_w", - "llvm.mips.min.s.b" => "__builtin_msa_min_s_b", - "llvm.mips.min.s.d" => "__builtin_msa_min_s_d", - "llvm.mips.min.s.h" => "__builtin_msa_min_s_h", - "llvm.mips.min.s.w" => "__builtin_msa_min_s_w", - "llvm.mips.min.u.b" => "__builtin_msa_min_u_b", - "llvm.mips.min.u.d" => "__builtin_msa_min_u_d", - "llvm.mips.min.u.h" => "__builtin_msa_min_u_h", - "llvm.mips.min.u.w" => "__builtin_msa_min_u_w", - "llvm.mips.mini.s.b" => "__builtin_msa_mini_s_b", - "llvm.mips.mini.s.d" => "__builtin_msa_mini_s_d", - "llvm.mips.mini.s.h" => "__builtin_msa_mini_s_h", - "llvm.mips.mini.s.w" => "__builtin_msa_mini_s_w", - "llvm.mips.mini.u.b" => "__builtin_msa_mini_u_b", - "llvm.mips.mini.u.d" => "__builtin_msa_mini_u_d", - "llvm.mips.mini.u.h" => "__builtin_msa_mini_u_h", - "llvm.mips.mini.u.w" => "__builtin_msa_mini_u_w", - "llvm.mips.mod.s.b" => "__builtin_msa_mod_s_b", - "llvm.mips.mod.s.d" => "__builtin_msa_mod_s_d", - "llvm.mips.mod.s.h" => "__builtin_msa_mod_s_h", - "llvm.mips.mod.s.w" => "__builtin_msa_mod_s_w", - "llvm.mips.mod.u.b" => "__builtin_msa_mod_u_b", - "llvm.mips.mod.u.d" => "__builtin_msa_mod_u_d", - "llvm.mips.mod.u.h" => "__builtin_msa_mod_u_h", - "llvm.mips.mod.u.w" => "__builtin_msa_mod_u_w", - "llvm.mips.modsub" => "__builtin_mips_modsub", - "llvm.mips.move.v" => "__builtin_msa_move_v", - "llvm.mips.msub" => "__builtin_mips_msub", - "llvm.mips.msub.q.h" => "__builtin_msa_msub_q_h", - "llvm.mips.msub.q.w" => "__builtin_msa_msub_q_w", - "llvm.mips.msubr.q.h" => "__builtin_msa_msubr_q_h", - "llvm.mips.msubr.q.w" => "__builtin_msa_msubr_q_w", - "llvm.mips.msubu" => "__builtin_mips_msubu", - "llvm.mips.msubv.b" => "__builtin_msa_msubv_b", - "llvm.mips.msubv.d" => "__builtin_msa_msubv_d", - "llvm.mips.msubv.h" => "__builtin_msa_msubv_h", - "llvm.mips.msubv.w" => "__builtin_msa_msubv_w", - "llvm.mips.mthlip" => "__builtin_mips_mthlip", - "llvm.mips.mul.ph" => "__builtin_mips_mul_ph", - "llvm.mips.mul.q.h" => "__builtin_msa_mul_q_h", - "llvm.mips.mul.q.w" => "__builtin_msa_mul_q_w", - "llvm.mips.mul.s.ph" => "__builtin_mips_mul_s_ph", - "llvm.mips.muleq.s.w.phl" => "__builtin_mips_muleq_s_w_phl", - "llvm.mips.muleq.s.w.phr" => "__builtin_mips_muleq_s_w_phr", - "llvm.mips.muleu.s.ph.qbl" => "__builtin_mips_muleu_s_ph_qbl", - "llvm.mips.muleu.s.ph.qbr" => "__builtin_mips_muleu_s_ph_qbr", - "llvm.mips.mulq.rs.ph" => "__builtin_mips_mulq_rs_ph", - "llvm.mips.mulq.rs.w" => "__builtin_mips_mulq_rs_w", - "llvm.mips.mulq.s.ph" => "__builtin_mips_mulq_s_ph", - "llvm.mips.mulq.s.w" => "__builtin_mips_mulq_s_w", - "llvm.mips.mulr.q.h" => "__builtin_msa_mulr_q_h", - "llvm.mips.mulr.q.w" => "__builtin_msa_mulr_q_w", - "llvm.mips.mulsa.w.ph" => "__builtin_mips_mulsa_w_ph", - "llvm.mips.mulsaq.s.w.ph" => "__builtin_mips_mulsaq_s_w_ph", - "llvm.mips.mult" => "__builtin_mips_mult", - "llvm.mips.multu" => "__builtin_mips_multu", - "llvm.mips.mulv.b" => "__builtin_msa_mulv_b", - "llvm.mips.mulv.d" => "__builtin_msa_mulv_d", - "llvm.mips.mulv.h" => "__builtin_msa_mulv_h", - "llvm.mips.mulv.w" => "__builtin_msa_mulv_w", - "llvm.mips.nloc.b" => "__builtin_msa_nloc_b", - "llvm.mips.nloc.d" => "__builtin_msa_nloc_d", - "llvm.mips.nloc.h" => "__builtin_msa_nloc_h", - "llvm.mips.nloc.w" => "__builtin_msa_nloc_w", - "llvm.mips.nlzc.b" => "__builtin_msa_nlzc_b", - "llvm.mips.nlzc.d" => "__builtin_msa_nlzc_d", - "llvm.mips.nlzc.h" => "__builtin_msa_nlzc_h", - "llvm.mips.nlzc.w" => "__builtin_msa_nlzc_w", - "llvm.mips.nor.v" => "__builtin_msa_nor_v", - "llvm.mips.nori.b" => "__builtin_msa_nori_b", - "llvm.mips.or.v" => "__builtin_msa_or_v", - "llvm.mips.ori.b" => "__builtin_msa_ori_b", - "llvm.mips.packrl.ph" => "__builtin_mips_packrl_ph", - "llvm.mips.pckev.b" => "__builtin_msa_pckev_b", - "llvm.mips.pckev.d" => "__builtin_msa_pckev_d", - "llvm.mips.pckev.h" => "__builtin_msa_pckev_h", - "llvm.mips.pckev.w" => "__builtin_msa_pckev_w", - "llvm.mips.pckod.b" => "__builtin_msa_pckod_b", - "llvm.mips.pckod.d" => "__builtin_msa_pckod_d", - "llvm.mips.pckod.h" => "__builtin_msa_pckod_h", - "llvm.mips.pckod.w" => "__builtin_msa_pckod_w", - "llvm.mips.pcnt.b" => "__builtin_msa_pcnt_b", - "llvm.mips.pcnt.d" => "__builtin_msa_pcnt_d", - "llvm.mips.pcnt.h" => "__builtin_msa_pcnt_h", - "llvm.mips.pcnt.w" => "__builtin_msa_pcnt_w", - "llvm.mips.pick.ph" => "__builtin_mips_pick_ph", - "llvm.mips.pick.qb" => "__builtin_mips_pick_qb", - "llvm.mips.preceq.w.phl" => "__builtin_mips_preceq_w_phl", - "llvm.mips.preceq.w.phr" => "__builtin_mips_preceq_w_phr", - "llvm.mips.precequ.ph.qbl" => "__builtin_mips_precequ_ph_qbl", - "llvm.mips.precequ.ph.qbla" => "__builtin_mips_precequ_ph_qbla", - "llvm.mips.precequ.ph.qbr" => "__builtin_mips_precequ_ph_qbr", - "llvm.mips.precequ.ph.qbra" => "__builtin_mips_precequ_ph_qbra", - "llvm.mips.preceu.ph.qbl" => "__builtin_mips_preceu_ph_qbl", - "llvm.mips.preceu.ph.qbla" => "__builtin_mips_preceu_ph_qbla", - "llvm.mips.preceu.ph.qbr" => "__builtin_mips_preceu_ph_qbr", - "llvm.mips.preceu.ph.qbra" => "__builtin_mips_preceu_ph_qbra", - "llvm.mips.precr.qb.ph" => "__builtin_mips_precr_qb_ph", - "llvm.mips.precr.sra.ph.w" => "__builtin_mips_precr_sra_ph_w", - "llvm.mips.precr.sra.r.ph.w" => "__builtin_mips_precr_sra_r_ph_w", - "llvm.mips.precrq.ph.w" => "__builtin_mips_precrq_ph_w", - "llvm.mips.precrq.qb.ph" => "__builtin_mips_precrq_qb_ph", - "llvm.mips.precrq.rs.ph.w" => "__builtin_mips_precrq_rs_ph_w", - "llvm.mips.precrqu.s.qb.ph" => "__builtin_mips_precrqu_s_qb_ph", - "llvm.mips.prepend" => "__builtin_mips_prepend", - "llvm.mips.raddu.w.qb" => "__builtin_mips_raddu_w_qb", - "llvm.mips.rddsp" => "__builtin_mips_rddsp", - "llvm.mips.repl.ph" => "__builtin_mips_repl_ph", - "llvm.mips.repl.qb" => "__builtin_mips_repl_qb", - "llvm.mips.sat.s.b" => "__builtin_msa_sat_s_b", - "llvm.mips.sat.s.d" => "__builtin_msa_sat_s_d", - "llvm.mips.sat.s.h" => "__builtin_msa_sat_s_h", - "llvm.mips.sat.s.w" => "__builtin_msa_sat_s_w", - "llvm.mips.sat.u.b" => "__builtin_msa_sat_u_b", - "llvm.mips.sat.u.d" => "__builtin_msa_sat_u_d", - "llvm.mips.sat.u.h" => "__builtin_msa_sat_u_h", - "llvm.mips.sat.u.w" => "__builtin_msa_sat_u_w", - "llvm.mips.shf.b" => "__builtin_msa_shf_b", - "llvm.mips.shf.h" => "__builtin_msa_shf_h", - "llvm.mips.shf.w" => "__builtin_msa_shf_w", - "llvm.mips.shilo" => "__builtin_mips_shilo", - "llvm.mips.shll.ph" => "__builtin_mips_shll_ph", - "llvm.mips.shll.qb" => "__builtin_mips_shll_qb", - "llvm.mips.shll.s.ph" => "__builtin_mips_shll_s_ph", - "llvm.mips.shll.s.w" => "__builtin_mips_shll_s_w", - "llvm.mips.shra.ph" => "__builtin_mips_shra_ph", - "llvm.mips.shra.qb" => "__builtin_mips_shra_qb", - "llvm.mips.shra.r.ph" => "__builtin_mips_shra_r_ph", - "llvm.mips.shra.r.qb" => "__builtin_mips_shra_r_qb", - "llvm.mips.shra.r.w" => "__builtin_mips_shra_r_w", - "llvm.mips.shrl.ph" => "__builtin_mips_shrl_ph", - "llvm.mips.shrl.qb" => "__builtin_mips_shrl_qb", - "llvm.mips.sld.b" => "__builtin_msa_sld_b", - "llvm.mips.sld.d" => "__builtin_msa_sld_d", - "llvm.mips.sld.h" => "__builtin_msa_sld_h", - "llvm.mips.sld.w" => "__builtin_msa_sld_w", - "llvm.mips.sldi.b" => "__builtin_msa_sldi_b", - "llvm.mips.sldi.d" => "__builtin_msa_sldi_d", - "llvm.mips.sldi.h" => "__builtin_msa_sldi_h", - "llvm.mips.sldi.w" => "__builtin_msa_sldi_w", - "llvm.mips.sll.b" => "__builtin_msa_sll_b", - "llvm.mips.sll.d" => "__builtin_msa_sll_d", - "llvm.mips.sll.h" => "__builtin_msa_sll_h", - "llvm.mips.sll.w" => "__builtin_msa_sll_w", - "llvm.mips.slli.b" => "__builtin_msa_slli_b", - "llvm.mips.slli.d" => "__builtin_msa_slli_d", - "llvm.mips.slli.h" => "__builtin_msa_slli_h", - "llvm.mips.slli.w" => "__builtin_msa_slli_w", - "llvm.mips.splat.b" => "__builtin_msa_splat_b", - "llvm.mips.splat.d" => "__builtin_msa_splat_d", - "llvm.mips.splat.h" => "__builtin_msa_splat_h", - "llvm.mips.splat.w" => "__builtin_msa_splat_w", - "llvm.mips.splati.b" => "__builtin_msa_splati_b", - "llvm.mips.splati.d" => "__builtin_msa_splati_d", - "llvm.mips.splati.h" => "__builtin_msa_splati_h", - "llvm.mips.splati.w" => "__builtin_msa_splati_w", - "llvm.mips.sra.b" => "__builtin_msa_sra_b", - "llvm.mips.sra.d" => "__builtin_msa_sra_d", - "llvm.mips.sra.h" => "__builtin_msa_sra_h", - "llvm.mips.sra.w" => "__builtin_msa_sra_w", - "llvm.mips.srai.b" => "__builtin_msa_srai_b", - "llvm.mips.srai.d" => "__builtin_msa_srai_d", - "llvm.mips.srai.h" => "__builtin_msa_srai_h", - "llvm.mips.srai.w" => "__builtin_msa_srai_w", - "llvm.mips.srar.b" => "__builtin_msa_srar_b", - "llvm.mips.srar.d" => "__builtin_msa_srar_d", - "llvm.mips.srar.h" => "__builtin_msa_srar_h", - "llvm.mips.srar.w" => "__builtin_msa_srar_w", - "llvm.mips.srari.b" => "__builtin_msa_srari_b", - "llvm.mips.srari.d" => "__builtin_msa_srari_d", - "llvm.mips.srari.h" => "__builtin_msa_srari_h", - "llvm.mips.srari.w" => "__builtin_msa_srari_w", - "llvm.mips.srl.b" => "__builtin_msa_srl_b", - "llvm.mips.srl.d" => "__builtin_msa_srl_d", - "llvm.mips.srl.h" => "__builtin_msa_srl_h", - "llvm.mips.srl.w" => "__builtin_msa_srl_w", - "llvm.mips.srli.b" => "__builtin_msa_srli_b", - "llvm.mips.srli.d" => "__builtin_msa_srli_d", - "llvm.mips.srli.h" => "__builtin_msa_srli_h", - "llvm.mips.srli.w" => "__builtin_msa_srli_w", - "llvm.mips.srlr.b" => "__builtin_msa_srlr_b", - "llvm.mips.srlr.d" => "__builtin_msa_srlr_d", - "llvm.mips.srlr.h" => "__builtin_msa_srlr_h", - "llvm.mips.srlr.w" => "__builtin_msa_srlr_w", - "llvm.mips.srlri.b" => "__builtin_msa_srlri_b", - "llvm.mips.srlri.d" => "__builtin_msa_srlri_d", - "llvm.mips.srlri.h" => "__builtin_msa_srlri_h", - "llvm.mips.srlri.w" => "__builtin_msa_srlri_w", - "llvm.mips.st.b" => "__builtin_msa_st_b", - "llvm.mips.st.d" => "__builtin_msa_st_d", - "llvm.mips.st.h" => "__builtin_msa_st_h", - "llvm.mips.st.w" => "__builtin_msa_st_w", - "llvm.mips.str.d" => "__builtin_msa_str_d", - "llvm.mips.str.w" => "__builtin_msa_str_w", - "llvm.mips.subq.ph" => "__builtin_mips_subq_ph", - "llvm.mips.subq.s.ph" => "__builtin_mips_subq_s_ph", - "llvm.mips.subq.s.w" => "__builtin_mips_subq_s_w", - "llvm.mips.subqh.ph" => "__builtin_mips_subqh_ph", - "llvm.mips.subqh.r.ph" => "__builtin_mips_subqh_r_ph", - "llvm.mips.subqh.r.w" => "__builtin_mips_subqh_r_w", - "llvm.mips.subqh.w" => "__builtin_mips_subqh_w", - "llvm.mips.subs.s.b" => "__builtin_msa_subs_s_b", - "llvm.mips.subs.s.d" => "__builtin_msa_subs_s_d", - "llvm.mips.subs.s.h" => "__builtin_msa_subs_s_h", - "llvm.mips.subs.s.w" => "__builtin_msa_subs_s_w", - "llvm.mips.subs.u.b" => "__builtin_msa_subs_u_b", - "llvm.mips.subs.u.d" => "__builtin_msa_subs_u_d", - "llvm.mips.subs.u.h" => "__builtin_msa_subs_u_h", - "llvm.mips.subs.u.w" => "__builtin_msa_subs_u_w", - "llvm.mips.subsus.u.b" => "__builtin_msa_subsus_u_b", - "llvm.mips.subsus.u.d" => "__builtin_msa_subsus_u_d", - "llvm.mips.subsus.u.h" => "__builtin_msa_subsus_u_h", - "llvm.mips.subsus.u.w" => "__builtin_msa_subsus_u_w", - "llvm.mips.subsuu.s.b" => "__builtin_msa_subsuu_s_b", - "llvm.mips.subsuu.s.d" => "__builtin_msa_subsuu_s_d", - "llvm.mips.subsuu.s.h" => "__builtin_msa_subsuu_s_h", - "llvm.mips.subsuu.s.w" => "__builtin_msa_subsuu_s_w", - "llvm.mips.subu.ph" => "__builtin_mips_subu_ph", - "llvm.mips.subu.qb" => "__builtin_mips_subu_qb", - "llvm.mips.subu.s.ph" => "__builtin_mips_subu_s_ph", - "llvm.mips.subu.s.qb" => "__builtin_mips_subu_s_qb", - "llvm.mips.subuh.qb" => "__builtin_mips_subuh_qb", - "llvm.mips.subuh.r.qb" => "__builtin_mips_subuh_r_qb", - "llvm.mips.subv.b" => "__builtin_msa_subv_b", - "llvm.mips.subv.d" => "__builtin_msa_subv_d", - "llvm.mips.subv.h" => "__builtin_msa_subv_h", - "llvm.mips.subv.w" => "__builtin_msa_subv_w", - "llvm.mips.subvi.b" => "__builtin_msa_subvi_b", - "llvm.mips.subvi.d" => "__builtin_msa_subvi_d", - "llvm.mips.subvi.h" => "__builtin_msa_subvi_h", - "llvm.mips.subvi.w" => "__builtin_msa_subvi_w", - "llvm.mips.vshf.b" => "__builtin_msa_vshf_b", - "llvm.mips.vshf.d" => "__builtin_msa_vshf_d", - "llvm.mips.vshf.h" => "__builtin_msa_vshf_h", - "llvm.mips.vshf.w" => "__builtin_msa_vshf_w", - "llvm.mips.wrdsp" => "__builtin_mips_wrdsp", - "llvm.mips.xor.v" => "__builtin_msa_xor_v", - "llvm.mips.xori.b" => "__builtin_msa_xori_b", - // nvvm - "llvm.nvvm.abs.i" => "__nvvm_abs_i", - "llvm.nvvm.abs.ll" => "__nvvm_abs_ll", - "llvm.nvvm.activemask" => "__nvvm_activemask", - "llvm.nvvm.add.rm.d" => "__nvvm_add_rm_d", - "llvm.nvvm.add.rm.f" => "__nvvm_add_rm_f", - "llvm.nvvm.add.rm.ftz.f" => "__nvvm_add_rm_ftz_f", - "llvm.nvvm.add.rn.d" => "__nvvm_add_rn_d", - "llvm.nvvm.add.rn.f" => "__nvvm_add_rn_f", - "llvm.nvvm.add.rn.ftz.f" => "__nvvm_add_rn_ftz_f", - "llvm.nvvm.add.rp.d" => "__nvvm_add_rp_d", - "llvm.nvvm.add.rp.f" => "__nvvm_add_rp_f", - "llvm.nvvm.add.rp.ftz.f" => "__nvvm_add_rp_ftz_f", - "llvm.nvvm.add.rz.d" => "__nvvm_add_rz_d", - "llvm.nvvm.add.rz.f" => "__nvvm_add_rz_f", - "llvm.nvvm.add.rz.ftz.f" => "__nvvm_add_rz_ftz_f", - "llvm.nvvm.bar.sync" => "__nvvm_bar_sync", - "llvm.nvvm.bar.warp.sync" => "__nvvm_bar_warp_sync", - "llvm.nvvm.barrier" => "__nvvm_bar", - "llvm.nvvm.barrier.n" => "__nvvm_bar_n", - "llvm.nvvm.barrier.sync" => "__nvvm_barrier_sync", - "llvm.nvvm.barrier.sync.cnt" => "__nvvm_barrier_sync_cnt", - "llvm.nvvm.barrier0" => "__syncthreads", - // [DUPLICATE]: "llvm.nvvm.barrier0" => "__nvvm_bar0", - "llvm.nvvm.barrier0.and" => "__nvvm_bar0_and", - "llvm.nvvm.barrier0.or" => "__nvvm_bar0_or", - "llvm.nvvm.barrier0.popc" => "__nvvm_bar0_popc", - "llvm.nvvm.bf16x2.to.ue8m0x2.rp" => "__nvvm_bf16x2_to_ue8m0x2_rp", - "llvm.nvvm.bf16x2.to.ue8m0x2.rp.satfinite" => "__nvvm_bf16x2_to_ue8m0x2_rp_satfinite", - "llvm.nvvm.bf16x2.to.ue8m0x2.rz" => "__nvvm_bf16x2_to_ue8m0x2_rz", - "llvm.nvvm.bf16x2.to.ue8m0x2.rz.satfinite" => "__nvvm_bf16x2_to_ue8m0x2_rz_satfinite", - "llvm.nvvm.bf2h.rn" => "__nvvm_bf2h_rn", - "llvm.nvvm.bf2h.rn.ftz" => "__nvvm_bf2h_rn_ftz", - "llvm.nvvm.bitcast.d2ll" => "__nvvm_bitcast_d2ll", - "llvm.nvvm.bitcast.f2i" => "__nvvm_bitcast_f2i", - "llvm.nvvm.bitcast.i2f" => "__nvvm_bitcast_i2f", - "llvm.nvvm.bitcast.ll2d" => "__nvvm_bitcast_ll2d", - "llvm.nvvm.brev32" => "__nvvm_brev32", - "llvm.nvvm.brev64" => "__nvvm_brev64", - "llvm.nvvm.ceil.d" => "__nvvm_ceil_d", - "llvm.nvvm.ceil.f" => "__nvvm_ceil_f", - "llvm.nvvm.ceil.ftz.f" => "__nvvm_ceil_ftz_f", - "llvm.nvvm.clz.i" => "__nvvm_clz_i", - "llvm.nvvm.clz.ll" => "__nvvm_clz_ll", - "llvm.nvvm.cos.approx.f" => "__nvvm_cos_approx_f", - "llvm.nvvm.cos.approx.ftz.f" => "__nvvm_cos_approx_ftz_f", - "llvm.nvvm.cp.async.commit.group" => "__nvvm_cp_async_commit_group", - "llvm.nvvm.cp.async.mbarrier.arrive" => "__nvvm_cp_async_mbarrier_arrive", - "llvm.nvvm.cp.async.mbarrier.arrive.noinc" => "__nvvm_cp_async_mbarrier_arrive_noinc", - "llvm.nvvm.cp.async.mbarrier.arrive.noinc.shared" => "__nvvm_cp_async_mbarrier_arrive_noinc_shared", - "llvm.nvvm.cp.async.mbarrier.arrive.shared" => "__nvvm_cp_async_mbarrier_arrive_shared", - "llvm.nvvm.cp.async.wait.all" => "__nvvm_cp_async_wait_all", - "llvm.nvvm.cp.async.wait.group" => "__nvvm_cp_async_wait_group", - "llvm.nvvm.d2f.rm" => "__nvvm_d2f_rm", - "llvm.nvvm.d2f.rm.ftz" => "__nvvm_d2f_rm_ftz", - "llvm.nvvm.d2f.rn" => "__nvvm_d2f_rn", - "llvm.nvvm.d2f.rn.ftz" => "__nvvm_d2f_rn_ftz", - "llvm.nvvm.d2f.rp" => "__nvvm_d2f_rp", - "llvm.nvvm.d2f.rp.ftz" => "__nvvm_d2f_rp_ftz", - "llvm.nvvm.d2f.rz" => "__nvvm_d2f_rz", - "llvm.nvvm.d2f.rz.ftz" => "__nvvm_d2f_rz_ftz", - "llvm.nvvm.d2i.hi" => "__nvvm_d2i_hi", - "llvm.nvvm.d2i.lo" => "__nvvm_d2i_lo", - "llvm.nvvm.d2i.rm" => "__nvvm_d2i_rm", - "llvm.nvvm.d2i.rn" => "__nvvm_d2i_rn", - "llvm.nvvm.d2i.rp" => "__nvvm_d2i_rp", - "llvm.nvvm.d2i.rz" => "__nvvm_d2i_rz", - "llvm.nvvm.d2ll.rm" => "__nvvm_d2ll_rm", - "llvm.nvvm.d2ll.rn" => "__nvvm_d2ll_rn", - "llvm.nvvm.d2ll.rp" => "__nvvm_d2ll_rp", - "llvm.nvvm.d2ll.rz" => "__nvvm_d2ll_rz", - "llvm.nvvm.d2ui.rm" => "__nvvm_d2ui_rm", - "llvm.nvvm.d2ui.rn" => "__nvvm_d2ui_rn", - "llvm.nvvm.d2ui.rp" => "__nvvm_d2ui_rp", - "llvm.nvvm.d2ui.rz" => "__nvvm_d2ui_rz", - "llvm.nvvm.d2ull.rm" => "__nvvm_d2ull_rm", - "llvm.nvvm.d2ull.rn" => "__nvvm_d2ull_rn", - "llvm.nvvm.d2ull.rp" => "__nvvm_d2ull_rp", - "llvm.nvvm.d2ull.rz" => "__nvvm_d2ull_rz", - "llvm.nvvm.div.approx.f" => "__nvvm_div_approx_f", - "llvm.nvvm.div.approx.ftz.f" => "__nvvm_div_approx_ftz_f", - "llvm.nvvm.div.full" => "__nvvm_div_full", - "llvm.nvvm.div.full.ftz" => "__nvvm_div_full_ftz", - "llvm.nvvm.div.rm.d" => "__nvvm_div_rm_d", - "llvm.nvvm.div.rm.f" => "__nvvm_div_rm_f", - "llvm.nvvm.div.rm.ftz.f" => "__nvvm_div_rm_ftz_f", - "llvm.nvvm.div.rn.d" => "__nvvm_div_rn_d", - "llvm.nvvm.div.rn.f" => "__nvvm_div_rn_f", - "llvm.nvvm.div.rn.ftz.f" => "__nvvm_div_rn_ftz_f", - "llvm.nvvm.div.rp.d" => "__nvvm_div_rp_d", - "llvm.nvvm.div.rp.f" => "__nvvm_div_rp_f", - "llvm.nvvm.div.rp.ftz.f" => "__nvvm_div_rp_ftz_f", - "llvm.nvvm.div.rz.d" => "__nvvm_div_rz_d", - "llvm.nvvm.div.rz.f" => "__nvvm_div_rz_f", - "llvm.nvvm.div.rz.ftz.f" => "__nvvm_div_rz_ftz_f", - "llvm.nvvm.e2m3x2.to.f16x2.rn" => "__nvvm_e2m3x2_to_f16x2_rn", - "llvm.nvvm.e2m3x2.to.f16x2.rn.relu" => "__nvvm_e2m3x2_to_f16x2_rn_relu", - "llvm.nvvm.e3m2x2.to.f16x2.rn" => "__nvvm_e3m2x2_to_f16x2_rn", - "llvm.nvvm.e3m2x2.to.f16x2.rn.relu" => "__nvvm_e3m2x2_to_f16x2_rn_relu", - "llvm.nvvm.e4m3x2.to.f16x2.rn" => "__nvvm_e4m3x2_to_f16x2_rn", - "llvm.nvvm.e4m3x2.to.f16x2.rn.relu" => "__nvvm_e4m3x2_to_f16x2_rn_relu", - "llvm.nvvm.e5m2x2.to.f16x2.rn" => "__nvvm_e5m2x2_to_f16x2_rn", - "llvm.nvvm.e5m2x2.to.f16x2.rn.relu" => "__nvvm_e5m2x2_to_f16x2_rn_relu", - "llvm.nvvm.ex2.approx.d" => "__nvvm_ex2_approx_d", - "llvm.nvvm.ex2.approx.f" => "__nvvm_ex2_approx_f", - "llvm.nvvm.ex2.approx.ftz.f" => "__nvvm_ex2_approx_ftz_f", - "llvm.nvvm.exit" => "__nvvm_exit", - "llvm.nvvm.f16x2.to.e4m3x2.rn" => "__nvvm_f16x2_to_e4m3x2_rn", - "llvm.nvvm.f16x2.to.e4m3x2.rn.relu" => "__nvvm_f16x2_to_e4m3x2_rn_relu", - "llvm.nvvm.f16x2.to.e5m2x2.rn" => "__nvvm_f16x2_to_e5m2x2_rn", - "llvm.nvvm.f16x2.to.e5m2x2.rn.relu" => "__nvvm_f16x2_to_e5m2x2_rn_relu", - "llvm.nvvm.f2bf16.rn" => "__nvvm_f2bf16_rn", - "llvm.nvvm.f2bf16.rn.relu" => "__nvvm_f2bf16_rn_relu", - "llvm.nvvm.f2bf16.rz" => "__nvvm_f2bf16_rz", - "llvm.nvvm.f2bf16.rz.relu" => "__nvvm_f2bf16_rz_relu", - "llvm.nvvm.f2h.rn" => "__nvvm_f2h_rn", - "llvm.nvvm.f2h.rn.ftz" => "__nvvm_f2h_rn_ftz", - "llvm.nvvm.f2i.rm" => "__nvvm_f2i_rm", - "llvm.nvvm.f2i.rm.ftz" => "__nvvm_f2i_rm_ftz", - "llvm.nvvm.f2i.rn" => "__nvvm_f2i_rn", - "llvm.nvvm.f2i.rn.ftz" => "__nvvm_f2i_rn_ftz", - "llvm.nvvm.f2i.rp" => "__nvvm_f2i_rp", - "llvm.nvvm.f2i.rp.ftz" => "__nvvm_f2i_rp_ftz", - "llvm.nvvm.f2i.rz" => "__nvvm_f2i_rz", - "llvm.nvvm.f2i.rz.ftz" => "__nvvm_f2i_rz_ftz", - "llvm.nvvm.f2ll.rm" => "__nvvm_f2ll_rm", - "llvm.nvvm.f2ll.rm.ftz" => "__nvvm_f2ll_rm_ftz", - "llvm.nvvm.f2ll.rn" => "__nvvm_f2ll_rn", - "llvm.nvvm.f2ll.rn.ftz" => "__nvvm_f2ll_rn_ftz", - "llvm.nvvm.f2ll.rp" => "__nvvm_f2ll_rp", - "llvm.nvvm.f2ll.rp.ftz" => "__nvvm_f2ll_rp_ftz", - "llvm.nvvm.f2ll.rz" => "__nvvm_f2ll_rz", - "llvm.nvvm.f2ll.rz.ftz" => "__nvvm_f2ll_rz_ftz", - "llvm.nvvm.f2tf32.rn" => "__nvvm_f2tf32_rn", - "llvm.nvvm.f2tf32.rn.relu" => "__nvvm_f2tf32_rn_relu", - "llvm.nvvm.f2tf32.rn.relu.satfinite" => "__nvvm_f2tf32_rn_relu_satfinite", - "llvm.nvvm.f2tf32.rn.satfinite" => "__nvvm_f2tf32_rn_satfinite", - "llvm.nvvm.f2tf32.rna" => "__nvvm_f2tf32_rna", - "llvm.nvvm.f2tf32.rna.satfinite" => "__nvvm_f2tf32_rna_satfinite", - "llvm.nvvm.f2tf32.rz" => "__nvvm_f2tf32_rz", - "llvm.nvvm.f2tf32.rz.relu" => "__nvvm_f2tf32_rz_relu", - "llvm.nvvm.f2tf32.rz.relu.satfinite" => "__nvvm_f2tf32_rz_relu_satfinite", - "llvm.nvvm.f2tf32.rz.satfinite" => "__nvvm_f2tf32_rz_satfinite", - "llvm.nvvm.f2ui.rm" => "__nvvm_f2ui_rm", - "llvm.nvvm.f2ui.rm.ftz" => "__nvvm_f2ui_rm_ftz", - "llvm.nvvm.f2ui.rn" => "__nvvm_f2ui_rn", - "llvm.nvvm.f2ui.rn.ftz" => "__nvvm_f2ui_rn_ftz", - "llvm.nvvm.f2ui.rp" => "__nvvm_f2ui_rp", - "llvm.nvvm.f2ui.rp.ftz" => "__nvvm_f2ui_rp_ftz", - "llvm.nvvm.f2ui.rz" => "__nvvm_f2ui_rz", - "llvm.nvvm.f2ui.rz.ftz" => "__nvvm_f2ui_rz_ftz", - "llvm.nvvm.f2ull.rm" => "__nvvm_f2ull_rm", - "llvm.nvvm.f2ull.rm.ftz" => "__nvvm_f2ull_rm_ftz", - "llvm.nvvm.f2ull.rn" => "__nvvm_f2ull_rn", - "llvm.nvvm.f2ull.rn.ftz" => "__nvvm_f2ull_rn_ftz", - "llvm.nvvm.f2ull.rp" => "__nvvm_f2ull_rp", - "llvm.nvvm.f2ull.rp.ftz" => "__nvvm_f2ull_rp_ftz", - "llvm.nvvm.f2ull.rz" => "__nvvm_f2ull_rz", - "llvm.nvvm.f2ull.rz.ftz" => "__nvvm_f2ull_rz_ftz", - "llvm.nvvm.fabs.d" => "__nvvm_fabs_d", - "llvm.nvvm.fabs.f" => "__nvvm_fabs_f", - "llvm.nvvm.fabs.ftz.f" => "__nvvm_fabs_ftz_f", - "llvm.nvvm.ff.to.e2m3x2.rn.relu.satfinite" => "__nvvm_ff_to_e2m3x2_rn_relu_satfinite", - "llvm.nvvm.ff.to.e2m3x2.rn.satfinite" => "__nvvm_ff_to_e2m3x2_rn_satfinite", - "llvm.nvvm.ff.to.e3m2x2.rn.relu.satfinite" => "__nvvm_ff_to_e3m2x2_rn_relu_satfinite", - "llvm.nvvm.ff.to.e3m2x2.rn.satfinite" => "__nvvm_ff_to_e3m2x2_rn_satfinite", - "llvm.nvvm.ff.to.e4m3x2.rn" => "__nvvm_ff_to_e4m3x2_rn", - "llvm.nvvm.ff.to.e4m3x2.rn.relu" => "__nvvm_ff_to_e4m3x2_rn_relu", - "llvm.nvvm.ff.to.e5m2x2.rn" => "__nvvm_ff_to_e5m2x2_rn", - "llvm.nvvm.ff.to.e5m2x2.rn.relu" => "__nvvm_ff_to_e5m2x2_rn_relu", - "llvm.nvvm.ff.to.ue8m0x2.rp" => "__nvvm_ff_to_ue8m0x2_rp", - "llvm.nvvm.ff.to.ue8m0x2.rp.satfinite" => "__nvvm_ff_to_ue8m0x2_rp_satfinite", - "llvm.nvvm.ff.to.ue8m0x2.rz" => "__nvvm_ff_to_ue8m0x2_rz", - "llvm.nvvm.ff.to.ue8m0x2.rz.satfinite" => "__nvvm_ff_to_ue8m0x2_rz_satfinite", - "llvm.nvvm.ff2bf16x2.rn" => "__nvvm_ff2bf16x2_rn", - "llvm.nvvm.ff2bf16x2.rn.relu" => "__nvvm_ff2bf16x2_rn_relu", - "llvm.nvvm.ff2bf16x2.rz" => "__nvvm_ff2bf16x2_rz", - "llvm.nvvm.ff2bf16x2.rz.relu" => "__nvvm_ff2bf16x2_rz_relu", - "llvm.nvvm.ff2f16x2.rn" => "__nvvm_ff2f16x2_rn", - "llvm.nvvm.ff2f16x2.rn.relu" => "__nvvm_ff2f16x2_rn_relu", - "llvm.nvvm.ff2f16x2.rz" => "__nvvm_ff2f16x2_rz", - "llvm.nvvm.ff2f16x2.rz.relu" => "__nvvm_ff2f16x2_rz_relu", - "llvm.nvvm.floor.d" => "__nvvm_floor_d", - "llvm.nvvm.floor.f" => "__nvvm_floor_f", - "llvm.nvvm.floor.ftz.f" => "__nvvm_floor_ftz_f", - "llvm.nvvm.fma.rm.d" => "__nvvm_fma_rm_d", - "llvm.nvvm.fma.rm.f" => "__nvvm_fma_rm_f", - "llvm.nvvm.fma.rm.ftz.f" => "__nvvm_fma_rm_ftz_f", - "llvm.nvvm.fma.rn.bf16" => "__nvvm_fma_rn_bf16", - "llvm.nvvm.fma.rn.bf16x2" => "__nvvm_fma_rn_bf16x2", - "llvm.nvvm.fma.rn.d" => "__nvvm_fma_rn_d", - "llvm.nvvm.fma.rn.f" => "__nvvm_fma_rn_f", - "llvm.nvvm.fma.rn.ftz.bf16" => "__nvvm_fma_rn_ftz_bf16", - "llvm.nvvm.fma.rn.ftz.bf16x2" => "__nvvm_fma_rn_ftz_bf16x2", - "llvm.nvvm.fma.rn.ftz.f" => "__nvvm_fma_rn_ftz_f", - "llvm.nvvm.fma.rn.ftz.relu.bf16" => "__nvvm_fma_rn_ftz_relu_bf16", - "llvm.nvvm.fma.rn.ftz.relu.bf16x2" => "__nvvm_fma_rn_ftz_relu_bf16x2", - "llvm.nvvm.fma.rn.ftz.sat.bf16" => "__nvvm_fma_rn_ftz_sat_bf16", - "llvm.nvvm.fma.rn.ftz.sat.bf16x2" => "__nvvm_fma_rn_ftz_sat_bf16x2", - "llvm.nvvm.fma.rn.relu.bf16" => "__nvvm_fma_rn_relu_bf16", - "llvm.nvvm.fma.rn.relu.bf16x2" => "__nvvm_fma_rn_relu_bf16x2", - "llvm.nvvm.fma.rn.sat.bf16" => "__nvvm_fma_rn_sat_bf16", - "llvm.nvvm.fma.rn.sat.bf16x2" => "__nvvm_fma_rn_sat_bf16x2", - "llvm.nvvm.fma.rp.d" => "__nvvm_fma_rp_d", - "llvm.nvvm.fma.rp.f" => "__nvvm_fma_rp_f", - "llvm.nvvm.fma.rp.ftz.f" => "__nvvm_fma_rp_ftz_f", - "llvm.nvvm.fma.rz.d" => "__nvvm_fma_rz_d", - "llvm.nvvm.fma.rz.f" => "__nvvm_fma_rz_f", - "llvm.nvvm.fma.rz.ftz.f" => "__nvvm_fma_rz_ftz_f", - "llvm.nvvm.fmax.bf16" => "__nvvm_fmax_bf16", - "llvm.nvvm.fmax.bf16x2" => "__nvvm_fmax_bf16x2", - "llvm.nvvm.fmax.d" => "__nvvm_fmax_d", - "llvm.nvvm.fmax.f" => "__nvvm_fmax_f", - "llvm.nvvm.fmax.ftz.bf16" => "__nvvm_fmax_ftz_bf16", - "llvm.nvvm.fmax.ftz.bf16x2" => "__nvvm_fmax_ftz_bf16x2", - "llvm.nvvm.fmax.ftz.f" => "__nvvm_fmax_ftz_f", - "llvm.nvvm.fmax.ftz.nan.bf16" => "__nvvm_fmax_ftz_nan_bf16", - "llvm.nvvm.fmax.ftz.nan.bf16x2" => "__nvvm_fmax_ftz_nan_bf16x2", - "llvm.nvvm.fmax.ftz.nan.f" => "__nvvm_fmax_ftz_nan_f", - "llvm.nvvm.fmax.ftz.nan.xorsign.abs.bf16" => "__nvvm_fmax_ftz_nan_xorsign_abs_bf16", - "llvm.nvvm.fmax.ftz.nan.xorsign.abs.bf16x2" => "__nvvm_fmax_ftz_nan_xorsign_abs_bf16x2", - "llvm.nvvm.fmax.ftz.nan.xorsign.abs.f" => "__nvvm_fmax_ftz_nan_xorsign_abs_f", - "llvm.nvvm.fmax.ftz.xorsign.abs.bf16" => "__nvvm_fmax_ftz_xorsign_abs_bf16", - "llvm.nvvm.fmax.ftz.xorsign.abs.bf16x2" => "__nvvm_fmax_ftz_xorsign_abs_bf16x2", - "llvm.nvvm.fmax.ftz.xorsign.abs.f" => "__nvvm_fmax_ftz_xorsign_abs_f", - "llvm.nvvm.fmax.nan.bf16" => "__nvvm_fmax_nan_bf16", - "llvm.nvvm.fmax.nan.bf16x2" => "__nvvm_fmax_nan_bf16x2", - "llvm.nvvm.fmax.nan.f" => "__nvvm_fmax_nan_f", - "llvm.nvvm.fmax.nan.xorsign.abs.bf16" => "__nvvm_fmax_nan_xorsign_abs_bf16", - "llvm.nvvm.fmax.nan.xorsign.abs.bf16x2" => "__nvvm_fmax_nan_xorsign_abs_bf16x2", - "llvm.nvvm.fmax.nan.xorsign.abs.f" => "__nvvm_fmax_nan_xorsign_abs_f", - "llvm.nvvm.fmax.xorsign.abs.bf16" => "__nvvm_fmax_xorsign_abs_bf16", - "llvm.nvvm.fmax.xorsign.abs.bf16x2" => "__nvvm_fmax_xorsign_abs_bf16x2", - "llvm.nvvm.fmax.xorsign.abs.f" => "__nvvm_fmax_xorsign_abs_f", - "llvm.nvvm.fmin.bf16" => "__nvvm_fmin_bf16", - "llvm.nvvm.fmin.bf16x2" => "__nvvm_fmin_bf16x2", - "llvm.nvvm.fmin.d" => "__nvvm_fmin_d", - "llvm.nvvm.fmin.f" => "__nvvm_fmin_f", - "llvm.nvvm.fmin.ftz.bf16" => "__nvvm_fmin_ftz_bf16", - "llvm.nvvm.fmin.ftz.bf16x2" => "__nvvm_fmin_ftz_bf16x2", - "llvm.nvvm.fmin.ftz.f" => "__nvvm_fmin_ftz_f", - "llvm.nvvm.fmin.ftz.nan.bf16" => "__nvvm_fmin_ftz_nan_bf16", - "llvm.nvvm.fmin.ftz.nan.bf16x2" => "__nvvm_fmin_ftz_nan_bf16x2", - "llvm.nvvm.fmin.ftz.nan.f" => "__nvvm_fmin_ftz_nan_f", - "llvm.nvvm.fmin.ftz.nan.xorsign.abs.bf16" => "__nvvm_fmin_ftz_nan_xorsign_abs_bf16", - "llvm.nvvm.fmin.ftz.nan.xorsign.abs.bf16x2" => "__nvvm_fmin_ftz_nan_xorsign_abs_bf16x2", - "llvm.nvvm.fmin.ftz.nan.xorsign.abs.f" => "__nvvm_fmin_ftz_nan_xorsign_abs_f", - "llvm.nvvm.fmin.ftz.xorsign.abs.bf16" => "__nvvm_fmin_ftz_xorsign_abs_bf16", - "llvm.nvvm.fmin.ftz.xorsign.abs.bf16x2" => "__nvvm_fmin_ftz_xorsign_abs_bf16x2", - "llvm.nvvm.fmin.ftz.xorsign.abs.f" => "__nvvm_fmin_ftz_xorsign_abs_f", - "llvm.nvvm.fmin.nan.bf16" => "__nvvm_fmin_nan_bf16", - "llvm.nvvm.fmin.nan.bf16x2" => "__nvvm_fmin_nan_bf16x2", - "llvm.nvvm.fmin.nan.f" => "__nvvm_fmin_nan_f", - "llvm.nvvm.fmin.nan.xorsign.abs.bf16" => "__nvvm_fmin_nan_xorsign_abs_bf16", - "llvm.nvvm.fmin.nan.xorsign.abs.bf16x2" => "__nvvm_fmin_nan_xorsign_abs_bf16x2", - "llvm.nvvm.fmin.nan.xorsign.abs.f" => "__nvvm_fmin_nan_xorsign_abs_f", - "llvm.nvvm.fmin.xorsign.abs.bf16" => "__nvvm_fmin_xorsign_abs_bf16", - "llvm.nvvm.fmin.xorsign.abs.bf16x2" => "__nvvm_fmin_xorsign_abs_bf16x2", - "llvm.nvvm.fmin.xorsign.abs.f" => "__nvvm_fmin_xorsign_abs_f", - "llvm.nvvm.fns" => "__nvvm_fns", - "llvm.nvvm.h2f" => "__nvvm_h2f", - "llvm.nvvm.i2d.rm" => "__nvvm_i2d_rm", - "llvm.nvvm.i2d.rn" => "__nvvm_i2d_rn", - "llvm.nvvm.i2d.rp" => "__nvvm_i2d_rp", - "llvm.nvvm.i2d.rz" => "__nvvm_i2d_rz", - "llvm.nvvm.i2f.rm" => "__nvvm_i2f_rm", - "llvm.nvvm.i2f.rn" => "__nvvm_i2f_rn", - "llvm.nvvm.i2f.rp" => "__nvvm_i2f_rp", - "llvm.nvvm.i2f.rz" => "__nvvm_i2f_rz", - "llvm.nvvm.isspacep.const" => "__nvvm_isspacep_const", - "llvm.nvvm.isspacep.global" => "__nvvm_isspacep_global", - "llvm.nvvm.isspacep.local" => "__nvvm_isspacep_local", - "llvm.nvvm.isspacep.shared" => "__nvvm_isspacep_shared", - "llvm.nvvm.istypep.sampler" => "__nvvm_istypep_sampler", - "llvm.nvvm.istypep.surface" => "__nvvm_istypep_surface", - "llvm.nvvm.istypep.texture" => "__nvvm_istypep_texture", - "llvm.nvvm.lg2.approx.d" => "__nvvm_lg2_approx_d", - "llvm.nvvm.lg2.approx.f" => "__nvvm_lg2_approx_f", - "llvm.nvvm.lg2.approx.ftz.f" => "__nvvm_lg2_approx_ftz_f", - "llvm.nvvm.ll2d.rm" => "__nvvm_ll2d_rm", - "llvm.nvvm.ll2d.rn" => "__nvvm_ll2d_rn", - "llvm.nvvm.ll2d.rp" => "__nvvm_ll2d_rp", - "llvm.nvvm.ll2d.rz" => "__nvvm_ll2d_rz", - "llvm.nvvm.ll2f.rm" => "__nvvm_ll2f_rm", - "llvm.nvvm.ll2f.rn" => "__nvvm_ll2f_rn", - "llvm.nvvm.ll2f.rp" => "__nvvm_ll2f_rp", - "llvm.nvvm.ll2f.rz" => "__nvvm_ll2f_rz", - "llvm.nvvm.lohi.i2d" => "__nvvm_lohi_i2d", - "llvm.nvvm.match.any.sync.i32" => "__nvvm_match_any_sync_i32", - "llvm.nvvm.match.any.sync.i64" => "__nvvm_match_any_sync_i64", - "llvm.nvvm.max.i" => "__nvvm_max_i", - "llvm.nvvm.max.ll" => "__nvvm_max_ll", - "llvm.nvvm.max.ui" => "__nvvm_max_ui", - "llvm.nvvm.max.ull" => "__nvvm_max_ull", - "llvm.nvvm.mbarrier.arrive" => "__nvvm_mbarrier_arrive", - "llvm.nvvm.mbarrier.arrive.drop" => "__nvvm_mbarrier_arrive_drop", - "llvm.nvvm.mbarrier.arrive.drop.noComplete" => "__nvvm_mbarrier_arrive_drop_noComplete", - "llvm.nvvm.mbarrier.arrive.drop.noComplete.shared" => "__nvvm_mbarrier_arrive_drop_noComplete_shared", - "llvm.nvvm.mbarrier.arrive.drop.shared" => "__nvvm_mbarrier_arrive_drop_shared", - "llvm.nvvm.mbarrier.arrive.noComplete" => "__nvvm_mbarrier_arrive_noComplete", - "llvm.nvvm.mbarrier.arrive.noComplete.shared" => "__nvvm_mbarrier_arrive_noComplete_shared", - "llvm.nvvm.mbarrier.arrive.shared" => "__nvvm_mbarrier_arrive_shared", - "llvm.nvvm.mbarrier.init" => "__nvvm_mbarrier_init", - "llvm.nvvm.mbarrier.init.shared" => "__nvvm_mbarrier_init_shared", - "llvm.nvvm.mbarrier.inval" => "__nvvm_mbarrier_inval", - "llvm.nvvm.mbarrier.inval.shared" => "__nvvm_mbarrier_inval_shared", - "llvm.nvvm.mbarrier.pending.count" => "__nvvm_mbarrier_pending_count", - "llvm.nvvm.mbarrier.test.wait" => "__nvvm_mbarrier_test_wait", - "llvm.nvvm.mbarrier.test.wait.shared" => "__nvvm_mbarrier_test_wait_shared", - "llvm.nvvm.membar.cta" => "__nvvm_membar_cta", - "llvm.nvvm.membar.gl" => "__nvvm_membar_gl", - "llvm.nvvm.membar.sys" => "__nvvm_membar_sys", - "llvm.nvvm.min.i" => "__nvvm_min_i", - "llvm.nvvm.min.ll" => "__nvvm_min_ll", - "llvm.nvvm.min.ui" => "__nvvm_min_ui", - "llvm.nvvm.min.ull" => "__nvvm_min_ull", - "llvm.nvvm.mul.rm.d" => "__nvvm_mul_rm_d", - "llvm.nvvm.mul.rm.f" => "__nvvm_mul_rm_f", - "llvm.nvvm.mul.rm.ftz.f" => "__nvvm_mul_rm_ftz_f", - "llvm.nvvm.mul.rn.d" => "__nvvm_mul_rn_d", - "llvm.nvvm.mul.rn.f" => "__nvvm_mul_rn_f", - "llvm.nvvm.mul.rn.ftz.f" => "__nvvm_mul_rn_ftz_f", - "llvm.nvvm.mul.rp.d" => "__nvvm_mul_rp_d", - "llvm.nvvm.mul.rp.f" => "__nvvm_mul_rp_f", - "llvm.nvvm.mul.rp.ftz.f" => "__nvvm_mul_rp_ftz_f", - "llvm.nvvm.mul.rz.d" => "__nvvm_mul_rz_d", - "llvm.nvvm.mul.rz.f" => "__nvvm_mul_rz_f", - "llvm.nvvm.mul.rz.ftz.f" => "__nvvm_mul_rz_ftz_f", - "llvm.nvvm.mul24.i" => "__nvvm_mul24_i", - "llvm.nvvm.mul24.ui" => "__nvvm_mul24_ui", - "llvm.nvvm.mulhi.i" => "__nvvm_mulhi_i", - "llvm.nvvm.mulhi.ll" => "__nvvm_mulhi_ll", - "llvm.nvvm.mulhi.s" => "__nvvm_mulhi_s", - "llvm.nvvm.mulhi.ui" => "__nvvm_mulhi_ui", - "llvm.nvvm.mulhi.ull" => "__nvvm_mulhi_ull", - "llvm.nvvm.mulhi.us" => "__nvvm_mulhi_us", - "llvm.nvvm.nanosleep" => "__nvvm_nanosleep", - "llvm.nvvm.neg.bf16" => "__nvvm_neg_bf16", - "llvm.nvvm.neg.bf16x2" => "__nvvm_neg_bf16x2", - "llvm.nvvm.popc.i" => "__nvvm_popc_i", - "llvm.nvvm.popc.ll" => "__nvvm_popc_ll", - "llvm.nvvm.prmt" => "__nvvm_prmt", - "llvm.nvvm.rcp.approx.ftz.d" => "__nvvm_rcp_approx_ftz_d", - "llvm.nvvm.rcp.approx.ftz.f" => "__nvvm_rcp_approx_ftz_f", - "llvm.nvvm.rcp.rm.d" => "__nvvm_rcp_rm_d", - "llvm.nvvm.rcp.rm.f" => "__nvvm_rcp_rm_f", - "llvm.nvvm.rcp.rm.ftz.f" => "__nvvm_rcp_rm_ftz_f", - "llvm.nvvm.rcp.rn.d" => "__nvvm_rcp_rn_d", - "llvm.nvvm.rcp.rn.f" => "__nvvm_rcp_rn_f", - "llvm.nvvm.rcp.rn.ftz.f" => "__nvvm_rcp_rn_ftz_f", - "llvm.nvvm.rcp.rp.d" => "__nvvm_rcp_rp_d", - "llvm.nvvm.rcp.rp.f" => "__nvvm_rcp_rp_f", - "llvm.nvvm.rcp.rp.ftz.f" => "__nvvm_rcp_rp_ftz_f", - "llvm.nvvm.rcp.rz.d" => "__nvvm_rcp_rz_d", - "llvm.nvvm.rcp.rz.f" => "__nvvm_rcp_rz_f", - "llvm.nvvm.rcp.rz.ftz.f" => "__nvvm_rcp_rz_ftz_f", - "llvm.nvvm.read.ptx.sreg.clock" => "__nvvm_read_ptx_sreg_clock", - // [DUPLICATE]: "llvm.nvvm.read.ptx.sreg.clock" => "__nvvm_read_ptx_sreg_", - "llvm.nvvm.read.ptx.sreg.clock64" => "__nvvm_read_ptx_sreg_clock64", - // [DUPLICATE]: "llvm.nvvm.read.ptx.sreg.clock64" => "__nvvm_read_ptx_sreg_", - "llvm.nvvm.read.ptx.sreg.ctaid.w" => "__nvvm_read_ptx_sreg_ctaid_w", - "llvm.nvvm.read.ptx.sreg.ctaid.x" => "__nvvm_read_ptx_sreg_ctaid_x", - "llvm.nvvm.read.ptx.sreg.ctaid.y" => "__nvvm_read_ptx_sreg_ctaid_y", - "llvm.nvvm.read.ptx.sreg.ctaid.z" => "__nvvm_read_ptx_sreg_ctaid_z", - "llvm.nvvm.read.ptx.sreg.envreg0" => "__nvvm_read_ptx_sreg_envreg0", - "llvm.nvvm.read.ptx.sreg.envreg1" => "__nvvm_read_ptx_sreg_envreg1", - "llvm.nvvm.read.ptx.sreg.envreg10" => "__nvvm_read_ptx_sreg_envreg10", - "llvm.nvvm.read.ptx.sreg.envreg11" => "__nvvm_read_ptx_sreg_envreg11", - "llvm.nvvm.read.ptx.sreg.envreg12" => "__nvvm_read_ptx_sreg_envreg12", - "llvm.nvvm.read.ptx.sreg.envreg13" => "__nvvm_read_ptx_sreg_envreg13", - "llvm.nvvm.read.ptx.sreg.envreg14" => "__nvvm_read_ptx_sreg_envreg14", - "llvm.nvvm.read.ptx.sreg.envreg15" => "__nvvm_read_ptx_sreg_envreg15", - "llvm.nvvm.read.ptx.sreg.envreg16" => "__nvvm_read_ptx_sreg_envreg16", - "llvm.nvvm.read.ptx.sreg.envreg17" => "__nvvm_read_ptx_sreg_envreg17", - "llvm.nvvm.read.ptx.sreg.envreg18" => "__nvvm_read_ptx_sreg_envreg18", - "llvm.nvvm.read.ptx.sreg.envreg19" => "__nvvm_read_ptx_sreg_envreg19", - "llvm.nvvm.read.ptx.sreg.envreg2" => "__nvvm_read_ptx_sreg_envreg2", - "llvm.nvvm.read.ptx.sreg.envreg20" => "__nvvm_read_ptx_sreg_envreg20", - "llvm.nvvm.read.ptx.sreg.envreg21" => "__nvvm_read_ptx_sreg_envreg21", - "llvm.nvvm.read.ptx.sreg.envreg22" => "__nvvm_read_ptx_sreg_envreg22", - "llvm.nvvm.read.ptx.sreg.envreg23" => "__nvvm_read_ptx_sreg_envreg23", - "llvm.nvvm.read.ptx.sreg.envreg24" => "__nvvm_read_ptx_sreg_envreg24", - "llvm.nvvm.read.ptx.sreg.envreg25" => "__nvvm_read_ptx_sreg_envreg25", - "llvm.nvvm.read.ptx.sreg.envreg26" => "__nvvm_read_ptx_sreg_envreg26", - "llvm.nvvm.read.ptx.sreg.envreg27" => "__nvvm_read_ptx_sreg_envreg27", - "llvm.nvvm.read.ptx.sreg.envreg28" => "__nvvm_read_ptx_sreg_envreg28", - "llvm.nvvm.read.ptx.sreg.envreg29" => "__nvvm_read_ptx_sreg_envreg29", - "llvm.nvvm.read.ptx.sreg.envreg3" => "__nvvm_read_ptx_sreg_envreg3", - "llvm.nvvm.read.ptx.sreg.envreg30" => "__nvvm_read_ptx_sreg_envreg30", - "llvm.nvvm.read.ptx.sreg.envreg31" => "__nvvm_read_ptx_sreg_envreg31", - "llvm.nvvm.read.ptx.sreg.envreg4" => "__nvvm_read_ptx_sreg_envreg4", - "llvm.nvvm.read.ptx.sreg.envreg5" => "__nvvm_read_ptx_sreg_envreg5", - "llvm.nvvm.read.ptx.sreg.envreg6" => "__nvvm_read_ptx_sreg_envreg6", - "llvm.nvvm.read.ptx.sreg.envreg7" => "__nvvm_read_ptx_sreg_envreg7", - "llvm.nvvm.read.ptx.sreg.envreg8" => "__nvvm_read_ptx_sreg_envreg8", - "llvm.nvvm.read.ptx.sreg.envreg9" => "__nvvm_read_ptx_sreg_envreg9", - "llvm.nvvm.read.ptx.sreg.globaltimer" => "__nvvm_read_ptx_sreg_globaltimer", - "llvm.nvvm.read.ptx.sreg.gridid" => "__nvvm_read_ptx_sreg_gridid", - // [DUPLICATE]: "llvm.nvvm.read.ptx.sreg.gridid" => "__nvvm_read_ptx_sreg_", - "llvm.nvvm.read.ptx.sreg.laneid" => "__nvvm_read_ptx_sreg_laneid", - // [DUPLICATE]: "llvm.nvvm.read.ptx.sreg.laneid" => "__nvvm_read_ptx_sreg_", - "llvm.nvvm.read.ptx.sreg.lanemask.eq" => "__nvvm_read_ptx_sreg_lanemask_eq", - // [DUPLICATE]: "llvm.nvvm.read.ptx.sreg.lanemask.eq" => "__nvvm_read_ptx_sreg_", - "llvm.nvvm.read.ptx.sreg.lanemask.ge" => "__nvvm_read_ptx_sreg_lanemask_ge", - // [DUPLICATE]: "llvm.nvvm.read.ptx.sreg.lanemask.ge" => "__nvvm_read_ptx_sreg_", - "llvm.nvvm.read.ptx.sreg.lanemask.gt" => "__nvvm_read_ptx_sreg_lanemask_gt", - // [DUPLICATE]: "llvm.nvvm.read.ptx.sreg.lanemask.gt" => "__nvvm_read_ptx_sreg_", - "llvm.nvvm.read.ptx.sreg.lanemask.le" => "__nvvm_read_ptx_sreg_lanemask_le", - // [DUPLICATE]: "llvm.nvvm.read.ptx.sreg.lanemask.le" => "__nvvm_read_ptx_sreg_", - "llvm.nvvm.read.ptx.sreg.lanemask.lt" => "__nvvm_read_ptx_sreg_lanemask_lt", - // [DUPLICATE]: "llvm.nvvm.read.ptx.sreg.lanemask.lt" => "__nvvm_read_ptx_sreg_", - "llvm.nvvm.read.ptx.sreg.nctaid.w" => "__nvvm_read_ptx_sreg_nctaid_w", - "llvm.nvvm.read.ptx.sreg.nctaid.x" => "__nvvm_read_ptx_sreg_nctaid_x", - "llvm.nvvm.read.ptx.sreg.nctaid.y" => "__nvvm_read_ptx_sreg_nctaid_y", - "llvm.nvvm.read.ptx.sreg.nctaid.z" => "__nvvm_read_ptx_sreg_nctaid_z", - "llvm.nvvm.read.ptx.sreg.nsmid" => "__nvvm_read_ptx_sreg_nsmid", - // [DUPLICATE]: "llvm.nvvm.read.ptx.sreg.nsmid" => "__nvvm_read_ptx_sreg_", - "llvm.nvvm.read.ptx.sreg.ntid.w" => "__nvvm_read_ptx_sreg_ntid_w", - "llvm.nvvm.read.ptx.sreg.ntid.x" => "__nvvm_read_ptx_sreg_ntid_x", - "llvm.nvvm.read.ptx.sreg.ntid.y" => "__nvvm_read_ptx_sreg_ntid_y", - "llvm.nvvm.read.ptx.sreg.ntid.z" => "__nvvm_read_ptx_sreg_ntid_z", - "llvm.nvvm.read.ptx.sreg.nwarpid" => "__nvvm_read_ptx_sreg_nwarpid", - // [DUPLICATE]: "llvm.nvvm.read.ptx.sreg.nwarpid" => "__nvvm_read_ptx_sreg_", - "llvm.nvvm.read.ptx.sreg.pm0" => "__nvvm_read_ptx_sreg_pm0", - // [DUPLICATE]: "llvm.nvvm.read.ptx.sreg.pm0" => "__nvvm_read_ptx_sreg_", - "llvm.nvvm.read.ptx.sreg.pm1" => "__nvvm_read_ptx_sreg_pm1", - // [DUPLICATE]: "llvm.nvvm.read.ptx.sreg.pm1" => "__nvvm_read_ptx_sreg_", - "llvm.nvvm.read.ptx.sreg.pm2" => "__nvvm_read_ptx_sreg_pm2", - // [DUPLICATE]: "llvm.nvvm.read.ptx.sreg.pm2" => "__nvvm_read_ptx_sreg_", - "llvm.nvvm.read.ptx.sreg.pm3" => "__nvvm_read_ptx_sreg_pm3", - // [DUPLICATE]: "llvm.nvvm.read.ptx.sreg.pm3" => "__nvvm_read_ptx_sreg_", - "llvm.nvvm.read.ptx.sreg.smid" => "__nvvm_read_ptx_sreg_smid", - // [DUPLICATE]: "llvm.nvvm.read.ptx.sreg.smid" => "__nvvm_read_ptx_sreg_", - "llvm.nvvm.read.ptx.sreg.tid.w" => "__nvvm_read_ptx_sreg_tid_w", - "llvm.nvvm.read.ptx.sreg.tid.x" => "__nvvm_read_ptx_sreg_tid_x", - "llvm.nvvm.read.ptx.sreg.tid.y" => "__nvvm_read_ptx_sreg_tid_y", - "llvm.nvvm.read.ptx.sreg.tid.z" => "__nvvm_read_ptx_sreg_tid_z", - "llvm.nvvm.read.ptx.sreg.warpid" => "__nvvm_read_ptx_sreg_warpid", - // [DUPLICATE]: "llvm.nvvm.read.ptx.sreg.warpid" => "__nvvm_read_ptx_sreg_", - "llvm.nvvm.read.ptx.sreg.warpsize" => "__nvvm_read_ptx_sreg_warpsize", - // [DUPLICATE]: "llvm.nvvm.read.ptx.sreg.warpsize" => "__nvvm_read_ptx_sreg_", - "llvm.nvvm.redux.sync.add" => "__nvvm_redux_sync_add", - "llvm.nvvm.redux.sync.and" => "__nvvm_redux_sync_and", - "llvm.nvvm.redux.sync.fmax" => "__nvvm_redux_sync_fmax", - "llvm.nvvm.redux.sync.fmax.NaN" => "__nvvm_redux_sync_fmax_NaN", - "llvm.nvvm.redux.sync.fmax.abs" => "__nvvm_redux_sync_fmax_abs", - "llvm.nvvm.redux.sync.fmax.abs.NaN" => "__nvvm_redux_sync_fmax_abs_NaN", - "llvm.nvvm.redux.sync.fmin" => "__nvvm_redux_sync_fmin", - "llvm.nvvm.redux.sync.fmin.NaN" => "__nvvm_redux_sync_fmin_NaN", - "llvm.nvvm.redux.sync.fmin.abs" => "__nvvm_redux_sync_fmin_abs", - "llvm.nvvm.redux.sync.fmin.abs.NaN" => "__nvvm_redux_sync_fmin_abs_NaN", - "llvm.nvvm.redux.sync.max" => "__nvvm_redux_sync_max", - "llvm.nvvm.redux.sync.min" => "__nvvm_redux_sync_min", - "llvm.nvvm.redux.sync.or" => "__nvvm_redux_sync_or", - "llvm.nvvm.redux.sync.umax" => "__nvvm_redux_sync_umax", - "llvm.nvvm.redux.sync.umin" => "__nvvm_redux_sync_umin", - "llvm.nvvm.redux.sync.xor" => "__nvvm_redux_sync_xor", - "llvm.nvvm.reflect" => "__nvvm_reflect", - "llvm.nvvm.rotate.b32" => "__nvvm_rotate_b32", - "llvm.nvvm.rotate.b64" => "__nvvm_rotate_b64", - "llvm.nvvm.rotate.right.b64" => "__nvvm_rotate_right_b64", - "llvm.nvvm.round.d" => "__nvvm_round_d", - "llvm.nvvm.round.f" => "__nvvm_round_f", - "llvm.nvvm.round.ftz.f" => "__nvvm_round_ftz_f", - "llvm.nvvm.rsqrt.approx.d" => "__nvvm_rsqrt_approx_d", - "llvm.nvvm.rsqrt.approx.f" => "__nvvm_rsqrt_approx_f", - "llvm.nvvm.rsqrt.approx.ftz.d" => "__nvvm_rsqrt_approx_ftz_d", - "llvm.nvvm.rsqrt.approx.ftz.f" => "__nvvm_rsqrt_approx_ftz_f", - "llvm.nvvm.sad.i" => "__nvvm_sad_i", - "llvm.nvvm.sad.ll" => "__nvvm_sad_ll", - "llvm.nvvm.sad.s" => "__nvvm_sad_s", - "llvm.nvvm.sad.ui" => "__nvvm_sad_ui", - "llvm.nvvm.sad.ull" => "__nvvm_sad_ull", - "llvm.nvvm.sad.us" => "__nvvm_sad_us", - "llvm.nvvm.saturate.d" => "__nvvm_saturate_d", - "llvm.nvvm.saturate.f" => "__nvvm_saturate_f", - "llvm.nvvm.saturate.ftz.f" => "__nvvm_saturate_ftz_f", - "llvm.nvvm.shfl.bfly.f32" => "__nvvm_shfl_bfly_f32", - "llvm.nvvm.shfl.bfly.i32" => "__nvvm_shfl_bfly_i32", - "llvm.nvvm.shfl.down.f32" => "__nvvm_shfl_down_f32", - "llvm.nvvm.shfl.down.i32" => "__nvvm_shfl_down_i32", - "llvm.nvvm.shfl.idx.f32" => "__nvvm_shfl_idx_f32", - "llvm.nvvm.shfl.idx.i32" => "__nvvm_shfl_idx_i32", - "llvm.nvvm.shfl.sync.bfly.f32" => "__nvvm_shfl_sync_bfly_f32", - "llvm.nvvm.shfl.sync.bfly.i32" => "__nvvm_shfl_sync_bfly_i32", - "llvm.nvvm.shfl.sync.down.f32" => "__nvvm_shfl_sync_down_f32", - "llvm.nvvm.shfl.sync.down.i32" => "__nvvm_shfl_sync_down_i32", - "llvm.nvvm.shfl.sync.idx.f32" => "__nvvm_shfl_sync_idx_f32", - "llvm.nvvm.shfl.sync.idx.i32" => "__nvvm_shfl_sync_idx_i32", - "llvm.nvvm.shfl.sync.up.f32" => "__nvvm_shfl_sync_up_f32", - "llvm.nvvm.shfl.sync.up.i32" => "__nvvm_shfl_sync_up_i32", - "llvm.nvvm.shfl.up.f32" => "__nvvm_shfl_up_f32", - "llvm.nvvm.shfl.up.i32" => "__nvvm_shfl_up_i32", - "llvm.nvvm.sin.approx.f" => "__nvvm_sin_approx_f", - "llvm.nvvm.sin.approx.ftz.f" => "__nvvm_sin_approx_ftz_f", - "llvm.nvvm.sqrt.approx.f" => "__nvvm_sqrt_approx_f", - "llvm.nvvm.sqrt.approx.ftz.f" => "__nvvm_sqrt_approx_ftz_f", - "llvm.nvvm.sqrt.f" => "__nvvm_sqrt_f", - "llvm.nvvm.sqrt.rm.d" => "__nvvm_sqrt_rm_d", - "llvm.nvvm.sqrt.rm.f" => "__nvvm_sqrt_rm_f", - "llvm.nvvm.sqrt.rm.ftz.f" => "__nvvm_sqrt_rm_ftz_f", - "llvm.nvvm.sqrt.rn.d" => "__nvvm_sqrt_rn_d", - "llvm.nvvm.sqrt.rn.f" => "__nvvm_sqrt_rn_f", - "llvm.nvvm.sqrt.rn.ftz.f" => "__nvvm_sqrt_rn_ftz_f", - "llvm.nvvm.sqrt.rp.d" => "__nvvm_sqrt_rp_d", - "llvm.nvvm.sqrt.rp.f" => "__nvvm_sqrt_rp_f", - "llvm.nvvm.sqrt.rp.ftz.f" => "__nvvm_sqrt_rp_ftz_f", - "llvm.nvvm.sqrt.rz.d" => "__nvvm_sqrt_rz_d", - "llvm.nvvm.sqrt.rz.f" => "__nvvm_sqrt_rz_f", - "llvm.nvvm.sqrt.rz.ftz.f" => "__nvvm_sqrt_rz_ftz_f", - "llvm.nvvm.suq.array.size" => "__nvvm_suq_array_size", - "llvm.nvvm.suq.channel.data.type" => "__nvvm_suq_channel_data_type", - "llvm.nvvm.suq.channel.order" => "__nvvm_suq_channel_order", - "llvm.nvvm.suq.depth" => "__nvvm_suq_depth", - "llvm.nvvm.suq.height" => "__nvvm_suq_height", - "llvm.nvvm.suq.width" => "__nvvm_suq_width", - "llvm.nvvm.sust.b.1d.array.i16.clamp" => "__nvvm_sust_b_1d_array_i16_clamp", - "llvm.nvvm.sust.b.1d.array.i16.trap" => "__nvvm_sust_b_1d_array_i16_trap", - "llvm.nvvm.sust.b.1d.array.i16.zero" => "__nvvm_sust_b_1d_array_i16_zero", - "llvm.nvvm.sust.b.1d.array.i32.clamp" => "__nvvm_sust_b_1d_array_i32_clamp", - "llvm.nvvm.sust.b.1d.array.i32.trap" => "__nvvm_sust_b_1d_array_i32_trap", - "llvm.nvvm.sust.b.1d.array.i32.zero" => "__nvvm_sust_b_1d_array_i32_zero", - "llvm.nvvm.sust.b.1d.array.i64.clamp" => "__nvvm_sust_b_1d_array_i64_clamp", - "llvm.nvvm.sust.b.1d.array.i64.trap" => "__nvvm_sust_b_1d_array_i64_trap", - "llvm.nvvm.sust.b.1d.array.i64.zero" => "__nvvm_sust_b_1d_array_i64_zero", - "llvm.nvvm.sust.b.1d.array.i8.clamp" => "__nvvm_sust_b_1d_array_i8_clamp", - "llvm.nvvm.sust.b.1d.array.i8.trap" => "__nvvm_sust_b_1d_array_i8_trap", - "llvm.nvvm.sust.b.1d.array.i8.zero" => "__nvvm_sust_b_1d_array_i8_zero", - "llvm.nvvm.sust.b.1d.array.v2i16.clamp" => "__nvvm_sust_b_1d_array_v2i16_clamp", - "llvm.nvvm.sust.b.1d.array.v2i16.trap" => "__nvvm_sust_b_1d_array_v2i16_trap", - "llvm.nvvm.sust.b.1d.array.v2i16.zero" => "__nvvm_sust_b_1d_array_v2i16_zero", - "llvm.nvvm.sust.b.1d.array.v2i32.clamp" => "__nvvm_sust_b_1d_array_v2i32_clamp", - "llvm.nvvm.sust.b.1d.array.v2i32.trap" => "__nvvm_sust_b_1d_array_v2i32_trap", - "llvm.nvvm.sust.b.1d.array.v2i32.zero" => "__nvvm_sust_b_1d_array_v2i32_zero", - "llvm.nvvm.sust.b.1d.array.v2i64.clamp" => "__nvvm_sust_b_1d_array_v2i64_clamp", - "llvm.nvvm.sust.b.1d.array.v2i64.trap" => "__nvvm_sust_b_1d_array_v2i64_trap", - "llvm.nvvm.sust.b.1d.array.v2i64.zero" => "__nvvm_sust_b_1d_array_v2i64_zero", - "llvm.nvvm.sust.b.1d.array.v2i8.clamp" => "__nvvm_sust_b_1d_array_v2i8_clamp", - "llvm.nvvm.sust.b.1d.array.v2i8.trap" => "__nvvm_sust_b_1d_array_v2i8_trap", - "llvm.nvvm.sust.b.1d.array.v2i8.zero" => "__nvvm_sust_b_1d_array_v2i8_zero", - "llvm.nvvm.sust.b.1d.array.v4i16.clamp" => "__nvvm_sust_b_1d_array_v4i16_clamp", - "llvm.nvvm.sust.b.1d.array.v4i16.trap" => "__nvvm_sust_b_1d_array_v4i16_trap", - "llvm.nvvm.sust.b.1d.array.v4i16.zero" => "__nvvm_sust_b_1d_array_v4i16_zero", - "llvm.nvvm.sust.b.1d.array.v4i32.clamp" => "__nvvm_sust_b_1d_array_v4i32_clamp", - "llvm.nvvm.sust.b.1d.array.v4i32.trap" => "__nvvm_sust_b_1d_array_v4i32_trap", - "llvm.nvvm.sust.b.1d.array.v4i32.zero" => "__nvvm_sust_b_1d_array_v4i32_zero", - "llvm.nvvm.sust.b.1d.array.v4i8.clamp" => "__nvvm_sust_b_1d_array_v4i8_clamp", - "llvm.nvvm.sust.b.1d.array.v4i8.trap" => "__nvvm_sust_b_1d_array_v4i8_trap", - "llvm.nvvm.sust.b.1d.array.v4i8.zero" => "__nvvm_sust_b_1d_array_v4i8_zero", - "llvm.nvvm.sust.b.1d.i16.clamp" => "__nvvm_sust_b_1d_i16_clamp", - "llvm.nvvm.sust.b.1d.i16.trap" => "__nvvm_sust_b_1d_i16_trap", - "llvm.nvvm.sust.b.1d.i16.zero" => "__nvvm_sust_b_1d_i16_zero", - "llvm.nvvm.sust.b.1d.i32.clamp" => "__nvvm_sust_b_1d_i32_clamp", - "llvm.nvvm.sust.b.1d.i32.trap" => "__nvvm_sust_b_1d_i32_trap", - "llvm.nvvm.sust.b.1d.i32.zero" => "__nvvm_sust_b_1d_i32_zero", - "llvm.nvvm.sust.b.1d.i64.clamp" => "__nvvm_sust_b_1d_i64_clamp", - "llvm.nvvm.sust.b.1d.i64.trap" => "__nvvm_sust_b_1d_i64_trap", - "llvm.nvvm.sust.b.1d.i64.zero" => "__nvvm_sust_b_1d_i64_zero", - "llvm.nvvm.sust.b.1d.i8.clamp" => "__nvvm_sust_b_1d_i8_clamp", - "llvm.nvvm.sust.b.1d.i8.trap" => "__nvvm_sust_b_1d_i8_trap", - "llvm.nvvm.sust.b.1d.i8.zero" => "__nvvm_sust_b_1d_i8_zero", - "llvm.nvvm.sust.b.1d.v2i16.clamp" => "__nvvm_sust_b_1d_v2i16_clamp", - "llvm.nvvm.sust.b.1d.v2i16.trap" => "__nvvm_sust_b_1d_v2i16_trap", - "llvm.nvvm.sust.b.1d.v2i16.zero" => "__nvvm_sust_b_1d_v2i16_zero", - "llvm.nvvm.sust.b.1d.v2i32.clamp" => "__nvvm_sust_b_1d_v2i32_clamp", - "llvm.nvvm.sust.b.1d.v2i32.trap" => "__nvvm_sust_b_1d_v2i32_trap", - "llvm.nvvm.sust.b.1d.v2i32.zero" => "__nvvm_sust_b_1d_v2i32_zero", - "llvm.nvvm.sust.b.1d.v2i64.clamp" => "__nvvm_sust_b_1d_v2i64_clamp", - "llvm.nvvm.sust.b.1d.v2i64.trap" => "__nvvm_sust_b_1d_v2i64_trap", - "llvm.nvvm.sust.b.1d.v2i64.zero" => "__nvvm_sust_b_1d_v2i64_zero", - "llvm.nvvm.sust.b.1d.v2i8.clamp" => "__nvvm_sust_b_1d_v2i8_clamp", - "llvm.nvvm.sust.b.1d.v2i8.trap" => "__nvvm_sust_b_1d_v2i8_trap", - "llvm.nvvm.sust.b.1d.v2i8.zero" => "__nvvm_sust_b_1d_v2i8_zero", - "llvm.nvvm.sust.b.1d.v4i16.clamp" => "__nvvm_sust_b_1d_v4i16_clamp", - "llvm.nvvm.sust.b.1d.v4i16.trap" => "__nvvm_sust_b_1d_v4i16_trap", - "llvm.nvvm.sust.b.1d.v4i16.zero" => "__nvvm_sust_b_1d_v4i16_zero", - "llvm.nvvm.sust.b.1d.v4i32.clamp" => "__nvvm_sust_b_1d_v4i32_clamp", - "llvm.nvvm.sust.b.1d.v4i32.trap" => "__nvvm_sust_b_1d_v4i32_trap", - "llvm.nvvm.sust.b.1d.v4i32.zero" => "__nvvm_sust_b_1d_v4i32_zero", - "llvm.nvvm.sust.b.1d.v4i8.clamp" => "__nvvm_sust_b_1d_v4i8_clamp", - "llvm.nvvm.sust.b.1d.v4i8.trap" => "__nvvm_sust_b_1d_v4i8_trap", - "llvm.nvvm.sust.b.1d.v4i8.zero" => "__nvvm_sust_b_1d_v4i8_zero", - "llvm.nvvm.sust.b.2d.array.i16.clamp" => "__nvvm_sust_b_2d_array_i16_clamp", - "llvm.nvvm.sust.b.2d.array.i16.trap" => "__nvvm_sust_b_2d_array_i16_trap", - "llvm.nvvm.sust.b.2d.array.i16.zero" => "__nvvm_sust_b_2d_array_i16_zero", - "llvm.nvvm.sust.b.2d.array.i32.clamp" => "__nvvm_sust_b_2d_array_i32_clamp", - "llvm.nvvm.sust.b.2d.array.i32.trap" => "__nvvm_sust_b_2d_array_i32_trap", - "llvm.nvvm.sust.b.2d.array.i32.zero" => "__nvvm_sust_b_2d_array_i32_zero", - "llvm.nvvm.sust.b.2d.array.i64.clamp" => "__nvvm_sust_b_2d_array_i64_clamp", - "llvm.nvvm.sust.b.2d.array.i64.trap" => "__nvvm_sust_b_2d_array_i64_trap", - "llvm.nvvm.sust.b.2d.array.i64.zero" => "__nvvm_sust_b_2d_array_i64_zero", - "llvm.nvvm.sust.b.2d.array.i8.clamp" => "__nvvm_sust_b_2d_array_i8_clamp", - "llvm.nvvm.sust.b.2d.array.i8.trap" => "__nvvm_sust_b_2d_array_i8_trap", - "llvm.nvvm.sust.b.2d.array.i8.zero" => "__nvvm_sust_b_2d_array_i8_zero", - "llvm.nvvm.sust.b.2d.array.v2i16.clamp" => "__nvvm_sust_b_2d_array_v2i16_clamp", - "llvm.nvvm.sust.b.2d.array.v2i16.trap" => "__nvvm_sust_b_2d_array_v2i16_trap", - "llvm.nvvm.sust.b.2d.array.v2i16.zero" => "__nvvm_sust_b_2d_array_v2i16_zero", - "llvm.nvvm.sust.b.2d.array.v2i32.clamp" => "__nvvm_sust_b_2d_array_v2i32_clamp", - "llvm.nvvm.sust.b.2d.array.v2i32.trap" => "__nvvm_sust_b_2d_array_v2i32_trap", - "llvm.nvvm.sust.b.2d.array.v2i32.zero" => "__nvvm_sust_b_2d_array_v2i32_zero", - "llvm.nvvm.sust.b.2d.array.v2i64.clamp" => "__nvvm_sust_b_2d_array_v2i64_clamp", - "llvm.nvvm.sust.b.2d.array.v2i64.trap" => "__nvvm_sust_b_2d_array_v2i64_trap", - "llvm.nvvm.sust.b.2d.array.v2i64.zero" => "__nvvm_sust_b_2d_array_v2i64_zero", - "llvm.nvvm.sust.b.2d.array.v2i8.clamp" => "__nvvm_sust_b_2d_array_v2i8_clamp", - "llvm.nvvm.sust.b.2d.array.v2i8.trap" => "__nvvm_sust_b_2d_array_v2i8_trap", - "llvm.nvvm.sust.b.2d.array.v2i8.zero" => "__nvvm_sust_b_2d_array_v2i8_zero", - "llvm.nvvm.sust.b.2d.array.v4i16.clamp" => "__nvvm_sust_b_2d_array_v4i16_clamp", - "llvm.nvvm.sust.b.2d.array.v4i16.trap" => "__nvvm_sust_b_2d_array_v4i16_trap", - "llvm.nvvm.sust.b.2d.array.v4i16.zero" => "__nvvm_sust_b_2d_array_v4i16_zero", - "llvm.nvvm.sust.b.2d.array.v4i32.clamp" => "__nvvm_sust_b_2d_array_v4i32_clamp", - "llvm.nvvm.sust.b.2d.array.v4i32.trap" => "__nvvm_sust_b_2d_array_v4i32_trap", - "llvm.nvvm.sust.b.2d.array.v4i32.zero" => "__nvvm_sust_b_2d_array_v4i32_zero", - "llvm.nvvm.sust.b.2d.array.v4i8.clamp" => "__nvvm_sust_b_2d_array_v4i8_clamp", - "llvm.nvvm.sust.b.2d.array.v4i8.trap" => "__nvvm_sust_b_2d_array_v4i8_trap", - "llvm.nvvm.sust.b.2d.array.v4i8.zero" => "__nvvm_sust_b_2d_array_v4i8_zero", - "llvm.nvvm.sust.b.2d.i16.clamp" => "__nvvm_sust_b_2d_i16_clamp", - "llvm.nvvm.sust.b.2d.i16.trap" => "__nvvm_sust_b_2d_i16_trap", - "llvm.nvvm.sust.b.2d.i16.zero" => "__nvvm_sust_b_2d_i16_zero", - "llvm.nvvm.sust.b.2d.i32.clamp" => "__nvvm_sust_b_2d_i32_clamp", - "llvm.nvvm.sust.b.2d.i32.trap" => "__nvvm_sust_b_2d_i32_trap", - "llvm.nvvm.sust.b.2d.i32.zero" => "__nvvm_sust_b_2d_i32_zero", - "llvm.nvvm.sust.b.2d.i64.clamp" => "__nvvm_sust_b_2d_i64_clamp", - "llvm.nvvm.sust.b.2d.i64.trap" => "__nvvm_sust_b_2d_i64_trap", - "llvm.nvvm.sust.b.2d.i64.zero" => "__nvvm_sust_b_2d_i64_zero", - "llvm.nvvm.sust.b.2d.i8.clamp" => "__nvvm_sust_b_2d_i8_clamp", - "llvm.nvvm.sust.b.2d.i8.trap" => "__nvvm_sust_b_2d_i8_trap", - "llvm.nvvm.sust.b.2d.i8.zero" => "__nvvm_sust_b_2d_i8_zero", - "llvm.nvvm.sust.b.2d.v2i16.clamp" => "__nvvm_sust_b_2d_v2i16_clamp", - "llvm.nvvm.sust.b.2d.v2i16.trap" => "__nvvm_sust_b_2d_v2i16_trap", - "llvm.nvvm.sust.b.2d.v2i16.zero" => "__nvvm_sust_b_2d_v2i16_zero", - "llvm.nvvm.sust.b.2d.v2i32.clamp" => "__nvvm_sust_b_2d_v2i32_clamp", - "llvm.nvvm.sust.b.2d.v2i32.trap" => "__nvvm_sust_b_2d_v2i32_trap", - "llvm.nvvm.sust.b.2d.v2i32.zero" => "__nvvm_sust_b_2d_v2i32_zero", - "llvm.nvvm.sust.b.2d.v2i64.clamp" => "__nvvm_sust_b_2d_v2i64_clamp", - "llvm.nvvm.sust.b.2d.v2i64.trap" => "__nvvm_sust_b_2d_v2i64_trap", - "llvm.nvvm.sust.b.2d.v2i64.zero" => "__nvvm_sust_b_2d_v2i64_zero", - "llvm.nvvm.sust.b.2d.v2i8.clamp" => "__nvvm_sust_b_2d_v2i8_clamp", - "llvm.nvvm.sust.b.2d.v2i8.trap" => "__nvvm_sust_b_2d_v2i8_trap", - "llvm.nvvm.sust.b.2d.v2i8.zero" => "__nvvm_sust_b_2d_v2i8_zero", - "llvm.nvvm.sust.b.2d.v4i16.clamp" => "__nvvm_sust_b_2d_v4i16_clamp", - "llvm.nvvm.sust.b.2d.v4i16.trap" => "__nvvm_sust_b_2d_v4i16_trap", - "llvm.nvvm.sust.b.2d.v4i16.zero" => "__nvvm_sust_b_2d_v4i16_zero", - "llvm.nvvm.sust.b.2d.v4i32.clamp" => "__nvvm_sust_b_2d_v4i32_clamp", - "llvm.nvvm.sust.b.2d.v4i32.trap" => "__nvvm_sust_b_2d_v4i32_trap", - "llvm.nvvm.sust.b.2d.v4i32.zero" => "__nvvm_sust_b_2d_v4i32_zero", - "llvm.nvvm.sust.b.2d.v4i8.clamp" => "__nvvm_sust_b_2d_v4i8_clamp", - "llvm.nvvm.sust.b.2d.v4i8.trap" => "__nvvm_sust_b_2d_v4i8_trap", - "llvm.nvvm.sust.b.2d.v4i8.zero" => "__nvvm_sust_b_2d_v4i8_zero", - "llvm.nvvm.sust.b.3d.i16.clamp" => "__nvvm_sust_b_3d_i16_clamp", - "llvm.nvvm.sust.b.3d.i16.trap" => "__nvvm_sust_b_3d_i16_trap", - "llvm.nvvm.sust.b.3d.i16.zero" => "__nvvm_sust_b_3d_i16_zero", - "llvm.nvvm.sust.b.3d.i32.clamp" => "__nvvm_sust_b_3d_i32_clamp", - "llvm.nvvm.sust.b.3d.i32.trap" => "__nvvm_sust_b_3d_i32_trap", - "llvm.nvvm.sust.b.3d.i32.zero" => "__nvvm_sust_b_3d_i32_zero", - "llvm.nvvm.sust.b.3d.i64.clamp" => "__nvvm_sust_b_3d_i64_clamp", - "llvm.nvvm.sust.b.3d.i64.trap" => "__nvvm_sust_b_3d_i64_trap", - "llvm.nvvm.sust.b.3d.i64.zero" => "__nvvm_sust_b_3d_i64_zero", - "llvm.nvvm.sust.b.3d.i8.clamp" => "__nvvm_sust_b_3d_i8_clamp", - "llvm.nvvm.sust.b.3d.i8.trap" => "__nvvm_sust_b_3d_i8_trap", - "llvm.nvvm.sust.b.3d.i8.zero" => "__nvvm_sust_b_3d_i8_zero", - "llvm.nvvm.sust.b.3d.v2i16.clamp" => "__nvvm_sust_b_3d_v2i16_clamp", - "llvm.nvvm.sust.b.3d.v2i16.trap" => "__nvvm_sust_b_3d_v2i16_trap", - "llvm.nvvm.sust.b.3d.v2i16.zero" => "__nvvm_sust_b_3d_v2i16_zero", - "llvm.nvvm.sust.b.3d.v2i32.clamp" => "__nvvm_sust_b_3d_v2i32_clamp", - "llvm.nvvm.sust.b.3d.v2i32.trap" => "__nvvm_sust_b_3d_v2i32_trap", - "llvm.nvvm.sust.b.3d.v2i32.zero" => "__nvvm_sust_b_3d_v2i32_zero", - "llvm.nvvm.sust.b.3d.v2i64.clamp" => "__nvvm_sust_b_3d_v2i64_clamp", - "llvm.nvvm.sust.b.3d.v2i64.trap" => "__nvvm_sust_b_3d_v2i64_trap", - "llvm.nvvm.sust.b.3d.v2i64.zero" => "__nvvm_sust_b_3d_v2i64_zero", - "llvm.nvvm.sust.b.3d.v2i8.clamp" => "__nvvm_sust_b_3d_v2i8_clamp", - "llvm.nvvm.sust.b.3d.v2i8.trap" => "__nvvm_sust_b_3d_v2i8_trap", - "llvm.nvvm.sust.b.3d.v2i8.zero" => "__nvvm_sust_b_3d_v2i8_zero", - "llvm.nvvm.sust.b.3d.v4i16.clamp" => "__nvvm_sust_b_3d_v4i16_clamp", - "llvm.nvvm.sust.b.3d.v4i16.trap" => "__nvvm_sust_b_3d_v4i16_trap", - "llvm.nvvm.sust.b.3d.v4i16.zero" => "__nvvm_sust_b_3d_v4i16_zero", - "llvm.nvvm.sust.b.3d.v4i32.clamp" => "__nvvm_sust_b_3d_v4i32_clamp", - "llvm.nvvm.sust.b.3d.v4i32.trap" => "__nvvm_sust_b_3d_v4i32_trap", - "llvm.nvvm.sust.b.3d.v4i32.zero" => "__nvvm_sust_b_3d_v4i32_zero", - "llvm.nvvm.sust.b.3d.v4i8.clamp" => "__nvvm_sust_b_3d_v4i8_clamp", - "llvm.nvvm.sust.b.3d.v4i8.trap" => "__nvvm_sust_b_3d_v4i8_trap", - "llvm.nvvm.sust.b.3d.v4i8.zero" => "__nvvm_sust_b_3d_v4i8_zero", - "llvm.nvvm.sust.p.1d.array.i16.trap" => "__nvvm_sust_p_1d_array_i16_trap", - "llvm.nvvm.sust.p.1d.array.i32.trap" => "__nvvm_sust_p_1d_array_i32_trap", - "llvm.nvvm.sust.p.1d.array.i8.trap" => "__nvvm_sust_p_1d_array_i8_trap", - "llvm.nvvm.sust.p.1d.array.v2i16.trap" => "__nvvm_sust_p_1d_array_v2i16_trap", - "llvm.nvvm.sust.p.1d.array.v2i32.trap" => "__nvvm_sust_p_1d_array_v2i32_trap", - "llvm.nvvm.sust.p.1d.array.v2i8.trap" => "__nvvm_sust_p_1d_array_v2i8_trap", - "llvm.nvvm.sust.p.1d.array.v4i16.trap" => "__nvvm_sust_p_1d_array_v4i16_trap", - "llvm.nvvm.sust.p.1d.array.v4i32.trap" => "__nvvm_sust_p_1d_array_v4i32_trap", - "llvm.nvvm.sust.p.1d.array.v4i8.trap" => "__nvvm_sust_p_1d_array_v4i8_trap", - "llvm.nvvm.sust.p.1d.i16.trap" => "__nvvm_sust_p_1d_i16_trap", - "llvm.nvvm.sust.p.1d.i32.trap" => "__nvvm_sust_p_1d_i32_trap", - "llvm.nvvm.sust.p.1d.i8.trap" => "__nvvm_sust_p_1d_i8_trap", - "llvm.nvvm.sust.p.1d.v2i16.trap" => "__nvvm_sust_p_1d_v2i16_trap", - "llvm.nvvm.sust.p.1d.v2i32.trap" => "__nvvm_sust_p_1d_v2i32_trap", - "llvm.nvvm.sust.p.1d.v2i8.trap" => "__nvvm_sust_p_1d_v2i8_trap", - "llvm.nvvm.sust.p.1d.v4i16.trap" => "__nvvm_sust_p_1d_v4i16_trap", - "llvm.nvvm.sust.p.1d.v4i32.trap" => "__nvvm_sust_p_1d_v4i32_trap", - "llvm.nvvm.sust.p.1d.v4i8.trap" => "__nvvm_sust_p_1d_v4i8_trap", - "llvm.nvvm.sust.p.2d.array.i16.trap" => "__nvvm_sust_p_2d_array_i16_trap", - "llvm.nvvm.sust.p.2d.array.i32.trap" => "__nvvm_sust_p_2d_array_i32_trap", - "llvm.nvvm.sust.p.2d.array.i8.trap" => "__nvvm_sust_p_2d_array_i8_trap", - "llvm.nvvm.sust.p.2d.array.v2i16.trap" => "__nvvm_sust_p_2d_array_v2i16_trap", - "llvm.nvvm.sust.p.2d.array.v2i32.trap" => "__nvvm_sust_p_2d_array_v2i32_trap", - "llvm.nvvm.sust.p.2d.array.v2i8.trap" => "__nvvm_sust_p_2d_array_v2i8_trap", - "llvm.nvvm.sust.p.2d.array.v4i16.trap" => "__nvvm_sust_p_2d_array_v4i16_trap", - "llvm.nvvm.sust.p.2d.array.v4i32.trap" => "__nvvm_sust_p_2d_array_v4i32_trap", - "llvm.nvvm.sust.p.2d.array.v4i8.trap" => "__nvvm_sust_p_2d_array_v4i8_trap", - "llvm.nvvm.sust.p.2d.i16.trap" => "__nvvm_sust_p_2d_i16_trap", - "llvm.nvvm.sust.p.2d.i32.trap" => "__nvvm_sust_p_2d_i32_trap", - "llvm.nvvm.sust.p.2d.i8.trap" => "__nvvm_sust_p_2d_i8_trap", - "llvm.nvvm.sust.p.2d.v2i16.trap" => "__nvvm_sust_p_2d_v2i16_trap", - "llvm.nvvm.sust.p.2d.v2i32.trap" => "__nvvm_sust_p_2d_v2i32_trap", - "llvm.nvvm.sust.p.2d.v2i8.trap" => "__nvvm_sust_p_2d_v2i8_trap", - "llvm.nvvm.sust.p.2d.v4i16.trap" => "__nvvm_sust_p_2d_v4i16_trap", - "llvm.nvvm.sust.p.2d.v4i32.trap" => "__nvvm_sust_p_2d_v4i32_trap", - "llvm.nvvm.sust.p.2d.v4i8.trap" => "__nvvm_sust_p_2d_v4i8_trap", - "llvm.nvvm.sust.p.3d.i16.trap" => "__nvvm_sust_p_3d_i16_trap", - "llvm.nvvm.sust.p.3d.i32.trap" => "__nvvm_sust_p_3d_i32_trap", - "llvm.nvvm.sust.p.3d.i8.trap" => "__nvvm_sust_p_3d_i8_trap", - "llvm.nvvm.sust.p.3d.v2i16.trap" => "__nvvm_sust_p_3d_v2i16_trap", - "llvm.nvvm.sust.p.3d.v2i32.trap" => "__nvvm_sust_p_3d_v2i32_trap", - "llvm.nvvm.sust.p.3d.v2i8.trap" => "__nvvm_sust_p_3d_v2i8_trap", - "llvm.nvvm.sust.p.3d.v4i16.trap" => "__nvvm_sust_p_3d_v4i16_trap", - "llvm.nvvm.sust.p.3d.v4i32.trap" => "__nvvm_sust_p_3d_v4i32_trap", - "llvm.nvvm.sust.p.3d.v4i8.trap" => "__nvvm_sust_p_3d_v4i8_trap", - "llvm.nvvm.swap.lo.hi.b64" => "__nvvm_swap_lo_hi_b64", - "llvm.nvvm.trunc.d" => "__nvvm_trunc_d", - "llvm.nvvm.trunc.f" => "__nvvm_trunc_f", - "llvm.nvvm.trunc.ftz.f" => "__nvvm_trunc_ftz_f", - "llvm.nvvm.txq.array.size" => "__nvvm_txq_array_size", - "llvm.nvvm.txq.channel.data.type" => "__nvvm_txq_channel_data_type", - "llvm.nvvm.txq.channel.order" => "__nvvm_txq_channel_order", - "llvm.nvvm.txq.depth" => "__nvvm_txq_depth", - "llvm.nvvm.txq.height" => "__nvvm_txq_height", - "llvm.nvvm.txq.num.mipmap.levels" => "__nvvm_txq_num_mipmap_levels", - "llvm.nvvm.txq.num.samples" => "__nvvm_txq_num_samples", - "llvm.nvvm.txq.width" => "__nvvm_txq_width", - "llvm.nvvm.ue8m0x2.to.bf16x2" => "__nvvm_ue8m0x2_to_bf16x2", - "llvm.nvvm.ui2d.rm" => "__nvvm_ui2d_rm", - "llvm.nvvm.ui2d.rn" => "__nvvm_ui2d_rn", - "llvm.nvvm.ui2d.rp" => "__nvvm_ui2d_rp", - "llvm.nvvm.ui2d.rz" => "__nvvm_ui2d_rz", - "llvm.nvvm.ui2f.rm" => "__nvvm_ui2f_rm", - "llvm.nvvm.ui2f.rn" => "__nvvm_ui2f_rn", - "llvm.nvvm.ui2f.rp" => "__nvvm_ui2f_rp", - "llvm.nvvm.ui2f.rz" => "__nvvm_ui2f_rz", - "llvm.nvvm.ull2d.rm" => "__nvvm_ull2d_rm", - "llvm.nvvm.ull2d.rn" => "__nvvm_ull2d_rn", - "llvm.nvvm.ull2d.rp" => "__nvvm_ull2d_rp", - "llvm.nvvm.ull2d.rz" => "__nvvm_ull2d_rz", - "llvm.nvvm.ull2f.rm" => "__nvvm_ull2f_rm", - "llvm.nvvm.ull2f.rn" => "__nvvm_ull2f_rn", - "llvm.nvvm.ull2f.rp" => "__nvvm_ull2f_rp", - "llvm.nvvm.ull2f.rz" => "__nvvm_ull2f_rz", - "llvm.nvvm.vote.all" => "__nvvm_vote_all", - "llvm.nvvm.vote.all.sync" => "__nvvm_vote_all_sync", - "llvm.nvvm.vote.any" => "__nvvm_vote_any", - "llvm.nvvm.vote.any.sync" => "__nvvm_vote_any_sync", - "llvm.nvvm.vote.ballot" => "__nvvm_vote_ballot", - "llvm.nvvm.vote.ballot.sync" => "__nvvm_vote_ballot_sync", - "llvm.nvvm.vote.uni" => "__nvvm_vote_uni", - "llvm.nvvm.vote.uni.sync" => "__nvvm_vote_uni_sync", - // ppc - "llvm.ppc.addex" => "__builtin_ppc_addex", - "llvm.ppc.addf128.round.to.odd" => "__builtin_addf128_round_to_odd", - "llvm.ppc.addg6s" => "__builtin_addg6s", - "llvm.ppc.addg6sd" => "__builtin_ppc_addg6s", - "llvm.ppc.altivec.crypto.vcipher" => "__builtin_altivec_crypto_vcipher", - "llvm.ppc.altivec.crypto.vcipherlast" => "__builtin_altivec_crypto_vcipherlast", - "llvm.ppc.altivec.crypto.vncipher" => "__builtin_altivec_crypto_vncipher", - "llvm.ppc.altivec.crypto.vncipherlast" => "__builtin_altivec_crypto_vncipherlast", - "llvm.ppc.altivec.crypto.vpermxor" => "__builtin_altivec_crypto_vpermxor", - "llvm.ppc.altivec.crypto.vpermxor.be" => "__builtin_altivec_crypto_vpermxor_be", - "llvm.ppc.altivec.crypto.vpmsumb" => "__builtin_altivec_crypto_vpmsumb", - "llvm.ppc.altivec.crypto.vpmsumd" => "__builtin_altivec_crypto_vpmsumd", - "llvm.ppc.altivec.crypto.vpmsumh" => "__builtin_altivec_crypto_vpmsumh", - "llvm.ppc.altivec.crypto.vpmsumw" => "__builtin_altivec_crypto_vpmsumw", - "llvm.ppc.altivec.crypto.vsbox" => "__builtin_altivec_crypto_vsbox", - "llvm.ppc.altivec.crypto.vshasigmad" => "__builtin_altivec_crypto_vshasigmad", - "llvm.ppc.altivec.crypto.vshasigmaw" => "__builtin_altivec_crypto_vshasigmaw", - "llvm.ppc.altivec.dss" => "__builtin_altivec_dss", - "llvm.ppc.altivec.dssall" => "__builtin_altivec_dssall", - "llvm.ppc.altivec.dst" => "__builtin_altivec_dst", - "llvm.ppc.altivec.dstst" => "__builtin_altivec_dstst", - "llvm.ppc.altivec.dststt" => "__builtin_altivec_dststt", - "llvm.ppc.altivec.dstt" => "__builtin_altivec_dstt", - "llvm.ppc.altivec.mfvscr" => "__builtin_altivec_mfvscr", - "llvm.ppc.altivec.mtvscr" => "__builtin_altivec_mtvscr", - "llvm.ppc.altivec.mtvsrbm" => "__builtin_altivec_mtvsrbm", - "llvm.ppc.altivec.mtvsrdm" => "__builtin_altivec_mtvsrdm", - "llvm.ppc.altivec.mtvsrhm" => "__builtin_altivec_mtvsrhm", - "llvm.ppc.altivec.mtvsrqm" => "__builtin_altivec_mtvsrqm", - "llvm.ppc.altivec.mtvsrwm" => "__builtin_altivec_mtvsrwm", - "llvm.ppc.altivec.vabsdub" => "__builtin_altivec_vabsdub", - "llvm.ppc.altivec.vabsduh" => "__builtin_altivec_vabsduh", - "llvm.ppc.altivec.vabsduw" => "__builtin_altivec_vabsduw", - "llvm.ppc.altivec.vaddcuq" => "__builtin_altivec_vaddcuq", - "llvm.ppc.altivec.vaddcuw" => "__builtin_altivec_vaddcuw", - "llvm.ppc.altivec.vaddecuq" => "__builtin_altivec_vaddecuq", - "llvm.ppc.altivec.vaddeuqm" => "__builtin_altivec_vaddeuqm", - "llvm.ppc.altivec.vaddsbs" => "__builtin_altivec_vaddsbs", - "llvm.ppc.altivec.vaddshs" => "__builtin_altivec_vaddshs", - "llvm.ppc.altivec.vaddsws" => "__builtin_altivec_vaddsws", - "llvm.ppc.altivec.vaddubs" => "__builtin_altivec_vaddubs", - "llvm.ppc.altivec.vadduhs" => "__builtin_altivec_vadduhs", - "llvm.ppc.altivec.vadduws" => "__builtin_altivec_vadduws", - "llvm.ppc.altivec.vavgsb" => "__builtin_altivec_vavgsb", - "llvm.ppc.altivec.vavgsh" => "__builtin_altivec_vavgsh", - "llvm.ppc.altivec.vavgsw" => "__builtin_altivec_vavgsw", - "llvm.ppc.altivec.vavgub" => "__builtin_altivec_vavgub", - "llvm.ppc.altivec.vavguh" => "__builtin_altivec_vavguh", - "llvm.ppc.altivec.vavguw" => "__builtin_altivec_vavguw", - "llvm.ppc.altivec.vbpermd" => "__builtin_altivec_vbpermd", - "llvm.ppc.altivec.vbpermq" => "__builtin_altivec_vbpermq", - "llvm.ppc.altivec.vcfsx" => "__builtin_altivec_vcfsx", - "llvm.ppc.altivec.vcfuged" => "__builtin_altivec_vcfuged", - "llvm.ppc.altivec.vcfux" => "__builtin_altivec_vcfux", - "llvm.ppc.altivec.vclrlb" => "__builtin_altivec_vclrlb", - "llvm.ppc.altivec.vclrrb" => "__builtin_altivec_vclrrb", - "llvm.ppc.altivec.vclzdm" => "__builtin_altivec_vclzdm", - "llvm.ppc.altivec.vclzlsbb" => "__builtin_altivec_vclzlsbb", - "llvm.ppc.altivec.vcmpbfp" => "__builtin_altivec_vcmpbfp", - "llvm.ppc.altivec.vcmpbfp.p" => "__builtin_altivec_vcmpbfp_p", - "llvm.ppc.altivec.vcmpeqfp" => "__builtin_altivec_vcmpeqfp", - "llvm.ppc.altivec.vcmpeqfp.p" => "__builtin_altivec_vcmpeqfp_p", - "llvm.ppc.altivec.vcmpequb" => "__builtin_altivec_vcmpequb", - "llvm.ppc.altivec.vcmpequb.p" => "__builtin_altivec_vcmpequb_p", - "llvm.ppc.altivec.vcmpequd" => "__builtin_altivec_vcmpequd", - "llvm.ppc.altivec.vcmpequd.p" => "__builtin_altivec_vcmpequd_p", - "llvm.ppc.altivec.vcmpequh" => "__builtin_altivec_vcmpequh", - "llvm.ppc.altivec.vcmpequh.p" => "__builtin_altivec_vcmpequh_p", - "llvm.ppc.altivec.vcmpequq" => "__builtin_altivec_vcmpequq", - "llvm.ppc.altivec.vcmpequq.p" => "__builtin_altivec_vcmpequq_p", - "llvm.ppc.altivec.vcmpequw" => "__builtin_altivec_vcmpequw", - "llvm.ppc.altivec.vcmpequw.p" => "__builtin_altivec_vcmpequw_p", - "llvm.ppc.altivec.vcmpgefp" => "__builtin_altivec_vcmpgefp", - "llvm.ppc.altivec.vcmpgefp.p" => "__builtin_altivec_vcmpgefp_p", - "llvm.ppc.altivec.vcmpgtfp" => "__builtin_altivec_vcmpgtfp", - "llvm.ppc.altivec.vcmpgtfp.p" => "__builtin_altivec_vcmpgtfp_p", - "llvm.ppc.altivec.vcmpgtsb" => "__builtin_altivec_vcmpgtsb", - "llvm.ppc.altivec.vcmpgtsb.p" => "__builtin_altivec_vcmpgtsb_p", - "llvm.ppc.altivec.vcmpgtsd" => "__builtin_altivec_vcmpgtsd", - "llvm.ppc.altivec.vcmpgtsd.p" => "__builtin_altivec_vcmpgtsd_p", - "llvm.ppc.altivec.vcmpgtsh" => "__builtin_altivec_vcmpgtsh", - "llvm.ppc.altivec.vcmpgtsh.p" => "__builtin_altivec_vcmpgtsh_p", - "llvm.ppc.altivec.vcmpgtsq" => "__builtin_altivec_vcmpgtsq", - "llvm.ppc.altivec.vcmpgtsq.p" => "__builtin_altivec_vcmpgtsq_p", - "llvm.ppc.altivec.vcmpgtsw" => "__builtin_altivec_vcmpgtsw", - "llvm.ppc.altivec.vcmpgtsw.p" => "__builtin_altivec_vcmpgtsw_p", - "llvm.ppc.altivec.vcmpgtub" => "__builtin_altivec_vcmpgtub", - "llvm.ppc.altivec.vcmpgtub.p" => "__builtin_altivec_vcmpgtub_p", - "llvm.ppc.altivec.vcmpgtud" => "__builtin_altivec_vcmpgtud", - "llvm.ppc.altivec.vcmpgtud.p" => "__builtin_altivec_vcmpgtud_p", - "llvm.ppc.altivec.vcmpgtuh" => "__builtin_altivec_vcmpgtuh", - "llvm.ppc.altivec.vcmpgtuh.p" => "__builtin_altivec_vcmpgtuh_p", - "llvm.ppc.altivec.vcmpgtuq" => "__builtin_altivec_vcmpgtuq", - "llvm.ppc.altivec.vcmpgtuq.p" => "__builtin_altivec_vcmpgtuq_p", - "llvm.ppc.altivec.vcmpgtuw" => "__builtin_altivec_vcmpgtuw", - "llvm.ppc.altivec.vcmpgtuw.p" => "__builtin_altivec_vcmpgtuw_p", - "llvm.ppc.altivec.vcmpneb" => "__builtin_altivec_vcmpneb", - "llvm.ppc.altivec.vcmpneb.p" => "__builtin_altivec_vcmpneb_p", - "llvm.ppc.altivec.vcmpneh" => "__builtin_altivec_vcmpneh", - "llvm.ppc.altivec.vcmpneh.p" => "__builtin_altivec_vcmpneh_p", - "llvm.ppc.altivec.vcmpnew" => "__builtin_altivec_vcmpnew", - "llvm.ppc.altivec.vcmpnew.p" => "__builtin_altivec_vcmpnew_p", - "llvm.ppc.altivec.vcmpnezb" => "__builtin_altivec_vcmpnezb", - "llvm.ppc.altivec.vcmpnezb.p" => "__builtin_altivec_vcmpnezb_p", - "llvm.ppc.altivec.vcmpnezh" => "__builtin_altivec_vcmpnezh", - "llvm.ppc.altivec.vcmpnezh.p" => "__builtin_altivec_vcmpnezh_p", - "llvm.ppc.altivec.vcmpnezw" => "__builtin_altivec_vcmpnezw", - "llvm.ppc.altivec.vcmpnezw.p" => "__builtin_altivec_vcmpnezw_p", - "llvm.ppc.altivec.vcntmbb" => "__builtin_altivec_vcntmbb", - "llvm.ppc.altivec.vcntmbd" => "__builtin_altivec_vcntmbd", - "llvm.ppc.altivec.vcntmbh" => "__builtin_altivec_vcntmbh", - "llvm.ppc.altivec.vcntmbw" => "__builtin_altivec_vcntmbw", - "llvm.ppc.altivec.vctsxs" => "__builtin_altivec_vctsxs", - "llvm.ppc.altivec.vctuxs" => "__builtin_altivec_vctuxs", - "llvm.ppc.altivec.vctzdm" => "__builtin_altivec_vctzdm", - "llvm.ppc.altivec.vctzlsbb" => "__builtin_altivec_vctzlsbb", - "llvm.ppc.altivec.vdivesd" => "__builtin_altivec_vdivesd", - "llvm.ppc.altivec.vdivesq" => "__builtin_altivec_vdivesq", - "llvm.ppc.altivec.vdivesw" => "__builtin_altivec_vdivesw", - "llvm.ppc.altivec.vdiveud" => "__builtin_altivec_vdiveud", - "llvm.ppc.altivec.vdiveuq" => "__builtin_altivec_vdiveuq", - "llvm.ppc.altivec.vdiveuw" => "__builtin_altivec_vdiveuw", - "llvm.ppc.altivec.vexpandbm" => "__builtin_altivec_vexpandbm", - "llvm.ppc.altivec.vexpanddm" => "__builtin_altivec_vexpanddm", - "llvm.ppc.altivec.vexpandhm" => "__builtin_altivec_vexpandhm", - "llvm.ppc.altivec.vexpandqm" => "__builtin_altivec_vexpandqm", - "llvm.ppc.altivec.vexpandwm" => "__builtin_altivec_vexpandwm", - "llvm.ppc.altivec.vexptefp" => "__builtin_altivec_vexptefp", - "llvm.ppc.altivec.vextddvlx" => "__builtin_altivec_vextddvlx", - "llvm.ppc.altivec.vextddvrx" => "__builtin_altivec_vextddvrx", - "llvm.ppc.altivec.vextdubvlx" => "__builtin_altivec_vextdubvlx", - "llvm.ppc.altivec.vextdubvrx" => "__builtin_altivec_vextdubvrx", - "llvm.ppc.altivec.vextduhvlx" => "__builtin_altivec_vextduhvlx", - "llvm.ppc.altivec.vextduhvrx" => "__builtin_altivec_vextduhvrx", - "llvm.ppc.altivec.vextduwvlx" => "__builtin_altivec_vextduwvlx", - "llvm.ppc.altivec.vextduwvrx" => "__builtin_altivec_vextduwvrx", - "llvm.ppc.altivec.vextractbm" => "__builtin_altivec_vextractbm", - "llvm.ppc.altivec.vextractdm" => "__builtin_altivec_vextractdm", - "llvm.ppc.altivec.vextracthm" => "__builtin_altivec_vextracthm", - "llvm.ppc.altivec.vextractqm" => "__builtin_altivec_vextractqm", - "llvm.ppc.altivec.vextractwm" => "__builtin_altivec_vextractwm", - "llvm.ppc.altivec.vextsb2d" => "__builtin_altivec_vextsb2d", - "llvm.ppc.altivec.vextsb2w" => "__builtin_altivec_vextsb2w", - "llvm.ppc.altivec.vextsd2q" => "__builtin_altivec_vextsd2q", - "llvm.ppc.altivec.vextsh2d" => "__builtin_altivec_vextsh2d", - "llvm.ppc.altivec.vextsh2w" => "__builtin_altivec_vextsh2w", - "llvm.ppc.altivec.vextsw2d" => "__builtin_altivec_vextsw2d", - "llvm.ppc.altivec.vgbbd" => "__builtin_altivec_vgbbd", - "llvm.ppc.altivec.vgnb" => "__builtin_altivec_vgnb", - "llvm.ppc.altivec.vinsblx" => "__builtin_altivec_vinsblx", - "llvm.ppc.altivec.vinsbrx" => "__builtin_altivec_vinsbrx", - "llvm.ppc.altivec.vinsbvlx" => "__builtin_altivec_vinsbvlx", - "llvm.ppc.altivec.vinsbvrx" => "__builtin_altivec_vinsbvrx", - "llvm.ppc.altivec.vinsdlx" => "__builtin_altivec_vinsdlx", - "llvm.ppc.altivec.vinsdrx" => "__builtin_altivec_vinsdrx", - "llvm.ppc.altivec.vinshlx" => "__builtin_altivec_vinshlx", - "llvm.ppc.altivec.vinshrx" => "__builtin_altivec_vinshrx", - "llvm.ppc.altivec.vinshvlx" => "__builtin_altivec_vinshvlx", - "llvm.ppc.altivec.vinshvrx" => "__builtin_altivec_vinshvrx", - "llvm.ppc.altivec.vinswlx" => "__builtin_altivec_vinswlx", - "llvm.ppc.altivec.vinswrx" => "__builtin_altivec_vinswrx", - "llvm.ppc.altivec.vinswvlx" => "__builtin_altivec_vinswvlx", - "llvm.ppc.altivec.vinswvrx" => "__builtin_altivec_vinswvrx", - "llvm.ppc.altivec.vlogefp" => "__builtin_altivec_vlogefp", - "llvm.ppc.altivec.vmaddfp" => "__builtin_altivec_vmaddfp", - "llvm.ppc.altivec.vmaxfp" => "__builtin_altivec_vmaxfp", - "llvm.ppc.altivec.vmaxsb" => "__builtin_altivec_vmaxsb", - "llvm.ppc.altivec.vmaxsd" => "__builtin_altivec_vmaxsd", - "llvm.ppc.altivec.vmaxsh" => "__builtin_altivec_vmaxsh", - "llvm.ppc.altivec.vmaxsw" => "__builtin_altivec_vmaxsw", - "llvm.ppc.altivec.vmaxub" => "__builtin_altivec_vmaxub", - "llvm.ppc.altivec.vmaxud" => "__builtin_altivec_vmaxud", - "llvm.ppc.altivec.vmaxuh" => "__builtin_altivec_vmaxuh", - "llvm.ppc.altivec.vmaxuw" => "__builtin_altivec_vmaxuw", - "llvm.ppc.altivec.vmhaddshs" => "__builtin_altivec_vmhaddshs", - "llvm.ppc.altivec.vmhraddshs" => "__builtin_altivec_vmhraddshs", - "llvm.ppc.altivec.vminfp" => "__builtin_altivec_vminfp", - "llvm.ppc.altivec.vminsb" => "__builtin_altivec_vminsb", - "llvm.ppc.altivec.vminsd" => "__builtin_altivec_vminsd", - "llvm.ppc.altivec.vminsh" => "__builtin_altivec_vminsh", - "llvm.ppc.altivec.vminsw" => "__builtin_altivec_vminsw", - "llvm.ppc.altivec.vminub" => "__builtin_altivec_vminub", - "llvm.ppc.altivec.vminud" => "__builtin_altivec_vminud", - "llvm.ppc.altivec.vminuh" => "__builtin_altivec_vminuh", - "llvm.ppc.altivec.vminuw" => "__builtin_altivec_vminuw", - "llvm.ppc.altivec.vmladduhm" => "__builtin_altivec_vmladduhm", - "llvm.ppc.altivec.vmsumcud" => "__builtin_altivec_vmsumcud", - "llvm.ppc.altivec.vmsummbm" => "__builtin_altivec_vmsummbm", - "llvm.ppc.altivec.vmsumshm" => "__builtin_altivec_vmsumshm", - "llvm.ppc.altivec.vmsumshs" => "__builtin_altivec_vmsumshs", - "llvm.ppc.altivec.vmsumubm" => "__builtin_altivec_vmsumubm", - "llvm.ppc.altivec.vmsumudm" => "__builtin_altivec_vmsumudm", - "llvm.ppc.altivec.vmsumuhm" => "__builtin_altivec_vmsumuhm", - "llvm.ppc.altivec.vmsumuhs" => "__builtin_altivec_vmsumuhs", - "llvm.ppc.altivec.vmulesb" => "__builtin_altivec_vmulesb", - "llvm.ppc.altivec.vmulesd" => "__builtin_altivec_vmulesd", - "llvm.ppc.altivec.vmulesh" => "__builtin_altivec_vmulesh", - "llvm.ppc.altivec.vmulesw" => "__builtin_altivec_vmulesw", - "llvm.ppc.altivec.vmuleub" => "__builtin_altivec_vmuleub", - "llvm.ppc.altivec.vmuleud" => "__builtin_altivec_vmuleud", - "llvm.ppc.altivec.vmuleuh" => "__builtin_altivec_vmuleuh", - "llvm.ppc.altivec.vmuleuw" => "__builtin_altivec_vmuleuw", - "llvm.ppc.altivec.vmulhsd" => "__builtin_altivec_vmulhsd", - "llvm.ppc.altivec.vmulhsw" => "__builtin_altivec_vmulhsw", - "llvm.ppc.altivec.vmulhud" => "__builtin_altivec_vmulhud", - "llvm.ppc.altivec.vmulhuw" => "__builtin_altivec_vmulhuw", - "llvm.ppc.altivec.vmulosb" => "__builtin_altivec_vmulosb", - "llvm.ppc.altivec.vmulosd" => "__builtin_altivec_vmulosd", - "llvm.ppc.altivec.vmulosh" => "__builtin_altivec_vmulosh", - "llvm.ppc.altivec.vmulosw" => "__builtin_altivec_vmulosw", - "llvm.ppc.altivec.vmuloub" => "__builtin_altivec_vmuloub", - "llvm.ppc.altivec.vmuloud" => "__builtin_altivec_vmuloud", - "llvm.ppc.altivec.vmulouh" => "__builtin_altivec_vmulouh", - "llvm.ppc.altivec.vmulouw" => "__builtin_altivec_vmulouw", - "llvm.ppc.altivec.vnmsubfp" => "__builtin_altivec_vnmsubfp", - "llvm.ppc.altivec.vpdepd" => "__builtin_altivec_vpdepd", - "llvm.ppc.altivec.vperm" => "__builtin_altivec_vperm_4si", - "llvm.ppc.altivec.vpextd" => "__builtin_altivec_vpextd", - "llvm.ppc.altivec.vpkpx" => "__builtin_altivec_vpkpx", - "llvm.ppc.altivec.vpksdss" => "__builtin_altivec_vpksdss", - "llvm.ppc.altivec.vpksdus" => "__builtin_altivec_vpksdus", - "llvm.ppc.altivec.vpkshss" => "__builtin_altivec_vpkshss", - "llvm.ppc.altivec.vpkshus" => "__builtin_altivec_vpkshus", - "llvm.ppc.altivec.vpkswss" => "__builtin_altivec_vpkswss", - "llvm.ppc.altivec.vpkswus" => "__builtin_altivec_vpkswus", - "llvm.ppc.altivec.vpkudus" => "__builtin_altivec_vpkudus", - "llvm.ppc.altivec.vpkuhus" => "__builtin_altivec_vpkuhus", - "llvm.ppc.altivec.vpkuwus" => "__builtin_altivec_vpkuwus", - "llvm.ppc.altivec.vprtybd" => "__builtin_altivec_vprtybd", - "llvm.ppc.altivec.vprtybq" => "__builtin_altivec_vprtybq", - "llvm.ppc.altivec.vprtybw" => "__builtin_altivec_vprtybw", - "llvm.ppc.altivec.vrefp" => "__builtin_altivec_vrefp", - "llvm.ppc.altivec.vrfim" => "__builtin_altivec_vrfim", - "llvm.ppc.altivec.vrfin" => "__builtin_altivec_vrfin", - "llvm.ppc.altivec.vrfip" => "__builtin_altivec_vrfip", - "llvm.ppc.altivec.vrfiz" => "__builtin_altivec_vrfiz", - "llvm.ppc.altivec.vrlb" => "__builtin_altivec_vrlb", - "llvm.ppc.altivec.vrld" => "__builtin_altivec_vrld", - "llvm.ppc.altivec.vrldmi" => "__builtin_altivec_vrldmi", - "llvm.ppc.altivec.vrldnm" => "__builtin_altivec_vrldnm", - "llvm.ppc.altivec.vrlh" => "__builtin_altivec_vrlh", - "llvm.ppc.altivec.vrlqmi" => "__builtin_altivec_vrlqmi", - "llvm.ppc.altivec.vrlqnm" => "__builtin_altivec_vrlqnm", - "llvm.ppc.altivec.vrlw" => "__builtin_altivec_vrlw", - "llvm.ppc.altivec.vrlwmi" => "__builtin_altivec_vrlwmi", - "llvm.ppc.altivec.vrlwnm" => "__builtin_altivec_vrlwnm", - "llvm.ppc.altivec.vrsqrtefp" => "__builtin_altivec_vrsqrtefp", - "llvm.ppc.altivec.vsel" => "__builtin_altivec_vsel_4si", - "llvm.ppc.altivec.vsl" => "__builtin_altivec_vsl", - "llvm.ppc.altivec.vslb" => "__builtin_altivec_vslb", - "llvm.ppc.altivec.vsldbi" => "__builtin_altivec_vsldbi", - "llvm.ppc.altivec.vslh" => "__builtin_altivec_vslh", - "llvm.ppc.altivec.vslo" => "__builtin_altivec_vslo", - "llvm.ppc.altivec.vslv" => "__builtin_altivec_vslv", - "llvm.ppc.altivec.vslw" => "__builtin_altivec_vslw", - "llvm.ppc.altivec.vsr" => "__builtin_altivec_vsr", - "llvm.ppc.altivec.vsrab" => "__builtin_altivec_vsrab", - "llvm.ppc.altivec.vsrah" => "__builtin_altivec_vsrah", - "llvm.ppc.altivec.vsraw" => "__builtin_altivec_vsraw", - "llvm.ppc.altivec.vsrb" => "__builtin_altivec_vsrb", - "llvm.ppc.altivec.vsrdbi" => "__builtin_altivec_vsrdbi", - "llvm.ppc.altivec.vsrh" => "__builtin_altivec_vsrh", - "llvm.ppc.altivec.vsro" => "__builtin_altivec_vsro", - "llvm.ppc.altivec.vsrv" => "__builtin_altivec_vsrv", - "llvm.ppc.altivec.vsrw" => "__builtin_altivec_vsrw", - "llvm.ppc.altivec.vstribl" => "__builtin_altivec_vstribl", - "llvm.ppc.altivec.vstribl.p" => "__builtin_altivec_vstribl_p", - "llvm.ppc.altivec.vstribr" => "__builtin_altivec_vstribr", - "llvm.ppc.altivec.vstribr.p" => "__builtin_altivec_vstribr_p", - "llvm.ppc.altivec.vstrihl" => "__builtin_altivec_vstrihl", - "llvm.ppc.altivec.vstrihl.p" => "__builtin_altivec_vstrihl_p", - "llvm.ppc.altivec.vstrihr" => "__builtin_altivec_vstrihr", - "llvm.ppc.altivec.vstrihr.p" => "__builtin_altivec_vstrihr_p", - "llvm.ppc.altivec.vsubcuq" => "__builtin_altivec_vsubcuq", - "llvm.ppc.altivec.vsubcuw" => "__builtin_altivec_vsubcuw", - "llvm.ppc.altivec.vsubecuq" => "__builtin_altivec_vsubecuq", - "llvm.ppc.altivec.vsubeuqm" => "__builtin_altivec_vsubeuqm", - "llvm.ppc.altivec.vsubsbs" => "__builtin_altivec_vsubsbs", - "llvm.ppc.altivec.vsubshs" => "__builtin_altivec_vsubshs", - "llvm.ppc.altivec.vsubsws" => "__builtin_altivec_vsubsws", - "llvm.ppc.altivec.vsububs" => "__builtin_altivec_vsububs", - "llvm.ppc.altivec.vsubuhs" => "__builtin_altivec_vsubuhs", - "llvm.ppc.altivec.vsubuws" => "__builtin_altivec_vsubuws", - "llvm.ppc.altivec.vsum2sws" => "__builtin_altivec_vsum2sws", - "llvm.ppc.altivec.vsum4sbs" => "__builtin_altivec_vsum4sbs", - "llvm.ppc.altivec.vsum4shs" => "__builtin_altivec_vsum4shs", - "llvm.ppc.altivec.vsum4ubs" => "__builtin_altivec_vsum4ubs", - "llvm.ppc.altivec.vsumsws" => "__builtin_altivec_vsumsws", - "llvm.ppc.altivec.vupkhpx" => "__builtin_altivec_vupkhpx", - "llvm.ppc.altivec.vupkhsb" => "__builtin_altivec_vupkhsb", - "llvm.ppc.altivec.vupkhsh" => "__builtin_altivec_vupkhsh", - "llvm.ppc.altivec.vupkhsw" => "__builtin_altivec_vupkhsw", - "llvm.ppc.altivec.vupklpx" => "__builtin_altivec_vupklpx", - "llvm.ppc.altivec.vupklsb" => "__builtin_altivec_vupklsb", - "llvm.ppc.altivec.vupklsh" => "__builtin_altivec_vupklsh", - "llvm.ppc.altivec.vupklsw" => "__builtin_altivec_vupklsw", - "llvm.ppc.bcdadd" => "__builtin_ppc_bcdadd", - "llvm.ppc.bcdadd.p" => "__builtin_ppc_bcdadd_p", - "llvm.ppc.bcdsub" => "__builtin_ppc_bcdsub", - "llvm.ppc.bcdsub.p" => "__builtin_ppc_bcdsub_p", - "llvm.ppc.bpermd" => "__builtin_bpermd", - "llvm.ppc.cbcdtd" => "__builtin_cbcdtd", - "llvm.ppc.cbcdtdd" => "__builtin_ppc_cbcdtd", - "llvm.ppc.cdtbcd" => "__builtin_cdtbcd", - "llvm.ppc.cdtbcdd" => "__builtin_ppc_cdtbcd", - "llvm.ppc.cfuged" => "__builtin_cfuged", - "llvm.ppc.cmpeqb" => "__builtin_ppc_cmpeqb", - "llvm.ppc.cmprb" => "__builtin_ppc_cmprb", - "llvm.ppc.cntlzdm" => "__builtin_cntlzdm", - "llvm.ppc.cnttzdm" => "__builtin_cnttzdm", - "llvm.ppc.compare.exp.eq" => "__builtin_ppc_compare_exp_eq", - "llvm.ppc.compare.exp.gt" => "__builtin_ppc_compare_exp_gt", - "llvm.ppc.compare.exp.lt" => "__builtin_ppc_compare_exp_lt", - "llvm.ppc.compare.exp.uo" => "__builtin_ppc_compare_exp_uo", - "llvm.ppc.darn" => "__builtin_darn", - "llvm.ppc.darn32" => "__builtin_darn_32", - "llvm.ppc.darnraw" => "__builtin_darn_raw", - "llvm.ppc.dcbf" => "__builtin_dcbf", - "llvm.ppc.dcbfl" => "__builtin_ppc_dcbfl", - "llvm.ppc.dcbflp" => "__builtin_ppc_dcbflp", - "llvm.ppc.dcbst" => "__builtin_ppc_dcbst", - "llvm.ppc.dcbt" => "__builtin_ppc_dcbt", - "llvm.ppc.dcbtst" => "__builtin_ppc_dcbtst", - "llvm.ppc.dcbtstt" => "__builtin_ppc_dcbtstt", - "llvm.ppc.dcbtt" => "__builtin_ppc_dcbtt", - "llvm.ppc.dcbz" => "__builtin_ppc_dcbz", - "llvm.ppc.divde" => "__builtin_divde", - "llvm.ppc.divdeu" => "__builtin_divdeu", - "llvm.ppc.divf128.round.to.odd" => "__builtin_divf128_round_to_odd", - "llvm.ppc.divwe" => "__builtin_divwe", - "llvm.ppc.divweu" => "__builtin_divweu", - "llvm.ppc.eieio" => "__builtin_ppc_eieio", - "llvm.ppc.extract.exp" => "__builtin_ppc_extract_exp", - "llvm.ppc.extract.sig" => "__builtin_ppc_extract_sig", - "llvm.ppc.fcfid" => "__builtin_ppc_fcfid", - "llvm.ppc.fcfud" => "__builtin_ppc_fcfud", - "llvm.ppc.fctid" => "__builtin_ppc_fctid", - "llvm.ppc.fctidz" => "__builtin_ppc_fctidz", - "llvm.ppc.fctiw" => "__builtin_ppc_fctiw", - "llvm.ppc.fctiwz" => "__builtin_ppc_fctiwz", - "llvm.ppc.fctudz" => "__builtin_ppc_fctudz", - "llvm.ppc.fctuwz" => "__builtin_ppc_fctuwz", - "llvm.ppc.fence" => "__builtin_ppc_fence", - "llvm.ppc.fmaf128.round.to.odd" => "__builtin_fmaf128_round_to_odd", - "llvm.ppc.fmsub" => "__builtin_ppc_fmsub", - "llvm.ppc.fmsubs" => "__builtin_ppc_fmsubs", - "llvm.ppc.fnabs" => "__builtin_ppc_fnabs", - "llvm.ppc.fnabss" => "__builtin_ppc_fnabss", - "llvm.ppc.fnmadd" => "__builtin_ppc_fnmadd", - "llvm.ppc.fnmadds" => "__builtin_ppc_fnmadds", - "llvm.ppc.fre" => "__builtin_ppc_fre", - "llvm.ppc.fres" => "__builtin_ppc_fres", - "llvm.ppc.frsqrte" => "__builtin_ppc_frsqrte", - "llvm.ppc.frsqrtes" => "__builtin_ppc_frsqrtes", - "llvm.ppc.fsel" => "__builtin_ppc_fsel", - "llvm.ppc.fsels" => "__builtin_ppc_fsels", - "llvm.ppc.get.texasr" => "__builtin_get_texasr", - "llvm.ppc.get.texasru" => "__builtin_get_texasru", - "llvm.ppc.get.tfhar" => "__builtin_get_tfhar", - "llvm.ppc.get.tfiar" => "__builtin_get_tfiar", - "llvm.ppc.icbt" => "__builtin_ppc_icbt", - "llvm.ppc.insert.exp" => "__builtin_ppc_insert_exp", - "llvm.ppc.iospace.eieio" => "__builtin_ppc_iospace_eieio", - "llvm.ppc.iospace.lwsync" => "__builtin_ppc_iospace_lwsync", - "llvm.ppc.iospace.sync" => "__builtin_ppc_iospace_sync", - "llvm.ppc.isync" => "__builtin_ppc_isync", - "llvm.ppc.load4r" => "__builtin_ppc_load4r", - "llvm.ppc.load8r" => "__builtin_ppc_load8r", - "llvm.ppc.lwsync" => "__builtin_ppc_lwsync", - "llvm.ppc.maddhd" => "__builtin_ppc_maddhd", - "llvm.ppc.maddhdu" => "__builtin_ppc_maddhdu", - "llvm.ppc.maddld" => "__builtin_ppc_maddld", - "llvm.ppc.mffsl" => "__builtin_ppc_mffsl", - "llvm.ppc.mfmsr" => "__builtin_ppc_mfmsr", - "llvm.ppc.mftbu" => "__builtin_ppc_mftbu", - "llvm.ppc.mtfsb0" => "__builtin_ppc_mtfsb0", - "llvm.ppc.mtfsb1" => "__builtin_ppc_mtfsb1", - "llvm.ppc.mtfsfi" => "__builtin_ppc_mtfsfi", - "llvm.ppc.mtmsr" => "__builtin_ppc_mtmsr", - "llvm.ppc.mulf128.round.to.odd" => "__builtin_mulf128_round_to_odd", - "llvm.ppc.mulhd" => "__builtin_ppc_mulhd", - "llvm.ppc.mulhdu" => "__builtin_ppc_mulhdu", - "llvm.ppc.mulhw" => "__builtin_ppc_mulhw", - "llvm.ppc.mulhwu" => "__builtin_ppc_mulhwu", - "llvm.ppc.pack.longdouble" => "__builtin_pack_longdouble", - "llvm.ppc.pdepd" => "__builtin_pdepd", - "llvm.ppc.pextd" => "__builtin_pextd", - "llvm.ppc.qpx.qvfabs" => "__builtin_qpx_qvfabs", - "llvm.ppc.qpx.qvfadd" => "__builtin_qpx_qvfadd", - "llvm.ppc.qpx.qvfadds" => "__builtin_qpx_qvfadds", - "llvm.ppc.qpx.qvfcfid" => "__builtin_qpx_qvfcfid", - "llvm.ppc.qpx.qvfcfids" => "__builtin_qpx_qvfcfids", - "llvm.ppc.qpx.qvfcfidu" => "__builtin_qpx_qvfcfidu", - "llvm.ppc.qpx.qvfcfidus" => "__builtin_qpx_qvfcfidus", - "llvm.ppc.qpx.qvfcmpeq" => "__builtin_qpx_qvfcmpeq", - "llvm.ppc.qpx.qvfcmpgt" => "__builtin_qpx_qvfcmpgt", - "llvm.ppc.qpx.qvfcmplt" => "__builtin_qpx_qvfcmplt", - "llvm.ppc.qpx.qvfcpsgn" => "__builtin_qpx_qvfcpsgn", - "llvm.ppc.qpx.qvfctid" => "__builtin_qpx_qvfctid", - "llvm.ppc.qpx.qvfctidu" => "__builtin_qpx_qvfctidu", - "llvm.ppc.qpx.qvfctiduz" => "__builtin_qpx_qvfctiduz", - "llvm.ppc.qpx.qvfctidz" => "__builtin_qpx_qvfctidz", - "llvm.ppc.qpx.qvfctiw" => "__builtin_qpx_qvfctiw", - "llvm.ppc.qpx.qvfctiwu" => "__builtin_qpx_qvfctiwu", - "llvm.ppc.qpx.qvfctiwuz" => "__builtin_qpx_qvfctiwuz", - "llvm.ppc.qpx.qvfctiwz" => "__builtin_qpx_qvfctiwz", - "llvm.ppc.qpx.qvflogical" => "__builtin_qpx_qvflogical", - "llvm.ppc.qpx.qvfmadd" => "__builtin_qpx_qvfmadd", - "llvm.ppc.qpx.qvfmadds" => "__builtin_qpx_qvfmadds", - "llvm.ppc.qpx.qvfmsub" => "__builtin_qpx_qvfmsub", - "llvm.ppc.qpx.qvfmsubs" => "__builtin_qpx_qvfmsubs", - "llvm.ppc.qpx.qvfmul" => "__builtin_qpx_qvfmul", - "llvm.ppc.qpx.qvfmuls" => "__builtin_qpx_qvfmuls", - "llvm.ppc.qpx.qvfnabs" => "__builtin_qpx_qvfnabs", - "llvm.ppc.qpx.qvfneg" => "__builtin_qpx_qvfneg", - "llvm.ppc.qpx.qvfnmadd" => "__builtin_qpx_qvfnmadd", - "llvm.ppc.qpx.qvfnmadds" => "__builtin_qpx_qvfnmadds", - "llvm.ppc.qpx.qvfnmsub" => "__builtin_qpx_qvfnmsub", - "llvm.ppc.qpx.qvfnmsubs" => "__builtin_qpx_qvfnmsubs", - "llvm.ppc.qpx.qvfperm" => "__builtin_qpx_qvfperm", - "llvm.ppc.qpx.qvfre" => "__builtin_qpx_qvfre", - "llvm.ppc.qpx.qvfres" => "__builtin_qpx_qvfres", - "llvm.ppc.qpx.qvfrim" => "__builtin_qpx_qvfrim", - "llvm.ppc.qpx.qvfrin" => "__builtin_qpx_qvfrin", - "llvm.ppc.qpx.qvfrip" => "__builtin_qpx_qvfrip", - "llvm.ppc.qpx.qvfriz" => "__builtin_qpx_qvfriz", - "llvm.ppc.qpx.qvfrsp" => "__builtin_qpx_qvfrsp", - "llvm.ppc.qpx.qvfrsqrte" => "__builtin_qpx_qvfrsqrte", - "llvm.ppc.qpx.qvfrsqrtes" => "__builtin_qpx_qvfrsqrtes", - "llvm.ppc.qpx.qvfsel" => "__builtin_qpx_qvfsel", - "llvm.ppc.qpx.qvfsub" => "__builtin_qpx_qvfsub", - "llvm.ppc.qpx.qvfsubs" => "__builtin_qpx_qvfsubs", - "llvm.ppc.qpx.qvftstnan" => "__builtin_qpx_qvftstnan", - "llvm.ppc.qpx.qvfxmadd" => "__builtin_qpx_qvfxmadd", - "llvm.ppc.qpx.qvfxmadds" => "__builtin_qpx_qvfxmadds", - "llvm.ppc.qpx.qvfxmul" => "__builtin_qpx_qvfxmul", - "llvm.ppc.qpx.qvfxmuls" => "__builtin_qpx_qvfxmuls", - "llvm.ppc.qpx.qvfxxcpnmadd" => "__builtin_qpx_qvfxxcpnmadd", - "llvm.ppc.qpx.qvfxxcpnmadds" => "__builtin_qpx_qvfxxcpnmadds", - "llvm.ppc.qpx.qvfxxmadd" => "__builtin_qpx_qvfxxmadd", - "llvm.ppc.qpx.qvfxxmadds" => "__builtin_qpx_qvfxxmadds", - "llvm.ppc.qpx.qvfxxnpmadd" => "__builtin_qpx_qvfxxnpmadd", - "llvm.ppc.qpx.qvfxxnpmadds" => "__builtin_qpx_qvfxxnpmadds", - "llvm.ppc.qpx.qvgpci" => "__builtin_qpx_qvgpci", - "llvm.ppc.qpx.qvlfcd" => "__builtin_qpx_qvlfcd", - "llvm.ppc.qpx.qvlfcda" => "__builtin_qpx_qvlfcda", - "llvm.ppc.qpx.qvlfcs" => "__builtin_qpx_qvlfcs", - "llvm.ppc.qpx.qvlfcsa" => "__builtin_qpx_qvlfcsa", - "llvm.ppc.qpx.qvlfd" => "__builtin_qpx_qvlfd", - "llvm.ppc.qpx.qvlfda" => "__builtin_qpx_qvlfda", - "llvm.ppc.qpx.qvlfiwa" => "__builtin_qpx_qvlfiwa", - "llvm.ppc.qpx.qvlfiwaa" => "__builtin_qpx_qvlfiwaa", - "llvm.ppc.qpx.qvlfiwz" => "__builtin_qpx_qvlfiwz", - "llvm.ppc.qpx.qvlfiwza" => "__builtin_qpx_qvlfiwza", - "llvm.ppc.qpx.qvlfs" => "__builtin_qpx_qvlfs", - "llvm.ppc.qpx.qvlfsa" => "__builtin_qpx_qvlfsa", - "llvm.ppc.qpx.qvlpcld" => "__builtin_qpx_qvlpcld", - "llvm.ppc.qpx.qvlpcls" => "__builtin_qpx_qvlpcls", - "llvm.ppc.qpx.qvlpcrd" => "__builtin_qpx_qvlpcrd", - "llvm.ppc.qpx.qvlpcrs" => "__builtin_qpx_qvlpcrs", - "llvm.ppc.qpx.qvstfcd" => "__builtin_qpx_qvstfcd", - "llvm.ppc.qpx.qvstfcda" => "__builtin_qpx_qvstfcda", - "llvm.ppc.qpx.qvstfcs" => "__builtin_qpx_qvstfcs", - "llvm.ppc.qpx.qvstfcsa" => "__builtin_qpx_qvstfcsa", - "llvm.ppc.qpx.qvstfd" => "__builtin_qpx_qvstfd", - "llvm.ppc.qpx.qvstfda" => "__builtin_qpx_qvstfda", - "llvm.ppc.qpx.qvstfiw" => "__builtin_qpx_qvstfiw", - "llvm.ppc.qpx.qvstfiwa" => "__builtin_qpx_qvstfiwa", - "llvm.ppc.qpx.qvstfs" => "__builtin_qpx_qvstfs", - "llvm.ppc.qpx.qvstfsa" => "__builtin_qpx_qvstfsa", - "llvm.ppc.readflm" => "__builtin_readflm", - "llvm.ppc.rlwimi" => "__builtin_ppc_rlwimi", - "llvm.ppc.rlwnm" => "__builtin_ppc_rlwnm", - "llvm.ppc.scalar.extract.expq" => "__builtin_vsx_scalar_extract_expq", - "llvm.ppc.scalar.insert.exp.qp" => "__builtin_vsx_scalar_insert_exp_qp", - "llvm.ppc.set.texasr" => "__builtin_set_texasr", - "llvm.ppc.set.texasru" => "__builtin_set_texasru", - "llvm.ppc.set.tfhar" => "__builtin_set_tfhar", - "llvm.ppc.set.tfiar" => "__builtin_set_tfiar", - "llvm.ppc.setb" => "__builtin_ppc_setb", - "llvm.ppc.setflm" => "__builtin_setflm", - "llvm.ppc.setrnd" => "__builtin_setrnd", - "llvm.ppc.sqrtf128.round.to.odd" => "__builtin_sqrtf128_round_to_odd", - "llvm.ppc.stbcx" => "__builtin_ppc_stbcx", - "llvm.ppc.stdcx" => "__builtin_ppc_stdcx", - "llvm.ppc.stfiw" => "__builtin_ppc_stfiw", - "llvm.ppc.store2r" => "__builtin_ppc_store2r", - "llvm.ppc.store4r" => "__builtin_ppc_store4r", - "llvm.ppc.store8r" => "__builtin_ppc_store8r", - "llvm.ppc.stwcx" => "__builtin_ppc_stwcx", - "llvm.ppc.subf128.round.to.odd" => "__builtin_subf128_round_to_odd", - "llvm.ppc.sync" => "__builtin_ppc_sync", - "llvm.ppc.tabort" => "__builtin_tabort", - "llvm.ppc.tabortdc" => "__builtin_tabortdc", - "llvm.ppc.tabortdci" => "__builtin_tabortdci", - "llvm.ppc.tabortwc" => "__builtin_tabortwc", - "llvm.ppc.tabortwci" => "__builtin_tabortwci", - "llvm.ppc.tbegin" => "__builtin_tbegin", - "llvm.ppc.tcheck" => "__builtin_tcheck", - "llvm.ppc.tdw" => "__builtin_ppc_tdw", - "llvm.ppc.tend" => "__builtin_tend", - "llvm.ppc.tendall" => "__builtin_tendall", - "llvm.ppc.trap" => "__builtin_ppc_trap", - "llvm.ppc.trapd" => "__builtin_ppc_trapd", - "llvm.ppc.trechkpt" => "__builtin_trechkpt", - "llvm.ppc.treclaim" => "__builtin_treclaim", - "llvm.ppc.tresume" => "__builtin_tresume", - "llvm.ppc.truncf128.round.to.odd" => "__builtin_truncf128_round_to_odd", - "llvm.ppc.tsr" => "__builtin_tsr", - "llvm.ppc.tsuspend" => "__builtin_tsuspend", - "llvm.ppc.ttest" => "__builtin_ttest", - "llvm.ppc.tw" => "__builtin_ppc_tw", - "llvm.ppc.unpack.longdouble" => "__builtin_unpack_longdouble", - "llvm.ppc.vsx.xsmaxdp" => "__builtin_vsx_xsmaxdp", - "llvm.ppc.vsx.xsmindp" => "__builtin_vsx_xsmindp", - "llvm.ppc.vsx.xvcmpeqdp" => "__builtin_vsx_xvcmpeqdp", - "llvm.ppc.vsx.xvcmpeqdp.p" => "__builtin_vsx_xvcmpeqdp_p", - "llvm.ppc.vsx.xvcmpeqsp" => "__builtin_vsx_xvcmpeqsp", - "llvm.ppc.vsx.xvcmpeqsp.p" => "__builtin_vsx_xvcmpeqsp_p", - "llvm.ppc.vsx.xvcmpgedp" => "__builtin_vsx_xvcmpgedp", - "llvm.ppc.vsx.xvcmpgedp.p" => "__builtin_vsx_xvcmpgedp_p", - "llvm.ppc.vsx.xvcmpgesp" => "__builtin_vsx_xvcmpgesp", - "llvm.ppc.vsx.xvcmpgesp.p" => "__builtin_vsx_xvcmpgesp_p", - "llvm.ppc.vsx.xvcmpgtdp" => "__builtin_vsx_xvcmpgtdp", - "llvm.ppc.vsx.xvcmpgtdp.p" => "__builtin_vsx_xvcmpgtdp_p", - "llvm.ppc.vsx.xvcmpgtsp" => "__builtin_vsx_xvcmpgtsp", - "llvm.ppc.vsx.xvcmpgtsp.p" => "__builtin_vsx_xvcmpgtsp_p", - "llvm.ppc.vsx.xvcvbf16spn" => "__builtin_vsx_xvcvbf16spn", - "llvm.ppc.vsx.xvcvdpsp" => "__builtin_vsx_xvcvdpsp", - "llvm.ppc.vsx.xvcvdpsxws" => "__builtin_vsx_xvcvdpsxws", - "llvm.ppc.vsx.xvcvdpuxws" => "__builtin_vsx_xvcvdpuxws", - "llvm.ppc.vsx.xvcvhpsp" => "__builtin_vsx_xvcvhpsp", - "llvm.ppc.vsx.xvcvspbf16" => "__builtin_vsx_xvcvspbf16", - "llvm.ppc.vsx.xvcvspdp" => "__builtin_vsx_xvcvspdp", - "llvm.ppc.vsx.xvcvsphp" => "__builtin_vsx_xvcvsphp", - "llvm.ppc.vsx.xvcvspsxds" => "__builtin_vsx_xvcvspsxds", - "llvm.ppc.vsx.xvcvspuxds" => "__builtin_vsx_xvcvspuxds", - "llvm.ppc.vsx.xvcvsxdsp" => "__builtin_vsx_xvcvsxdsp", - "llvm.ppc.vsx.xvcvsxwdp" => "__builtin_vsx_xvcvsxwdp", - "llvm.ppc.vsx.xvcvuxdsp" => "__builtin_vsx_xvcvuxdsp", - "llvm.ppc.vsx.xvcvuxwdp" => "__builtin_vsx_xvcvuxwdp", - "llvm.ppc.vsx.xvdivdp" => "__builtin_vsx_xvdivdp", - "llvm.ppc.vsx.xvdivsp" => "__builtin_vsx_xvdivsp", - "llvm.ppc.vsx.xviexpdp" => "__builtin_vsx_xviexpdp", - "llvm.ppc.vsx.xviexpsp" => "__builtin_vsx_xviexpsp", - "llvm.ppc.vsx.xvmaxdp" => "__builtin_vsx_xvmaxdp", - "llvm.ppc.vsx.xvmaxsp" => "__builtin_vsx_xvmaxsp", - "llvm.ppc.vsx.xvmindp" => "__builtin_vsx_xvmindp", - "llvm.ppc.vsx.xvminsp" => "__builtin_vsx_xvminsp", - "llvm.ppc.vsx.xvredp" => "__builtin_vsx_xvredp", - "llvm.ppc.vsx.xvresp" => "__builtin_vsx_xvresp", - "llvm.ppc.vsx.xvrsqrtedp" => "__builtin_vsx_xvrsqrtedp", - "llvm.ppc.vsx.xvrsqrtesp" => "__builtin_vsx_xvrsqrtesp", - "llvm.ppc.vsx.xvtdivdp" => "__builtin_vsx_xvtdivdp", - "llvm.ppc.vsx.xvtdivsp" => "__builtin_vsx_xvtdivsp", - "llvm.ppc.vsx.xvtlsbb" => "__builtin_vsx_xvtlsbb", - "llvm.ppc.vsx.xvtsqrtdp" => "__builtin_vsx_xvtsqrtdp", - "llvm.ppc.vsx.xvtsqrtsp" => "__builtin_vsx_xvtsqrtsp", - "llvm.ppc.vsx.xvtstdcdp" => "__builtin_vsx_xvtstdcdp", - "llvm.ppc.vsx.xvtstdcsp" => "__builtin_vsx_xvtstdcsp", - "llvm.ppc.vsx.xvxexpdp" => "__builtin_vsx_xvxexpdp", - "llvm.ppc.vsx.xvxexpsp" => "__builtin_vsx_xvxexpsp", - "llvm.ppc.vsx.xvxsigdp" => "__builtin_vsx_xvxsigdp", - "llvm.ppc.vsx.xvxsigsp" => "__builtin_vsx_xvxsigsp", - "llvm.ppc.vsx.xxblendvb" => "__builtin_vsx_xxblendvb", - "llvm.ppc.vsx.xxblendvd" => "__builtin_vsx_xxblendvd", - "llvm.ppc.vsx.xxblendvh" => "__builtin_vsx_xxblendvh", - "llvm.ppc.vsx.xxblendvw" => "__builtin_vsx_xxblendvw", - "llvm.ppc.vsx.xxeval" => "__builtin_vsx_xxeval", - "llvm.ppc.vsx.xxextractuw" => "__builtin_vsx_xxextractuw", - "llvm.ppc.vsx.xxgenpcvbm" => "__builtin_vsx_xxgenpcvbm", - "llvm.ppc.vsx.xxgenpcvdm" => "__builtin_vsx_xxgenpcvdm", - "llvm.ppc.vsx.xxgenpcvhm" => "__builtin_vsx_xxgenpcvhm", - "llvm.ppc.vsx.xxgenpcvwm" => "__builtin_vsx_xxgenpcvwm", - "llvm.ppc.vsx.xxinsertw" => "__builtin_vsx_xxinsertw", - "llvm.ppc.vsx.xxleqv" => "__builtin_vsx_xxleqv", - "llvm.ppc.vsx.xxpermx" => "__builtin_vsx_xxpermx", - // ptx - "llvm.ptx.bar.sync" => "__builtin_ptx_bar_sync", - "llvm.ptx.read.clock" => "__builtin_ptx_read_clock", - "llvm.ptx.read.clock64" => "__builtin_ptx_read_clock64", - "llvm.ptx.read.gridid" => "__builtin_ptx_read_gridid", - "llvm.ptx.read.laneid" => "__builtin_ptx_read_laneid", - "llvm.ptx.read.lanemask.eq" => "__builtin_ptx_read_lanemask_eq", - "llvm.ptx.read.lanemask.ge" => "__builtin_ptx_read_lanemask_ge", - "llvm.ptx.read.lanemask.gt" => "__builtin_ptx_read_lanemask_gt", - "llvm.ptx.read.lanemask.le" => "__builtin_ptx_read_lanemask_le", - "llvm.ptx.read.lanemask.lt" => "__builtin_ptx_read_lanemask_lt", - "llvm.ptx.read.nsmid" => "__builtin_ptx_read_nsmid", - "llvm.ptx.read.nwarpid" => "__builtin_ptx_read_nwarpid", - "llvm.ptx.read.pm0" => "__builtin_ptx_read_pm0", - "llvm.ptx.read.pm1" => "__builtin_ptx_read_pm1", - "llvm.ptx.read.pm2" => "__builtin_ptx_read_pm2", - "llvm.ptx.read.pm3" => "__builtin_ptx_read_pm3", - "llvm.ptx.read.smid" => "__builtin_ptx_read_smid", - "llvm.ptx.read.warpid" => "__builtin_ptx_read_warpid", - // r600 - "llvm.r600.group.barrier" => "__builtin_r600_group_barrier", - "llvm.r600.implicitarg.ptr" => "__builtin_r600_implicitarg_ptr", - "llvm.r600.rat.store.typed" => "__builtin_r600_rat_store_typed", - "llvm.r600.read.global.size.x" => "__builtin_r600_read_global_size_x", - "llvm.r600.read.global.size.y" => "__builtin_r600_read_global_size_y", - "llvm.r600.read.global.size.z" => "__builtin_r600_read_global_size_z", - "llvm.r600.read.ngroups.x" => "__builtin_r600_read_ngroups_x", - "llvm.r600.read.ngroups.y" => "__builtin_r600_read_ngroups_y", - "llvm.r600.read.ngroups.z" => "__builtin_r600_read_ngroups_z", - "llvm.r600.read.tgid.x" => "__builtin_r600_read_tgid_x", - "llvm.r600.read.tgid.y" => "__builtin_r600_read_tgid_y", - "llvm.r600.read.tgid.z" => "__builtin_r600_read_tgid_z", - "llvm.r600.read.tidig.x" => "__builtin_r600_read_tidig_x", - "llvm.r600.read.tidig.y" => "__builtin_r600_read_tidig_y", - "llvm.r600.read.tidig.z" => "__builtin_r600_read_tidig_z", - // riscv - "llvm.riscv.aes32dsi" => "__builtin_riscv_aes32dsi", - "llvm.riscv.aes32dsmi" => "__builtin_riscv_aes32dsmi", - "llvm.riscv.aes32esi" => "__builtin_riscv_aes32esi", - "llvm.riscv.aes32esmi" => "__builtin_riscv_aes32esmi", - "llvm.riscv.aes64ds" => "__builtin_riscv_aes64ds", - "llvm.riscv.aes64dsm" => "__builtin_riscv_aes64dsm", - "llvm.riscv.aes64es" => "__builtin_riscv_aes64es", - "llvm.riscv.aes64esm" => "__builtin_riscv_aes64esm", - "llvm.riscv.aes64im" => "__builtin_riscv_aes64im", - "llvm.riscv.aes64ks1i" => "__builtin_riscv_aes64ks1i", - "llvm.riscv.aes64ks2" => "__builtin_riscv_aes64ks2", - "llvm.riscv.sha512sig0" => "__builtin_riscv_sha512sig0", - "llvm.riscv.sha512sig0h" => "__builtin_riscv_sha512sig0h", - "llvm.riscv.sha512sig0l" => "__builtin_riscv_sha512sig0l", - "llvm.riscv.sha512sig1" => "__builtin_riscv_sha512sig1", - "llvm.riscv.sha512sig1h" => "__builtin_riscv_sha512sig1h", - "llvm.riscv.sha512sig1l" => "__builtin_riscv_sha512sig1l", - "llvm.riscv.sha512sum0" => "__builtin_riscv_sha512sum0", - "llvm.riscv.sha512sum0r" => "__builtin_riscv_sha512sum0r", - "llvm.riscv.sha512sum1" => "__builtin_riscv_sha512sum1", - "llvm.riscv.sha512sum1r" => "__builtin_riscv_sha512sum1r", - // s390 - "llvm.s390.bdepg" => "__builtin_s390_bdepg", - "llvm.s390.bextg" => "__builtin_s390_bextg", - "llvm.s390.efpc" => "__builtin_s390_efpc", - "llvm.s390.etnd" => "__builtin_tx_nesting_depth", - "llvm.s390.lcbb" => "__builtin_s390_lcbb", - "llvm.s390.ppa.txassist" => "__builtin_tx_assist", - "llvm.s390.sfpc" => "__builtin_s390_sfpc", - "llvm.s390.tend" => "__builtin_tend", - "llvm.s390.vaccb" => "__builtin_s390_vaccb", - "llvm.s390.vacccq" => "__builtin_s390_vacccq", - "llvm.s390.vaccf" => "__builtin_s390_vaccf", - "llvm.s390.vaccg" => "__builtin_s390_vaccg", - "llvm.s390.vacch" => "__builtin_s390_vacch", - "llvm.s390.vaccq" => "__builtin_s390_vaccq", - "llvm.s390.vacq" => "__builtin_s390_vacq", - "llvm.s390.vaq" => "__builtin_s390_vaq", - "llvm.s390.vavgb" => "__builtin_s390_vavgb", - "llvm.s390.vavgf" => "__builtin_s390_vavgf", - "llvm.s390.vavgg" => "__builtin_s390_vavgg", - "llvm.s390.vavgh" => "__builtin_s390_vavgh", - "llvm.s390.vavglb" => "__builtin_s390_vavglb", - "llvm.s390.vavglf" => "__builtin_s390_vavglf", - "llvm.s390.vavglg" => "__builtin_s390_vavglg", - "llvm.s390.vavglh" => "__builtin_s390_vavglh", - "llvm.s390.vavglq" => "__builtin_s390_vavglq", - "llvm.s390.vavgq" => "__builtin_s390_vavgq", - "llvm.s390.vbperm" => "__builtin_s390_vbperm", - "llvm.s390.vcfn" => "__builtin_s390_vcfn", - "llvm.s390.vcksm" => "__builtin_s390_vcksm", - "llvm.s390.vclfnhs" => "__builtin_s390_vclfnhs", - "llvm.s390.vclfnls" => "__builtin_s390_vclfnls", - "llvm.s390.vcnf" => "__builtin_s390_vcnf", - "llvm.s390.vcrnfs" => "__builtin_s390_vcrnfs", - "llvm.s390.verimb" => "__builtin_s390_verimb", - "llvm.s390.verimf" => "__builtin_s390_verimf", - "llvm.s390.verimg" => "__builtin_s390_verimg", - "llvm.s390.verimh" => "__builtin_s390_verimh", - "llvm.s390.veval" => "__builtin_s390_veval", - "llvm.s390.vfaeb" => "__builtin_s390_vfaeb", - "llvm.s390.vfaef" => "__builtin_s390_vfaef", - "llvm.s390.vfaeh" => "__builtin_s390_vfaeh", - "llvm.s390.vfaezb" => "__builtin_s390_vfaezb", - "llvm.s390.vfaezf" => "__builtin_s390_vfaezf", - "llvm.s390.vfaezh" => "__builtin_s390_vfaezh", - "llvm.s390.vfeeb" => "__builtin_s390_vfeeb", - "llvm.s390.vfeef" => "__builtin_s390_vfeef", - "llvm.s390.vfeeh" => "__builtin_s390_vfeeh", - "llvm.s390.vfeezb" => "__builtin_s390_vfeezb", - "llvm.s390.vfeezf" => "__builtin_s390_vfeezf", - "llvm.s390.vfeezh" => "__builtin_s390_vfeezh", - "llvm.s390.vfeneb" => "__builtin_s390_vfeneb", - "llvm.s390.vfenef" => "__builtin_s390_vfenef", - "llvm.s390.vfeneh" => "__builtin_s390_vfeneh", - "llvm.s390.vfenezb" => "__builtin_s390_vfenezb", - "llvm.s390.vfenezf" => "__builtin_s390_vfenezf", - "llvm.s390.vfenezh" => "__builtin_s390_vfenezh", - "llvm.s390.vgemb" => "__builtin_s390_vgemb", - "llvm.s390.vgemf" => "__builtin_s390_vgemf", - "llvm.s390.vgemg" => "__builtin_s390_vgemg", - "llvm.s390.vgemh" => "__builtin_s390_vgemh", - "llvm.s390.vgemq" => "__builtin_s390_vgemq", - "llvm.s390.vgfmab" => "__builtin_s390_vgfmab", - "llvm.s390.vgfmaf" => "__builtin_s390_vgfmaf", - "llvm.s390.vgfmag" => "__builtin_s390_vgfmag", - "llvm.s390.vgfmah" => "__builtin_s390_vgfmah", - "llvm.s390.vgfmb" => "__builtin_s390_vgfmb", - "llvm.s390.vgfmf" => "__builtin_s390_vgfmf", - "llvm.s390.vgfmg" => "__builtin_s390_vgfmg", - "llvm.s390.vgfmh" => "__builtin_s390_vgfmh", - "llvm.s390.vistrb" => "__builtin_s390_vistrb", - "llvm.s390.vistrf" => "__builtin_s390_vistrf", - "llvm.s390.vistrh" => "__builtin_s390_vistrh", - "llvm.s390.vlbb" => "__builtin_s390_vlbb", - "llvm.s390.vll" => "__builtin_s390_vll", - "llvm.s390.vlrl" => "__builtin_s390_vlrlr", - "llvm.s390.vmaeb" => "__builtin_s390_vmaeb", - "llvm.s390.vmaef" => "__builtin_s390_vmaef", - "llvm.s390.vmaeg" => "__builtin_s390_vmaeg", - "llvm.s390.vmaeh" => "__builtin_s390_vmaeh", - "llvm.s390.vmahb" => "__builtin_s390_vmahb", - "llvm.s390.vmahf" => "__builtin_s390_vmahf", - "llvm.s390.vmahg" => "__builtin_s390_vmahg", - "llvm.s390.vmahh" => "__builtin_s390_vmahh", - "llvm.s390.vmahq" => "__builtin_s390_vmahq", - "llvm.s390.vmaleb" => "__builtin_s390_vmaleb", - "llvm.s390.vmalef" => "__builtin_s390_vmalef", - "llvm.s390.vmaleg" => "__builtin_s390_vmaleg", - "llvm.s390.vmaleh" => "__builtin_s390_vmaleh", - "llvm.s390.vmalhb" => "__builtin_s390_vmalhb", - "llvm.s390.vmalhf" => "__builtin_s390_vmalhf", - "llvm.s390.vmalhg" => "__builtin_s390_vmalhg", - "llvm.s390.vmalhh" => "__builtin_s390_vmalhh", - "llvm.s390.vmalhq" => "__builtin_s390_vmalhq", - "llvm.s390.vmalob" => "__builtin_s390_vmalob", - "llvm.s390.vmalof" => "__builtin_s390_vmalof", - "llvm.s390.vmalog" => "__builtin_s390_vmalog", - "llvm.s390.vmaloh" => "__builtin_s390_vmaloh", - "llvm.s390.vmaob" => "__builtin_s390_vmaob", - "llvm.s390.vmaof" => "__builtin_s390_vmaof", - "llvm.s390.vmaog" => "__builtin_s390_vmaog", - "llvm.s390.vmaoh" => "__builtin_s390_vmaoh", - "llvm.s390.vmeb" => "__builtin_s390_vmeb", - "llvm.s390.vmef" => "__builtin_s390_vmef", - "llvm.s390.vmeg" => "__builtin_s390_vmeg", - "llvm.s390.vmeh" => "__builtin_s390_vmeh", - "llvm.s390.vmhb" => "__builtin_s390_vmhb", - "llvm.s390.vmhf" => "__builtin_s390_vmhf", - "llvm.s390.vmhg" => "__builtin_s390_vmhg", - "llvm.s390.vmhh" => "__builtin_s390_vmhh", - "llvm.s390.vmhq" => "__builtin_s390_vmhq", - "llvm.s390.vmleb" => "__builtin_s390_vmleb", - "llvm.s390.vmlef" => "__builtin_s390_vmlef", - "llvm.s390.vmleg" => "__builtin_s390_vmleg", - "llvm.s390.vmleh" => "__builtin_s390_vmleh", - "llvm.s390.vmlhb" => "__builtin_s390_vmlhb", - "llvm.s390.vmlhf" => "__builtin_s390_vmlhf", - "llvm.s390.vmlhg" => "__builtin_s390_vmlhg", - "llvm.s390.vmlhh" => "__builtin_s390_vmlhh", - "llvm.s390.vmlhq" => "__builtin_s390_vmlhq", - "llvm.s390.vmlob" => "__builtin_s390_vmlob", - "llvm.s390.vmlof" => "__builtin_s390_vmlof", - "llvm.s390.vmlog" => "__builtin_s390_vmlog", - "llvm.s390.vmloh" => "__builtin_s390_vmloh", - "llvm.s390.vmob" => "__builtin_s390_vmob", - "llvm.s390.vmof" => "__builtin_s390_vmof", - "llvm.s390.vmog" => "__builtin_s390_vmog", - "llvm.s390.vmoh" => "__builtin_s390_vmoh", - "llvm.s390.vmslg" => "__builtin_s390_vmslg", - "llvm.s390.vpdi" => "__builtin_s390_vpdi", - "llvm.s390.vperm" => "__builtin_s390_vperm", - "llvm.s390.vpklsf" => "__builtin_s390_vpklsf", - "llvm.s390.vpklsg" => "__builtin_s390_vpklsg", - "llvm.s390.vpklsh" => "__builtin_s390_vpklsh", - "llvm.s390.vpksf" => "__builtin_s390_vpksf", - "llvm.s390.vpksg" => "__builtin_s390_vpksg", - "llvm.s390.vpksh" => "__builtin_s390_vpksh", - "llvm.s390.vsbcbiq" => "__builtin_s390_vsbcbiq", - "llvm.s390.vsbiq" => "__builtin_s390_vsbiq", - "llvm.s390.vscbib" => "__builtin_s390_vscbib", - "llvm.s390.vscbif" => "__builtin_s390_vscbif", - "llvm.s390.vscbig" => "__builtin_s390_vscbig", - "llvm.s390.vscbih" => "__builtin_s390_vscbih", - "llvm.s390.vscbiq" => "__builtin_s390_vscbiq", - "llvm.s390.vsl" => "__builtin_s390_vsl", - "llvm.s390.vslb" => "__builtin_s390_vslb", - "llvm.s390.vsld" => "__builtin_s390_vsld", - "llvm.s390.vsldb" => "__builtin_s390_vsldb", - "llvm.s390.vsq" => "__builtin_s390_vsq", - "llvm.s390.vsra" => "__builtin_s390_vsra", - "llvm.s390.vsrab" => "__builtin_s390_vsrab", - "llvm.s390.vsrd" => "__builtin_s390_vsrd", - "llvm.s390.vsrl" => "__builtin_s390_vsrl", - "llvm.s390.vsrlb" => "__builtin_s390_vsrlb", - "llvm.s390.vstl" => "__builtin_s390_vstl", - "llvm.s390.vstrcb" => "__builtin_s390_vstrcb", - "llvm.s390.vstrcf" => "__builtin_s390_vstrcf", - "llvm.s390.vstrch" => "__builtin_s390_vstrch", - "llvm.s390.vstrczb" => "__builtin_s390_vstrczb", - "llvm.s390.vstrczf" => "__builtin_s390_vstrczf", - "llvm.s390.vstrczh" => "__builtin_s390_vstrczh", - "llvm.s390.vstrl" => "__builtin_s390_vstrlr", - "llvm.s390.vsumb" => "__builtin_s390_vsumb", - "llvm.s390.vsumgf" => "__builtin_s390_vsumgf", - "llvm.s390.vsumgh" => "__builtin_s390_vsumgh", - "llvm.s390.vsumh" => "__builtin_s390_vsumh", - "llvm.s390.vsumqf" => "__builtin_s390_vsumqf", - "llvm.s390.vsumqg" => "__builtin_s390_vsumqg", - "llvm.s390.vtm" => "__builtin_s390_vtm", - "llvm.s390.vuphb" => "__builtin_s390_vuphb", - "llvm.s390.vuphf" => "__builtin_s390_vuphf", - "llvm.s390.vuphg" => "__builtin_s390_vuphg", - "llvm.s390.vuphh" => "__builtin_s390_vuphh", - "llvm.s390.vuplb" => "__builtin_s390_vuplb", - "llvm.s390.vuplf" => "__builtin_s390_vuplf", - "llvm.s390.vuplg" => "__builtin_s390_vuplg", - "llvm.s390.vuplhb" => "__builtin_s390_vuplhb", - "llvm.s390.vuplhf" => "__builtin_s390_vuplhf", - "llvm.s390.vuplhg" => "__builtin_s390_vuplhg", - "llvm.s390.vuplhh" => "__builtin_s390_vuplhh", - "llvm.s390.vuplhw" => "__builtin_s390_vuplhw", - "llvm.s390.vupllb" => "__builtin_s390_vupllb", - "llvm.s390.vupllf" => "__builtin_s390_vupllf", - "llvm.s390.vupllg" => "__builtin_s390_vupllg", - "llvm.s390.vupllh" => "__builtin_s390_vupllh", - // ve - "llvm.ve.vl.andm.MMM" => "__builtin_ve_vl_andm_MMM", - "llvm.ve.vl.andm.mmm" => "__builtin_ve_vl_andm_mmm", - "llvm.ve.vl.eqvm.MMM" => "__builtin_ve_vl_eqvm_MMM", - "llvm.ve.vl.eqvm.mmm" => "__builtin_ve_vl_eqvm_mmm", - "llvm.ve.vl.extract.vm512l" => "__builtin_ve_vl_extract_vm512l", - "llvm.ve.vl.extract.vm512u" => "__builtin_ve_vl_extract_vm512u", - "llvm.ve.vl.fencec.s" => "__builtin_ve_vl_fencec_s", - "llvm.ve.vl.fencei" => "__builtin_ve_vl_fencei", - "llvm.ve.vl.fencem.s" => "__builtin_ve_vl_fencem_s", - "llvm.ve.vl.fidcr.sss" => "__builtin_ve_vl_fidcr_sss", - "llvm.ve.vl.insert.vm512l" => "__builtin_ve_vl_insert_vm512l", - "llvm.ve.vl.insert.vm512u" => "__builtin_ve_vl_insert_vm512u", - "llvm.ve.vl.lcr.sss" => "__builtin_ve_vl_lcr_sss", - "llvm.ve.vl.lsv.vvss" => "__builtin_ve_vl_lsv_vvss", - "llvm.ve.vl.lvm.MMss" => "__builtin_ve_vl_lvm_MMss", - "llvm.ve.vl.lvm.mmss" => "__builtin_ve_vl_lvm_mmss", - "llvm.ve.vl.lvsd.svs" => "__builtin_ve_vl_lvsd_svs", - "llvm.ve.vl.lvsl.svs" => "__builtin_ve_vl_lvsl_svs", - "llvm.ve.vl.lvss.svs" => "__builtin_ve_vl_lvss_svs", - "llvm.ve.vl.lzvm.sml" => "__builtin_ve_vl_lzvm_sml", - "llvm.ve.vl.negm.MM" => "__builtin_ve_vl_negm_MM", - "llvm.ve.vl.negm.mm" => "__builtin_ve_vl_negm_mm", - "llvm.ve.vl.nndm.MMM" => "__builtin_ve_vl_nndm_MMM", - "llvm.ve.vl.nndm.mmm" => "__builtin_ve_vl_nndm_mmm", - "llvm.ve.vl.orm.MMM" => "__builtin_ve_vl_orm_MMM", - "llvm.ve.vl.orm.mmm" => "__builtin_ve_vl_orm_mmm", - "llvm.ve.vl.pack.f32a" => "__builtin_ve_vl_pack_f32a", - "llvm.ve.vl.pack.f32p" => "__builtin_ve_vl_pack_f32p", - "llvm.ve.vl.pcvm.sml" => "__builtin_ve_vl_pcvm_sml", - "llvm.ve.vl.pfchv.ssl" => "__builtin_ve_vl_pfchv_ssl", - "llvm.ve.vl.pfchvnc.ssl" => "__builtin_ve_vl_pfchvnc_ssl", - "llvm.ve.vl.pvadds.vsvMvl" => "__builtin_ve_vl_pvadds_vsvMvl", - "llvm.ve.vl.pvadds.vsvl" => "__builtin_ve_vl_pvadds_vsvl", - "llvm.ve.vl.pvadds.vsvvl" => "__builtin_ve_vl_pvadds_vsvvl", - "llvm.ve.vl.pvadds.vvvMvl" => "__builtin_ve_vl_pvadds_vvvMvl", - "llvm.ve.vl.pvadds.vvvl" => "__builtin_ve_vl_pvadds_vvvl", - "llvm.ve.vl.pvadds.vvvvl" => "__builtin_ve_vl_pvadds_vvvvl", - "llvm.ve.vl.pvaddu.vsvMvl" => "__builtin_ve_vl_pvaddu_vsvMvl", - "llvm.ve.vl.pvaddu.vsvl" => "__builtin_ve_vl_pvaddu_vsvl", - "llvm.ve.vl.pvaddu.vsvvl" => "__builtin_ve_vl_pvaddu_vsvvl", - "llvm.ve.vl.pvaddu.vvvMvl" => "__builtin_ve_vl_pvaddu_vvvMvl", - "llvm.ve.vl.pvaddu.vvvl" => "__builtin_ve_vl_pvaddu_vvvl", - "llvm.ve.vl.pvaddu.vvvvl" => "__builtin_ve_vl_pvaddu_vvvvl", - "llvm.ve.vl.pvand.vsvMvl" => "__builtin_ve_vl_pvand_vsvMvl", - "llvm.ve.vl.pvand.vsvl" => "__builtin_ve_vl_pvand_vsvl", - "llvm.ve.vl.pvand.vsvvl" => "__builtin_ve_vl_pvand_vsvvl", - "llvm.ve.vl.pvand.vvvMvl" => "__builtin_ve_vl_pvand_vvvMvl", - "llvm.ve.vl.pvand.vvvl" => "__builtin_ve_vl_pvand_vvvl", - "llvm.ve.vl.pvand.vvvvl" => "__builtin_ve_vl_pvand_vvvvl", - "llvm.ve.vl.pvbrd.vsMvl" => "__builtin_ve_vl_pvbrd_vsMvl", - "llvm.ve.vl.pvbrd.vsl" => "__builtin_ve_vl_pvbrd_vsl", - "llvm.ve.vl.pvbrd.vsvl" => "__builtin_ve_vl_pvbrd_vsvl", - "llvm.ve.vl.pvbrv.vvMvl" => "__builtin_ve_vl_pvbrv_vvMvl", - "llvm.ve.vl.pvbrv.vvl" => "__builtin_ve_vl_pvbrv_vvl", - "llvm.ve.vl.pvbrv.vvvl" => "__builtin_ve_vl_pvbrv_vvvl", - "llvm.ve.vl.pvbrvlo.vvl" => "__builtin_ve_vl_pvbrvlo_vvl", - "llvm.ve.vl.pvbrvlo.vvmvl" => "__builtin_ve_vl_pvbrvlo_vvmvl", - "llvm.ve.vl.pvbrvlo.vvvl" => "__builtin_ve_vl_pvbrvlo_vvvl", - "llvm.ve.vl.pvbrvup.vvl" => "__builtin_ve_vl_pvbrvup_vvl", - "llvm.ve.vl.pvbrvup.vvmvl" => "__builtin_ve_vl_pvbrvup_vvmvl", - "llvm.ve.vl.pvbrvup.vvvl" => "__builtin_ve_vl_pvbrvup_vvvl", - "llvm.ve.vl.pvcmps.vsvMvl" => "__builtin_ve_vl_pvcmps_vsvMvl", - "llvm.ve.vl.pvcmps.vsvl" => "__builtin_ve_vl_pvcmps_vsvl", - "llvm.ve.vl.pvcmps.vsvvl" => "__builtin_ve_vl_pvcmps_vsvvl", - "llvm.ve.vl.pvcmps.vvvMvl" => "__builtin_ve_vl_pvcmps_vvvMvl", - "llvm.ve.vl.pvcmps.vvvl" => "__builtin_ve_vl_pvcmps_vvvl", - "llvm.ve.vl.pvcmps.vvvvl" => "__builtin_ve_vl_pvcmps_vvvvl", - "llvm.ve.vl.pvcmpu.vsvMvl" => "__builtin_ve_vl_pvcmpu_vsvMvl", - "llvm.ve.vl.pvcmpu.vsvl" => "__builtin_ve_vl_pvcmpu_vsvl", - "llvm.ve.vl.pvcmpu.vsvvl" => "__builtin_ve_vl_pvcmpu_vsvvl", - "llvm.ve.vl.pvcmpu.vvvMvl" => "__builtin_ve_vl_pvcmpu_vvvMvl", - "llvm.ve.vl.pvcmpu.vvvl" => "__builtin_ve_vl_pvcmpu_vvvl", - "llvm.ve.vl.pvcmpu.vvvvl" => "__builtin_ve_vl_pvcmpu_vvvvl", - "llvm.ve.vl.pvcvtsw.vvl" => "__builtin_ve_vl_pvcvtsw_vvl", - "llvm.ve.vl.pvcvtsw.vvvl" => "__builtin_ve_vl_pvcvtsw_vvvl", - "llvm.ve.vl.pvcvtws.vvMvl" => "__builtin_ve_vl_pvcvtws_vvMvl", - "llvm.ve.vl.pvcvtws.vvl" => "__builtin_ve_vl_pvcvtws_vvl", - "llvm.ve.vl.pvcvtws.vvvl" => "__builtin_ve_vl_pvcvtws_vvvl", - "llvm.ve.vl.pvcvtwsrz.vvMvl" => "__builtin_ve_vl_pvcvtwsrz_vvMvl", - "llvm.ve.vl.pvcvtwsrz.vvl" => "__builtin_ve_vl_pvcvtwsrz_vvl", - "llvm.ve.vl.pvcvtwsrz.vvvl" => "__builtin_ve_vl_pvcvtwsrz_vvvl", - "llvm.ve.vl.pveqv.vsvMvl" => "__builtin_ve_vl_pveqv_vsvMvl", - "llvm.ve.vl.pveqv.vsvl" => "__builtin_ve_vl_pveqv_vsvl", - "llvm.ve.vl.pveqv.vsvvl" => "__builtin_ve_vl_pveqv_vsvvl", - "llvm.ve.vl.pveqv.vvvMvl" => "__builtin_ve_vl_pveqv_vvvMvl", - "llvm.ve.vl.pveqv.vvvl" => "__builtin_ve_vl_pveqv_vvvl", - "llvm.ve.vl.pveqv.vvvvl" => "__builtin_ve_vl_pveqv_vvvvl", - "llvm.ve.vl.pvfadd.vsvMvl" => "__builtin_ve_vl_pvfadd_vsvMvl", - "llvm.ve.vl.pvfadd.vsvl" => "__builtin_ve_vl_pvfadd_vsvl", - "llvm.ve.vl.pvfadd.vsvvl" => "__builtin_ve_vl_pvfadd_vsvvl", - "llvm.ve.vl.pvfadd.vvvMvl" => "__builtin_ve_vl_pvfadd_vvvMvl", - "llvm.ve.vl.pvfadd.vvvl" => "__builtin_ve_vl_pvfadd_vvvl", - "llvm.ve.vl.pvfadd.vvvvl" => "__builtin_ve_vl_pvfadd_vvvvl", - "llvm.ve.vl.pvfcmp.vsvMvl" => "__builtin_ve_vl_pvfcmp_vsvMvl", - "llvm.ve.vl.pvfcmp.vsvl" => "__builtin_ve_vl_pvfcmp_vsvl", - "llvm.ve.vl.pvfcmp.vsvvl" => "__builtin_ve_vl_pvfcmp_vsvvl", - "llvm.ve.vl.pvfcmp.vvvMvl" => "__builtin_ve_vl_pvfcmp_vvvMvl", - "llvm.ve.vl.pvfcmp.vvvl" => "__builtin_ve_vl_pvfcmp_vvvl", - "llvm.ve.vl.pvfcmp.vvvvl" => "__builtin_ve_vl_pvfcmp_vvvvl", - "llvm.ve.vl.pvfmad.vsvvMvl" => "__builtin_ve_vl_pvfmad_vsvvMvl", - "llvm.ve.vl.pvfmad.vsvvl" => "__builtin_ve_vl_pvfmad_vsvvl", - "llvm.ve.vl.pvfmad.vsvvvl" => "__builtin_ve_vl_pvfmad_vsvvvl", - "llvm.ve.vl.pvfmad.vvsvMvl" => "__builtin_ve_vl_pvfmad_vvsvMvl", - "llvm.ve.vl.pvfmad.vvsvl" => "__builtin_ve_vl_pvfmad_vvsvl", - "llvm.ve.vl.pvfmad.vvsvvl" => "__builtin_ve_vl_pvfmad_vvsvvl", - "llvm.ve.vl.pvfmad.vvvvMvl" => "__builtin_ve_vl_pvfmad_vvvvMvl", - "llvm.ve.vl.pvfmad.vvvvl" => "__builtin_ve_vl_pvfmad_vvvvl", - "llvm.ve.vl.pvfmad.vvvvvl" => "__builtin_ve_vl_pvfmad_vvvvvl", - "llvm.ve.vl.pvfmax.vsvMvl" => "__builtin_ve_vl_pvfmax_vsvMvl", - "llvm.ve.vl.pvfmax.vsvl" => "__builtin_ve_vl_pvfmax_vsvl", - "llvm.ve.vl.pvfmax.vsvvl" => "__builtin_ve_vl_pvfmax_vsvvl", - "llvm.ve.vl.pvfmax.vvvMvl" => "__builtin_ve_vl_pvfmax_vvvMvl", - "llvm.ve.vl.pvfmax.vvvl" => "__builtin_ve_vl_pvfmax_vvvl", - "llvm.ve.vl.pvfmax.vvvvl" => "__builtin_ve_vl_pvfmax_vvvvl", - "llvm.ve.vl.pvfmin.vsvMvl" => "__builtin_ve_vl_pvfmin_vsvMvl", - "llvm.ve.vl.pvfmin.vsvl" => "__builtin_ve_vl_pvfmin_vsvl", - "llvm.ve.vl.pvfmin.vsvvl" => "__builtin_ve_vl_pvfmin_vsvvl", - "llvm.ve.vl.pvfmin.vvvMvl" => "__builtin_ve_vl_pvfmin_vvvMvl", - "llvm.ve.vl.pvfmin.vvvl" => "__builtin_ve_vl_pvfmin_vvvl", - "llvm.ve.vl.pvfmin.vvvvl" => "__builtin_ve_vl_pvfmin_vvvvl", - "llvm.ve.vl.pvfmkaf.Ml" => "__builtin_ve_vl_pvfmkaf_Ml", - "llvm.ve.vl.pvfmkat.Ml" => "__builtin_ve_vl_pvfmkat_Ml", - "llvm.ve.vl.pvfmkseq.MvMl" => "__builtin_ve_vl_pvfmkseq_MvMl", - "llvm.ve.vl.pvfmkseq.Mvl" => "__builtin_ve_vl_pvfmkseq_Mvl", - "llvm.ve.vl.pvfmkseqnan.MvMl" => "__builtin_ve_vl_pvfmkseqnan_MvMl", - "llvm.ve.vl.pvfmkseqnan.Mvl" => "__builtin_ve_vl_pvfmkseqnan_Mvl", - "llvm.ve.vl.pvfmksge.MvMl" => "__builtin_ve_vl_pvfmksge_MvMl", - "llvm.ve.vl.pvfmksge.Mvl" => "__builtin_ve_vl_pvfmksge_Mvl", - "llvm.ve.vl.pvfmksgenan.MvMl" => "__builtin_ve_vl_pvfmksgenan_MvMl", - "llvm.ve.vl.pvfmksgenan.Mvl" => "__builtin_ve_vl_pvfmksgenan_Mvl", - "llvm.ve.vl.pvfmksgt.MvMl" => "__builtin_ve_vl_pvfmksgt_MvMl", - "llvm.ve.vl.pvfmksgt.Mvl" => "__builtin_ve_vl_pvfmksgt_Mvl", - "llvm.ve.vl.pvfmksgtnan.MvMl" => "__builtin_ve_vl_pvfmksgtnan_MvMl", - "llvm.ve.vl.pvfmksgtnan.Mvl" => "__builtin_ve_vl_pvfmksgtnan_Mvl", - "llvm.ve.vl.pvfmksle.MvMl" => "__builtin_ve_vl_pvfmksle_MvMl", - "llvm.ve.vl.pvfmksle.Mvl" => "__builtin_ve_vl_pvfmksle_Mvl", - "llvm.ve.vl.pvfmkslenan.MvMl" => "__builtin_ve_vl_pvfmkslenan_MvMl", - "llvm.ve.vl.pvfmkslenan.Mvl" => "__builtin_ve_vl_pvfmkslenan_Mvl", - "llvm.ve.vl.pvfmksloeq.mvl" => "__builtin_ve_vl_pvfmksloeq_mvl", - "llvm.ve.vl.pvfmksloeq.mvml" => "__builtin_ve_vl_pvfmksloeq_mvml", - "llvm.ve.vl.pvfmksloeqnan.mvl" => "__builtin_ve_vl_pvfmksloeqnan_mvl", - "llvm.ve.vl.pvfmksloeqnan.mvml" => "__builtin_ve_vl_pvfmksloeqnan_mvml", - "llvm.ve.vl.pvfmksloge.mvl" => "__builtin_ve_vl_pvfmksloge_mvl", - "llvm.ve.vl.pvfmksloge.mvml" => "__builtin_ve_vl_pvfmksloge_mvml", - "llvm.ve.vl.pvfmkslogenan.mvl" => "__builtin_ve_vl_pvfmkslogenan_mvl", - "llvm.ve.vl.pvfmkslogenan.mvml" => "__builtin_ve_vl_pvfmkslogenan_mvml", - "llvm.ve.vl.pvfmkslogt.mvl" => "__builtin_ve_vl_pvfmkslogt_mvl", - "llvm.ve.vl.pvfmkslogt.mvml" => "__builtin_ve_vl_pvfmkslogt_mvml", - "llvm.ve.vl.pvfmkslogtnan.mvl" => "__builtin_ve_vl_pvfmkslogtnan_mvl", - "llvm.ve.vl.pvfmkslogtnan.mvml" => "__builtin_ve_vl_pvfmkslogtnan_mvml", - "llvm.ve.vl.pvfmkslole.mvl" => "__builtin_ve_vl_pvfmkslole_mvl", - "llvm.ve.vl.pvfmkslole.mvml" => "__builtin_ve_vl_pvfmkslole_mvml", - "llvm.ve.vl.pvfmkslolenan.mvl" => "__builtin_ve_vl_pvfmkslolenan_mvl", - "llvm.ve.vl.pvfmkslolenan.mvml" => "__builtin_ve_vl_pvfmkslolenan_mvml", - "llvm.ve.vl.pvfmkslolt.mvl" => "__builtin_ve_vl_pvfmkslolt_mvl", - "llvm.ve.vl.pvfmkslolt.mvml" => "__builtin_ve_vl_pvfmkslolt_mvml", - "llvm.ve.vl.pvfmksloltnan.mvl" => "__builtin_ve_vl_pvfmksloltnan_mvl", - "llvm.ve.vl.pvfmksloltnan.mvml" => "__builtin_ve_vl_pvfmksloltnan_mvml", - "llvm.ve.vl.pvfmkslonan.mvl" => "__builtin_ve_vl_pvfmkslonan_mvl", - "llvm.ve.vl.pvfmkslonan.mvml" => "__builtin_ve_vl_pvfmkslonan_mvml", - "llvm.ve.vl.pvfmkslone.mvl" => "__builtin_ve_vl_pvfmkslone_mvl", - "llvm.ve.vl.pvfmkslone.mvml" => "__builtin_ve_vl_pvfmkslone_mvml", - "llvm.ve.vl.pvfmkslonenan.mvl" => "__builtin_ve_vl_pvfmkslonenan_mvl", - "llvm.ve.vl.pvfmkslonenan.mvml" => "__builtin_ve_vl_pvfmkslonenan_mvml", - "llvm.ve.vl.pvfmkslonum.mvl" => "__builtin_ve_vl_pvfmkslonum_mvl", - "llvm.ve.vl.pvfmkslonum.mvml" => "__builtin_ve_vl_pvfmkslonum_mvml", - "llvm.ve.vl.pvfmkslt.MvMl" => "__builtin_ve_vl_pvfmkslt_MvMl", - "llvm.ve.vl.pvfmkslt.Mvl" => "__builtin_ve_vl_pvfmkslt_Mvl", - "llvm.ve.vl.pvfmksltnan.MvMl" => "__builtin_ve_vl_pvfmksltnan_MvMl", - "llvm.ve.vl.pvfmksltnan.Mvl" => "__builtin_ve_vl_pvfmksltnan_Mvl", - "llvm.ve.vl.pvfmksnan.MvMl" => "__builtin_ve_vl_pvfmksnan_MvMl", - "llvm.ve.vl.pvfmksnan.Mvl" => "__builtin_ve_vl_pvfmksnan_Mvl", - "llvm.ve.vl.pvfmksne.MvMl" => "__builtin_ve_vl_pvfmksne_MvMl", - "llvm.ve.vl.pvfmksne.Mvl" => "__builtin_ve_vl_pvfmksne_Mvl", - "llvm.ve.vl.pvfmksnenan.MvMl" => "__builtin_ve_vl_pvfmksnenan_MvMl", - "llvm.ve.vl.pvfmksnenan.Mvl" => "__builtin_ve_vl_pvfmksnenan_Mvl", - "llvm.ve.vl.pvfmksnum.MvMl" => "__builtin_ve_vl_pvfmksnum_MvMl", - "llvm.ve.vl.pvfmksnum.Mvl" => "__builtin_ve_vl_pvfmksnum_Mvl", - "llvm.ve.vl.pvfmksupeq.mvl" => "__builtin_ve_vl_pvfmksupeq_mvl", - "llvm.ve.vl.pvfmksupeq.mvml" => "__builtin_ve_vl_pvfmksupeq_mvml", - "llvm.ve.vl.pvfmksupeqnan.mvl" => "__builtin_ve_vl_pvfmksupeqnan_mvl", - "llvm.ve.vl.pvfmksupeqnan.mvml" => "__builtin_ve_vl_pvfmksupeqnan_mvml", - "llvm.ve.vl.pvfmksupge.mvl" => "__builtin_ve_vl_pvfmksupge_mvl", - "llvm.ve.vl.pvfmksupge.mvml" => "__builtin_ve_vl_pvfmksupge_mvml", - "llvm.ve.vl.pvfmksupgenan.mvl" => "__builtin_ve_vl_pvfmksupgenan_mvl", - "llvm.ve.vl.pvfmksupgenan.mvml" => "__builtin_ve_vl_pvfmksupgenan_mvml", - "llvm.ve.vl.pvfmksupgt.mvl" => "__builtin_ve_vl_pvfmksupgt_mvl", - "llvm.ve.vl.pvfmksupgt.mvml" => "__builtin_ve_vl_pvfmksupgt_mvml", - "llvm.ve.vl.pvfmksupgtnan.mvl" => "__builtin_ve_vl_pvfmksupgtnan_mvl", - "llvm.ve.vl.pvfmksupgtnan.mvml" => "__builtin_ve_vl_pvfmksupgtnan_mvml", - "llvm.ve.vl.pvfmksuple.mvl" => "__builtin_ve_vl_pvfmksuple_mvl", - "llvm.ve.vl.pvfmksuple.mvml" => "__builtin_ve_vl_pvfmksuple_mvml", - "llvm.ve.vl.pvfmksuplenan.mvl" => "__builtin_ve_vl_pvfmksuplenan_mvl", - "llvm.ve.vl.pvfmksuplenan.mvml" => "__builtin_ve_vl_pvfmksuplenan_mvml", - "llvm.ve.vl.pvfmksuplt.mvl" => "__builtin_ve_vl_pvfmksuplt_mvl", - "llvm.ve.vl.pvfmksuplt.mvml" => "__builtin_ve_vl_pvfmksuplt_mvml", - "llvm.ve.vl.pvfmksupltnan.mvl" => "__builtin_ve_vl_pvfmksupltnan_mvl", - "llvm.ve.vl.pvfmksupltnan.mvml" => "__builtin_ve_vl_pvfmksupltnan_mvml", - "llvm.ve.vl.pvfmksupnan.mvl" => "__builtin_ve_vl_pvfmksupnan_mvl", - "llvm.ve.vl.pvfmksupnan.mvml" => "__builtin_ve_vl_pvfmksupnan_mvml", - "llvm.ve.vl.pvfmksupne.mvl" => "__builtin_ve_vl_pvfmksupne_mvl", - "llvm.ve.vl.pvfmksupne.mvml" => "__builtin_ve_vl_pvfmksupne_mvml", - "llvm.ve.vl.pvfmksupnenan.mvl" => "__builtin_ve_vl_pvfmksupnenan_mvl", - "llvm.ve.vl.pvfmksupnenan.mvml" => "__builtin_ve_vl_pvfmksupnenan_mvml", - "llvm.ve.vl.pvfmksupnum.mvl" => "__builtin_ve_vl_pvfmksupnum_mvl", - "llvm.ve.vl.pvfmksupnum.mvml" => "__builtin_ve_vl_pvfmksupnum_mvml", - "llvm.ve.vl.pvfmkweq.MvMl" => "__builtin_ve_vl_pvfmkweq_MvMl", - "llvm.ve.vl.pvfmkweq.Mvl" => "__builtin_ve_vl_pvfmkweq_Mvl", - "llvm.ve.vl.pvfmkweqnan.MvMl" => "__builtin_ve_vl_pvfmkweqnan_MvMl", - "llvm.ve.vl.pvfmkweqnan.Mvl" => "__builtin_ve_vl_pvfmkweqnan_Mvl", - "llvm.ve.vl.pvfmkwge.MvMl" => "__builtin_ve_vl_pvfmkwge_MvMl", - "llvm.ve.vl.pvfmkwge.Mvl" => "__builtin_ve_vl_pvfmkwge_Mvl", - "llvm.ve.vl.pvfmkwgenan.MvMl" => "__builtin_ve_vl_pvfmkwgenan_MvMl", - "llvm.ve.vl.pvfmkwgenan.Mvl" => "__builtin_ve_vl_pvfmkwgenan_Mvl", - "llvm.ve.vl.pvfmkwgt.MvMl" => "__builtin_ve_vl_pvfmkwgt_MvMl", - "llvm.ve.vl.pvfmkwgt.Mvl" => "__builtin_ve_vl_pvfmkwgt_Mvl", - "llvm.ve.vl.pvfmkwgtnan.MvMl" => "__builtin_ve_vl_pvfmkwgtnan_MvMl", - "llvm.ve.vl.pvfmkwgtnan.Mvl" => "__builtin_ve_vl_pvfmkwgtnan_Mvl", - "llvm.ve.vl.pvfmkwle.MvMl" => "__builtin_ve_vl_pvfmkwle_MvMl", - "llvm.ve.vl.pvfmkwle.Mvl" => "__builtin_ve_vl_pvfmkwle_Mvl", - "llvm.ve.vl.pvfmkwlenan.MvMl" => "__builtin_ve_vl_pvfmkwlenan_MvMl", - "llvm.ve.vl.pvfmkwlenan.Mvl" => "__builtin_ve_vl_pvfmkwlenan_Mvl", - "llvm.ve.vl.pvfmkwloeq.mvl" => "__builtin_ve_vl_pvfmkwloeq_mvl", - "llvm.ve.vl.pvfmkwloeq.mvml" => "__builtin_ve_vl_pvfmkwloeq_mvml", - "llvm.ve.vl.pvfmkwloeqnan.mvl" => "__builtin_ve_vl_pvfmkwloeqnan_mvl", - "llvm.ve.vl.pvfmkwloeqnan.mvml" => "__builtin_ve_vl_pvfmkwloeqnan_mvml", - "llvm.ve.vl.pvfmkwloge.mvl" => "__builtin_ve_vl_pvfmkwloge_mvl", - "llvm.ve.vl.pvfmkwloge.mvml" => "__builtin_ve_vl_pvfmkwloge_mvml", - "llvm.ve.vl.pvfmkwlogenan.mvl" => "__builtin_ve_vl_pvfmkwlogenan_mvl", - "llvm.ve.vl.pvfmkwlogenan.mvml" => "__builtin_ve_vl_pvfmkwlogenan_mvml", - "llvm.ve.vl.pvfmkwlogt.mvl" => "__builtin_ve_vl_pvfmkwlogt_mvl", - "llvm.ve.vl.pvfmkwlogt.mvml" => "__builtin_ve_vl_pvfmkwlogt_mvml", - "llvm.ve.vl.pvfmkwlogtnan.mvl" => "__builtin_ve_vl_pvfmkwlogtnan_mvl", - "llvm.ve.vl.pvfmkwlogtnan.mvml" => "__builtin_ve_vl_pvfmkwlogtnan_mvml", - "llvm.ve.vl.pvfmkwlole.mvl" => "__builtin_ve_vl_pvfmkwlole_mvl", - "llvm.ve.vl.pvfmkwlole.mvml" => "__builtin_ve_vl_pvfmkwlole_mvml", - "llvm.ve.vl.pvfmkwlolenan.mvl" => "__builtin_ve_vl_pvfmkwlolenan_mvl", - "llvm.ve.vl.pvfmkwlolenan.mvml" => "__builtin_ve_vl_pvfmkwlolenan_mvml", - "llvm.ve.vl.pvfmkwlolt.mvl" => "__builtin_ve_vl_pvfmkwlolt_mvl", - "llvm.ve.vl.pvfmkwlolt.mvml" => "__builtin_ve_vl_pvfmkwlolt_mvml", - "llvm.ve.vl.pvfmkwloltnan.mvl" => "__builtin_ve_vl_pvfmkwloltnan_mvl", - "llvm.ve.vl.pvfmkwloltnan.mvml" => "__builtin_ve_vl_pvfmkwloltnan_mvml", - "llvm.ve.vl.pvfmkwlonan.mvl" => "__builtin_ve_vl_pvfmkwlonan_mvl", - "llvm.ve.vl.pvfmkwlonan.mvml" => "__builtin_ve_vl_pvfmkwlonan_mvml", - "llvm.ve.vl.pvfmkwlone.mvl" => "__builtin_ve_vl_pvfmkwlone_mvl", - "llvm.ve.vl.pvfmkwlone.mvml" => "__builtin_ve_vl_pvfmkwlone_mvml", - "llvm.ve.vl.pvfmkwlonenan.mvl" => "__builtin_ve_vl_pvfmkwlonenan_mvl", - "llvm.ve.vl.pvfmkwlonenan.mvml" => "__builtin_ve_vl_pvfmkwlonenan_mvml", - "llvm.ve.vl.pvfmkwlonum.mvl" => "__builtin_ve_vl_pvfmkwlonum_mvl", - "llvm.ve.vl.pvfmkwlonum.mvml" => "__builtin_ve_vl_pvfmkwlonum_mvml", - "llvm.ve.vl.pvfmkwlt.MvMl" => "__builtin_ve_vl_pvfmkwlt_MvMl", - "llvm.ve.vl.pvfmkwlt.Mvl" => "__builtin_ve_vl_pvfmkwlt_Mvl", - "llvm.ve.vl.pvfmkwltnan.MvMl" => "__builtin_ve_vl_pvfmkwltnan_MvMl", - "llvm.ve.vl.pvfmkwltnan.Mvl" => "__builtin_ve_vl_pvfmkwltnan_Mvl", - "llvm.ve.vl.pvfmkwnan.MvMl" => "__builtin_ve_vl_pvfmkwnan_MvMl", - "llvm.ve.vl.pvfmkwnan.Mvl" => "__builtin_ve_vl_pvfmkwnan_Mvl", - "llvm.ve.vl.pvfmkwne.MvMl" => "__builtin_ve_vl_pvfmkwne_MvMl", - "llvm.ve.vl.pvfmkwne.Mvl" => "__builtin_ve_vl_pvfmkwne_Mvl", - "llvm.ve.vl.pvfmkwnenan.MvMl" => "__builtin_ve_vl_pvfmkwnenan_MvMl", - "llvm.ve.vl.pvfmkwnenan.Mvl" => "__builtin_ve_vl_pvfmkwnenan_Mvl", - "llvm.ve.vl.pvfmkwnum.MvMl" => "__builtin_ve_vl_pvfmkwnum_MvMl", - "llvm.ve.vl.pvfmkwnum.Mvl" => "__builtin_ve_vl_pvfmkwnum_Mvl", - "llvm.ve.vl.pvfmkwupeq.mvl" => "__builtin_ve_vl_pvfmkwupeq_mvl", - "llvm.ve.vl.pvfmkwupeq.mvml" => "__builtin_ve_vl_pvfmkwupeq_mvml", - "llvm.ve.vl.pvfmkwupeqnan.mvl" => "__builtin_ve_vl_pvfmkwupeqnan_mvl", - "llvm.ve.vl.pvfmkwupeqnan.mvml" => "__builtin_ve_vl_pvfmkwupeqnan_mvml", - "llvm.ve.vl.pvfmkwupge.mvl" => "__builtin_ve_vl_pvfmkwupge_mvl", - "llvm.ve.vl.pvfmkwupge.mvml" => "__builtin_ve_vl_pvfmkwupge_mvml", - "llvm.ve.vl.pvfmkwupgenan.mvl" => "__builtin_ve_vl_pvfmkwupgenan_mvl", - "llvm.ve.vl.pvfmkwupgenan.mvml" => "__builtin_ve_vl_pvfmkwupgenan_mvml", - "llvm.ve.vl.pvfmkwupgt.mvl" => "__builtin_ve_vl_pvfmkwupgt_mvl", - "llvm.ve.vl.pvfmkwupgt.mvml" => "__builtin_ve_vl_pvfmkwupgt_mvml", - "llvm.ve.vl.pvfmkwupgtnan.mvl" => "__builtin_ve_vl_pvfmkwupgtnan_mvl", - "llvm.ve.vl.pvfmkwupgtnan.mvml" => "__builtin_ve_vl_pvfmkwupgtnan_mvml", - "llvm.ve.vl.pvfmkwuple.mvl" => "__builtin_ve_vl_pvfmkwuple_mvl", - "llvm.ve.vl.pvfmkwuple.mvml" => "__builtin_ve_vl_pvfmkwuple_mvml", - "llvm.ve.vl.pvfmkwuplenan.mvl" => "__builtin_ve_vl_pvfmkwuplenan_mvl", - "llvm.ve.vl.pvfmkwuplenan.mvml" => "__builtin_ve_vl_pvfmkwuplenan_mvml", - "llvm.ve.vl.pvfmkwuplt.mvl" => "__builtin_ve_vl_pvfmkwuplt_mvl", - "llvm.ve.vl.pvfmkwuplt.mvml" => "__builtin_ve_vl_pvfmkwuplt_mvml", - "llvm.ve.vl.pvfmkwupltnan.mvl" => "__builtin_ve_vl_pvfmkwupltnan_mvl", - "llvm.ve.vl.pvfmkwupltnan.mvml" => "__builtin_ve_vl_pvfmkwupltnan_mvml", - "llvm.ve.vl.pvfmkwupnan.mvl" => "__builtin_ve_vl_pvfmkwupnan_mvl", - "llvm.ve.vl.pvfmkwupnan.mvml" => "__builtin_ve_vl_pvfmkwupnan_mvml", - "llvm.ve.vl.pvfmkwupne.mvl" => "__builtin_ve_vl_pvfmkwupne_mvl", - "llvm.ve.vl.pvfmkwupne.mvml" => "__builtin_ve_vl_pvfmkwupne_mvml", - "llvm.ve.vl.pvfmkwupnenan.mvl" => "__builtin_ve_vl_pvfmkwupnenan_mvl", - "llvm.ve.vl.pvfmkwupnenan.mvml" => "__builtin_ve_vl_pvfmkwupnenan_mvml", - "llvm.ve.vl.pvfmkwupnum.mvl" => "__builtin_ve_vl_pvfmkwupnum_mvl", - "llvm.ve.vl.pvfmkwupnum.mvml" => "__builtin_ve_vl_pvfmkwupnum_mvml", - "llvm.ve.vl.pvfmsb.vsvvMvl" => "__builtin_ve_vl_pvfmsb_vsvvMvl", - "llvm.ve.vl.pvfmsb.vsvvl" => "__builtin_ve_vl_pvfmsb_vsvvl", - "llvm.ve.vl.pvfmsb.vsvvvl" => "__builtin_ve_vl_pvfmsb_vsvvvl", - "llvm.ve.vl.pvfmsb.vvsvMvl" => "__builtin_ve_vl_pvfmsb_vvsvMvl", - "llvm.ve.vl.pvfmsb.vvsvl" => "__builtin_ve_vl_pvfmsb_vvsvl", - "llvm.ve.vl.pvfmsb.vvsvvl" => "__builtin_ve_vl_pvfmsb_vvsvvl", - "llvm.ve.vl.pvfmsb.vvvvMvl" => "__builtin_ve_vl_pvfmsb_vvvvMvl", - "llvm.ve.vl.pvfmsb.vvvvl" => "__builtin_ve_vl_pvfmsb_vvvvl", - "llvm.ve.vl.pvfmsb.vvvvvl" => "__builtin_ve_vl_pvfmsb_vvvvvl", - "llvm.ve.vl.pvfmul.vsvMvl" => "__builtin_ve_vl_pvfmul_vsvMvl", - "llvm.ve.vl.pvfmul.vsvl" => "__builtin_ve_vl_pvfmul_vsvl", - "llvm.ve.vl.pvfmul.vsvvl" => "__builtin_ve_vl_pvfmul_vsvvl", - "llvm.ve.vl.pvfmul.vvvMvl" => "__builtin_ve_vl_pvfmul_vvvMvl", - "llvm.ve.vl.pvfmul.vvvl" => "__builtin_ve_vl_pvfmul_vvvl", - "llvm.ve.vl.pvfmul.vvvvl" => "__builtin_ve_vl_pvfmul_vvvvl", - "llvm.ve.vl.pvfnmad.vsvvMvl" => "__builtin_ve_vl_pvfnmad_vsvvMvl", - "llvm.ve.vl.pvfnmad.vsvvl" => "__builtin_ve_vl_pvfnmad_vsvvl", - "llvm.ve.vl.pvfnmad.vsvvvl" => "__builtin_ve_vl_pvfnmad_vsvvvl", - "llvm.ve.vl.pvfnmad.vvsvMvl" => "__builtin_ve_vl_pvfnmad_vvsvMvl", - "llvm.ve.vl.pvfnmad.vvsvl" => "__builtin_ve_vl_pvfnmad_vvsvl", - "llvm.ve.vl.pvfnmad.vvsvvl" => "__builtin_ve_vl_pvfnmad_vvsvvl", - "llvm.ve.vl.pvfnmad.vvvvMvl" => "__builtin_ve_vl_pvfnmad_vvvvMvl", - "llvm.ve.vl.pvfnmad.vvvvl" => "__builtin_ve_vl_pvfnmad_vvvvl", - "llvm.ve.vl.pvfnmad.vvvvvl" => "__builtin_ve_vl_pvfnmad_vvvvvl", - "llvm.ve.vl.pvfnmsb.vsvvMvl" => "__builtin_ve_vl_pvfnmsb_vsvvMvl", - "llvm.ve.vl.pvfnmsb.vsvvl" => "__builtin_ve_vl_pvfnmsb_vsvvl", - "llvm.ve.vl.pvfnmsb.vsvvvl" => "__builtin_ve_vl_pvfnmsb_vsvvvl", - "llvm.ve.vl.pvfnmsb.vvsvMvl" => "__builtin_ve_vl_pvfnmsb_vvsvMvl", - "llvm.ve.vl.pvfnmsb.vvsvl" => "__builtin_ve_vl_pvfnmsb_vvsvl", - "llvm.ve.vl.pvfnmsb.vvsvvl" => "__builtin_ve_vl_pvfnmsb_vvsvvl", - "llvm.ve.vl.pvfnmsb.vvvvMvl" => "__builtin_ve_vl_pvfnmsb_vvvvMvl", - "llvm.ve.vl.pvfnmsb.vvvvl" => "__builtin_ve_vl_pvfnmsb_vvvvl", - "llvm.ve.vl.pvfnmsb.vvvvvl" => "__builtin_ve_vl_pvfnmsb_vvvvvl", - "llvm.ve.vl.pvfsub.vsvMvl" => "__builtin_ve_vl_pvfsub_vsvMvl", - "llvm.ve.vl.pvfsub.vsvl" => "__builtin_ve_vl_pvfsub_vsvl", - "llvm.ve.vl.pvfsub.vsvvl" => "__builtin_ve_vl_pvfsub_vsvvl", - "llvm.ve.vl.pvfsub.vvvMvl" => "__builtin_ve_vl_pvfsub_vvvMvl", - "llvm.ve.vl.pvfsub.vvvl" => "__builtin_ve_vl_pvfsub_vvvl", - "llvm.ve.vl.pvfsub.vvvvl" => "__builtin_ve_vl_pvfsub_vvvvl", - "llvm.ve.vl.pvldz.vvMvl" => "__builtin_ve_vl_pvldz_vvMvl", - "llvm.ve.vl.pvldz.vvl" => "__builtin_ve_vl_pvldz_vvl", - "llvm.ve.vl.pvldz.vvvl" => "__builtin_ve_vl_pvldz_vvvl", - "llvm.ve.vl.pvldzlo.vvl" => "__builtin_ve_vl_pvldzlo_vvl", - "llvm.ve.vl.pvldzlo.vvmvl" => "__builtin_ve_vl_pvldzlo_vvmvl", - "llvm.ve.vl.pvldzlo.vvvl" => "__builtin_ve_vl_pvldzlo_vvvl", - "llvm.ve.vl.pvldzup.vvl" => "__builtin_ve_vl_pvldzup_vvl", - "llvm.ve.vl.pvldzup.vvmvl" => "__builtin_ve_vl_pvldzup_vvmvl", - "llvm.ve.vl.pvldzup.vvvl" => "__builtin_ve_vl_pvldzup_vvvl", - "llvm.ve.vl.pvmaxs.vsvMvl" => "__builtin_ve_vl_pvmaxs_vsvMvl", - "llvm.ve.vl.pvmaxs.vsvl" => "__builtin_ve_vl_pvmaxs_vsvl", - "llvm.ve.vl.pvmaxs.vsvvl" => "__builtin_ve_vl_pvmaxs_vsvvl", - "llvm.ve.vl.pvmaxs.vvvMvl" => "__builtin_ve_vl_pvmaxs_vvvMvl", - "llvm.ve.vl.pvmaxs.vvvl" => "__builtin_ve_vl_pvmaxs_vvvl", - "llvm.ve.vl.pvmaxs.vvvvl" => "__builtin_ve_vl_pvmaxs_vvvvl", - "llvm.ve.vl.pvmins.vsvMvl" => "__builtin_ve_vl_pvmins_vsvMvl", - "llvm.ve.vl.pvmins.vsvl" => "__builtin_ve_vl_pvmins_vsvl", - "llvm.ve.vl.pvmins.vsvvl" => "__builtin_ve_vl_pvmins_vsvvl", - "llvm.ve.vl.pvmins.vvvMvl" => "__builtin_ve_vl_pvmins_vvvMvl", - "llvm.ve.vl.pvmins.vvvl" => "__builtin_ve_vl_pvmins_vvvl", - "llvm.ve.vl.pvmins.vvvvl" => "__builtin_ve_vl_pvmins_vvvvl", - "llvm.ve.vl.pvor.vsvMvl" => "__builtin_ve_vl_pvor_vsvMvl", - "llvm.ve.vl.pvor.vsvl" => "__builtin_ve_vl_pvor_vsvl", - "llvm.ve.vl.pvor.vsvvl" => "__builtin_ve_vl_pvor_vsvvl", - "llvm.ve.vl.pvor.vvvMvl" => "__builtin_ve_vl_pvor_vvvMvl", - "llvm.ve.vl.pvor.vvvl" => "__builtin_ve_vl_pvor_vvvl", - "llvm.ve.vl.pvor.vvvvl" => "__builtin_ve_vl_pvor_vvvvl", - "llvm.ve.vl.pvpcnt.vvMvl" => "__builtin_ve_vl_pvpcnt_vvMvl", - "llvm.ve.vl.pvpcnt.vvl" => "__builtin_ve_vl_pvpcnt_vvl", - "llvm.ve.vl.pvpcnt.vvvl" => "__builtin_ve_vl_pvpcnt_vvvl", - "llvm.ve.vl.pvpcntlo.vvl" => "__builtin_ve_vl_pvpcntlo_vvl", - "llvm.ve.vl.pvpcntlo.vvmvl" => "__builtin_ve_vl_pvpcntlo_vvmvl", - "llvm.ve.vl.pvpcntlo.vvvl" => "__builtin_ve_vl_pvpcntlo_vvvl", - "llvm.ve.vl.pvpcntup.vvl" => "__builtin_ve_vl_pvpcntup_vvl", - "llvm.ve.vl.pvpcntup.vvmvl" => "__builtin_ve_vl_pvpcntup_vvmvl", - "llvm.ve.vl.pvpcntup.vvvl" => "__builtin_ve_vl_pvpcntup_vvvl", - "llvm.ve.vl.pvrcp.vvl" => "__builtin_ve_vl_pvrcp_vvl", - "llvm.ve.vl.pvrcp.vvvl" => "__builtin_ve_vl_pvrcp_vvvl", - "llvm.ve.vl.pvrsqrt.vvl" => "__builtin_ve_vl_pvrsqrt_vvl", - "llvm.ve.vl.pvrsqrt.vvvl" => "__builtin_ve_vl_pvrsqrt_vvvl", - "llvm.ve.vl.pvrsqrtnex.vvl" => "__builtin_ve_vl_pvrsqrtnex_vvl", - "llvm.ve.vl.pvrsqrtnex.vvvl" => "__builtin_ve_vl_pvrsqrtnex_vvvl", - "llvm.ve.vl.pvseq.vl" => "__builtin_ve_vl_pvseq_vl", - "llvm.ve.vl.pvseq.vvl" => "__builtin_ve_vl_pvseq_vvl", - "llvm.ve.vl.pvseqlo.vl" => "__builtin_ve_vl_pvseqlo_vl", - "llvm.ve.vl.pvseqlo.vvl" => "__builtin_ve_vl_pvseqlo_vvl", - "llvm.ve.vl.pvsequp.vl" => "__builtin_ve_vl_pvsequp_vl", - "llvm.ve.vl.pvsequp.vvl" => "__builtin_ve_vl_pvsequp_vvl", - "llvm.ve.vl.pvsla.vvsMvl" => "__builtin_ve_vl_pvsla_vvsMvl", - "llvm.ve.vl.pvsla.vvsl" => "__builtin_ve_vl_pvsla_vvsl", - "llvm.ve.vl.pvsla.vvsvl" => "__builtin_ve_vl_pvsla_vvsvl", - "llvm.ve.vl.pvsla.vvvMvl" => "__builtin_ve_vl_pvsla_vvvMvl", - "llvm.ve.vl.pvsla.vvvl" => "__builtin_ve_vl_pvsla_vvvl", - "llvm.ve.vl.pvsla.vvvvl" => "__builtin_ve_vl_pvsla_vvvvl", - "llvm.ve.vl.pvsll.vvsMvl" => "__builtin_ve_vl_pvsll_vvsMvl", - "llvm.ve.vl.pvsll.vvsl" => "__builtin_ve_vl_pvsll_vvsl", - "llvm.ve.vl.pvsll.vvsvl" => "__builtin_ve_vl_pvsll_vvsvl", - "llvm.ve.vl.pvsll.vvvMvl" => "__builtin_ve_vl_pvsll_vvvMvl", - "llvm.ve.vl.pvsll.vvvl" => "__builtin_ve_vl_pvsll_vvvl", - "llvm.ve.vl.pvsll.vvvvl" => "__builtin_ve_vl_pvsll_vvvvl", - "llvm.ve.vl.pvsra.vvsMvl" => "__builtin_ve_vl_pvsra_vvsMvl", - "llvm.ve.vl.pvsra.vvsl" => "__builtin_ve_vl_pvsra_vvsl", - "llvm.ve.vl.pvsra.vvsvl" => "__builtin_ve_vl_pvsra_vvsvl", - "llvm.ve.vl.pvsra.vvvMvl" => "__builtin_ve_vl_pvsra_vvvMvl", - "llvm.ve.vl.pvsra.vvvl" => "__builtin_ve_vl_pvsra_vvvl", - "llvm.ve.vl.pvsra.vvvvl" => "__builtin_ve_vl_pvsra_vvvvl", - "llvm.ve.vl.pvsrl.vvsMvl" => "__builtin_ve_vl_pvsrl_vvsMvl", - "llvm.ve.vl.pvsrl.vvsl" => "__builtin_ve_vl_pvsrl_vvsl", - "llvm.ve.vl.pvsrl.vvsvl" => "__builtin_ve_vl_pvsrl_vvsvl", - "llvm.ve.vl.pvsrl.vvvMvl" => "__builtin_ve_vl_pvsrl_vvvMvl", - "llvm.ve.vl.pvsrl.vvvl" => "__builtin_ve_vl_pvsrl_vvvl", - "llvm.ve.vl.pvsrl.vvvvl" => "__builtin_ve_vl_pvsrl_vvvvl", - "llvm.ve.vl.pvsubs.vsvMvl" => "__builtin_ve_vl_pvsubs_vsvMvl", - "llvm.ve.vl.pvsubs.vsvl" => "__builtin_ve_vl_pvsubs_vsvl", - "llvm.ve.vl.pvsubs.vsvvl" => "__builtin_ve_vl_pvsubs_vsvvl", - "llvm.ve.vl.pvsubs.vvvMvl" => "__builtin_ve_vl_pvsubs_vvvMvl", - "llvm.ve.vl.pvsubs.vvvl" => "__builtin_ve_vl_pvsubs_vvvl", - "llvm.ve.vl.pvsubs.vvvvl" => "__builtin_ve_vl_pvsubs_vvvvl", - "llvm.ve.vl.pvsubu.vsvMvl" => "__builtin_ve_vl_pvsubu_vsvMvl", - "llvm.ve.vl.pvsubu.vsvl" => "__builtin_ve_vl_pvsubu_vsvl", - "llvm.ve.vl.pvsubu.vsvvl" => "__builtin_ve_vl_pvsubu_vsvvl", - "llvm.ve.vl.pvsubu.vvvMvl" => "__builtin_ve_vl_pvsubu_vvvMvl", - "llvm.ve.vl.pvsubu.vvvl" => "__builtin_ve_vl_pvsubu_vvvl", - "llvm.ve.vl.pvsubu.vvvvl" => "__builtin_ve_vl_pvsubu_vvvvl", - "llvm.ve.vl.pvxor.vsvMvl" => "__builtin_ve_vl_pvxor_vsvMvl", - "llvm.ve.vl.pvxor.vsvl" => "__builtin_ve_vl_pvxor_vsvl", - "llvm.ve.vl.pvxor.vsvvl" => "__builtin_ve_vl_pvxor_vsvvl", - "llvm.ve.vl.pvxor.vvvMvl" => "__builtin_ve_vl_pvxor_vvvMvl", - "llvm.ve.vl.pvxor.vvvl" => "__builtin_ve_vl_pvxor_vvvl", - "llvm.ve.vl.pvxor.vvvvl" => "__builtin_ve_vl_pvxor_vvvvl", - "llvm.ve.vl.scr.sss" => "__builtin_ve_vl_scr_sss", - "llvm.ve.vl.svm.sMs" => "__builtin_ve_vl_svm_sMs", - "llvm.ve.vl.svm.sms" => "__builtin_ve_vl_svm_sms", - "llvm.ve.vl.svob" => "__builtin_ve_vl_svob", - "llvm.ve.vl.tovm.sml" => "__builtin_ve_vl_tovm_sml", - "llvm.ve.vl.tscr.ssss" => "__builtin_ve_vl_tscr_ssss", - "llvm.ve.vl.vaddsl.vsvl" => "__builtin_ve_vl_vaddsl_vsvl", - "llvm.ve.vl.vaddsl.vsvmvl" => "__builtin_ve_vl_vaddsl_vsvmvl", - "llvm.ve.vl.vaddsl.vsvvl" => "__builtin_ve_vl_vaddsl_vsvvl", - "llvm.ve.vl.vaddsl.vvvl" => "__builtin_ve_vl_vaddsl_vvvl", - "llvm.ve.vl.vaddsl.vvvmvl" => "__builtin_ve_vl_vaddsl_vvvmvl", - "llvm.ve.vl.vaddsl.vvvvl" => "__builtin_ve_vl_vaddsl_vvvvl", - "llvm.ve.vl.vaddswsx.vsvl" => "__builtin_ve_vl_vaddswsx_vsvl", - "llvm.ve.vl.vaddswsx.vsvmvl" => "__builtin_ve_vl_vaddswsx_vsvmvl", - "llvm.ve.vl.vaddswsx.vsvvl" => "__builtin_ve_vl_vaddswsx_vsvvl", - "llvm.ve.vl.vaddswsx.vvvl" => "__builtin_ve_vl_vaddswsx_vvvl", - "llvm.ve.vl.vaddswsx.vvvmvl" => "__builtin_ve_vl_vaddswsx_vvvmvl", - "llvm.ve.vl.vaddswsx.vvvvl" => "__builtin_ve_vl_vaddswsx_vvvvl", - "llvm.ve.vl.vaddswzx.vsvl" => "__builtin_ve_vl_vaddswzx_vsvl", - "llvm.ve.vl.vaddswzx.vsvmvl" => "__builtin_ve_vl_vaddswzx_vsvmvl", - "llvm.ve.vl.vaddswzx.vsvvl" => "__builtin_ve_vl_vaddswzx_vsvvl", - "llvm.ve.vl.vaddswzx.vvvl" => "__builtin_ve_vl_vaddswzx_vvvl", - "llvm.ve.vl.vaddswzx.vvvmvl" => "__builtin_ve_vl_vaddswzx_vvvmvl", - "llvm.ve.vl.vaddswzx.vvvvl" => "__builtin_ve_vl_vaddswzx_vvvvl", - "llvm.ve.vl.vaddul.vsvl" => "__builtin_ve_vl_vaddul_vsvl", - "llvm.ve.vl.vaddul.vsvmvl" => "__builtin_ve_vl_vaddul_vsvmvl", - "llvm.ve.vl.vaddul.vsvvl" => "__builtin_ve_vl_vaddul_vsvvl", - "llvm.ve.vl.vaddul.vvvl" => "__builtin_ve_vl_vaddul_vvvl", - "llvm.ve.vl.vaddul.vvvmvl" => "__builtin_ve_vl_vaddul_vvvmvl", - "llvm.ve.vl.vaddul.vvvvl" => "__builtin_ve_vl_vaddul_vvvvl", - "llvm.ve.vl.vadduw.vsvl" => "__builtin_ve_vl_vadduw_vsvl", - "llvm.ve.vl.vadduw.vsvmvl" => "__builtin_ve_vl_vadduw_vsvmvl", - "llvm.ve.vl.vadduw.vsvvl" => "__builtin_ve_vl_vadduw_vsvvl", - "llvm.ve.vl.vadduw.vvvl" => "__builtin_ve_vl_vadduw_vvvl", - "llvm.ve.vl.vadduw.vvvmvl" => "__builtin_ve_vl_vadduw_vvvmvl", - "llvm.ve.vl.vadduw.vvvvl" => "__builtin_ve_vl_vadduw_vvvvl", - "llvm.ve.vl.vand.vsvl" => "__builtin_ve_vl_vand_vsvl", - "llvm.ve.vl.vand.vsvmvl" => "__builtin_ve_vl_vand_vsvmvl", - "llvm.ve.vl.vand.vsvvl" => "__builtin_ve_vl_vand_vsvvl", - "llvm.ve.vl.vand.vvvl" => "__builtin_ve_vl_vand_vvvl", - "llvm.ve.vl.vand.vvvmvl" => "__builtin_ve_vl_vand_vvvmvl", - "llvm.ve.vl.vand.vvvvl" => "__builtin_ve_vl_vand_vvvvl", - "llvm.ve.vl.vbrdd.vsl" => "__builtin_ve_vl_vbrdd_vsl", - "llvm.ve.vl.vbrdd.vsmvl" => "__builtin_ve_vl_vbrdd_vsmvl", - "llvm.ve.vl.vbrdd.vsvl" => "__builtin_ve_vl_vbrdd_vsvl", - "llvm.ve.vl.vbrdl.vsl" => "__builtin_ve_vl_vbrdl_vsl", - "llvm.ve.vl.vbrdl.vsmvl" => "__builtin_ve_vl_vbrdl_vsmvl", - "llvm.ve.vl.vbrdl.vsvl" => "__builtin_ve_vl_vbrdl_vsvl", - "llvm.ve.vl.vbrds.vsl" => "__builtin_ve_vl_vbrds_vsl", - "llvm.ve.vl.vbrds.vsmvl" => "__builtin_ve_vl_vbrds_vsmvl", - "llvm.ve.vl.vbrds.vsvl" => "__builtin_ve_vl_vbrds_vsvl", - "llvm.ve.vl.vbrdw.vsl" => "__builtin_ve_vl_vbrdw_vsl", - "llvm.ve.vl.vbrdw.vsmvl" => "__builtin_ve_vl_vbrdw_vsmvl", - "llvm.ve.vl.vbrdw.vsvl" => "__builtin_ve_vl_vbrdw_vsvl", - "llvm.ve.vl.vbrv.vvl" => "__builtin_ve_vl_vbrv_vvl", - "llvm.ve.vl.vbrv.vvmvl" => "__builtin_ve_vl_vbrv_vvmvl", - "llvm.ve.vl.vbrv.vvvl" => "__builtin_ve_vl_vbrv_vvvl", - "llvm.ve.vl.vcmpsl.vsvl" => "__builtin_ve_vl_vcmpsl_vsvl", - "llvm.ve.vl.vcmpsl.vsvmvl" => "__builtin_ve_vl_vcmpsl_vsvmvl", - "llvm.ve.vl.vcmpsl.vsvvl" => "__builtin_ve_vl_vcmpsl_vsvvl", - "llvm.ve.vl.vcmpsl.vvvl" => "__builtin_ve_vl_vcmpsl_vvvl", - "llvm.ve.vl.vcmpsl.vvvmvl" => "__builtin_ve_vl_vcmpsl_vvvmvl", - "llvm.ve.vl.vcmpsl.vvvvl" => "__builtin_ve_vl_vcmpsl_vvvvl", - "llvm.ve.vl.vcmpswsx.vsvl" => "__builtin_ve_vl_vcmpswsx_vsvl", - "llvm.ve.vl.vcmpswsx.vsvmvl" => "__builtin_ve_vl_vcmpswsx_vsvmvl", - "llvm.ve.vl.vcmpswsx.vsvvl" => "__builtin_ve_vl_vcmpswsx_vsvvl", - "llvm.ve.vl.vcmpswsx.vvvl" => "__builtin_ve_vl_vcmpswsx_vvvl", - "llvm.ve.vl.vcmpswsx.vvvmvl" => "__builtin_ve_vl_vcmpswsx_vvvmvl", - "llvm.ve.vl.vcmpswsx.vvvvl" => "__builtin_ve_vl_vcmpswsx_vvvvl", - "llvm.ve.vl.vcmpswzx.vsvl" => "__builtin_ve_vl_vcmpswzx_vsvl", - "llvm.ve.vl.vcmpswzx.vsvmvl" => "__builtin_ve_vl_vcmpswzx_vsvmvl", - "llvm.ve.vl.vcmpswzx.vsvvl" => "__builtin_ve_vl_vcmpswzx_vsvvl", - "llvm.ve.vl.vcmpswzx.vvvl" => "__builtin_ve_vl_vcmpswzx_vvvl", - "llvm.ve.vl.vcmpswzx.vvvmvl" => "__builtin_ve_vl_vcmpswzx_vvvmvl", - "llvm.ve.vl.vcmpswzx.vvvvl" => "__builtin_ve_vl_vcmpswzx_vvvvl", - "llvm.ve.vl.vcmpul.vsvl" => "__builtin_ve_vl_vcmpul_vsvl", - "llvm.ve.vl.vcmpul.vsvmvl" => "__builtin_ve_vl_vcmpul_vsvmvl", - "llvm.ve.vl.vcmpul.vsvvl" => "__builtin_ve_vl_vcmpul_vsvvl", - "llvm.ve.vl.vcmpul.vvvl" => "__builtin_ve_vl_vcmpul_vvvl", - "llvm.ve.vl.vcmpul.vvvmvl" => "__builtin_ve_vl_vcmpul_vvvmvl", - "llvm.ve.vl.vcmpul.vvvvl" => "__builtin_ve_vl_vcmpul_vvvvl", - "llvm.ve.vl.vcmpuw.vsvl" => "__builtin_ve_vl_vcmpuw_vsvl", - "llvm.ve.vl.vcmpuw.vsvmvl" => "__builtin_ve_vl_vcmpuw_vsvmvl", - "llvm.ve.vl.vcmpuw.vsvvl" => "__builtin_ve_vl_vcmpuw_vsvvl", - "llvm.ve.vl.vcmpuw.vvvl" => "__builtin_ve_vl_vcmpuw_vvvl", - "llvm.ve.vl.vcmpuw.vvvmvl" => "__builtin_ve_vl_vcmpuw_vvvmvl", - "llvm.ve.vl.vcmpuw.vvvvl" => "__builtin_ve_vl_vcmpuw_vvvvl", - "llvm.ve.vl.vcp.vvmvl" => "__builtin_ve_vl_vcp_vvmvl", - "llvm.ve.vl.vcvtdl.vvl" => "__builtin_ve_vl_vcvtdl_vvl", - "llvm.ve.vl.vcvtdl.vvvl" => "__builtin_ve_vl_vcvtdl_vvvl", - "llvm.ve.vl.vcvtds.vvl" => "__builtin_ve_vl_vcvtds_vvl", - "llvm.ve.vl.vcvtds.vvvl" => "__builtin_ve_vl_vcvtds_vvvl", - "llvm.ve.vl.vcvtdw.vvl" => "__builtin_ve_vl_vcvtdw_vvl", - "llvm.ve.vl.vcvtdw.vvvl" => "__builtin_ve_vl_vcvtdw_vvvl", - "llvm.ve.vl.vcvtld.vvl" => "__builtin_ve_vl_vcvtld_vvl", - "llvm.ve.vl.vcvtld.vvmvl" => "__builtin_ve_vl_vcvtld_vvmvl", - "llvm.ve.vl.vcvtld.vvvl" => "__builtin_ve_vl_vcvtld_vvvl", - "llvm.ve.vl.vcvtldrz.vvl" => "__builtin_ve_vl_vcvtldrz_vvl", - "llvm.ve.vl.vcvtldrz.vvmvl" => "__builtin_ve_vl_vcvtldrz_vvmvl", - "llvm.ve.vl.vcvtldrz.vvvl" => "__builtin_ve_vl_vcvtldrz_vvvl", - "llvm.ve.vl.vcvtsd.vvl" => "__builtin_ve_vl_vcvtsd_vvl", - "llvm.ve.vl.vcvtsd.vvvl" => "__builtin_ve_vl_vcvtsd_vvvl", - "llvm.ve.vl.vcvtsw.vvl" => "__builtin_ve_vl_vcvtsw_vvl", - "llvm.ve.vl.vcvtsw.vvvl" => "__builtin_ve_vl_vcvtsw_vvvl", - "llvm.ve.vl.vcvtwdsx.vvl" => "__builtin_ve_vl_vcvtwdsx_vvl", - "llvm.ve.vl.vcvtwdsx.vvmvl" => "__builtin_ve_vl_vcvtwdsx_vvmvl", - "llvm.ve.vl.vcvtwdsx.vvvl" => "__builtin_ve_vl_vcvtwdsx_vvvl", - "llvm.ve.vl.vcvtwdsxrz.vvl" => "__builtin_ve_vl_vcvtwdsxrz_vvl", - "llvm.ve.vl.vcvtwdsxrz.vvmvl" => "__builtin_ve_vl_vcvtwdsxrz_vvmvl", - "llvm.ve.vl.vcvtwdsxrz.vvvl" => "__builtin_ve_vl_vcvtwdsxrz_vvvl", - "llvm.ve.vl.vcvtwdzx.vvl" => "__builtin_ve_vl_vcvtwdzx_vvl", - "llvm.ve.vl.vcvtwdzx.vvmvl" => "__builtin_ve_vl_vcvtwdzx_vvmvl", - "llvm.ve.vl.vcvtwdzx.vvvl" => "__builtin_ve_vl_vcvtwdzx_vvvl", - "llvm.ve.vl.vcvtwdzxrz.vvl" => "__builtin_ve_vl_vcvtwdzxrz_vvl", - "llvm.ve.vl.vcvtwdzxrz.vvmvl" => "__builtin_ve_vl_vcvtwdzxrz_vvmvl", - "llvm.ve.vl.vcvtwdzxrz.vvvl" => "__builtin_ve_vl_vcvtwdzxrz_vvvl", - "llvm.ve.vl.vcvtwssx.vvl" => "__builtin_ve_vl_vcvtwssx_vvl", - "llvm.ve.vl.vcvtwssx.vvmvl" => "__builtin_ve_vl_vcvtwssx_vvmvl", - "llvm.ve.vl.vcvtwssx.vvvl" => "__builtin_ve_vl_vcvtwssx_vvvl", - "llvm.ve.vl.vcvtwssxrz.vvl" => "__builtin_ve_vl_vcvtwssxrz_vvl", - "llvm.ve.vl.vcvtwssxrz.vvmvl" => "__builtin_ve_vl_vcvtwssxrz_vvmvl", - "llvm.ve.vl.vcvtwssxrz.vvvl" => "__builtin_ve_vl_vcvtwssxrz_vvvl", - "llvm.ve.vl.vcvtwszx.vvl" => "__builtin_ve_vl_vcvtwszx_vvl", - "llvm.ve.vl.vcvtwszx.vvmvl" => "__builtin_ve_vl_vcvtwszx_vvmvl", - "llvm.ve.vl.vcvtwszx.vvvl" => "__builtin_ve_vl_vcvtwszx_vvvl", - "llvm.ve.vl.vcvtwszxrz.vvl" => "__builtin_ve_vl_vcvtwszxrz_vvl", - "llvm.ve.vl.vcvtwszxrz.vvmvl" => "__builtin_ve_vl_vcvtwszxrz_vvmvl", - "llvm.ve.vl.vcvtwszxrz.vvvl" => "__builtin_ve_vl_vcvtwszxrz_vvvl", - "llvm.ve.vl.vdivsl.vsvl" => "__builtin_ve_vl_vdivsl_vsvl", - "llvm.ve.vl.vdivsl.vsvmvl" => "__builtin_ve_vl_vdivsl_vsvmvl", - "llvm.ve.vl.vdivsl.vsvvl" => "__builtin_ve_vl_vdivsl_vsvvl", - "llvm.ve.vl.vdivsl.vvsl" => "__builtin_ve_vl_vdivsl_vvsl", - "llvm.ve.vl.vdivsl.vvsmvl" => "__builtin_ve_vl_vdivsl_vvsmvl", - "llvm.ve.vl.vdivsl.vvsvl" => "__builtin_ve_vl_vdivsl_vvsvl", - "llvm.ve.vl.vdivsl.vvvl" => "__builtin_ve_vl_vdivsl_vvvl", - "llvm.ve.vl.vdivsl.vvvmvl" => "__builtin_ve_vl_vdivsl_vvvmvl", - "llvm.ve.vl.vdivsl.vvvvl" => "__builtin_ve_vl_vdivsl_vvvvl", - "llvm.ve.vl.vdivswsx.vsvl" => "__builtin_ve_vl_vdivswsx_vsvl", - "llvm.ve.vl.vdivswsx.vsvmvl" => "__builtin_ve_vl_vdivswsx_vsvmvl", - "llvm.ve.vl.vdivswsx.vsvvl" => "__builtin_ve_vl_vdivswsx_vsvvl", - "llvm.ve.vl.vdivswsx.vvsl" => "__builtin_ve_vl_vdivswsx_vvsl", - "llvm.ve.vl.vdivswsx.vvsmvl" => "__builtin_ve_vl_vdivswsx_vvsmvl", - "llvm.ve.vl.vdivswsx.vvsvl" => "__builtin_ve_vl_vdivswsx_vvsvl", - "llvm.ve.vl.vdivswsx.vvvl" => "__builtin_ve_vl_vdivswsx_vvvl", - "llvm.ve.vl.vdivswsx.vvvmvl" => "__builtin_ve_vl_vdivswsx_vvvmvl", - "llvm.ve.vl.vdivswsx.vvvvl" => "__builtin_ve_vl_vdivswsx_vvvvl", - "llvm.ve.vl.vdivswzx.vsvl" => "__builtin_ve_vl_vdivswzx_vsvl", - "llvm.ve.vl.vdivswzx.vsvmvl" => "__builtin_ve_vl_vdivswzx_vsvmvl", - "llvm.ve.vl.vdivswzx.vsvvl" => "__builtin_ve_vl_vdivswzx_vsvvl", - "llvm.ve.vl.vdivswzx.vvsl" => "__builtin_ve_vl_vdivswzx_vvsl", - "llvm.ve.vl.vdivswzx.vvsmvl" => "__builtin_ve_vl_vdivswzx_vvsmvl", - "llvm.ve.vl.vdivswzx.vvsvl" => "__builtin_ve_vl_vdivswzx_vvsvl", - "llvm.ve.vl.vdivswzx.vvvl" => "__builtin_ve_vl_vdivswzx_vvvl", - "llvm.ve.vl.vdivswzx.vvvmvl" => "__builtin_ve_vl_vdivswzx_vvvmvl", - "llvm.ve.vl.vdivswzx.vvvvl" => "__builtin_ve_vl_vdivswzx_vvvvl", - "llvm.ve.vl.vdivul.vsvl" => "__builtin_ve_vl_vdivul_vsvl", - "llvm.ve.vl.vdivul.vsvmvl" => "__builtin_ve_vl_vdivul_vsvmvl", - "llvm.ve.vl.vdivul.vsvvl" => "__builtin_ve_vl_vdivul_vsvvl", - "llvm.ve.vl.vdivul.vvsl" => "__builtin_ve_vl_vdivul_vvsl", - "llvm.ve.vl.vdivul.vvsmvl" => "__builtin_ve_vl_vdivul_vvsmvl", - "llvm.ve.vl.vdivul.vvsvl" => "__builtin_ve_vl_vdivul_vvsvl", - "llvm.ve.vl.vdivul.vvvl" => "__builtin_ve_vl_vdivul_vvvl", - "llvm.ve.vl.vdivul.vvvmvl" => "__builtin_ve_vl_vdivul_vvvmvl", - "llvm.ve.vl.vdivul.vvvvl" => "__builtin_ve_vl_vdivul_vvvvl", - "llvm.ve.vl.vdivuw.vsvl" => "__builtin_ve_vl_vdivuw_vsvl", - "llvm.ve.vl.vdivuw.vsvmvl" => "__builtin_ve_vl_vdivuw_vsvmvl", - "llvm.ve.vl.vdivuw.vsvvl" => "__builtin_ve_vl_vdivuw_vsvvl", - "llvm.ve.vl.vdivuw.vvsl" => "__builtin_ve_vl_vdivuw_vvsl", - "llvm.ve.vl.vdivuw.vvsmvl" => "__builtin_ve_vl_vdivuw_vvsmvl", - "llvm.ve.vl.vdivuw.vvsvl" => "__builtin_ve_vl_vdivuw_vvsvl", - "llvm.ve.vl.vdivuw.vvvl" => "__builtin_ve_vl_vdivuw_vvvl", - "llvm.ve.vl.vdivuw.vvvmvl" => "__builtin_ve_vl_vdivuw_vvvmvl", - "llvm.ve.vl.vdivuw.vvvvl" => "__builtin_ve_vl_vdivuw_vvvvl", - "llvm.ve.vl.veqv.vsvl" => "__builtin_ve_vl_veqv_vsvl", - "llvm.ve.vl.veqv.vsvmvl" => "__builtin_ve_vl_veqv_vsvmvl", - "llvm.ve.vl.veqv.vsvvl" => "__builtin_ve_vl_veqv_vsvvl", - "llvm.ve.vl.veqv.vvvl" => "__builtin_ve_vl_veqv_vvvl", - "llvm.ve.vl.veqv.vvvmvl" => "__builtin_ve_vl_veqv_vvvmvl", - "llvm.ve.vl.veqv.vvvvl" => "__builtin_ve_vl_veqv_vvvvl", - "llvm.ve.vl.vex.vvmvl" => "__builtin_ve_vl_vex_vvmvl", - "llvm.ve.vl.vfaddd.vsvl" => "__builtin_ve_vl_vfaddd_vsvl", - "llvm.ve.vl.vfaddd.vsvmvl" => "__builtin_ve_vl_vfaddd_vsvmvl", - "llvm.ve.vl.vfaddd.vsvvl" => "__builtin_ve_vl_vfaddd_vsvvl", - "llvm.ve.vl.vfaddd.vvvl" => "__builtin_ve_vl_vfaddd_vvvl", - "llvm.ve.vl.vfaddd.vvvmvl" => "__builtin_ve_vl_vfaddd_vvvmvl", - "llvm.ve.vl.vfaddd.vvvvl" => "__builtin_ve_vl_vfaddd_vvvvl", - "llvm.ve.vl.vfadds.vsvl" => "__builtin_ve_vl_vfadds_vsvl", - "llvm.ve.vl.vfadds.vsvmvl" => "__builtin_ve_vl_vfadds_vsvmvl", - "llvm.ve.vl.vfadds.vsvvl" => "__builtin_ve_vl_vfadds_vsvvl", - "llvm.ve.vl.vfadds.vvvl" => "__builtin_ve_vl_vfadds_vvvl", - "llvm.ve.vl.vfadds.vvvmvl" => "__builtin_ve_vl_vfadds_vvvmvl", - "llvm.ve.vl.vfadds.vvvvl" => "__builtin_ve_vl_vfadds_vvvvl", - "llvm.ve.vl.vfcmpd.vsvl" => "__builtin_ve_vl_vfcmpd_vsvl", - "llvm.ve.vl.vfcmpd.vsvmvl" => "__builtin_ve_vl_vfcmpd_vsvmvl", - "llvm.ve.vl.vfcmpd.vsvvl" => "__builtin_ve_vl_vfcmpd_vsvvl", - "llvm.ve.vl.vfcmpd.vvvl" => "__builtin_ve_vl_vfcmpd_vvvl", - "llvm.ve.vl.vfcmpd.vvvmvl" => "__builtin_ve_vl_vfcmpd_vvvmvl", - "llvm.ve.vl.vfcmpd.vvvvl" => "__builtin_ve_vl_vfcmpd_vvvvl", - "llvm.ve.vl.vfcmps.vsvl" => "__builtin_ve_vl_vfcmps_vsvl", - "llvm.ve.vl.vfcmps.vsvmvl" => "__builtin_ve_vl_vfcmps_vsvmvl", - "llvm.ve.vl.vfcmps.vsvvl" => "__builtin_ve_vl_vfcmps_vsvvl", - "llvm.ve.vl.vfcmps.vvvl" => "__builtin_ve_vl_vfcmps_vvvl", - "llvm.ve.vl.vfcmps.vvvmvl" => "__builtin_ve_vl_vfcmps_vvvmvl", - "llvm.ve.vl.vfcmps.vvvvl" => "__builtin_ve_vl_vfcmps_vvvvl", - "llvm.ve.vl.vfdivd.vsvl" => "__builtin_ve_vl_vfdivd_vsvl", - "llvm.ve.vl.vfdivd.vsvmvl" => "__builtin_ve_vl_vfdivd_vsvmvl", - "llvm.ve.vl.vfdivd.vsvvl" => "__builtin_ve_vl_vfdivd_vsvvl", - "llvm.ve.vl.vfdivd.vvvl" => "__builtin_ve_vl_vfdivd_vvvl", - "llvm.ve.vl.vfdivd.vvvmvl" => "__builtin_ve_vl_vfdivd_vvvmvl", - "llvm.ve.vl.vfdivd.vvvvl" => "__builtin_ve_vl_vfdivd_vvvvl", - "llvm.ve.vl.vfdivs.vsvl" => "__builtin_ve_vl_vfdivs_vsvl", - "llvm.ve.vl.vfdivs.vsvmvl" => "__builtin_ve_vl_vfdivs_vsvmvl", - "llvm.ve.vl.vfdivs.vsvvl" => "__builtin_ve_vl_vfdivs_vsvvl", - "llvm.ve.vl.vfdivs.vvvl" => "__builtin_ve_vl_vfdivs_vvvl", - "llvm.ve.vl.vfdivs.vvvmvl" => "__builtin_ve_vl_vfdivs_vvvmvl", - "llvm.ve.vl.vfdivs.vvvvl" => "__builtin_ve_vl_vfdivs_vvvvl", - "llvm.ve.vl.vfmadd.vsvvl" => "__builtin_ve_vl_vfmadd_vsvvl", - "llvm.ve.vl.vfmadd.vsvvmvl" => "__builtin_ve_vl_vfmadd_vsvvmvl", - "llvm.ve.vl.vfmadd.vsvvvl" => "__builtin_ve_vl_vfmadd_vsvvvl", - "llvm.ve.vl.vfmadd.vvsvl" => "__builtin_ve_vl_vfmadd_vvsvl", - "llvm.ve.vl.vfmadd.vvsvmvl" => "__builtin_ve_vl_vfmadd_vvsvmvl", - "llvm.ve.vl.vfmadd.vvsvvl" => "__builtin_ve_vl_vfmadd_vvsvvl", - "llvm.ve.vl.vfmadd.vvvvl" => "__builtin_ve_vl_vfmadd_vvvvl", - "llvm.ve.vl.vfmadd.vvvvmvl" => "__builtin_ve_vl_vfmadd_vvvvmvl", - "llvm.ve.vl.vfmadd.vvvvvl" => "__builtin_ve_vl_vfmadd_vvvvvl", - "llvm.ve.vl.vfmads.vsvvl" => "__builtin_ve_vl_vfmads_vsvvl", - "llvm.ve.vl.vfmads.vsvvmvl" => "__builtin_ve_vl_vfmads_vsvvmvl", - "llvm.ve.vl.vfmads.vsvvvl" => "__builtin_ve_vl_vfmads_vsvvvl", - "llvm.ve.vl.vfmads.vvsvl" => "__builtin_ve_vl_vfmads_vvsvl", - "llvm.ve.vl.vfmads.vvsvmvl" => "__builtin_ve_vl_vfmads_vvsvmvl", - "llvm.ve.vl.vfmads.vvsvvl" => "__builtin_ve_vl_vfmads_vvsvvl", - "llvm.ve.vl.vfmads.vvvvl" => "__builtin_ve_vl_vfmads_vvvvl", - "llvm.ve.vl.vfmads.vvvvmvl" => "__builtin_ve_vl_vfmads_vvvvmvl", - "llvm.ve.vl.vfmads.vvvvvl" => "__builtin_ve_vl_vfmads_vvvvvl", - "llvm.ve.vl.vfmaxd.vsvl" => "__builtin_ve_vl_vfmaxd_vsvl", - "llvm.ve.vl.vfmaxd.vsvmvl" => "__builtin_ve_vl_vfmaxd_vsvmvl", - "llvm.ve.vl.vfmaxd.vsvvl" => "__builtin_ve_vl_vfmaxd_vsvvl", - "llvm.ve.vl.vfmaxd.vvvl" => "__builtin_ve_vl_vfmaxd_vvvl", - "llvm.ve.vl.vfmaxd.vvvmvl" => "__builtin_ve_vl_vfmaxd_vvvmvl", - "llvm.ve.vl.vfmaxd.vvvvl" => "__builtin_ve_vl_vfmaxd_vvvvl", - "llvm.ve.vl.vfmaxs.vsvl" => "__builtin_ve_vl_vfmaxs_vsvl", - "llvm.ve.vl.vfmaxs.vsvmvl" => "__builtin_ve_vl_vfmaxs_vsvmvl", - "llvm.ve.vl.vfmaxs.vsvvl" => "__builtin_ve_vl_vfmaxs_vsvvl", - "llvm.ve.vl.vfmaxs.vvvl" => "__builtin_ve_vl_vfmaxs_vvvl", - "llvm.ve.vl.vfmaxs.vvvmvl" => "__builtin_ve_vl_vfmaxs_vvvmvl", - "llvm.ve.vl.vfmaxs.vvvvl" => "__builtin_ve_vl_vfmaxs_vvvvl", - "llvm.ve.vl.vfmind.vsvl" => "__builtin_ve_vl_vfmind_vsvl", - "llvm.ve.vl.vfmind.vsvmvl" => "__builtin_ve_vl_vfmind_vsvmvl", - "llvm.ve.vl.vfmind.vsvvl" => "__builtin_ve_vl_vfmind_vsvvl", - "llvm.ve.vl.vfmind.vvvl" => "__builtin_ve_vl_vfmind_vvvl", - "llvm.ve.vl.vfmind.vvvmvl" => "__builtin_ve_vl_vfmind_vvvmvl", - "llvm.ve.vl.vfmind.vvvvl" => "__builtin_ve_vl_vfmind_vvvvl", - "llvm.ve.vl.vfmins.vsvl" => "__builtin_ve_vl_vfmins_vsvl", - "llvm.ve.vl.vfmins.vsvmvl" => "__builtin_ve_vl_vfmins_vsvmvl", - "llvm.ve.vl.vfmins.vsvvl" => "__builtin_ve_vl_vfmins_vsvvl", - "llvm.ve.vl.vfmins.vvvl" => "__builtin_ve_vl_vfmins_vvvl", - "llvm.ve.vl.vfmins.vvvmvl" => "__builtin_ve_vl_vfmins_vvvmvl", - "llvm.ve.vl.vfmins.vvvvl" => "__builtin_ve_vl_vfmins_vvvvl", - "llvm.ve.vl.vfmkdeq.mvl" => "__builtin_ve_vl_vfmkdeq_mvl", - "llvm.ve.vl.vfmkdeq.mvml" => "__builtin_ve_vl_vfmkdeq_mvml", - "llvm.ve.vl.vfmkdeqnan.mvl" => "__builtin_ve_vl_vfmkdeqnan_mvl", - "llvm.ve.vl.vfmkdeqnan.mvml" => "__builtin_ve_vl_vfmkdeqnan_mvml", - "llvm.ve.vl.vfmkdge.mvl" => "__builtin_ve_vl_vfmkdge_mvl", - "llvm.ve.vl.vfmkdge.mvml" => "__builtin_ve_vl_vfmkdge_mvml", - "llvm.ve.vl.vfmkdgenan.mvl" => "__builtin_ve_vl_vfmkdgenan_mvl", - "llvm.ve.vl.vfmkdgenan.mvml" => "__builtin_ve_vl_vfmkdgenan_mvml", - "llvm.ve.vl.vfmkdgt.mvl" => "__builtin_ve_vl_vfmkdgt_mvl", - "llvm.ve.vl.vfmkdgt.mvml" => "__builtin_ve_vl_vfmkdgt_mvml", - "llvm.ve.vl.vfmkdgtnan.mvl" => "__builtin_ve_vl_vfmkdgtnan_mvl", - "llvm.ve.vl.vfmkdgtnan.mvml" => "__builtin_ve_vl_vfmkdgtnan_mvml", - "llvm.ve.vl.vfmkdle.mvl" => "__builtin_ve_vl_vfmkdle_mvl", - "llvm.ve.vl.vfmkdle.mvml" => "__builtin_ve_vl_vfmkdle_mvml", - "llvm.ve.vl.vfmkdlenan.mvl" => "__builtin_ve_vl_vfmkdlenan_mvl", - "llvm.ve.vl.vfmkdlenan.mvml" => "__builtin_ve_vl_vfmkdlenan_mvml", - "llvm.ve.vl.vfmkdlt.mvl" => "__builtin_ve_vl_vfmkdlt_mvl", - "llvm.ve.vl.vfmkdlt.mvml" => "__builtin_ve_vl_vfmkdlt_mvml", - "llvm.ve.vl.vfmkdltnan.mvl" => "__builtin_ve_vl_vfmkdltnan_mvl", - "llvm.ve.vl.vfmkdltnan.mvml" => "__builtin_ve_vl_vfmkdltnan_mvml", - "llvm.ve.vl.vfmkdnan.mvl" => "__builtin_ve_vl_vfmkdnan_mvl", - "llvm.ve.vl.vfmkdnan.mvml" => "__builtin_ve_vl_vfmkdnan_mvml", - "llvm.ve.vl.vfmkdne.mvl" => "__builtin_ve_vl_vfmkdne_mvl", - "llvm.ve.vl.vfmkdne.mvml" => "__builtin_ve_vl_vfmkdne_mvml", - "llvm.ve.vl.vfmkdnenan.mvl" => "__builtin_ve_vl_vfmkdnenan_mvl", - "llvm.ve.vl.vfmkdnenan.mvml" => "__builtin_ve_vl_vfmkdnenan_mvml", - "llvm.ve.vl.vfmkdnum.mvl" => "__builtin_ve_vl_vfmkdnum_mvl", - "llvm.ve.vl.vfmkdnum.mvml" => "__builtin_ve_vl_vfmkdnum_mvml", - "llvm.ve.vl.vfmklaf.ml" => "__builtin_ve_vl_vfmklaf_ml", - "llvm.ve.vl.vfmklat.ml" => "__builtin_ve_vl_vfmklat_ml", - "llvm.ve.vl.vfmkleq.mvl" => "__builtin_ve_vl_vfmkleq_mvl", - "llvm.ve.vl.vfmkleq.mvml" => "__builtin_ve_vl_vfmkleq_mvml", - "llvm.ve.vl.vfmkleqnan.mvl" => "__builtin_ve_vl_vfmkleqnan_mvl", - "llvm.ve.vl.vfmkleqnan.mvml" => "__builtin_ve_vl_vfmkleqnan_mvml", - "llvm.ve.vl.vfmklge.mvl" => "__builtin_ve_vl_vfmklge_mvl", - "llvm.ve.vl.vfmklge.mvml" => "__builtin_ve_vl_vfmklge_mvml", - "llvm.ve.vl.vfmklgenan.mvl" => "__builtin_ve_vl_vfmklgenan_mvl", - "llvm.ve.vl.vfmklgenan.mvml" => "__builtin_ve_vl_vfmklgenan_mvml", - "llvm.ve.vl.vfmklgt.mvl" => "__builtin_ve_vl_vfmklgt_mvl", - "llvm.ve.vl.vfmklgt.mvml" => "__builtin_ve_vl_vfmklgt_mvml", - "llvm.ve.vl.vfmklgtnan.mvl" => "__builtin_ve_vl_vfmklgtnan_mvl", - "llvm.ve.vl.vfmklgtnan.mvml" => "__builtin_ve_vl_vfmklgtnan_mvml", - "llvm.ve.vl.vfmklle.mvl" => "__builtin_ve_vl_vfmklle_mvl", - "llvm.ve.vl.vfmklle.mvml" => "__builtin_ve_vl_vfmklle_mvml", - "llvm.ve.vl.vfmkllenan.mvl" => "__builtin_ve_vl_vfmkllenan_mvl", - "llvm.ve.vl.vfmkllenan.mvml" => "__builtin_ve_vl_vfmkllenan_mvml", - "llvm.ve.vl.vfmkllt.mvl" => "__builtin_ve_vl_vfmkllt_mvl", - "llvm.ve.vl.vfmkllt.mvml" => "__builtin_ve_vl_vfmkllt_mvml", - "llvm.ve.vl.vfmklltnan.mvl" => "__builtin_ve_vl_vfmklltnan_mvl", - "llvm.ve.vl.vfmklltnan.mvml" => "__builtin_ve_vl_vfmklltnan_mvml", - "llvm.ve.vl.vfmklnan.mvl" => "__builtin_ve_vl_vfmklnan_mvl", - "llvm.ve.vl.vfmklnan.mvml" => "__builtin_ve_vl_vfmklnan_mvml", - "llvm.ve.vl.vfmklne.mvl" => "__builtin_ve_vl_vfmklne_mvl", - "llvm.ve.vl.vfmklne.mvml" => "__builtin_ve_vl_vfmklne_mvml", - "llvm.ve.vl.vfmklnenan.mvl" => "__builtin_ve_vl_vfmklnenan_mvl", - "llvm.ve.vl.vfmklnenan.mvml" => "__builtin_ve_vl_vfmklnenan_mvml", - "llvm.ve.vl.vfmklnum.mvl" => "__builtin_ve_vl_vfmklnum_mvl", - "llvm.ve.vl.vfmklnum.mvml" => "__builtin_ve_vl_vfmklnum_mvml", - "llvm.ve.vl.vfmkseq.mvl" => "__builtin_ve_vl_vfmkseq_mvl", - "llvm.ve.vl.vfmkseq.mvml" => "__builtin_ve_vl_vfmkseq_mvml", - "llvm.ve.vl.vfmkseqnan.mvl" => "__builtin_ve_vl_vfmkseqnan_mvl", - "llvm.ve.vl.vfmkseqnan.mvml" => "__builtin_ve_vl_vfmkseqnan_mvml", - "llvm.ve.vl.vfmksge.mvl" => "__builtin_ve_vl_vfmksge_mvl", - "llvm.ve.vl.vfmksge.mvml" => "__builtin_ve_vl_vfmksge_mvml", - "llvm.ve.vl.vfmksgenan.mvl" => "__builtin_ve_vl_vfmksgenan_mvl", - "llvm.ve.vl.vfmksgenan.mvml" => "__builtin_ve_vl_vfmksgenan_mvml", - "llvm.ve.vl.vfmksgt.mvl" => "__builtin_ve_vl_vfmksgt_mvl", - "llvm.ve.vl.vfmksgt.mvml" => "__builtin_ve_vl_vfmksgt_mvml", - "llvm.ve.vl.vfmksgtnan.mvl" => "__builtin_ve_vl_vfmksgtnan_mvl", - "llvm.ve.vl.vfmksgtnan.mvml" => "__builtin_ve_vl_vfmksgtnan_mvml", - "llvm.ve.vl.vfmksle.mvl" => "__builtin_ve_vl_vfmksle_mvl", - "llvm.ve.vl.vfmksle.mvml" => "__builtin_ve_vl_vfmksle_mvml", - "llvm.ve.vl.vfmkslenan.mvl" => "__builtin_ve_vl_vfmkslenan_mvl", - "llvm.ve.vl.vfmkslenan.mvml" => "__builtin_ve_vl_vfmkslenan_mvml", - "llvm.ve.vl.vfmkslt.mvl" => "__builtin_ve_vl_vfmkslt_mvl", - "llvm.ve.vl.vfmkslt.mvml" => "__builtin_ve_vl_vfmkslt_mvml", - "llvm.ve.vl.vfmksltnan.mvl" => "__builtin_ve_vl_vfmksltnan_mvl", - "llvm.ve.vl.vfmksltnan.mvml" => "__builtin_ve_vl_vfmksltnan_mvml", - "llvm.ve.vl.vfmksnan.mvl" => "__builtin_ve_vl_vfmksnan_mvl", - "llvm.ve.vl.vfmksnan.mvml" => "__builtin_ve_vl_vfmksnan_mvml", - "llvm.ve.vl.vfmksne.mvl" => "__builtin_ve_vl_vfmksne_mvl", - "llvm.ve.vl.vfmksne.mvml" => "__builtin_ve_vl_vfmksne_mvml", - "llvm.ve.vl.vfmksnenan.mvl" => "__builtin_ve_vl_vfmksnenan_mvl", - "llvm.ve.vl.vfmksnenan.mvml" => "__builtin_ve_vl_vfmksnenan_mvml", - "llvm.ve.vl.vfmksnum.mvl" => "__builtin_ve_vl_vfmksnum_mvl", - "llvm.ve.vl.vfmksnum.mvml" => "__builtin_ve_vl_vfmksnum_mvml", - "llvm.ve.vl.vfmkweq.mvl" => "__builtin_ve_vl_vfmkweq_mvl", - "llvm.ve.vl.vfmkweq.mvml" => "__builtin_ve_vl_vfmkweq_mvml", - "llvm.ve.vl.vfmkweqnan.mvl" => "__builtin_ve_vl_vfmkweqnan_mvl", - "llvm.ve.vl.vfmkweqnan.mvml" => "__builtin_ve_vl_vfmkweqnan_mvml", - "llvm.ve.vl.vfmkwge.mvl" => "__builtin_ve_vl_vfmkwge_mvl", - "llvm.ve.vl.vfmkwge.mvml" => "__builtin_ve_vl_vfmkwge_mvml", - "llvm.ve.vl.vfmkwgenan.mvl" => "__builtin_ve_vl_vfmkwgenan_mvl", - "llvm.ve.vl.vfmkwgenan.mvml" => "__builtin_ve_vl_vfmkwgenan_mvml", - "llvm.ve.vl.vfmkwgt.mvl" => "__builtin_ve_vl_vfmkwgt_mvl", - "llvm.ve.vl.vfmkwgt.mvml" => "__builtin_ve_vl_vfmkwgt_mvml", - "llvm.ve.vl.vfmkwgtnan.mvl" => "__builtin_ve_vl_vfmkwgtnan_mvl", - "llvm.ve.vl.vfmkwgtnan.mvml" => "__builtin_ve_vl_vfmkwgtnan_mvml", - "llvm.ve.vl.vfmkwle.mvl" => "__builtin_ve_vl_vfmkwle_mvl", - "llvm.ve.vl.vfmkwle.mvml" => "__builtin_ve_vl_vfmkwle_mvml", - "llvm.ve.vl.vfmkwlenan.mvl" => "__builtin_ve_vl_vfmkwlenan_mvl", - "llvm.ve.vl.vfmkwlenan.mvml" => "__builtin_ve_vl_vfmkwlenan_mvml", - "llvm.ve.vl.vfmkwlt.mvl" => "__builtin_ve_vl_vfmkwlt_mvl", - "llvm.ve.vl.vfmkwlt.mvml" => "__builtin_ve_vl_vfmkwlt_mvml", - "llvm.ve.vl.vfmkwltnan.mvl" => "__builtin_ve_vl_vfmkwltnan_mvl", - "llvm.ve.vl.vfmkwltnan.mvml" => "__builtin_ve_vl_vfmkwltnan_mvml", - "llvm.ve.vl.vfmkwnan.mvl" => "__builtin_ve_vl_vfmkwnan_mvl", - "llvm.ve.vl.vfmkwnan.mvml" => "__builtin_ve_vl_vfmkwnan_mvml", - "llvm.ve.vl.vfmkwne.mvl" => "__builtin_ve_vl_vfmkwne_mvl", - "llvm.ve.vl.vfmkwne.mvml" => "__builtin_ve_vl_vfmkwne_mvml", - "llvm.ve.vl.vfmkwnenan.mvl" => "__builtin_ve_vl_vfmkwnenan_mvl", - "llvm.ve.vl.vfmkwnenan.mvml" => "__builtin_ve_vl_vfmkwnenan_mvml", - "llvm.ve.vl.vfmkwnum.mvl" => "__builtin_ve_vl_vfmkwnum_mvl", - "llvm.ve.vl.vfmkwnum.mvml" => "__builtin_ve_vl_vfmkwnum_mvml", - "llvm.ve.vl.vfmsbd.vsvvl" => "__builtin_ve_vl_vfmsbd_vsvvl", - "llvm.ve.vl.vfmsbd.vsvvmvl" => "__builtin_ve_vl_vfmsbd_vsvvmvl", - "llvm.ve.vl.vfmsbd.vsvvvl" => "__builtin_ve_vl_vfmsbd_vsvvvl", - "llvm.ve.vl.vfmsbd.vvsvl" => "__builtin_ve_vl_vfmsbd_vvsvl", - "llvm.ve.vl.vfmsbd.vvsvmvl" => "__builtin_ve_vl_vfmsbd_vvsvmvl", - "llvm.ve.vl.vfmsbd.vvsvvl" => "__builtin_ve_vl_vfmsbd_vvsvvl", - "llvm.ve.vl.vfmsbd.vvvvl" => "__builtin_ve_vl_vfmsbd_vvvvl", - "llvm.ve.vl.vfmsbd.vvvvmvl" => "__builtin_ve_vl_vfmsbd_vvvvmvl", - "llvm.ve.vl.vfmsbd.vvvvvl" => "__builtin_ve_vl_vfmsbd_vvvvvl", - "llvm.ve.vl.vfmsbs.vsvvl" => "__builtin_ve_vl_vfmsbs_vsvvl", - "llvm.ve.vl.vfmsbs.vsvvmvl" => "__builtin_ve_vl_vfmsbs_vsvvmvl", - "llvm.ve.vl.vfmsbs.vsvvvl" => "__builtin_ve_vl_vfmsbs_vsvvvl", - "llvm.ve.vl.vfmsbs.vvsvl" => "__builtin_ve_vl_vfmsbs_vvsvl", - "llvm.ve.vl.vfmsbs.vvsvmvl" => "__builtin_ve_vl_vfmsbs_vvsvmvl", - "llvm.ve.vl.vfmsbs.vvsvvl" => "__builtin_ve_vl_vfmsbs_vvsvvl", - "llvm.ve.vl.vfmsbs.vvvvl" => "__builtin_ve_vl_vfmsbs_vvvvl", - "llvm.ve.vl.vfmsbs.vvvvmvl" => "__builtin_ve_vl_vfmsbs_vvvvmvl", - "llvm.ve.vl.vfmsbs.vvvvvl" => "__builtin_ve_vl_vfmsbs_vvvvvl", - "llvm.ve.vl.vfmuld.vsvl" => "__builtin_ve_vl_vfmuld_vsvl", - "llvm.ve.vl.vfmuld.vsvmvl" => "__builtin_ve_vl_vfmuld_vsvmvl", - "llvm.ve.vl.vfmuld.vsvvl" => "__builtin_ve_vl_vfmuld_vsvvl", - "llvm.ve.vl.vfmuld.vvvl" => "__builtin_ve_vl_vfmuld_vvvl", - "llvm.ve.vl.vfmuld.vvvmvl" => "__builtin_ve_vl_vfmuld_vvvmvl", - "llvm.ve.vl.vfmuld.vvvvl" => "__builtin_ve_vl_vfmuld_vvvvl", - "llvm.ve.vl.vfmuls.vsvl" => "__builtin_ve_vl_vfmuls_vsvl", - "llvm.ve.vl.vfmuls.vsvmvl" => "__builtin_ve_vl_vfmuls_vsvmvl", - "llvm.ve.vl.vfmuls.vsvvl" => "__builtin_ve_vl_vfmuls_vsvvl", - "llvm.ve.vl.vfmuls.vvvl" => "__builtin_ve_vl_vfmuls_vvvl", - "llvm.ve.vl.vfmuls.vvvmvl" => "__builtin_ve_vl_vfmuls_vvvmvl", - "llvm.ve.vl.vfmuls.vvvvl" => "__builtin_ve_vl_vfmuls_vvvvl", - "llvm.ve.vl.vfnmadd.vsvvl" => "__builtin_ve_vl_vfnmadd_vsvvl", - "llvm.ve.vl.vfnmadd.vsvvmvl" => "__builtin_ve_vl_vfnmadd_vsvvmvl", - "llvm.ve.vl.vfnmadd.vsvvvl" => "__builtin_ve_vl_vfnmadd_vsvvvl", - "llvm.ve.vl.vfnmadd.vvsvl" => "__builtin_ve_vl_vfnmadd_vvsvl", - "llvm.ve.vl.vfnmadd.vvsvmvl" => "__builtin_ve_vl_vfnmadd_vvsvmvl", - "llvm.ve.vl.vfnmadd.vvsvvl" => "__builtin_ve_vl_vfnmadd_vvsvvl", - "llvm.ve.vl.vfnmadd.vvvvl" => "__builtin_ve_vl_vfnmadd_vvvvl", - "llvm.ve.vl.vfnmadd.vvvvmvl" => "__builtin_ve_vl_vfnmadd_vvvvmvl", - "llvm.ve.vl.vfnmadd.vvvvvl" => "__builtin_ve_vl_vfnmadd_vvvvvl", - "llvm.ve.vl.vfnmads.vsvvl" => "__builtin_ve_vl_vfnmads_vsvvl", - "llvm.ve.vl.vfnmads.vsvvmvl" => "__builtin_ve_vl_vfnmads_vsvvmvl", - "llvm.ve.vl.vfnmads.vsvvvl" => "__builtin_ve_vl_vfnmads_vsvvvl", - "llvm.ve.vl.vfnmads.vvsvl" => "__builtin_ve_vl_vfnmads_vvsvl", - "llvm.ve.vl.vfnmads.vvsvmvl" => "__builtin_ve_vl_vfnmads_vvsvmvl", - "llvm.ve.vl.vfnmads.vvsvvl" => "__builtin_ve_vl_vfnmads_vvsvvl", - "llvm.ve.vl.vfnmads.vvvvl" => "__builtin_ve_vl_vfnmads_vvvvl", - "llvm.ve.vl.vfnmads.vvvvmvl" => "__builtin_ve_vl_vfnmads_vvvvmvl", - "llvm.ve.vl.vfnmads.vvvvvl" => "__builtin_ve_vl_vfnmads_vvvvvl", - "llvm.ve.vl.vfnmsbd.vsvvl" => "__builtin_ve_vl_vfnmsbd_vsvvl", - "llvm.ve.vl.vfnmsbd.vsvvmvl" => "__builtin_ve_vl_vfnmsbd_vsvvmvl", - "llvm.ve.vl.vfnmsbd.vsvvvl" => "__builtin_ve_vl_vfnmsbd_vsvvvl", - "llvm.ve.vl.vfnmsbd.vvsvl" => "__builtin_ve_vl_vfnmsbd_vvsvl", - "llvm.ve.vl.vfnmsbd.vvsvmvl" => "__builtin_ve_vl_vfnmsbd_vvsvmvl", - "llvm.ve.vl.vfnmsbd.vvsvvl" => "__builtin_ve_vl_vfnmsbd_vvsvvl", - "llvm.ve.vl.vfnmsbd.vvvvl" => "__builtin_ve_vl_vfnmsbd_vvvvl", - "llvm.ve.vl.vfnmsbd.vvvvmvl" => "__builtin_ve_vl_vfnmsbd_vvvvmvl", - "llvm.ve.vl.vfnmsbd.vvvvvl" => "__builtin_ve_vl_vfnmsbd_vvvvvl", - "llvm.ve.vl.vfnmsbs.vsvvl" => "__builtin_ve_vl_vfnmsbs_vsvvl", - "llvm.ve.vl.vfnmsbs.vsvvmvl" => "__builtin_ve_vl_vfnmsbs_vsvvmvl", - "llvm.ve.vl.vfnmsbs.vsvvvl" => "__builtin_ve_vl_vfnmsbs_vsvvvl", - "llvm.ve.vl.vfnmsbs.vvsvl" => "__builtin_ve_vl_vfnmsbs_vvsvl", - "llvm.ve.vl.vfnmsbs.vvsvmvl" => "__builtin_ve_vl_vfnmsbs_vvsvmvl", - "llvm.ve.vl.vfnmsbs.vvsvvl" => "__builtin_ve_vl_vfnmsbs_vvsvvl", - "llvm.ve.vl.vfnmsbs.vvvvl" => "__builtin_ve_vl_vfnmsbs_vvvvl", - "llvm.ve.vl.vfnmsbs.vvvvmvl" => "__builtin_ve_vl_vfnmsbs_vvvvmvl", - "llvm.ve.vl.vfnmsbs.vvvvvl" => "__builtin_ve_vl_vfnmsbs_vvvvvl", - "llvm.ve.vl.vfrmaxdfst.vvl" => "__builtin_ve_vl_vfrmaxdfst_vvl", - "llvm.ve.vl.vfrmaxdfst.vvvl" => "__builtin_ve_vl_vfrmaxdfst_vvvl", - "llvm.ve.vl.vfrmaxdlst.vvl" => "__builtin_ve_vl_vfrmaxdlst_vvl", - "llvm.ve.vl.vfrmaxdlst.vvvl" => "__builtin_ve_vl_vfrmaxdlst_vvvl", - "llvm.ve.vl.vfrmaxsfst.vvl" => "__builtin_ve_vl_vfrmaxsfst_vvl", - "llvm.ve.vl.vfrmaxsfst.vvvl" => "__builtin_ve_vl_vfrmaxsfst_vvvl", - "llvm.ve.vl.vfrmaxslst.vvl" => "__builtin_ve_vl_vfrmaxslst_vvl", - "llvm.ve.vl.vfrmaxslst.vvvl" => "__builtin_ve_vl_vfrmaxslst_vvvl", - "llvm.ve.vl.vfrmindfst.vvl" => "__builtin_ve_vl_vfrmindfst_vvl", - "llvm.ve.vl.vfrmindfst.vvvl" => "__builtin_ve_vl_vfrmindfst_vvvl", - "llvm.ve.vl.vfrmindlst.vvl" => "__builtin_ve_vl_vfrmindlst_vvl", - "llvm.ve.vl.vfrmindlst.vvvl" => "__builtin_ve_vl_vfrmindlst_vvvl", - "llvm.ve.vl.vfrminsfst.vvl" => "__builtin_ve_vl_vfrminsfst_vvl", - "llvm.ve.vl.vfrminsfst.vvvl" => "__builtin_ve_vl_vfrminsfst_vvvl", - "llvm.ve.vl.vfrminslst.vvl" => "__builtin_ve_vl_vfrminslst_vvl", - "llvm.ve.vl.vfrminslst.vvvl" => "__builtin_ve_vl_vfrminslst_vvvl", - "llvm.ve.vl.vfsqrtd.vvl" => "__builtin_ve_vl_vfsqrtd_vvl", - "llvm.ve.vl.vfsqrtd.vvvl" => "__builtin_ve_vl_vfsqrtd_vvvl", - "llvm.ve.vl.vfsqrts.vvl" => "__builtin_ve_vl_vfsqrts_vvl", - "llvm.ve.vl.vfsqrts.vvvl" => "__builtin_ve_vl_vfsqrts_vvvl", - "llvm.ve.vl.vfsubd.vsvl" => "__builtin_ve_vl_vfsubd_vsvl", - "llvm.ve.vl.vfsubd.vsvmvl" => "__builtin_ve_vl_vfsubd_vsvmvl", - "llvm.ve.vl.vfsubd.vsvvl" => "__builtin_ve_vl_vfsubd_vsvvl", - "llvm.ve.vl.vfsubd.vvvl" => "__builtin_ve_vl_vfsubd_vvvl", - "llvm.ve.vl.vfsubd.vvvmvl" => "__builtin_ve_vl_vfsubd_vvvmvl", - "llvm.ve.vl.vfsubd.vvvvl" => "__builtin_ve_vl_vfsubd_vvvvl", - "llvm.ve.vl.vfsubs.vsvl" => "__builtin_ve_vl_vfsubs_vsvl", - "llvm.ve.vl.vfsubs.vsvmvl" => "__builtin_ve_vl_vfsubs_vsvmvl", - "llvm.ve.vl.vfsubs.vsvvl" => "__builtin_ve_vl_vfsubs_vsvvl", - "llvm.ve.vl.vfsubs.vvvl" => "__builtin_ve_vl_vfsubs_vvvl", - "llvm.ve.vl.vfsubs.vvvmvl" => "__builtin_ve_vl_vfsubs_vvvmvl", - "llvm.ve.vl.vfsubs.vvvvl" => "__builtin_ve_vl_vfsubs_vvvvl", - "llvm.ve.vl.vfsumd.vvl" => "__builtin_ve_vl_vfsumd_vvl", - "llvm.ve.vl.vfsumd.vvml" => "__builtin_ve_vl_vfsumd_vvml", - "llvm.ve.vl.vfsums.vvl" => "__builtin_ve_vl_vfsums_vvl", - "llvm.ve.vl.vfsums.vvml" => "__builtin_ve_vl_vfsums_vvml", - "llvm.ve.vl.vgt.vvssl" => "__builtin_ve_vl_vgt_vvssl", - "llvm.ve.vl.vgt.vvssml" => "__builtin_ve_vl_vgt_vvssml", - "llvm.ve.vl.vgt.vvssmvl" => "__builtin_ve_vl_vgt_vvssmvl", - "llvm.ve.vl.vgt.vvssvl" => "__builtin_ve_vl_vgt_vvssvl", - "llvm.ve.vl.vgtlsx.vvssl" => "__builtin_ve_vl_vgtlsx_vvssl", - "llvm.ve.vl.vgtlsx.vvssml" => "__builtin_ve_vl_vgtlsx_vvssml", - "llvm.ve.vl.vgtlsx.vvssmvl" => "__builtin_ve_vl_vgtlsx_vvssmvl", - "llvm.ve.vl.vgtlsx.vvssvl" => "__builtin_ve_vl_vgtlsx_vvssvl", - "llvm.ve.vl.vgtlsxnc.vvssl" => "__builtin_ve_vl_vgtlsxnc_vvssl", - "llvm.ve.vl.vgtlsxnc.vvssml" => "__builtin_ve_vl_vgtlsxnc_vvssml", - "llvm.ve.vl.vgtlsxnc.vvssmvl" => "__builtin_ve_vl_vgtlsxnc_vvssmvl", - "llvm.ve.vl.vgtlsxnc.vvssvl" => "__builtin_ve_vl_vgtlsxnc_vvssvl", - "llvm.ve.vl.vgtlzx.vvssl" => "__builtin_ve_vl_vgtlzx_vvssl", - "llvm.ve.vl.vgtlzx.vvssml" => "__builtin_ve_vl_vgtlzx_vvssml", - "llvm.ve.vl.vgtlzx.vvssmvl" => "__builtin_ve_vl_vgtlzx_vvssmvl", - "llvm.ve.vl.vgtlzx.vvssvl" => "__builtin_ve_vl_vgtlzx_vvssvl", - "llvm.ve.vl.vgtlzxnc.vvssl" => "__builtin_ve_vl_vgtlzxnc_vvssl", - "llvm.ve.vl.vgtlzxnc.vvssml" => "__builtin_ve_vl_vgtlzxnc_vvssml", - "llvm.ve.vl.vgtlzxnc.vvssmvl" => "__builtin_ve_vl_vgtlzxnc_vvssmvl", - "llvm.ve.vl.vgtlzxnc.vvssvl" => "__builtin_ve_vl_vgtlzxnc_vvssvl", - "llvm.ve.vl.vgtnc.vvssl" => "__builtin_ve_vl_vgtnc_vvssl", - "llvm.ve.vl.vgtnc.vvssml" => "__builtin_ve_vl_vgtnc_vvssml", - "llvm.ve.vl.vgtnc.vvssmvl" => "__builtin_ve_vl_vgtnc_vvssmvl", - "llvm.ve.vl.vgtnc.vvssvl" => "__builtin_ve_vl_vgtnc_vvssvl", - "llvm.ve.vl.vgtu.vvssl" => "__builtin_ve_vl_vgtu_vvssl", - "llvm.ve.vl.vgtu.vvssml" => "__builtin_ve_vl_vgtu_vvssml", - "llvm.ve.vl.vgtu.vvssmvl" => "__builtin_ve_vl_vgtu_vvssmvl", - "llvm.ve.vl.vgtu.vvssvl" => "__builtin_ve_vl_vgtu_vvssvl", - "llvm.ve.vl.vgtunc.vvssl" => "__builtin_ve_vl_vgtunc_vvssl", - "llvm.ve.vl.vgtunc.vvssml" => "__builtin_ve_vl_vgtunc_vvssml", - "llvm.ve.vl.vgtunc.vvssmvl" => "__builtin_ve_vl_vgtunc_vvssmvl", - "llvm.ve.vl.vgtunc.vvssvl" => "__builtin_ve_vl_vgtunc_vvssvl", - "llvm.ve.vl.vld.vssl" => "__builtin_ve_vl_vld_vssl", - "llvm.ve.vl.vld.vssvl" => "__builtin_ve_vl_vld_vssvl", - "llvm.ve.vl.vld2d.vssl" => "__builtin_ve_vl_vld2d_vssl", - "llvm.ve.vl.vld2d.vssvl" => "__builtin_ve_vl_vld2d_vssvl", - "llvm.ve.vl.vld2dnc.vssl" => "__builtin_ve_vl_vld2dnc_vssl", - "llvm.ve.vl.vld2dnc.vssvl" => "__builtin_ve_vl_vld2dnc_vssvl", - "llvm.ve.vl.vldl2dsx.vssl" => "__builtin_ve_vl_vldl2dsx_vssl", - "llvm.ve.vl.vldl2dsx.vssvl" => "__builtin_ve_vl_vldl2dsx_vssvl", - "llvm.ve.vl.vldl2dsxnc.vssl" => "__builtin_ve_vl_vldl2dsxnc_vssl", - "llvm.ve.vl.vldl2dsxnc.vssvl" => "__builtin_ve_vl_vldl2dsxnc_vssvl", - "llvm.ve.vl.vldl2dzx.vssl" => "__builtin_ve_vl_vldl2dzx_vssl", - "llvm.ve.vl.vldl2dzx.vssvl" => "__builtin_ve_vl_vldl2dzx_vssvl", - "llvm.ve.vl.vldl2dzxnc.vssl" => "__builtin_ve_vl_vldl2dzxnc_vssl", - "llvm.ve.vl.vldl2dzxnc.vssvl" => "__builtin_ve_vl_vldl2dzxnc_vssvl", - "llvm.ve.vl.vldlsx.vssl" => "__builtin_ve_vl_vldlsx_vssl", - "llvm.ve.vl.vldlsx.vssvl" => "__builtin_ve_vl_vldlsx_vssvl", - "llvm.ve.vl.vldlsxnc.vssl" => "__builtin_ve_vl_vldlsxnc_vssl", - "llvm.ve.vl.vldlsxnc.vssvl" => "__builtin_ve_vl_vldlsxnc_vssvl", - "llvm.ve.vl.vldlzx.vssl" => "__builtin_ve_vl_vldlzx_vssl", - "llvm.ve.vl.vldlzx.vssvl" => "__builtin_ve_vl_vldlzx_vssvl", - "llvm.ve.vl.vldlzxnc.vssl" => "__builtin_ve_vl_vldlzxnc_vssl", - "llvm.ve.vl.vldlzxnc.vssvl" => "__builtin_ve_vl_vldlzxnc_vssvl", - "llvm.ve.vl.vldnc.vssl" => "__builtin_ve_vl_vldnc_vssl", - "llvm.ve.vl.vldnc.vssvl" => "__builtin_ve_vl_vldnc_vssvl", - "llvm.ve.vl.vldu.vssl" => "__builtin_ve_vl_vldu_vssl", - "llvm.ve.vl.vldu.vssvl" => "__builtin_ve_vl_vldu_vssvl", - "llvm.ve.vl.vldu2d.vssl" => "__builtin_ve_vl_vldu2d_vssl", - "llvm.ve.vl.vldu2d.vssvl" => "__builtin_ve_vl_vldu2d_vssvl", - "llvm.ve.vl.vldu2dnc.vssl" => "__builtin_ve_vl_vldu2dnc_vssl", - "llvm.ve.vl.vldu2dnc.vssvl" => "__builtin_ve_vl_vldu2dnc_vssvl", - "llvm.ve.vl.vldunc.vssl" => "__builtin_ve_vl_vldunc_vssl", - "llvm.ve.vl.vldunc.vssvl" => "__builtin_ve_vl_vldunc_vssvl", - "llvm.ve.vl.vldz.vvl" => "__builtin_ve_vl_vldz_vvl", - "llvm.ve.vl.vldz.vvmvl" => "__builtin_ve_vl_vldz_vvmvl", - "llvm.ve.vl.vldz.vvvl" => "__builtin_ve_vl_vldz_vvvl", - "llvm.ve.vl.vmaxsl.vsvl" => "__builtin_ve_vl_vmaxsl_vsvl", - "llvm.ve.vl.vmaxsl.vsvmvl" => "__builtin_ve_vl_vmaxsl_vsvmvl", - "llvm.ve.vl.vmaxsl.vsvvl" => "__builtin_ve_vl_vmaxsl_vsvvl", - "llvm.ve.vl.vmaxsl.vvvl" => "__builtin_ve_vl_vmaxsl_vvvl", - "llvm.ve.vl.vmaxsl.vvvmvl" => "__builtin_ve_vl_vmaxsl_vvvmvl", - "llvm.ve.vl.vmaxsl.vvvvl" => "__builtin_ve_vl_vmaxsl_vvvvl", - "llvm.ve.vl.vmaxswsx.vsvl" => "__builtin_ve_vl_vmaxswsx_vsvl", - "llvm.ve.vl.vmaxswsx.vsvmvl" => "__builtin_ve_vl_vmaxswsx_vsvmvl", - "llvm.ve.vl.vmaxswsx.vsvvl" => "__builtin_ve_vl_vmaxswsx_vsvvl", - "llvm.ve.vl.vmaxswsx.vvvl" => "__builtin_ve_vl_vmaxswsx_vvvl", - "llvm.ve.vl.vmaxswsx.vvvmvl" => "__builtin_ve_vl_vmaxswsx_vvvmvl", - "llvm.ve.vl.vmaxswsx.vvvvl" => "__builtin_ve_vl_vmaxswsx_vvvvl", - "llvm.ve.vl.vmaxswzx.vsvl" => "__builtin_ve_vl_vmaxswzx_vsvl", - "llvm.ve.vl.vmaxswzx.vsvmvl" => "__builtin_ve_vl_vmaxswzx_vsvmvl", - "llvm.ve.vl.vmaxswzx.vsvvl" => "__builtin_ve_vl_vmaxswzx_vsvvl", - "llvm.ve.vl.vmaxswzx.vvvl" => "__builtin_ve_vl_vmaxswzx_vvvl", - "llvm.ve.vl.vmaxswzx.vvvmvl" => "__builtin_ve_vl_vmaxswzx_vvvmvl", - "llvm.ve.vl.vmaxswzx.vvvvl" => "__builtin_ve_vl_vmaxswzx_vvvvl", - "llvm.ve.vl.vminsl.vsvl" => "__builtin_ve_vl_vminsl_vsvl", - "llvm.ve.vl.vminsl.vsvmvl" => "__builtin_ve_vl_vminsl_vsvmvl", - "llvm.ve.vl.vminsl.vsvvl" => "__builtin_ve_vl_vminsl_vsvvl", - "llvm.ve.vl.vminsl.vvvl" => "__builtin_ve_vl_vminsl_vvvl", - "llvm.ve.vl.vminsl.vvvmvl" => "__builtin_ve_vl_vminsl_vvvmvl", - "llvm.ve.vl.vminsl.vvvvl" => "__builtin_ve_vl_vminsl_vvvvl", - "llvm.ve.vl.vminswsx.vsvl" => "__builtin_ve_vl_vminswsx_vsvl", - "llvm.ve.vl.vminswsx.vsvmvl" => "__builtin_ve_vl_vminswsx_vsvmvl", - "llvm.ve.vl.vminswsx.vsvvl" => "__builtin_ve_vl_vminswsx_vsvvl", - "llvm.ve.vl.vminswsx.vvvl" => "__builtin_ve_vl_vminswsx_vvvl", - "llvm.ve.vl.vminswsx.vvvmvl" => "__builtin_ve_vl_vminswsx_vvvmvl", - "llvm.ve.vl.vminswsx.vvvvl" => "__builtin_ve_vl_vminswsx_vvvvl", - "llvm.ve.vl.vminswzx.vsvl" => "__builtin_ve_vl_vminswzx_vsvl", - "llvm.ve.vl.vminswzx.vsvmvl" => "__builtin_ve_vl_vminswzx_vsvmvl", - "llvm.ve.vl.vminswzx.vsvvl" => "__builtin_ve_vl_vminswzx_vsvvl", - "llvm.ve.vl.vminswzx.vvvl" => "__builtin_ve_vl_vminswzx_vvvl", - "llvm.ve.vl.vminswzx.vvvmvl" => "__builtin_ve_vl_vminswzx_vvvmvl", - "llvm.ve.vl.vminswzx.vvvvl" => "__builtin_ve_vl_vminswzx_vvvvl", - "llvm.ve.vl.vmrg.vsvml" => "__builtin_ve_vl_vmrg_vsvml", - "llvm.ve.vl.vmrg.vsvmvl" => "__builtin_ve_vl_vmrg_vsvmvl", - "llvm.ve.vl.vmrg.vvvml" => "__builtin_ve_vl_vmrg_vvvml", - "llvm.ve.vl.vmrg.vvvmvl" => "__builtin_ve_vl_vmrg_vvvmvl", - "llvm.ve.vl.vmrgw.vsvMl" => "__builtin_ve_vl_vmrgw_vsvMl", - "llvm.ve.vl.vmrgw.vsvMvl" => "__builtin_ve_vl_vmrgw_vsvMvl", - "llvm.ve.vl.vmrgw.vvvMl" => "__builtin_ve_vl_vmrgw_vvvMl", - "llvm.ve.vl.vmrgw.vvvMvl" => "__builtin_ve_vl_vmrgw_vvvMvl", - "llvm.ve.vl.vmulsl.vsvl" => "__builtin_ve_vl_vmulsl_vsvl", - "llvm.ve.vl.vmulsl.vsvmvl" => "__builtin_ve_vl_vmulsl_vsvmvl", - "llvm.ve.vl.vmulsl.vsvvl" => "__builtin_ve_vl_vmulsl_vsvvl", - "llvm.ve.vl.vmulsl.vvvl" => "__builtin_ve_vl_vmulsl_vvvl", - "llvm.ve.vl.vmulsl.vvvmvl" => "__builtin_ve_vl_vmulsl_vvvmvl", - "llvm.ve.vl.vmulsl.vvvvl" => "__builtin_ve_vl_vmulsl_vvvvl", - "llvm.ve.vl.vmulslw.vsvl" => "__builtin_ve_vl_vmulslw_vsvl", - "llvm.ve.vl.vmulslw.vsvvl" => "__builtin_ve_vl_vmulslw_vsvvl", - "llvm.ve.vl.vmulslw.vvvl" => "__builtin_ve_vl_vmulslw_vvvl", - "llvm.ve.vl.vmulslw.vvvvl" => "__builtin_ve_vl_vmulslw_vvvvl", - "llvm.ve.vl.vmulswsx.vsvl" => "__builtin_ve_vl_vmulswsx_vsvl", - "llvm.ve.vl.vmulswsx.vsvmvl" => "__builtin_ve_vl_vmulswsx_vsvmvl", - "llvm.ve.vl.vmulswsx.vsvvl" => "__builtin_ve_vl_vmulswsx_vsvvl", - "llvm.ve.vl.vmulswsx.vvvl" => "__builtin_ve_vl_vmulswsx_vvvl", - "llvm.ve.vl.vmulswsx.vvvmvl" => "__builtin_ve_vl_vmulswsx_vvvmvl", - "llvm.ve.vl.vmulswsx.vvvvl" => "__builtin_ve_vl_vmulswsx_vvvvl", - "llvm.ve.vl.vmulswzx.vsvl" => "__builtin_ve_vl_vmulswzx_vsvl", - "llvm.ve.vl.vmulswzx.vsvmvl" => "__builtin_ve_vl_vmulswzx_vsvmvl", - "llvm.ve.vl.vmulswzx.vsvvl" => "__builtin_ve_vl_vmulswzx_vsvvl", - "llvm.ve.vl.vmulswzx.vvvl" => "__builtin_ve_vl_vmulswzx_vvvl", - "llvm.ve.vl.vmulswzx.vvvmvl" => "__builtin_ve_vl_vmulswzx_vvvmvl", - "llvm.ve.vl.vmulswzx.vvvvl" => "__builtin_ve_vl_vmulswzx_vvvvl", - "llvm.ve.vl.vmulul.vsvl" => "__builtin_ve_vl_vmulul_vsvl", - "llvm.ve.vl.vmulul.vsvmvl" => "__builtin_ve_vl_vmulul_vsvmvl", - "llvm.ve.vl.vmulul.vsvvl" => "__builtin_ve_vl_vmulul_vsvvl", - "llvm.ve.vl.vmulul.vvvl" => "__builtin_ve_vl_vmulul_vvvl", - "llvm.ve.vl.vmulul.vvvmvl" => "__builtin_ve_vl_vmulul_vvvmvl", - "llvm.ve.vl.vmulul.vvvvl" => "__builtin_ve_vl_vmulul_vvvvl", - "llvm.ve.vl.vmuluw.vsvl" => "__builtin_ve_vl_vmuluw_vsvl", - "llvm.ve.vl.vmuluw.vsvmvl" => "__builtin_ve_vl_vmuluw_vsvmvl", - "llvm.ve.vl.vmuluw.vsvvl" => "__builtin_ve_vl_vmuluw_vsvvl", - "llvm.ve.vl.vmuluw.vvvl" => "__builtin_ve_vl_vmuluw_vvvl", - "llvm.ve.vl.vmuluw.vvvmvl" => "__builtin_ve_vl_vmuluw_vvvmvl", - "llvm.ve.vl.vmuluw.vvvvl" => "__builtin_ve_vl_vmuluw_vvvvl", - "llvm.ve.vl.vmv.vsvl" => "__builtin_ve_vl_vmv_vsvl", - "llvm.ve.vl.vmv.vsvmvl" => "__builtin_ve_vl_vmv_vsvmvl", - "llvm.ve.vl.vmv.vsvvl" => "__builtin_ve_vl_vmv_vsvvl", - "llvm.ve.vl.vor.vsvl" => "__builtin_ve_vl_vor_vsvl", - "llvm.ve.vl.vor.vsvmvl" => "__builtin_ve_vl_vor_vsvmvl", - "llvm.ve.vl.vor.vsvvl" => "__builtin_ve_vl_vor_vsvvl", - "llvm.ve.vl.vor.vvvl" => "__builtin_ve_vl_vor_vvvl", - "llvm.ve.vl.vor.vvvmvl" => "__builtin_ve_vl_vor_vvvmvl", - "llvm.ve.vl.vor.vvvvl" => "__builtin_ve_vl_vor_vvvvl", - "llvm.ve.vl.vpcnt.vvl" => "__builtin_ve_vl_vpcnt_vvl", - "llvm.ve.vl.vpcnt.vvmvl" => "__builtin_ve_vl_vpcnt_vvmvl", - "llvm.ve.vl.vpcnt.vvvl" => "__builtin_ve_vl_vpcnt_vvvl", - "llvm.ve.vl.vrand.vvl" => "__builtin_ve_vl_vrand_vvl", - "llvm.ve.vl.vrand.vvml" => "__builtin_ve_vl_vrand_vvml", - "llvm.ve.vl.vrcpd.vvl" => "__builtin_ve_vl_vrcpd_vvl", - "llvm.ve.vl.vrcpd.vvvl" => "__builtin_ve_vl_vrcpd_vvvl", - "llvm.ve.vl.vrcps.vvl" => "__builtin_ve_vl_vrcps_vvl", - "llvm.ve.vl.vrcps.vvvl" => "__builtin_ve_vl_vrcps_vvvl", - "llvm.ve.vl.vrmaxslfst.vvl" => "__builtin_ve_vl_vrmaxslfst_vvl", - "llvm.ve.vl.vrmaxslfst.vvvl" => "__builtin_ve_vl_vrmaxslfst_vvvl", - "llvm.ve.vl.vrmaxsllst.vvl" => "__builtin_ve_vl_vrmaxsllst_vvl", - "llvm.ve.vl.vrmaxsllst.vvvl" => "__builtin_ve_vl_vrmaxsllst_vvvl", - "llvm.ve.vl.vrmaxswfstsx.vvl" => "__builtin_ve_vl_vrmaxswfstsx_vvl", - "llvm.ve.vl.vrmaxswfstsx.vvvl" => "__builtin_ve_vl_vrmaxswfstsx_vvvl", - "llvm.ve.vl.vrmaxswfstzx.vvl" => "__builtin_ve_vl_vrmaxswfstzx_vvl", - "llvm.ve.vl.vrmaxswfstzx.vvvl" => "__builtin_ve_vl_vrmaxswfstzx_vvvl", - "llvm.ve.vl.vrmaxswlstsx.vvl" => "__builtin_ve_vl_vrmaxswlstsx_vvl", - "llvm.ve.vl.vrmaxswlstsx.vvvl" => "__builtin_ve_vl_vrmaxswlstsx_vvvl", - "llvm.ve.vl.vrmaxswlstzx.vvl" => "__builtin_ve_vl_vrmaxswlstzx_vvl", - "llvm.ve.vl.vrmaxswlstzx.vvvl" => "__builtin_ve_vl_vrmaxswlstzx_vvvl", - "llvm.ve.vl.vrminslfst.vvl" => "__builtin_ve_vl_vrminslfst_vvl", - "llvm.ve.vl.vrminslfst.vvvl" => "__builtin_ve_vl_vrminslfst_vvvl", - "llvm.ve.vl.vrminsllst.vvl" => "__builtin_ve_vl_vrminsllst_vvl", - "llvm.ve.vl.vrminsllst.vvvl" => "__builtin_ve_vl_vrminsllst_vvvl", - "llvm.ve.vl.vrminswfstsx.vvl" => "__builtin_ve_vl_vrminswfstsx_vvl", - "llvm.ve.vl.vrminswfstsx.vvvl" => "__builtin_ve_vl_vrminswfstsx_vvvl", - "llvm.ve.vl.vrminswfstzx.vvl" => "__builtin_ve_vl_vrminswfstzx_vvl", - "llvm.ve.vl.vrminswfstzx.vvvl" => "__builtin_ve_vl_vrminswfstzx_vvvl", - "llvm.ve.vl.vrminswlstsx.vvl" => "__builtin_ve_vl_vrminswlstsx_vvl", - "llvm.ve.vl.vrminswlstsx.vvvl" => "__builtin_ve_vl_vrminswlstsx_vvvl", - "llvm.ve.vl.vrminswlstzx.vvl" => "__builtin_ve_vl_vrminswlstzx_vvl", - "llvm.ve.vl.vrminswlstzx.vvvl" => "__builtin_ve_vl_vrminswlstzx_vvvl", - "llvm.ve.vl.vror.vvl" => "__builtin_ve_vl_vror_vvl", - "llvm.ve.vl.vror.vvml" => "__builtin_ve_vl_vror_vvml", - "llvm.ve.vl.vrsqrtd.vvl" => "__builtin_ve_vl_vrsqrtd_vvl", - "llvm.ve.vl.vrsqrtd.vvvl" => "__builtin_ve_vl_vrsqrtd_vvvl", - "llvm.ve.vl.vrsqrtdnex.vvl" => "__builtin_ve_vl_vrsqrtdnex_vvl", - "llvm.ve.vl.vrsqrtdnex.vvvl" => "__builtin_ve_vl_vrsqrtdnex_vvvl", - "llvm.ve.vl.vrsqrts.vvl" => "__builtin_ve_vl_vrsqrts_vvl", - "llvm.ve.vl.vrsqrts.vvvl" => "__builtin_ve_vl_vrsqrts_vvvl", - "llvm.ve.vl.vrsqrtsnex.vvl" => "__builtin_ve_vl_vrsqrtsnex_vvl", - "llvm.ve.vl.vrsqrtsnex.vvvl" => "__builtin_ve_vl_vrsqrtsnex_vvvl", - "llvm.ve.vl.vrxor.vvl" => "__builtin_ve_vl_vrxor_vvl", - "llvm.ve.vl.vrxor.vvml" => "__builtin_ve_vl_vrxor_vvml", - "llvm.ve.vl.vsc.vvssl" => "__builtin_ve_vl_vsc_vvssl", - "llvm.ve.vl.vsc.vvssml" => "__builtin_ve_vl_vsc_vvssml", - "llvm.ve.vl.vscl.vvssl" => "__builtin_ve_vl_vscl_vvssl", - "llvm.ve.vl.vscl.vvssml" => "__builtin_ve_vl_vscl_vvssml", - "llvm.ve.vl.vsclnc.vvssl" => "__builtin_ve_vl_vsclnc_vvssl", - "llvm.ve.vl.vsclnc.vvssml" => "__builtin_ve_vl_vsclnc_vvssml", - "llvm.ve.vl.vsclncot.vvssl" => "__builtin_ve_vl_vsclncot_vvssl", - "llvm.ve.vl.vsclncot.vvssml" => "__builtin_ve_vl_vsclncot_vvssml", - "llvm.ve.vl.vsclot.vvssl" => "__builtin_ve_vl_vsclot_vvssl", - "llvm.ve.vl.vsclot.vvssml" => "__builtin_ve_vl_vsclot_vvssml", - "llvm.ve.vl.vscnc.vvssl" => "__builtin_ve_vl_vscnc_vvssl", - "llvm.ve.vl.vscnc.vvssml" => "__builtin_ve_vl_vscnc_vvssml", - "llvm.ve.vl.vscncot.vvssl" => "__builtin_ve_vl_vscncot_vvssl", - "llvm.ve.vl.vscncot.vvssml" => "__builtin_ve_vl_vscncot_vvssml", - "llvm.ve.vl.vscot.vvssl" => "__builtin_ve_vl_vscot_vvssl", - "llvm.ve.vl.vscot.vvssml" => "__builtin_ve_vl_vscot_vvssml", - "llvm.ve.vl.vscu.vvssl" => "__builtin_ve_vl_vscu_vvssl", - "llvm.ve.vl.vscu.vvssml" => "__builtin_ve_vl_vscu_vvssml", - "llvm.ve.vl.vscunc.vvssl" => "__builtin_ve_vl_vscunc_vvssl", - "llvm.ve.vl.vscunc.vvssml" => "__builtin_ve_vl_vscunc_vvssml", - "llvm.ve.vl.vscuncot.vvssl" => "__builtin_ve_vl_vscuncot_vvssl", - "llvm.ve.vl.vscuncot.vvssml" => "__builtin_ve_vl_vscuncot_vvssml", - "llvm.ve.vl.vscuot.vvssl" => "__builtin_ve_vl_vscuot_vvssl", - "llvm.ve.vl.vscuot.vvssml" => "__builtin_ve_vl_vscuot_vvssml", - "llvm.ve.vl.vseq.vl" => "__builtin_ve_vl_vseq_vl", - "llvm.ve.vl.vseq.vvl" => "__builtin_ve_vl_vseq_vvl", - "llvm.ve.vl.vsfa.vvssl" => "__builtin_ve_vl_vsfa_vvssl", - "llvm.ve.vl.vsfa.vvssmvl" => "__builtin_ve_vl_vsfa_vvssmvl", - "llvm.ve.vl.vsfa.vvssvl" => "__builtin_ve_vl_vsfa_vvssvl", - "llvm.ve.vl.vshf.vvvsl" => "__builtin_ve_vl_vshf_vvvsl", - "llvm.ve.vl.vshf.vvvsvl" => "__builtin_ve_vl_vshf_vvvsvl", - "llvm.ve.vl.vslal.vvsl" => "__builtin_ve_vl_vslal_vvsl", - "llvm.ve.vl.vslal.vvsmvl" => "__builtin_ve_vl_vslal_vvsmvl", - "llvm.ve.vl.vslal.vvsvl" => "__builtin_ve_vl_vslal_vvsvl", - "llvm.ve.vl.vslal.vvvl" => "__builtin_ve_vl_vslal_vvvl", - "llvm.ve.vl.vslal.vvvmvl" => "__builtin_ve_vl_vslal_vvvmvl", - "llvm.ve.vl.vslal.vvvvl" => "__builtin_ve_vl_vslal_vvvvl", - "llvm.ve.vl.vslawsx.vvsl" => "__builtin_ve_vl_vslawsx_vvsl", - "llvm.ve.vl.vslawsx.vvsmvl" => "__builtin_ve_vl_vslawsx_vvsmvl", - "llvm.ve.vl.vslawsx.vvsvl" => "__builtin_ve_vl_vslawsx_vvsvl", - "llvm.ve.vl.vslawsx.vvvl" => "__builtin_ve_vl_vslawsx_vvvl", - "llvm.ve.vl.vslawsx.vvvmvl" => "__builtin_ve_vl_vslawsx_vvvmvl", - "llvm.ve.vl.vslawsx.vvvvl" => "__builtin_ve_vl_vslawsx_vvvvl", - "llvm.ve.vl.vslawzx.vvsl" => "__builtin_ve_vl_vslawzx_vvsl", - "llvm.ve.vl.vslawzx.vvsmvl" => "__builtin_ve_vl_vslawzx_vvsmvl", - "llvm.ve.vl.vslawzx.vvsvl" => "__builtin_ve_vl_vslawzx_vvsvl", - "llvm.ve.vl.vslawzx.vvvl" => "__builtin_ve_vl_vslawzx_vvvl", - "llvm.ve.vl.vslawzx.vvvmvl" => "__builtin_ve_vl_vslawzx_vvvmvl", - "llvm.ve.vl.vslawzx.vvvvl" => "__builtin_ve_vl_vslawzx_vvvvl", - "llvm.ve.vl.vsll.vvsl" => "__builtin_ve_vl_vsll_vvsl", - "llvm.ve.vl.vsll.vvsmvl" => "__builtin_ve_vl_vsll_vvsmvl", - "llvm.ve.vl.vsll.vvsvl" => "__builtin_ve_vl_vsll_vvsvl", - "llvm.ve.vl.vsll.vvvl" => "__builtin_ve_vl_vsll_vvvl", - "llvm.ve.vl.vsll.vvvmvl" => "__builtin_ve_vl_vsll_vvvmvl", - "llvm.ve.vl.vsll.vvvvl" => "__builtin_ve_vl_vsll_vvvvl", - "llvm.ve.vl.vsral.vvsl" => "__builtin_ve_vl_vsral_vvsl", - "llvm.ve.vl.vsral.vvsmvl" => "__builtin_ve_vl_vsral_vvsmvl", - "llvm.ve.vl.vsral.vvsvl" => "__builtin_ve_vl_vsral_vvsvl", - "llvm.ve.vl.vsral.vvvl" => "__builtin_ve_vl_vsral_vvvl", - "llvm.ve.vl.vsral.vvvmvl" => "__builtin_ve_vl_vsral_vvvmvl", - "llvm.ve.vl.vsral.vvvvl" => "__builtin_ve_vl_vsral_vvvvl", - "llvm.ve.vl.vsrawsx.vvsl" => "__builtin_ve_vl_vsrawsx_vvsl", - "llvm.ve.vl.vsrawsx.vvsmvl" => "__builtin_ve_vl_vsrawsx_vvsmvl", - "llvm.ve.vl.vsrawsx.vvsvl" => "__builtin_ve_vl_vsrawsx_vvsvl", - "llvm.ve.vl.vsrawsx.vvvl" => "__builtin_ve_vl_vsrawsx_vvvl", - "llvm.ve.vl.vsrawsx.vvvmvl" => "__builtin_ve_vl_vsrawsx_vvvmvl", - "llvm.ve.vl.vsrawsx.vvvvl" => "__builtin_ve_vl_vsrawsx_vvvvl", - "llvm.ve.vl.vsrawzx.vvsl" => "__builtin_ve_vl_vsrawzx_vvsl", - "llvm.ve.vl.vsrawzx.vvsmvl" => "__builtin_ve_vl_vsrawzx_vvsmvl", - "llvm.ve.vl.vsrawzx.vvsvl" => "__builtin_ve_vl_vsrawzx_vvsvl", - "llvm.ve.vl.vsrawzx.vvvl" => "__builtin_ve_vl_vsrawzx_vvvl", - "llvm.ve.vl.vsrawzx.vvvmvl" => "__builtin_ve_vl_vsrawzx_vvvmvl", - "llvm.ve.vl.vsrawzx.vvvvl" => "__builtin_ve_vl_vsrawzx_vvvvl", - "llvm.ve.vl.vsrl.vvsl" => "__builtin_ve_vl_vsrl_vvsl", - "llvm.ve.vl.vsrl.vvsmvl" => "__builtin_ve_vl_vsrl_vvsmvl", - "llvm.ve.vl.vsrl.vvsvl" => "__builtin_ve_vl_vsrl_vvsvl", - "llvm.ve.vl.vsrl.vvvl" => "__builtin_ve_vl_vsrl_vvvl", - "llvm.ve.vl.vsrl.vvvmvl" => "__builtin_ve_vl_vsrl_vvvmvl", - "llvm.ve.vl.vsrl.vvvvl" => "__builtin_ve_vl_vsrl_vvvvl", - "llvm.ve.vl.vst.vssl" => "__builtin_ve_vl_vst_vssl", - "llvm.ve.vl.vst.vssml" => "__builtin_ve_vl_vst_vssml", - "llvm.ve.vl.vst2d.vssl" => "__builtin_ve_vl_vst2d_vssl", - "llvm.ve.vl.vst2d.vssml" => "__builtin_ve_vl_vst2d_vssml", - "llvm.ve.vl.vst2dnc.vssl" => "__builtin_ve_vl_vst2dnc_vssl", - "llvm.ve.vl.vst2dnc.vssml" => "__builtin_ve_vl_vst2dnc_vssml", - "llvm.ve.vl.vst2dncot.vssl" => "__builtin_ve_vl_vst2dncot_vssl", - "llvm.ve.vl.vst2dncot.vssml" => "__builtin_ve_vl_vst2dncot_vssml", - "llvm.ve.vl.vst2dot.vssl" => "__builtin_ve_vl_vst2dot_vssl", - "llvm.ve.vl.vst2dot.vssml" => "__builtin_ve_vl_vst2dot_vssml", - "llvm.ve.vl.vstl.vssl" => "__builtin_ve_vl_vstl_vssl", - "llvm.ve.vl.vstl.vssml" => "__builtin_ve_vl_vstl_vssml", - "llvm.ve.vl.vstl2d.vssl" => "__builtin_ve_vl_vstl2d_vssl", - "llvm.ve.vl.vstl2d.vssml" => "__builtin_ve_vl_vstl2d_vssml", - "llvm.ve.vl.vstl2dnc.vssl" => "__builtin_ve_vl_vstl2dnc_vssl", - "llvm.ve.vl.vstl2dnc.vssml" => "__builtin_ve_vl_vstl2dnc_vssml", - "llvm.ve.vl.vstl2dncot.vssl" => "__builtin_ve_vl_vstl2dncot_vssl", - "llvm.ve.vl.vstl2dncot.vssml" => "__builtin_ve_vl_vstl2dncot_vssml", - "llvm.ve.vl.vstl2dot.vssl" => "__builtin_ve_vl_vstl2dot_vssl", - "llvm.ve.vl.vstl2dot.vssml" => "__builtin_ve_vl_vstl2dot_vssml", - "llvm.ve.vl.vstlnc.vssl" => "__builtin_ve_vl_vstlnc_vssl", - "llvm.ve.vl.vstlnc.vssml" => "__builtin_ve_vl_vstlnc_vssml", - "llvm.ve.vl.vstlncot.vssl" => "__builtin_ve_vl_vstlncot_vssl", - "llvm.ve.vl.vstlncot.vssml" => "__builtin_ve_vl_vstlncot_vssml", - "llvm.ve.vl.vstlot.vssl" => "__builtin_ve_vl_vstlot_vssl", - "llvm.ve.vl.vstlot.vssml" => "__builtin_ve_vl_vstlot_vssml", - "llvm.ve.vl.vstnc.vssl" => "__builtin_ve_vl_vstnc_vssl", - "llvm.ve.vl.vstnc.vssml" => "__builtin_ve_vl_vstnc_vssml", - "llvm.ve.vl.vstncot.vssl" => "__builtin_ve_vl_vstncot_vssl", - "llvm.ve.vl.vstncot.vssml" => "__builtin_ve_vl_vstncot_vssml", - "llvm.ve.vl.vstot.vssl" => "__builtin_ve_vl_vstot_vssl", - "llvm.ve.vl.vstot.vssml" => "__builtin_ve_vl_vstot_vssml", - "llvm.ve.vl.vstu.vssl" => "__builtin_ve_vl_vstu_vssl", - "llvm.ve.vl.vstu.vssml" => "__builtin_ve_vl_vstu_vssml", - "llvm.ve.vl.vstu2d.vssl" => "__builtin_ve_vl_vstu2d_vssl", - "llvm.ve.vl.vstu2d.vssml" => "__builtin_ve_vl_vstu2d_vssml", - "llvm.ve.vl.vstu2dnc.vssl" => "__builtin_ve_vl_vstu2dnc_vssl", - "llvm.ve.vl.vstu2dnc.vssml" => "__builtin_ve_vl_vstu2dnc_vssml", - "llvm.ve.vl.vstu2dncot.vssl" => "__builtin_ve_vl_vstu2dncot_vssl", - "llvm.ve.vl.vstu2dncot.vssml" => "__builtin_ve_vl_vstu2dncot_vssml", - "llvm.ve.vl.vstu2dot.vssl" => "__builtin_ve_vl_vstu2dot_vssl", - "llvm.ve.vl.vstu2dot.vssml" => "__builtin_ve_vl_vstu2dot_vssml", - "llvm.ve.vl.vstunc.vssl" => "__builtin_ve_vl_vstunc_vssl", - "llvm.ve.vl.vstunc.vssml" => "__builtin_ve_vl_vstunc_vssml", - "llvm.ve.vl.vstuncot.vssl" => "__builtin_ve_vl_vstuncot_vssl", - "llvm.ve.vl.vstuncot.vssml" => "__builtin_ve_vl_vstuncot_vssml", - "llvm.ve.vl.vstuot.vssl" => "__builtin_ve_vl_vstuot_vssl", - "llvm.ve.vl.vstuot.vssml" => "__builtin_ve_vl_vstuot_vssml", - "llvm.ve.vl.vsubsl.vsvl" => "__builtin_ve_vl_vsubsl_vsvl", - "llvm.ve.vl.vsubsl.vsvmvl" => "__builtin_ve_vl_vsubsl_vsvmvl", - "llvm.ve.vl.vsubsl.vsvvl" => "__builtin_ve_vl_vsubsl_vsvvl", - "llvm.ve.vl.vsubsl.vvvl" => "__builtin_ve_vl_vsubsl_vvvl", - "llvm.ve.vl.vsubsl.vvvmvl" => "__builtin_ve_vl_vsubsl_vvvmvl", - "llvm.ve.vl.vsubsl.vvvvl" => "__builtin_ve_vl_vsubsl_vvvvl", - "llvm.ve.vl.vsubswsx.vsvl" => "__builtin_ve_vl_vsubswsx_vsvl", - "llvm.ve.vl.vsubswsx.vsvmvl" => "__builtin_ve_vl_vsubswsx_vsvmvl", - "llvm.ve.vl.vsubswsx.vsvvl" => "__builtin_ve_vl_vsubswsx_vsvvl", - "llvm.ve.vl.vsubswsx.vvvl" => "__builtin_ve_vl_vsubswsx_vvvl", - "llvm.ve.vl.vsubswsx.vvvmvl" => "__builtin_ve_vl_vsubswsx_vvvmvl", - "llvm.ve.vl.vsubswsx.vvvvl" => "__builtin_ve_vl_vsubswsx_vvvvl", - "llvm.ve.vl.vsubswzx.vsvl" => "__builtin_ve_vl_vsubswzx_vsvl", - "llvm.ve.vl.vsubswzx.vsvmvl" => "__builtin_ve_vl_vsubswzx_vsvmvl", - "llvm.ve.vl.vsubswzx.vsvvl" => "__builtin_ve_vl_vsubswzx_vsvvl", - "llvm.ve.vl.vsubswzx.vvvl" => "__builtin_ve_vl_vsubswzx_vvvl", - "llvm.ve.vl.vsubswzx.vvvmvl" => "__builtin_ve_vl_vsubswzx_vvvmvl", - "llvm.ve.vl.vsubswzx.vvvvl" => "__builtin_ve_vl_vsubswzx_vvvvl", - "llvm.ve.vl.vsubul.vsvl" => "__builtin_ve_vl_vsubul_vsvl", - "llvm.ve.vl.vsubul.vsvmvl" => "__builtin_ve_vl_vsubul_vsvmvl", - "llvm.ve.vl.vsubul.vsvvl" => "__builtin_ve_vl_vsubul_vsvvl", - "llvm.ve.vl.vsubul.vvvl" => "__builtin_ve_vl_vsubul_vvvl", - "llvm.ve.vl.vsubul.vvvmvl" => "__builtin_ve_vl_vsubul_vvvmvl", - "llvm.ve.vl.vsubul.vvvvl" => "__builtin_ve_vl_vsubul_vvvvl", - "llvm.ve.vl.vsubuw.vsvl" => "__builtin_ve_vl_vsubuw_vsvl", - "llvm.ve.vl.vsubuw.vsvmvl" => "__builtin_ve_vl_vsubuw_vsvmvl", - "llvm.ve.vl.vsubuw.vsvvl" => "__builtin_ve_vl_vsubuw_vsvvl", - "llvm.ve.vl.vsubuw.vvvl" => "__builtin_ve_vl_vsubuw_vvvl", - "llvm.ve.vl.vsubuw.vvvmvl" => "__builtin_ve_vl_vsubuw_vvvmvl", - "llvm.ve.vl.vsubuw.vvvvl" => "__builtin_ve_vl_vsubuw_vvvvl", - "llvm.ve.vl.vsuml.vvl" => "__builtin_ve_vl_vsuml_vvl", - "llvm.ve.vl.vsuml.vvml" => "__builtin_ve_vl_vsuml_vvml", - "llvm.ve.vl.vsumwsx.vvl" => "__builtin_ve_vl_vsumwsx_vvl", - "llvm.ve.vl.vsumwsx.vvml" => "__builtin_ve_vl_vsumwsx_vvml", - "llvm.ve.vl.vsumwzx.vvl" => "__builtin_ve_vl_vsumwzx_vvl", - "llvm.ve.vl.vsumwzx.vvml" => "__builtin_ve_vl_vsumwzx_vvml", - "llvm.ve.vl.vxor.vsvl" => "__builtin_ve_vl_vxor_vsvl", - "llvm.ve.vl.vxor.vsvmvl" => "__builtin_ve_vl_vxor_vsvmvl", - "llvm.ve.vl.vxor.vsvvl" => "__builtin_ve_vl_vxor_vsvvl", - "llvm.ve.vl.vxor.vvvl" => "__builtin_ve_vl_vxor_vvvl", - "llvm.ve.vl.vxor.vvvmvl" => "__builtin_ve_vl_vxor_vvvmvl", - "llvm.ve.vl.vxor.vvvvl" => "__builtin_ve_vl_vxor_vvvvl", - "llvm.ve.vl.xorm.MMM" => "__builtin_ve_vl_xorm_MMM", - "llvm.ve.vl.xorm.mmm" => "__builtin_ve_vl_xorm_mmm", - // x86 - "llvm.x86.aadd32" => "__builtin_ia32_aadd32", - "llvm.x86.aadd64" => "__builtin_ia32_aadd64", - "llvm.x86.aand32" => "__builtin_ia32_aand32", - "llvm.x86.aand64" => "__builtin_ia32_aand64", - "llvm.x86.addcarry.u32" => "__builtin_ia32_addcarry_u32", - "llvm.x86.addcarry.u64" => "__builtin_ia32_addcarry_u64", - "llvm.x86.addcarryx.u32" => "__builtin_ia32_addcarryx_u32", - "llvm.x86.addcarryx.u64" => "__builtin_ia32_addcarryx_u64", - "llvm.x86.aesni.aesdec" => "__builtin_ia32_aesdec128", - "llvm.x86.aesni.aesdec.256" => "__builtin_ia32_aesdec256", - "llvm.x86.aesni.aesdec.512" => "__builtin_ia32_aesdec512", - "llvm.x86.aesni.aesdeclast" => "__builtin_ia32_aesdeclast128", - "llvm.x86.aesni.aesdeclast.256" => "__builtin_ia32_aesdeclast256", - "llvm.x86.aesni.aesdeclast.512" => "__builtin_ia32_aesdeclast512", - "llvm.x86.aesni.aesenc" => "__builtin_ia32_aesenc128", - "llvm.x86.aesni.aesenc.256" => "__builtin_ia32_aesenc256", - "llvm.x86.aesni.aesenc.512" => "__builtin_ia32_aesenc512", - "llvm.x86.aesni.aesenclast" => "__builtin_ia32_aesenclast128", - "llvm.x86.aesni.aesenclast.256" => "__builtin_ia32_aesenclast256", - "llvm.x86.aesni.aesenclast.512" => "__builtin_ia32_aesenclast512", - "llvm.x86.aesni.aesimc" => "__builtin_ia32_aesimc128", - "llvm.x86.aesni.aeskeygenassist" => "__builtin_ia32_aeskeygenassist128", - "llvm.x86.aor32" => "__builtin_ia32_aor32", - "llvm.x86.aor64" => "__builtin_ia32_aor64", - "llvm.x86.avx.addsub.pd.256" => "__builtin_ia32_addsubpd256", - "llvm.x86.avx.addsub.ps.256" => "__builtin_ia32_addsubps256", - "llvm.x86.avx.blend.pd.256" => "__builtin_ia32_blendpd256", - "llvm.x86.avx.blend.ps.256" => "__builtin_ia32_blendps256", - "llvm.x86.avx.blendv.pd.256" => "__builtin_ia32_blendvpd256", - "llvm.x86.avx.blendv.ps.256" => "__builtin_ia32_blendvps256", - "llvm.x86.avx.cmp.pd.256" => "__builtin_ia32_cmppd256", - "llvm.x86.avx.cmp.ps.256" => "__builtin_ia32_cmpps256", - "llvm.x86.avx.cvt.pd2.ps.256" => "__builtin_ia32_cvtpd2ps256", - "llvm.x86.avx.cvt.pd2dq.256" => "__builtin_ia32_cvtpd2dq256", - "llvm.x86.avx.cvt.ps2.pd.256" => "__builtin_ia32_cvtps2pd256", - "llvm.x86.avx.cvt.ps2dq.256" => "__builtin_ia32_cvtps2dq256", - "llvm.x86.avx.cvtdq2.pd.256" => "__builtin_ia32_cvtdq2pd256", - "llvm.x86.avx.cvtdq2.ps.256" => "__builtin_ia32_cvtdq2ps256", - "llvm.x86.avx.cvtt.pd2dq.256" => "__builtin_ia32_cvttpd2dq256", - "llvm.x86.avx.cvtt.ps2dq.256" => "__builtin_ia32_cvttps2dq256", - "llvm.x86.avx.dp.ps.256" => "__builtin_ia32_dpps256", - "llvm.x86.avx.hadd.pd.256" => "__builtin_ia32_haddpd256", - "llvm.x86.avx.hadd.ps.256" => "__builtin_ia32_haddps256", - "llvm.x86.avx.hsub.pd.256" => "__builtin_ia32_hsubpd256", - "llvm.x86.avx.hsub.ps.256" => "__builtin_ia32_hsubps256", - "llvm.x86.avx.ldu.dq.256" => "__builtin_ia32_lddqu256", - "llvm.x86.avx.maskload.pd" => "__builtin_ia32_maskloadpd", - "llvm.x86.avx.maskload.pd.256" => "__builtin_ia32_maskloadpd256", - "llvm.x86.avx.maskload.ps" => "__builtin_ia32_maskloadps", - "llvm.x86.avx.maskload.ps.256" => "__builtin_ia32_maskloadps256", - "llvm.x86.avx.maskstore.pd" => "__builtin_ia32_maskstorepd", - "llvm.x86.avx.maskstore.pd.256" => "__builtin_ia32_maskstorepd256", - "llvm.x86.avx.maskstore.ps" => "__builtin_ia32_maskstoreps", - "llvm.x86.avx.maskstore.ps.256" => "__builtin_ia32_maskstoreps256", - "llvm.x86.avx.max.pd.256" => "__builtin_ia32_maxpd256", - "llvm.x86.avx.max.ps.256" => "__builtin_ia32_maxps256", - "llvm.x86.avx.min.pd.256" => "__builtin_ia32_minpd256", - "llvm.x86.avx.min.ps.256" => "__builtin_ia32_minps256", - "llvm.x86.avx.movmsk.pd.256" => "__builtin_ia32_movmskpd256", - "llvm.x86.avx.movmsk.ps.256" => "__builtin_ia32_movmskps256", - "llvm.x86.avx.ptestc.256" => "__builtin_ia32_ptestc256", - "llvm.x86.avx.ptestnzc.256" => "__builtin_ia32_ptestnzc256", - "llvm.x86.avx.ptestz.256" => "__builtin_ia32_ptestz256", - "llvm.x86.avx.rcp.ps.256" => "__builtin_ia32_rcpps256", - "llvm.x86.avx.round.pd.256" => "__builtin_ia32_roundpd256", - "llvm.x86.avx.round.ps.256" => "__builtin_ia32_roundps256", - "llvm.x86.avx.rsqrt.ps.256" => "__builtin_ia32_rsqrtps256", - "llvm.x86.avx.sqrt.pd.256" => "__builtin_ia32_sqrtpd256", - "llvm.x86.avx.sqrt.ps.256" => "__builtin_ia32_sqrtps256", - "llvm.x86.avx.storeu.dq.256" => "__builtin_ia32_storedqu256", - "llvm.x86.avx.storeu.pd.256" => "__builtin_ia32_storeupd256", - "llvm.x86.avx.storeu.ps.256" => "__builtin_ia32_storeups256", - "llvm.x86.avx.vbroadcastf128.pd.256" => "__builtin_ia32_vbroadcastf128_pd256", - "llvm.x86.avx.vbroadcastf128.ps.256" => "__builtin_ia32_vbroadcastf128_ps256", - "llvm.x86.avx.vextractf128.pd.256" => "__builtin_ia32_vextractf128_pd256", - "llvm.x86.avx.vextractf128.ps.256" => "__builtin_ia32_vextractf128_ps256", - "llvm.x86.avx.vextractf128.si.256" => "__builtin_ia32_vextractf128_si256", - "llvm.x86.avx.vinsertf128.pd.256" => "__builtin_ia32_vinsertf128_pd256", - "llvm.x86.avx.vinsertf128.ps.256" => "__builtin_ia32_vinsertf128_ps256", - "llvm.x86.avx.vinsertf128.si.256" => "__builtin_ia32_vinsertf128_si256", - "llvm.x86.avx.vperm2f128.pd.256" => "__builtin_ia32_vperm2f128_pd256", - "llvm.x86.avx.vperm2f128.ps.256" => "__builtin_ia32_vperm2f128_ps256", - "llvm.x86.avx.vperm2f128.si.256" => "__builtin_ia32_vperm2f128_si256", - "llvm.x86.avx.vpermilvar.pd" => "__builtin_ia32_vpermilvarpd", - "llvm.x86.avx.vpermilvar.pd.256" => "__builtin_ia32_vpermilvarpd256", - "llvm.x86.avx.vpermilvar.ps" => "__builtin_ia32_vpermilvarps", - "llvm.x86.avx.vpermilvar.ps.256" => "__builtin_ia32_vpermilvarps256", - "llvm.x86.avx.vtestc.pd" => "__builtin_ia32_vtestcpd", - "llvm.x86.avx.vtestc.pd.256" => "__builtin_ia32_vtestcpd256", - "llvm.x86.avx.vtestc.ps" => "__builtin_ia32_vtestcps", - "llvm.x86.avx.vtestc.ps.256" => "__builtin_ia32_vtestcps256", - "llvm.x86.avx.vtestnzc.pd" => "__builtin_ia32_vtestnzcpd", - "llvm.x86.avx.vtestnzc.pd.256" => "__builtin_ia32_vtestnzcpd256", - "llvm.x86.avx.vtestnzc.ps" => "__builtin_ia32_vtestnzcps", - "llvm.x86.avx.vtestnzc.ps.256" => "__builtin_ia32_vtestnzcps256", - "llvm.x86.avx.vtestz.pd" => "__builtin_ia32_vtestzpd", - "llvm.x86.avx.vtestz.pd.256" => "__builtin_ia32_vtestzpd256", - "llvm.x86.avx.vtestz.ps" => "__builtin_ia32_vtestzps", - "llvm.x86.avx.vtestz.ps.256" => "__builtin_ia32_vtestzps256", - "llvm.x86.avx.vzeroall" => "__builtin_ia32_vzeroall", - "llvm.x86.avx.vzeroupper" => "__builtin_ia32_vzeroupper", - "llvm.x86.avx10.mask.getexp.bf16.128" => "__builtin_ia32_vgetexpbf16128_mask", - "llvm.x86.avx10.mask.getexp.bf16.256" => "__builtin_ia32_vgetexpbf16256_mask", - "llvm.x86.avx10.mask.getexp.bf16.512" => "__builtin_ia32_vgetexpbf16512_mask", - "llvm.x86.avx10.mask.getmant.bf16.128" => "__builtin_ia32_vgetmantbf16128_mask", - "llvm.x86.avx10.mask.getmant.bf16.256" => "__builtin_ia32_vgetmantbf16256_mask", - "llvm.x86.avx10.mask.getmant.bf16.512" => "__builtin_ia32_vgetmantbf16512_mask", - "llvm.x86.avx10.mask.rcp.bf16.128" => "__builtin_ia32_vrcpbf16128_mask", - "llvm.x86.avx10.mask.rcp.bf16.256" => "__builtin_ia32_vrcpbf16256_mask", - "llvm.x86.avx10.mask.rcp.bf16.512" => "__builtin_ia32_vrcpbf16512_mask", - "llvm.x86.avx10.mask.reduce.bf16.128" => "__builtin_ia32_vreducebf16128_mask", - "llvm.x86.avx10.mask.reduce.bf16.256" => "__builtin_ia32_vreducebf16256_mask", - "llvm.x86.avx10.mask.reduce.bf16.512" => "__builtin_ia32_vreducebf16512_mask", - "llvm.x86.avx10.mask.rndscale.bf16.128" => "__builtin_ia32_vrndscalebf16_128_mask", - "llvm.x86.avx10.mask.rndscale.bf16.256" => "__builtin_ia32_vrndscalebf16_256_mask", - "llvm.x86.avx10.mask.rndscale.bf16.512" => "__builtin_ia32_vrndscalebf16_mask", - "llvm.x86.avx10.mask.rsqrt.bf16.128" => "__builtin_ia32_vrsqrtbf16128_mask", - "llvm.x86.avx10.mask.rsqrt.bf16.256" => "__builtin_ia32_vrsqrtbf16256_mask", - "llvm.x86.avx10.mask.rsqrt.bf16.512" => "__builtin_ia32_vrsqrtbf16512_mask", - "llvm.x86.avx10.mask.scalef.bf16.128" => "__builtin_ia32_vscalefbf16128_mask", - "llvm.x86.avx10.mask.scalef.bf16.256" => "__builtin_ia32_vscalefbf16256_mask", - "llvm.x86.avx10.mask.scalef.bf16.512" => "__builtin_ia32_vscalefbf16512_mask", - "llvm.x86.avx10.mask.vcvt2ps2phx.128" => "__builtin_ia32_vcvt2ps2phx128_mask", - "llvm.x86.avx10.mask.vcvt2ps2phx.256" => "__builtin_ia32_vcvt2ps2phx256_mask", - "llvm.x86.avx10.mask.vcvt2ps2phx.512" => "__builtin_ia32_vcvt2ps2phx512_mask", - "llvm.x86.avx10.mask.vcvtbiasph2bf8128" => "__builtin_ia32_vcvtbiasph2bf8_128_mask", - "llvm.x86.avx10.mask.vcvtbiasph2bf8256" => "__builtin_ia32_vcvtbiasph2bf8_256_mask", - "llvm.x86.avx10.mask.vcvtbiasph2bf8512" => "__builtin_ia32_vcvtbiasph2bf8_512_mask", - "llvm.x86.avx10.mask.vcvtbiasph2bf8s128" => "__builtin_ia32_vcvtbiasph2bf8s_128_mask", - "llvm.x86.avx10.mask.vcvtbiasph2bf8s256" => "__builtin_ia32_vcvtbiasph2bf8s_256_mask", - "llvm.x86.avx10.mask.vcvtbiasph2bf8s512" => "__builtin_ia32_vcvtbiasph2bf8s_512_mask", - "llvm.x86.avx10.mask.vcvtbiasph2hf8128" => "__builtin_ia32_vcvtbiasph2hf8_128_mask", - "llvm.x86.avx10.mask.vcvtbiasph2hf8256" => "__builtin_ia32_vcvtbiasph2hf8_256_mask", - "llvm.x86.avx10.mask.vcvtbiasph2hf8512" => "__builtin_ia32_vcvtbiasph2hf8_512_mask", - "llvm.x86.avx10.mask.vcvtbiasph2hf8s128" => "__builtin_ia32_vcvtbiasph2hf8s_128_mask", - "llvm.x86.avx10.mask.vcvtbiasph2hf8s256" => "__builtin_ia32_vcvtbiasph2hf8s_256_mask", - "llvm.x86.avx10.mask.vcvtbiasph2hf8s512" => "__builtin_ia32_vcvtbiasph2hf8s_512_mask", - "llvm.x86.avx10.mask.vcvthf82ph128" => "__builtin_ia32_vcvthf8_2ph128_mask", - "llvm.x86.avx10.mask.vcvthf82ph256" => "__builtin_ia32_vcvthf8_2ph256_mask", - "llvm.x86.avx10.mask.vcvthf82ph512" => "__builtin_ia32_vcvthf8_2ph512_mask", - "llvm.x86.avx10.mask.vcvtph2bf8128" => "__builtin_ia32_vcvtph2bf8_128_mask", - "llvm.x86.avx10.mask.vcvtph2bf8256" => "__builtin_ia32_vcvtph2bf8_256_mask", - "llvm.x86.avx10.mask.vcvtph2bf8512" => "__builtin_ia32_vcvtph2bf8_512_mask", - "llvm.x86.avx10.mask.vcvtph2bf8s128" => "__builtin_ia32_vcvtph2bf8s_128_mask", - "llvm.x86.avx10.mask.vcvtph2bf8s256" => "__builtin_ia32_vcvtph2bf8s_256_mask", - "llvm.x86.avx10.mask.vcvtph2bf8s512" => "__builtin_ia32_vcvtph2bf8s_512_mask", - "llvm.x86.avx10.mask.vcvtph2hf8128" => "__builtin_ia32_vcvtph2hf8_128_mask", - "llvm.x86.avx10.mask.vcvtph2hf8256" => "__builtin_ia32_vcvtph2hf8_256_mask", - "llvm.x86.avx10.mask.vcvtph2hf8512" => "__builtin_ia32_vcvtph2hf8_512_mask", - "llvm.x86.avx10.mask.vcvtph2hf8s128" => "__builtin_ia32_vcvtph2hf8s_128_mask", - "llvm.x86.avx10.mask.vcvtph2hf8s256" => "__builtin_ia32_vcvtph2hf8s_256_mask", - "llvm.x86.avx10.mask.vcvtph2hf8s512" => "__builtin_ia32_vcvtph2hf8s_512_mask", - "llvm.x86.avx10.mask.vcvtph2ibs128" => "__builtin_ia32_vcvtph2ibs128_mask", - "llvm.x86.avx10.mask.vcvtph2ibs256" => "__builtin_ia32_vcvtph2ibs256_mask", - "llvm.x86.avx10.mask.vcvtph2ibs512" => "__builtin_ia32_vcvtph2ibs512_mask", - "llvm.x86.avx10.mask.vcvtph2iubs128" => "__builtin_ia32_vcvtph2iubs128_mask", - "llvm.x86.avx10.mask.vcvtph2iubs256" => "__builtin_ia32_vcvtph2iubs256_mask", - "llvm.x86.avx10.mask.vcvtph2iubs512" => "__builtin_ia32_vcvtph2iubs512_mask", - "llvm.x86.avx10.mask.vcvtps2ibs128" => "__builtin_ia32_vcvtps2ibs128_mask", - "llvm.x86.avx10.mask.vcvtps2ibs256" => "__builtin_ia32_vcvtps2ibs256_mask", - "llvm.x86.avx10.mask.vcvtps2ibs512" => "__builtin_ia32_vcvtps2ibs512_mask", - "llvm.x86.avx10.mask.vcvtps2iubs128" => "__builtin_ia32_vcvtps2iubs128_mask", - "llvm.x86.avx10.mask.vcvtps2iubs256" => "__builtin_ia32_vcvtps2iubs256_mask", - "llvm.x86.avx10.mask.vcvtps2iubs512" => "__builtin_ia32_vcvtps2iubs512_mask", - "llvm.x86.avx10.mask.vcvttpd2dqs.128" => "__builtin_ia32_vcvttpd2dqs128_mask", - "llvm.x86.avx10.mask.vcvttpd2dqs.256" => "__builtin_ia32_vcvttpd2dqs256_mask", - // [INVALID CONVERSION]: "llvm.x86.avx10.mask.vcvttpd2dqs.round.512" => "__builtin_ia32_vcvttpd2dqs512_round_mask", - "llvm.x86.avx10.mask.vcvttpd2qqs.128" => "__builtin_ia32_vcvttpd2qqs128_mask", - "llvm.x86.avx10.mask.vcvttpd2qqs.256" => "__builtin_ia32_vcvttpd2qqs256_mask", - // [INVALID CONVERSION]: "llvm.x86.avx10.mask.vcvttpd2qqs.round.512" => "__builtin_ia32_vcvttpd2qqs512_round_mask", - "llvm.x86.avx10.mask.vcvttpd2udqs.128" => "__builtin_ia32_vcvttpd2udqs128_mask", - "llvm.x86.avx10.mask.vcvttpd2udqs.256" => "__builtin_ia32_vcvttpd2udqs256_mask", - // [INVALID CONVERSION]: "llvm.x86.avx10.mask.vcvttpd2udqs.round.512" => "__builtin_ia32_vcvttpd2udqs512_round_mask", - "llvm.x86.avx10.mask.vcvttpd2uqqs.128" => "__builtin_ia32_vcvttpd2uqqs128_mask", - "llvm.x86.avx10.mask.vcvttpd2uqqs.256" => "__builtin_ia32_vcvttpd2uqqs256_mask", - // [INVALID CONVERSION]: "llvm.x86.avx10.mask.vcvttpd2uqqs.round.512" => "__builtin_ia32_vcvttpd2uqqs512_round_mask", - "llvm.x86.avx10.mask.vcvttph2ibs128" => "__builtin_ia32_vcvttph2ibs128_mask", - "llvm.x86.avx10.mask.vcvttph2ibs256" => "__builtin_ia32_vcvttph2ibs256_mask", - "llvm.x86.avx10.mask.vcvttph2ibs512" => "__builtin_ia32_vcvttph2ibs512_mask", - "llvm.x86.avx10.mask.vcvttph2iubs128" => "__builtin_ia32_vcvttph2iubs128_mask", - "llvm.x86.avx10.mask.vcvttph2iubs256" => "__builtin_ia32_vcvttph2iubs256_mask", - "llvm.x86.avx10.mask.vcvttph2iubs512" => "__builtin_ia32_vcvttph2iubs512_mask", - "llvm.x86.avx10.mask.vcvttps2dqs.128" => "__builtin_ia32_vcvttps2dqs128_mask", - "llvm.x86.avx10.mask.vcvttps2dqs.256" => "__builtin_ia32_vcvttps2dqs256_mask", - // [INVALID CONVERSION]: "llvm.x86.avx10.mask.vcvttps2dqs.round.512" => "__builtin_ia32_vcvttps2dqs512_round_mask", - "llvm.x86.avx10.mask.vcvttps2ibs128" => "__builtin_ia32_vcvttps2ibs128_mask", - "llvm.x86.avx10.mask.vcvttps2ibs256" => "__builtin_ia32_vcvttps2ibs256_mask", - "llvm.x86.avx10.mask.vcvttps2ibs512" => "__builtin_ia32_vcvttps2ibs512_mask", - "llvm.x86.avx10.mask.vcvttps2iubs128" => "__builtin_ia32_vcvttps2iubs128_mask", - "llvm.x86.avx10.mask.vcvttps2iubs256" => "__builtin_ia32_vcvttps2iubs256_mask", - "llvm.x86.avx10.mask.vcvttps2iubs512" => "__builtin_ia32_vcvttps2iubs512_mask", - "llvm.x86.avx10.mask.vcvttps2qqs.128" => "__builtin_ia32_vcvttps2qqs128_mask", - "llvm.x86.avx10.mask.vcvttps2qqs.256" => "__builtin_ia32_vcvttps2qqs256_mask", - // [INVALID CONVERSION]: "llvm.x86.avx10.mask.vcvttps2qqs.round.512" => "__builtin_ia32_vcvttps2qqs512_round_mask", - "llvm.x86.avx10.mask.vcvttps2udqs.128" => "__builtin_ia32_vcvttps2udqs128_mask", - "llvm.x86.avx10.mask.vcvttps2udqs.256" => "__builtin_ia32_vcvttps2udqs256_mask", - // [INVALID CONVERSION]: "llvm.x86.avx10.mask.vcvttps2udqs.round.512" => "__builtin_ia32_vcvttps2udqs512_round_mask", - "llvm.x86.avx10.mask.vcvttps2uqqs.128" => "__builtin_ia32_vcvttps2uqqs128_mask", - "llvm.x86.avx10.mask.vcvttps2uqqs.256" => "__builtin_ia32_vcvttps2uqqs256_mask", - // [INVALID CONVERSION]: "llvm.x86.avx10.mask.vcvttps2uqqs.round.512" => "__builtin_ia32_vcvttps2uqqs512_round_mask", - // [INVALID CONVERSION]: "llvm.x86.avx10.mask.vminmaxpd.round" => "__builtin_ia32_vminmaxpd512_round_mask", - "llvm.x86.avx10.mask.vminmaxpd128" => "__builtin_ia32_vminmaxpd128_mask", - "llvm.x86.avx10.mask.vminmaxpd256" => "__builtin_ia32_vminmaxpd256_mask", - // [INVALID CONVERSION]: "llvm.x86.avx10.mask.vminmaxph.round" => "__builtin_ia32_vminmaxph512_round_mask", - "llvm.x86.avx10.mask.vminmaxph128" => "__builtin_ia32_vminmaxph128_mask", - "llvm.x86.avx10.mask.vminmaxph256" => "__builtin_ia32_vminmaxph256_mask", - // [INVALID CONVERSION]: "llvm.x86.avx10.mask.vminmaxps.round" => "__builtin_ia32_vminmaxps512_round_mask", - "llvm.x86.avx10.mask.vminmaxps128" => "__builtin_ia32_vminmaxps128_mask", - "llvm.x86.avx10.mask.vminmaxps256" => "__builtin_ia32_vminmaxps256_mask", - // [INVALID CONVERSION]: "llvm.x86.avx10.mask.vminmaxsd.round" => "__builtin_ia32_vminmaxsd_round_mask", - // [INVALID CONVERSION]: "llvm.x86.avx10.mask.vminmaxsh.round" => "__builtin_ia32_vminmaxsh_round_mask", - // [INVALID CONVERSION]: "llvm.x86.avx10.mask.vminmaxss.round" => "__builtin_ia32_vminmaxss_round_mask", - "llvm.x86.avx10.vaddbf16128" => "__builtin_ia32_vaddbf16128", - "llvm.x86.avx10.vaddbf16256" => "__builtin_ia32_vaddbf16256", - "llvm.x86.avx10.vaddbf16512" => "__builtin_ia32_vaddbf16512", - "llvm.x86.avx10.vaddpd256" => "__builtin_ia32_vaddpd256_round", - "llvm.x86.avx10.vaddph256" => "__builtin_ia32_vaddph256_round", - "llvm.x86.avx10.vaddps256" => "__builtin_ia32_vaddps256_round", - "llvm.x86.avx10.vcomisbf16eq" => "__builtin_ia32_vcomisbf16eq", - "llvm.x86.avx10.vcomisbf16ge" => "__builtin_ia32_vcomisbf16ge", - "llvm.x86.avx10.vcomisbf16gt" => "__builtin_ia32_vcomisbf16gt", - "llvm.x86.avx10.vcomisbf16le" => "__builtin_ia32_vcomisbf16le", - "llvm.x86.avx10.vcomisbf16lt" => "__builtin_ia32_vcomisbf16lt", - "llvm.x86.avx10.vcomisbf16neq" => "__builtin_ia32_vcomisbf16neq", - "llvm.x86.avx10.vcvt2ph2bf8128" => "__builtin_ia32_vcvt2ph2bf8_128", - "llvm.x86.avx10.vcvt2ph2bf8256" => "__builtin_ia32_vcvt2ph2bf8_256", - "llvm.x86.avx10.vcvt2ph2bf8512" => "__builtin_ia32_vcvt2ph2bf8_512", - "llvm.x86.avx10.vcvt2ph2bf8s128" => "__builtin_ia32_vcvt2ph2bf8s_128", - "llvm.x86.avx10.vcvt2ph2bf8s256" => "__builtin_ia32_vcvt2ph2bf8s_256", - "llvm.x86.avx10.vcvt2ph2bf8s512" => "__builtin_ia32_vcvt2ph2bf8s_512", - "llvm.x86.avx10.vcvt2ph2hf8128" => "__builtin_ia32_vcvt2ph2hf8_128", - "llvm.x86.avx10.vcvt2ph2hf8256" => "__builtin_ia32_vcvt2ph2hf8_256", - "llvm.x86.avx10.vcvt2ph2hf8512" => "__builtin_ia32_vcvt2ph2hf8_512", - "llvm.x86.avx10.vcvt2ph2hf8s128" => "__builtin_ia32_vcvt2ph2hf8s_128", - "llvm.x86.avx10.vcvt2ph2hf8s256" => "__builtin_ia32_vcvt2ph2hf8s_256", - "llvm.x86.avx10.vcvt2ph2hf8s512" => "__builtin_ia32_vcvt2ph2hf8s_512", - "llvm.x86.avx10.vcvtbf162ibs128" => "__builtin_ia32_vcvtbf162ibs128", - "llvm.x86.avx10.vcvtbf162ibs256" => "__builtin_ia32_vcvtbf162ibs256", - "llvm.x86.avx10.vcvtbf162ibs512" => "__builtin_ia32_vcvtbf162ibs512", - "llvm.x86.avx10.vcvtbf162iubs128" => "__builtin_ia32_vcvtbf162iubs128", - "llvm.x86.avx10.vcvtbf162iubs256" => "__builtin_ia32_vcvtbf162iubs256", - "llvm.x86.avx10.vcvtbf162iubs512" => "__builtin_ia32_vcvtbf162iubs512", - "llvm.x86.avx10.vcvttbf162ibs128" => "__builtin_ia32_vcvttbf162ibs128", - "llvm.x86.avx10.vcvttbf162ibs256" => "__builtin_ia32_vcvttbf162ibs256", - "llvm.x86.avx10.vcvttbf162ibs512" => "__builtin_ia32_vcvttbf162ibs512", - "llvm.x86.avx10.vcvttbf162iubs128" => "__builtin_ia32_vcvttbf162iubs128", - "llvm.x86.avx10.vcvttbf162iubs256" => "__builtin_ia32_vcvttbf162iubs256", - "llvm.x86.avx10.vcvttbf162iubs512" => "__builtin_ia32_vcvttbf162iubs512", - "llvm.x86.avx10.vcvttsd2sis" => "__builtin_ia32_vcvttsd2sis32", - "llvm.x86.avx10.vcvttsd2sis64" => "__builtin_ia32_vcvttsd2sis64", - "llvm.x86.avx10.vcvttsd2usis" => "__builtin_ia32_vcvttsd2usis32", - "llvm.x86.avx10.vcvttsd2usis64" => "__builtin_ia32_vcvttsd2usis64", - "llvm.x86.avx10.vcvttss2sis" => "__builtin_ia32_vcvttss2sis32", - "llvm.x86.avx10.vcvttss2sis64" => "__builtin_ia32_vcvttss2sis64", - "llvm.x86.avx10.vcvttss2usis" => "__builtin_ia32_vcvttss2usis32", - "llvm.x86.avx10.vcvttss2usis64" => "__builtin_ia32_vcvttss2usis64", - "llvm.x86.avx10.vdivbf16128" => "__builtin_ia32_vdivbf16128", - "llvm.x86.avx10.vdivbf16256" => "__builtin_ia32_vdivbf16256", - "llvm.x86.avx10.vdivbf16512" => "__builtin_ia32_vdivbf16512", - "llvm.x86.avx10.vdpphps.128" => "__builtin_ia32_vdpphps128", - "llvm.x86.avx10.vdpphps.256" => "__builtin_ia32_vdpphps256", - "llvm.x86.avx10.vdpphps.512" => "__builtin_ia32_vdpphps512", - "llvm.x86.avx10.vfmadd132bf16128" => "__builtin_ia32_vfmadd132bf16128", - "llvm.x86.avx10.vfmadd132bf16256" => "__builtin_ia32_vfmadd132bf16256", - "llvm.x86.avx10.vfmadd132bf16512" => "__builtin_ia32_vfmadd132bf16512", - "llvm.x86.avx10.vfmadd213bf16128" => "__builtin_ia32_vfmadd213bf16128", - "llvm.x86.avx10.vfmadd213bf16256" => "__builtin_ia32_vfmadd213bf16256", - "llvm.x86.avx10.vfmadd231bf16128" => "__builtin_ia32_vfmadd231bf16128", - "llvm.x86.avx10.vfmadd231bf16256" => "__builtin_ia32_vfmadd231bf16256", - "llvm.x86.avx10.vfmadd231bf16512" => "__builtin_ia32_vfmadd231bf16512", - "llvm.x86.avx10.vfmsub132bf16128" => "__builtin_ia32_vfmsub132bf16128", - "llvm.x86.avx10.vfmsub132bf16256" => "__builtin_ia32_vfmsub132bf16256", - "llvm.x86.avx10.vfmsub132bf16512" => "__builtin_ia32_vfmsub132bf16512", - "llvm.x86.avx10.vfmsub213bf16128" => "__builtin_ia32_vfmsub213bf16128", - "llvm.x86.avx10.vfmsub213bf16256" => "__builtin_ia32_vfmsub213bf16256", - "llvm.x86.avx10.vfmsub213bf16512" => "__builtin_ia32_vfmsub213bf16512", - "llvm.x86.avx10.vfmsub231bf16128" => "__builtin_ia32_vfmsub231bf16128", - "llvm.x86.avx10.vfmsub231bf16256" => "__builtin_ia32_vfmsub231bf16256", - "llvm.x86.avx10.vfmsub231bf16512" => "__builtin_ia32_vfmsub231bf16512", - "llvm.x86.avx10.vfnmadd132bf16128" => "__builtin_ia32_vfnmadd132bf16128", - "llvm.x86.avx10.vfnmadd132bf16256" => "__builtin_ia32_vfnmadd132bf16256", - "llvm.x86.avx10.vfnmadd132bf16512" => "__builtin_ia32_vfnmadd132bf16512", - "llvm.x86.avx10.vfnmadd213bf16128" => "__builtin_ia32_vfnmadd213bf16128", - "llvm.x86.avx10.vfnmadd213bf16256" => "__builtin_ia32_vfnmadd213bf16256", - "llvm.x86.avx10.vfnmadd213bf16512" => "__builtin_ia32_vfnmadd213bf16512", - "llvm.x86.avx10.vfnmadd231bf16128" => "__builtin_ia32_vfnmadd231bf16128", - "llvm.x86.avx10.vfnmadd231bf16256" => "__builtin_ia32_vfnmadd231bf16256", - "llvm.x86.avx10.vfnmadd231bf16512" => "__builtin_ia32_vfnmadd231bf16512", - "llvm.x86.avx10.vfnmsub132bf16128" => "__builtin_ia32_vfnmsub132bf16128", - "llvm.x86.avx10.vfnmsub132bf16256" => "__builtin_ia32_vfnmsub132bf16256", - "llvm.x86.avx10.vfnmsub132bf16512" => "__builtin_ia32_vfnmsub132bf16512", - "llvm.x86.avx10.vfnmsub213bf16128" => "__builtin_ia32_vfnmsub213bf16128", - "llvm.x86.avx10.vfnmsub213bf16256" => "__builtin_ia32_vfnmsub213bf16256", - "llvm.x86.avx10.vfnmsub213bf16512" => "__builtin_ia32_vfnmsub213bf16512", - "llvm.x86.avx10.vfnmsub231bf16128" => "__builtin_ia32_vfnmsub231bf16128", - "llvm.x86.avx10.vfnmsub231bf16256" => "__builtin_ia32_vfnmsub231bf16256", - "llvm.x86.avx10.vfnmsub231bf16512" => "__builtin_ia32_vfnmsub231bf16512", - "llvm.x86.avx10.vmaxbf16128" => "__builtin_ia32_vmaxbf16128", - "llvm.x86.avx10.vmaxbf16256" => "__builtin_ia32_vmaxbf16256", - "llvm.x86.avx10.vmaxbf16512" => "__builtin_ia32_vmaxbf16512", - "llvm.x86.avx10.vminbf16128" => "__builtin_ia32_vminbf16128", - "llvm.x86.avx10.vminbf16256" => "__builtin_ia32_vminbf16256", - "llvm.x86.avx10.vminbf16512" => "__builtin_ia32_vminbf16512", - "llvm.x86.avx10.vminmaxbf16128" => "__builtin_ia32_vminmaxbf16128", - "llvm.x86.avx10.vminmaxbf16256" => "__builtin_ia32_vminmaxbf16256", - "llvm.x86.avx10.vminmaxbf16512" => "__builtin_ia32_vminmaxbf16512", - "llvm.x86.avx10.vminmaxpd128" => "__builtin_ia32_vminmaxpd128", - "llvm.x86.avx10.vminmaxpd256" => "__builtin_ia32_vminmaxpd256", - "llvm.x86.avx10.vminmaxph128" => "__builtin_ia32_vminmaxph128", - "llvm.x86.avx10.vminmaxph256" => "__builtin_ia32_vminmaxph256", - "llvm.x86.avx10.vminmaxps128" => "__builtin_ia32_vminmaxps128", - "llvm.x86.avx10.vminmaxps256" => "__builtin_ia32_vminmaxps256", - "llvm.x86.avx10.vmovrsb128" => "__builtin_ia32_vmovrsb128", - "llvm.x86.avx10.vmovrsb256" => "__builtin_ia32_vmovrsb256", - "llvm.x86.avx10.vmovrsb512" => "__builtin_ia32_vmovrsb512", - "llvm.x86.avx10.vmovrsd128" => "__builtin_ia32_vmovrsd128", - "llvm.x86.avx10.vmovrsd256" => "__builtin_ia32_vmovrsd256", - "llvm.x86.avx10.vmovrsd512" => "__builtin_ia32_vmovrsd512", - "llvm.x86.avx10.vmovrsq128" => "__builtin_ia32_vmovrsq128", - "llvm.x86.avx10.vmovrsq256" => "__builtin_ia32_vmovrsq256", - "llvm.x86.avx10.vmovrsq512" => "__builtin_ia32_vmovrsq512", - "llvm.x86.avx10.vmovrsw128" => "__builtin_ia32_vmovrsw128", - "llvm.x86.avx10.vmovrsw256" => "__builtin_ia32_vmovrsw256", - "llvm.x86.avx10.vmovrsw512" => "__builtin_ia32_vmovrsw512", - "llvm.x86.avx10.vmpsadbw.512" => "__builtin_ia32_mpsadbw512", - "llvm.x86.avx10.vmulbf16128" => "__builtin_ia32_vmulbf16128", - "llvm.x86.avx10.vmulbf16256" => "__builtin_ia32_vmulbf16256", - "llvm.x86.avx10.vmulbf16512" => "__builtin_ia32_vmulbf16512", - "llvm.x86.avx10.vpdpbssd.512" => "__builtin_ia32_vpdpbssd512", - "llvm.x86.avx10.vpdpbssds.512" => "__builtin_ia32_vpdpbssds512", - "llvm.x86.avx10.vpdpbsud.512" => "__builtin_ia32_vpdpbsud512", - "llvm.x86.avx10.vpdpbsuds.512" => "__builtin_ia32_vpdpbsuds512", - "llvm.x86.avx10.vpdpbuud.512" => "__builtin_ia32_vpdpbuud512", - "llvm.x86.avx10.vpdpbuuds.512" => "__builtin_ia32_vpdpbuuds512", - "llvm.x86.avx10.vpdpwsud.512" => "__builtin_ia32_vpdpwsud512", - "llvm.x86.avx10.vpdpwsuds.512" => "__builtin_ia32_vpdpwsuds512", - "llvm.x86.avx10.vpdpwusd.512" => "__builtin_ia32_vpdpwusd512", - "llvm.x86.avx10.vpdpwusds.512" => "__builtin_ia32_vpdpwusds512", - "llvm.x86.avx10.vpdpwuud.512" => "__builtin_ia32_vpdpwuud512", - "llvm.x86.avx10.vpdpwuuds.512" => "__builtin_ia32_vpdpwuuds512", - "llvm.x86.avx10.vsubbf16128" => "__builtin_ia32_vsubbf16128", - "llvm.x86.avx10.vsubbf16256" => "__builtin_ia32_vsubbf16256", - "llvm.x86.avx10.vsubbf16512" => "__builtin_ia32_vsubbf16512", - "llvm.x86.avx2.gather.d.d" => "__builtin_ia32_gatherd_d", - "llvm.x86.avx2.gather.d.d.256" => "__builtin_ia32_gatherd_d256", - "llvm.x86.avx2.gather.d.pd" => "__builtin_ia32_gatherd_pd", - "llvm.x86.avx2.gather.d.pd.256" => "__builtin_ia32_gatherd_pd256", - "llvm.x86.avx2.gather.d.ps" => "__builtin_ia32_gatherd_ps", - "llvm.x86.avx2.gather.d.ps.256" => "__builtin_ia32_gatherd_ps256", - "llvm.x86.avx2.gather.d.q" => "__builtin_ia32_gatherd_q", - "llvm.x86.avx2.gather.d.q.256" => "__builtin_ia32_gatherd_q256", - "llvm.x86.avx2.gather.q.d" => "__builtin_ia32_gatherq_d", - "llvm.x86.avx2.gather.q.d.256" => "__builtin_ia32_gatherq_d256", - "llvm.x86.avx2.gather.q.pd" => "__builtin_ia32_gatherq_pd", - "llvm.x86.avx2.gather.q.pd.256" => "__builtin_ia32_gatherq_pd256", - "llvm.x86.avx2.gather.q.ps" => "__builtin_ia32_gatherq_ps", - "llvm.x86.avx2.gather.q.ps.256" => "__builtin_ia32_gatherq_ps256", - "llvm.x86.avx2.gather.q.q" => "__builtin_ia32_gatherq_q", - "llvm.x86.avx2.gather.q.q.256" => "__builtin_ia32_gatherq_q256", - "llvm.x86.avx2.maskload.d" => "__builtin_ia32_maskloadd", - "llvm.x86.avx2.maskload.d.256" => "__builtin_ia32_maskloadd256", - "llvm.x86.avx2.maskload.q" => "__builtin_ia32_maskloadq", - "llvm.x86.avx2.maskload.q.256" => "__builtin_ia32_maskloadq256", - "llvm.x86.avx2.maskstore.d" => "__builtin_ia32_maskstored", - "llvm.x86.avx2.maskstore.d.256" => "__builtin_ia32_maskstored256", - "llvm.x86.avx2.maskstore.q" => "__builtin_ia32_maskstoreq", - "llvm.x86.avx2.maskstore.q.256" => "__builtin_ia32_maskstoreq256", - "llvm.x86.avx2.movntdqa" => "__builtin_ia32_movntdqa256", - "llvm.x86.avx2.mpsadbw" => "__builtin_ia32_mpsadbw256", - "llvm.x86.avx2.pabs.b" => "__builtin_ia32_pabsb256", - "llvm.x86.avx2.pabs.d" => "__builtin_ia32_pabsd256", - "llvm.x86.avx2.pabs.w" => "__builtin_ia32_pabsw256", - "llvm.x86.avx2.packssdw" => "__builtin_ia32_packssdw256", - "llvm.x86.avx2.packsswb" => "__builtin_ia32_packsswb256", - "llvm.x86.avx2.packusdw" => "__builtin_ia32_packusdw256", - "llvm.x86.avx2.packuswb" => "__builtin_ia32_packuswb256", - "llvm.x86.avx2.padds.b" => "__builtin_ia32_paddsb256", - "llvm.x86.avx2.padds.w" => "__builtin_ia32_paddsw256", - "llvm.x86.avx2.paddus.b" => "__builtin_ia32_paddusb256", - "llvm.x86.avx2.paddus.w" => "__builtin_ia32_paddusw256", - "llvm.x86.avx2.pavg.b" => "__builtin_ia32_pavgb256", - "llvm.x86.avx2.pavg.w" => "__builtin_ia32_pavgw256", - "llvm.x86.avx2.pblendd.128" => "__builtin_ia32_pblendd128", - "llvm.x86.avx2.pblendd.256" => "__builtin_ia32_pblendd256", - "llvm.x86.avx2.pblendvb" => "__builtin_ia32_pblendvb256", - "llvm.x86.avx2.pblendw" => "__builtin_ia32_pblendw256", - "llvm.x86.avx2.pbroadcastb.128" => "__builtin_ia32_pbroadcastb128", - "llvm.x86.avx2.pbroadcastb.256" => "__builtin_ia32_pbroadcastb256", - "llvm.x86.avx2.pbroadcastd.128" => "__builtin_ia32_pbroadcastd128", - "llvm.x86.avx2.pbroadcastd.256" => "__builtin_ia32_pbroadcastd256", - "llvm.x86.avx2.pbroadcastq.128" => "__builtin_ia32_pbroadcastq128", - "llvm.x86.avx2.pbroadcastq.256" => "__builtin_ia32_pbroadcastq256", - "llvm.x86.avx2.pbroadcastw.128" => "__builtin_ia32_pbroadcastw128", - "llvm.x86.avx2.pbroadcastw.256" => "__builtin_ia32_pbroadcastw256", - "llvm.x86.avx2.permd" => "__builtin_ia32_permvarsi256", - "llvm.x86.avx2.permps" => "__builtin_ia32_permvarsf256", - "llvm.x86.avx2.phadd.d" => "__builtin_ia32_phaddd256", - "llvm.x86.avx2.phadd.sw" => "__builtin_ia32_phaddsw256", - "llvm.x86.avx2.phadd.w" => "__builtin_ia32_phaddw256", - "llvm.x86.avx2.phsub.d" => "__builtin_ia32_phsubd256", - "llvm.x86.avx2.phsub.sw" => "__builtin_ia32_phsubsw256", - "llvm.x86.avx2.phsub.w" => "__builtin_ia32_phsubw256", - "llvm.x86.avx2.pmadd.ub.sw" => "__builtin_ia32_pmaddubsw256", - "llvm.x86.avx2.pmadd.wd" => "__builtin_ia32_pmaddwd256", - "llvm.x86.avx2.pmaxs.b" => "__builtin_ia32_pmaxsb256", - "llvm.x86.avx2.pmaxs.d" => "__builtin_ia32_pmaxsd256", - "llvm.x86.avx2.pmaxs.w" => "__builtin_ia32_pmaxsw256", - "llvm.x86.avx2.pmaxu.b" => "__builtin_ia32_pmaxub256", - "llvm.x86.avx2.pmaxu.d" => "__builtin_ia32_pmaxud256", - "llvm.x86.avx2.pmaxu.w" => "__builtin_ia32_pmaxuw256", - "llvm.x86.avx2.pmins.b" => "__builtin_ia32_pminsb256", - "llvm.x86.avx2.pmins.d" => "__builtin_ia32_pminsd256", - "llvm.x86.avx2.pmins.w" => "__builtin_ia32_pminsw256", - "llvm.x86.avx2.pminu.b" => "__builtin_ia32_pminub256", - "llvm.x86.avx2.pminu.d" => "__builtin_ia32_pminud256", - "llvm.x86.avx2.pminu.w" => "__builtin_ia32_pminuw256", - "llvm.x86.avx2.pmovmskb" => "__builtin_ia32_pmovmskb256", - "llvm.x86.avx2.pmovsxbd" => "__builtin_ia32_pmovsxbd256", - "llvm.x86.avx2.pmovsxbq" => "__builtin_ia32_pmovsxbq256", - "llvm.x86.avx2.pmovsxbw" => "__builtin_ia32_pmovsxbw256", - "llvm.x86.avx2.pmovsxdq" => "__builtin_ia32_pmovsxdq256", - "llvm.x86.avx2.pmovsxwd" => "__builtin_ia32_pmovsxwd256", - "llvm.x86.avx2.pmovsxwq" => "__builtin_ia32_pmovsxwq256", - "llvm.x86.avx2.pmovzxbd" => "__builtin_ia32_pmovzxbd256", - "llvm.x86.avx2.pmovzxbq" => "__builtin_ia32_pmovzxbq256", - "llvm.x86.avx2.pmovzxbw" => "__builtin_ia32_pmovzxbw256", - "llvm.x86.avx2.pmovzxdq" => "__builtin_ia32_pmovzxdq256", - "llvm.x86.avx2.pmovzxwd" => "__builtin_ia32_pmovzxwd256", - "llvm.x86.avx2.pmovzxwq" => "__builtin_ia32_pmovzxwq256", - "llvm.x86.avx2.pmul.dq" => "__builtin_ia32_pmuldq256", - "llvm.x86.avx2.pmul.hr.sw" => "__builtin_ia32_pmulhrsw256", - "llvm.x86.avx2.pmulh.w" => "__builtin_ia32_pmulhw256", - "llvm.x86.avx2.pmulhu.w" => "__builtin_ia32_pmulhuw256", - "llvm.x86.avx2.pmulu.dq" => "__builtin_ia32_pmuludq256", - "llvm.x86.avx2.psad.bw" => "__builtin_ia32_psadbw256", - "llvm.x86.avx2.pshuf.b" => "__builtin_ia32_pshufb256", - "llvm.x86.avx2.psign.b" => "__builtin_ia32_psignb256", - "llvm.x86.avx2.psign.d" => "__builtin_ia32_psignd256", - "llvm.x86.avx2.psign.w" => "__builtin_ia32_psignw256", - "llvm.x86.avx2.psll.d" => "__builtin_ia32_pslld256", - "llvm.x86.avx2.psll.dq" => "__builtin_ia32_pslldqi256", - "llvm.x86.avx2.psll.dq.bs" => "__builtin_ia32_pslldqi256_byteshift", - "llvm.x86.avx2.psll.q" => "__builtin_ia32_psllq256", - "llvm.x86.avx2.psll.w" => "__builtin_ia32_psllw256", - "llvm.x86.avx2.pslli.d" => "__builtin_ia32_pslldi256", - "llvm.x86.avx2.pslli.q" => "__builtin_ia32_psllqi256", - "llvm.x86.avx2.pslli.w" => "__builtin_ia32_psllwi256", - "llvm.x86.avx2.psllv.d" => "__builtin_ia32_psllv4si", - "llvm.x86.avx2.psllv.d.256" => "__builtin_ia32_psllv8si", - "llvm.x86.avx2.psllv.q" => "__builtin_ia32_psllv2di", - "llvm.x86.avx2.psllv.q.256" => "__builtin_ia32_psllv4di", - "llvm.x86.avx2.psra.d" => "__builtin_ia32_psrad256", - "llvm.x86.avx2.psra.w" => "__builtin_ia32_psraw256", - "llvm.x86.avx2.psrai.d" => "__builtin_ia32_psradi256", - "llvm.x86.avx2.psrai.w" => "__builtin_ia32_psrawi256", - "llvm.x86.avx2.psrav.d" => "__builtin_ia32_psrav4si", - "llvm.x86.avx2.psrav.d.256" => "__builtin_ia32_psrav8si", - "llvm.x86.avx2.psrl.d" => "__builtin_ia32_psrld256", - "llvm.x86.avx2.psrl.dq" => "__builtin_ia32_psrldqi256", - "llvm.x86.avx2.psrl.dq.bs" => "__builtin_ia32_psrldqi256_byteshift", - "llvm.x86.avx2.psrl.q" => "__builtin_ia32_psrlq256", - "llvm.x86.avx2.psrl.w" => "__builtin_ia32_psrlw256", - "llvm.x86.avx2.psrli.d" => "__builtin_ia32_psrldi256", - "llvm.x86.avx2.psrli.q" => "__builtin_ia32_psrlqi256", - "llvm.x86.avx2.psrli.w" => "__builtin_ia32_psrlwi256", - "llvm.x86.avx2.psrlv.d" => "__builtin_ia32_psrlv4si", - "llvm.x86.avx2.psrlv.d.256" => "__builtin_ia32_psrlv8si", - "llvm.x86.avx2.psrlv.q" => "__builtin_ia32_psrlv2di", - "llvm.x86.avx2.psrlv.q.256" => "__builtin_ia32_psrlv4di", - "llvm.x86.avx2.psubs.b" => "__builtin_ia32_psubsb256", - "llvm.x86.avx2.psubs.w" => "__builtin_ia32_psubsw256", - "llvm.x86.avx2.psubus.b" => "__builtin_ia32_psubusb256", - "llvm.x86.avx2.psubus.w" => "__builtin_ia32_psubusw256", - "llvm.x86.avx2.vbroadcast.sd.pd.256" => "__builtin_ia32_vbroadcastsd_pd256", - "llvm.x86.avx2.vbroadcast.ss.ps" => "__builtin_ia32_vbroadcastss_ps", - "llvm.x86.avx2.vbroadcast.ss.ps.256" => "__builtin_ia32_vbroadcastss_ps256", - "llvm.x86.avx2.vextracti128" => "__builtin_ia32_extract128i256", - "llvm.x86.avx2.vinserti128" => "__builtin_ia32_insert128i256", - "llvm.x86.avx2.vpdpbssd.128" => "__builtin_ia32_vpdpbssd128", - "llvm.x86.avx2.vpdpbssd.256" => "__builtin_ia32_vpdpbssd256", - "llvm.x86.avx2.vpdpbssds.128" => "__builtin_ia32_vpdpbssds128", - "llvm.x86.avx2.vpdpbssds.256" => "__builtin_ia32_vpdpbssds256", - "llvm.x86.avx2.vpdpbsud.128" => "__builtin_ia32_vpdpbsud128", - "llvm.x86.avx2.vpdpbsud.256" => "__builtin_ia32_vpdpbsud256", - "llvm.x86.avx2.vpdpbsuds.128" => "__builtin_ia32_vpdpbsuds128", - "llvm.x86.avx2.vpdpbsuds.256" => "__builtin_ia32_vpdpbsuds256", - "llvm.x86.avx2.vpdpbuud.128" => "__builtin_ia32_vpdpbuud128", - "llvm.x86.avx2.vpdpbuud.256" => "__builtin_ia32_vpdpbuud256", - "llvm.x86.avx2.vpdpbuuds.128" => "__builtin_ia32_vpdpbuuds128", - "llvm.x86.avx2.vpdpbuuds.256" => "__builtin_ia32_vpdpbuuds256", - "llvm.x86.avx2.vpdpwsud.128" => "__builtin_ia32_vpdpwsud128", - "llvm.x86.avx2.vpdpwsud.256" => "__builtin_ia32_vpdpwsud256", - "llvm.x86.avx2.vpdpwsuds.128" => "__builtin_ia32_vpdpwsuds128", - "llvm.x86.avx2.vpdpwsuds.256" => "__builtin_ia32_vpdpwsuds256", - "llvm.x86.avx2.vpdpwusd.128" => "__builtin_ia32_vpdpwusd128", - "llvm.x86.avx2.vpdpwusd.256" => "__builtin_ia32_vpdpwusd256", - "llvm.x86.avx2.vpdpwusds.128" => "__builtin_ia32_vpdpwusds128", - "llvm.x86.avx2.vpdpwusds.256" => "__builtin_ia32_vpdpwusds256", - "llvm.x86.avx2.vpdpwuud.128" => "__builtin_ia32_vpdpwuud128", - "llvm.x86.avx2.vpdpwuud.256" => "__builtin_ia32_vpdpwuud256", - "llvm.x86.avx2.vpdpwuuds.128" => "__builtin_ia32_vpdpwuuds128", - "llvm.x86.avx2.vpdpwuuds.256" => "__builtin_ia32_vpdpwuuds256", - "llvm.x86.avx2.vperm2i128" => "__builtin_ia32_permti256", - "llvm.x86.avx512.add.pd.512" => "__builtin_ia32_addpd512", - "llvm.x86.avx512.add.ps.512" => "__builtin_ia32_addps512", - "llvm.x86.avx512.broadcastmb.128" => "__builtin_ia32_broadcastmb128", - "llvm.x86.avx512.broadcastmb.256" => "__builtin_ia32_broadcastmb256", - "llvm.x86.avx512.broadcastmb.512" => "__builtin_ia32_broadcastmb512", - "llvm.x86.avx512.broadcastmw.128" => "__builtin_ia32_broadcastmw128", - "llvm.x86.avx512.broadcastmw.256" => "__builtin_ia32_broadcastmw256", - "llvm.x86.avx512.broadcastmw.512" => "__builtin_ia32_broadcastmw512", - "llvm.x86.avx512.conflict.d.128" => "__builtin_ia32_vpconflictsi_128", - "llvm.x86.avx512.conflict.d.256" => "__builtin_ia32_vpconflictsi_256", - "llvm.x86.avx512.conflict.d.512" => "__builtin_ia32_vpconflictsi_512", - "llvm.x86.avx512.conflict.q.128" => "__builtin_ia32_vpconflictdi_128", - "llvm.x86.avx512.conflict.q.256" => "__builtin_ia32_vpconflictdi_256", - "llvm.x86.avx512.conflict.q.512" => "__builtin_ia32_vpconflictdi_512", - "llvm.x86.avx512.cvtb2mask.128" => "__builtin_ia32_cvtb2mask128", - "llvm.x86.avx512.cvtb2mask.256" => "__builtin_ia32_cvtb2mask256", - "llvm.x86.avx512.cvtb2mask.512" => "__builtin_ia32_cvtb2mask512", - "llvm.x86.avx512.cvtd2mask.128" => "__builtin_ia32_cvtd2mask128", - "llvm.x86.avx512.cvtd2mask.256" => "__builtin_ia32_cvtd2mask256", - "llvm.x86.avx512.cvtd2mask.512" => "__builtin_ia32_cvtd2mask512", - "llvm.x86.avx512.cvtmask2b.128" => "__builtin_ia32_cvtmask2b128", - "llvm.x86.avx512.cvtmask2b.256" => "__builtin_ia32_cvtmask2b256", - "llvm.x86.avx512.cvtmask2b.512" => "__builtin_ia32_cvtmask2b512", - "llvm.x86.avx512.cvtmask2d.128" => "__builtin_ia32_cvtmask2d128", - "llvm.x86.avx512.cvtmask2d.256" => "__builtin_ia32_cvtmask2d256", - "llvm.x86.avx512.cvtmask2d.512" => "__builtin_ia32_cvtmask2d512", - "llvm.x86.avx512.cvtmask2q.128" => "__builtin_ia32_cvtmask2q128", - "llvm.x86.avx512.cvtmask2q.256" => "__builtin_ia32_cvtmask2q256", - "llvm.x86.avx512.cvtmask2q.512" => "__builtin_ia32_cvtmask2q512", - "llvm.x86.avx512.cvtmask2w.128" => "__builtin_ia32_cvtmask2w128", - "llvm.x86.avx512.cvtmask2w.256" => "__builtin_ia32_cvtmask2w256", - "llvm.x86.avx512.cvtmask2w.512" => "__builtin_ia32_cvtmask2w512", - "llvm.x86.avx512.cvtq2mask.128" => "__builtin_ia32_cvtq2mask128", - "llvm.x86.avx512.cvtq2mask.256" => "__builtin_ia32_cvtq2mask256", - "llvm.x86.avx512.cvtq2mask.512" => "__builtin_ia32_cvtq2mask512", - "llvm.x86.avx512.cvtsd2usi" => "__builtin_ia32_cvtsd2usi", - "llvm.x86.avx512.cvtsd2usi64" => "__builtin_ia32_cvtsd2usi64", - "llvm.x86.avx512.cvtsi2sd32" => "__builtin_ia32_cvtsi2sd32", - "llvm.x86.avx512.cvtsi2sd64" => "__builtin_ia32_cvtsi2sd64", - "llvm.x86.avx512.cvtsi2ss32" => "__builtin_ia32_cvtsi2ss32", - "llvm.x86.avx512.cvtsi2ss64" => "__builtin_ia32_cvtsi2ss64", - "llvm.x86.avx512.cvtss2usi" => "__builtin_ia32_cvtss2usi", - "llvm.x86.avx512.cvtss2usi64" => "__builtin_ia32_cvtss2usi64", - "llvm.x86.avx512.cvttsd2si" => "__builtin_ia32_vcvttsd2si32", - "llvm.x86.avx512.cvttsd2si64" => "__builtin_ia32_vcvttsd2si64", - "llvm.x86.avx512.cvttsd2usi" => "__builtin_ia32_vcvttsd2usi32", - // [DUPLICATE]: "llvm.x86.avx512.cvttsd2usi" => "__builtin_ia32_cvttsd2usi", - "llvm.x86.avx512.cvttsd2usi64" => "__builtin_ia32_vcvttsd2usi64", - // [DUPLICATE]: "llvm.x86.avx512.cvttsd2usi64" => "__builtin_ia32_cvttsd2usi64", - "llvm.x86.avx512.cvttss2si" => "__builtin_ia32_vcvttss2si32", - "llvm.x86.avx512.cvttss2si64" => "__builtin_ia32_vcvttss2si64", - "llvm.x86.avx512.cvttss2usi" => "__builtin_ia32_vcvttss2usi32", - // [DUPLICATE]: "llvm.x86.avx512.cvttss2usi" => "__builtin_ia32_cvttss2usi", - "llvm.x86.avx512.cvttss2usi64" => "__builtin_ia32_vcvttss2usi64", - // [DUPLICATE]: "llvm.x86.avx512.cvttss2usi64" => "__builtin_ia32_cvttss2usi64", - "llvm.x86.avx512.cvtusi2sd" => "__builtin_ia32_cvtusi2sd", - // [DUPLICATE]: "llvm.x86.avx512.cvtusi2sd" => "__builtin_ia32_cvtusi2sd32", - "llvm.x86.avx512.cvtusi2ss" => "__builtin_ia32_cvtusi2ss32", - // [DUPLICATE]: "llvm.x86.avx512.cvtusi2ss" => "__builtin_ia32_cvtusi2ss", - "llvm.x86.avx512.cvtusi642sd" => "__builtin_ia32_cvtusi2sd64", - // [DUPLICATE]: "llvm.x86.avx512.cvtusi642sd" => "__builtin_ia32_cvtusi642sd", - "llvm.x86.avx512.cvtusi642ss" => "__builtin_ia32_cvtusi2ss64", - // [DUPLICATE]: "llvm.x86.avx512.cvtusi642ss" => "__builtin_ia32_cvtusi642ss", - "llvm.x86.avx512.cvtw2mask.128" => "__builtin_ia32_cvtw2mask128", - "llvm.x86.avx512.cvtw2mask.256" => "__builtin_ia32_cvtw2mask256", - "llvm.x86.avx512.cvtw2mask.512" => "__builtin_ia32_cvtw2mask512", - "llvm.x86.avx512.dbpsadbw.128" => "__builtin_ia32_dbpsadbw128", - "llvm.x86.avx512.dbpsadbw.256" => "__builtin_ia32_dbpsadbw256", - "llvm.x86.avx512.dbpsadbw.512" => "__builtin_ia32_dbpsadbw512", - "llvm.x86.avx512.div.pd.512" => "__builtin_ia32_divpd512", - "llvm.x86.avx512.div.ps.512" => "__builtin_ia32_divps512", - "llvm.x86.avx512.exp2.pd" => "__builtin_ia32_exp2pd_mask", - "llvm.x86.avx512.exp2.ps" => "__builtin_ia32_exp2ps_mask", - "llvm.x86.avx512.gather.dpd.512" => "__builtin_ia32_gathersiv8df", - "llvm.x86.avx512.gather.dpi.512" => "__builtin_ia32_gathersiv16si", - "llvm.x86.avx512.gather.dpq.512" => "__builtin_ia32_gathersiv8di", - "llvm.x86.avx512.gather.dps.512" => "__builtin_ia32_gathersiv16sf", - "llvm.x86.avx512.gather.qpd.512" => "__builtin_ia32_gatherdiv8df", - "llvm.x86.avx512.gather.qpi.512" => "__builtin_ia32_gatherdiv16si", - "llvm.x86.avx512.gather.qpq.512" => "__builtin_ia32_gatherdiv8di", - "llvm.x86.avx512.gather.qps.512" => "__builtin_ia32_gatherdiv16sf", - "llvm.x86.avx512.gather3div2.df" => "__builtin_ia32_gather3div2df", - "llvm.x86.avx512.gather3div2.di" => "__builtin_ia32_gather3div2di", - "llvm.x86.avx512.gather3div4.df" => "__builtin_ia32_gather3div4df", - "llvm.x86.avx512.gather3div4.di" => "__builtin_ia32_gather3div4di", - "llvm.x86.avx512.gather3div4.sf" => "__builtin_ia32_gather3div4sf", - "llvm.x86.avx512.gather3div4.si" => "__builtin_ia32_gather3div4si", - "llvm.x86.avx512.gather3div8.sf" => "__builtin_ia32_gather3div8sf", - "llvm.x86.avx512.gather3div8.si" => "__builtin_ia32_gather3div8si", - "llvm.x86.avx512.gather3siv2.df" => "__builtin_ia32_gather3siv2df", - "llvm.x86.avx512.gather3siv2.di" => "__builtin_ia32_gather3siv2di", - "llvm.x86.avx512.gather3siv4.df" => "__builtin_ia32_gather3siv4df", - "llvm.x86.avx512.gather3siv4.di" => "__builtin_ia32_gather3siv4di", - "llvm.x86.avx512.gather3siv4.sf" => "__builtin_ia32_gather3siv4sf", - "llvm.x86.avx512.gather3siv4.si" => "__builtin_ia32_gather3siv4si", - "llvm.x86.avx512.gather3siv8.sf" => "__builtin_ia32_gather3siv8sf", - "llvm.x86.avx512.gather3siv8.si" => "__builtin_ia32_gather3siv8si", - "llvm.x86.avx512.gatherpf.dpd.512" => "__builtin_ia32_gatherpfdpd", - "llvm.x86.avx512.gatherpf.dps.512" => "__builtin_ia32_gatherpfdps", - "llvm.x86.avx512.gatherpf.qpd.512" => "__builtin_ia32_gatherpfqpd", - "llvm.x86.avx512.gatherpf.qps.512" => "__builtin_ia32_gatherpfqps", - "llvm.x86.avx512.kand.w" => "__builtin_ia32_kandhi", - "llvm.x86.avx512.kandn.w" => "__builtin_ia32_kandnhi", - "llvm.x86.avx512.knot.w" => "__builtin_ia32_knothi", - "llvm.x86.avx512.kor.w" => "__builtin_ia32_korhi", - "llvm.x86.avx512.kortestc.w" => "__builtin_ia32_kortestchi", - "llvm.x86.avx512.kortestz.w" => "__builtin_ia32_kortestzhi", - "llvm.x86.avx512.kunpck.bw" => "__builtin_ia32_kunpckhi", - "llvm.x86.avx512.kunpck.dq" => "__builtin_ia32_kunpckdi", - "llvm.x86.avx512.kunpck.wd" => "__builtin_ia32_kunpcksi", - "llvm.x86.avx512.kxnor.w" => "__builtin_ia32_kxnorhi", - "llvm.x86.avx512.kxor.w" => "__builtin_ia32_kxorhi", - "llvm.x86.avx512.mask.add.pd.128" => "__builtin_ia32_addpd128_mask", - "llvm.x86.avx512.mask.add.pd.256" => "__builtin_ia32_addpd256_mask", - "llvm.x86.avx512.mask.add.pd.512" => "__builtin_ia32_addpd512_mask", - "llvm.x86.avx512.mask.add.ps.128" => "__builtin_ia32_addps128_mask", - "llvm.x86.avx512.mask.add.ps.256" => "__builtin_ia32_addps256_mask", - "llvm.x86.avx512.mask.add.ps.512" => "__builtin_ia32_addps512_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512.mask.add.sd.round" => "__builtin_ia32_addsd_round_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512.mask.add.ss.round" => "__builtin_ia32_addss_round_mask", - "llvm.x86.avx512.mask.and.pd.128" => "__builtin_ia32_andpd128_mask", - "llvm.x86.avx512.mask.and.pd.256" => "__builtin_ia32_andpd256_mask", - "llvm.x86.avx512.mask.and.pd.512" => "__builtin_ia32_andpd512_mask", - "llvm.x86.avx512.mask.and.ps.128" => "__builtin_ia32_andps128_mask", - "llvm.x86.avx512.mask.and.ps.256" => "__builtin_ia32_andps256_mask", - "llvm.x86.avx512.mask.and.ps.512" => "__builtin_ia32_andps512_mask", - "llvm.x86.avx512.mask.andn.pd.128" => "__builtin_ia32_andnpd128_mask", - "llvm.x86.avx512.mask.andn.pd.256" => "__builtin_ia32_andnpd256_mask", - "llvm.x86.avx512.mask.andn.pd.512" => "__builtin_ia32_andnpd512_mask", - "llvm.x86.avx512.mask.andn.ps.128" => "__builtin_ia32_andnps128_mask", - "llvm.x86.avx512.mask.andn.ps.256" => "__builtin_ia32_andnps256_mask", - "llvm.x86.avx512.mask.andn.ps.512" => "__builtin_ia32_andnps512_mask", - "llvm.x86.avx512.mask.blend.d.512" => "__builtin_ia32_blendmd_512_mask", - "llvm.x86.avx512.mask.blend.pd.512" => "__builtin_ia32_blendmpd_512_mask", - "llvm.x86.avx512.mask.blend.ps.512" => "__builtin_ia32_blendmps_512_mask", - "llvm.x86.avx512.mask.blend.q.512" => "__builtin_ia32_blendmq_512_mask", - "llvm.x86.avx512.mask.broadcastf32x2.256" => "__builtin_ia32_broadcastf32x2_256_mask", - "llvm.x86.avx512.mask.broadcastf32x2.512" => "__builtin_ia32_broadcastf32x2_512_mask", - "llvm.x86.avx512.mask.broadcastf32x4.256" => "__builtin_ia32_broadcastf32x4_256_mask", - "llvm.x86.avx512.mask.broadcastf32x4.512" => "__builtin_ia32_broadcastf32x4_512", - "llvm.x86.avx512.mask.broadcastf32x8.512" => "__builtin_ia32_broadcastf32x8_512_mask", - "llvm.x86.avx512.mask.broadcastf64x2.256" => "__builtin_ia32_broadcastf64x2_256_mask", - "llvm.x86.avx512.mask.broadcastf64x2.512" => "__builtin_ia32_broadcastf64x2_512_mask", - "llvm.x86.avx512.mask.broadcastf64x4.512" => "__builtin_ia32_broadcastf64x4_512", - "llvm.x86.avx512.mask.broadcasti32x2.128" => "__builtin_ia32_broadcasti32x2_128_mask", - "llvm.x86.avx512.mask.broadcasti32x2.256" => "__builtin_ia32_broadcasti32x2_256_mask", - "llvm.x86.avx512.mask.broadcasti32x2.512" => "__builtin_ia32_broadcasti32x2_512_mask", - "llvm.x86.avx512.mask.broadcasti32x4.256" => "__builtin_ia32_broadcasti32x4_256_mask", - "llvm.x86.avx512.mask.broadcasti32x4.512" => "__builtin_ia32_broadcasti32x4_512", - "llvm.x86.avx512.mask.broadcasti32x8.512" => "__builtin_ia32_broadcasti32x8_512_mask", - "llvm.x86.avx512.mask.broadcasti64x2.256" => "__builtin_ia32_broadcasti64x2_256_mask", - "llvm.x86.avx512.mask.broadcasti64x2.512" => "__builtin_ia32_broadcasti64x2_512_mask", - "llvm.x86.avx512.mask.broadcasti64x4.512" => "__builtin_ia32_broadcasti64x4_512", - "llvm.x86.avx512.mask.cmp.pd.128" => "__builtin_ia32_cmppd128_mask", - "llvm.x86.avx512.mask.cmp.pd.256" => "__builtin_ia32_cmppd256_mask", - "llvm.x86.avx512.mask.cmp.pd.512" => "__builtin_ia32_cmppd512_mask", - "llvm.x86.avx512.mask.cmp.ps.128" => "__builtin_ia32_cmpps128_mask", - "llvm.x86.avx512.mask.cmp.ps.256" => "__builtin_ia32_cmpps256_mask", - "llvm.x86.avx512.mask.cmp.ps.512" => "__builtin_ia32_cmpps512_mask", - "llvm.x86.avx512.mask.cmp.sd" => "__builtin_ia32_cmpsd_mask", - "llvm.x86.avx512.mask.cmp.ss" => "__builtin_ia32_cmpss_mask", - "llvm.x86.avx512.mask.compress.d.128" => "__builtin_ia32_compresssi128_mask", - "llvm.x86.avx512.mask.compress.d.256" => "__builtin_ia32_compresssi256_mask", - "llvm.x86.avx512.mask.compress.d.512" => "__builtin_ia32_compresssi512_mask", - "llvm.x86.avx512.mask.compress.pd.128" => "__builtin_ia32_compressdf128_mask", - "llvm.x86.avx512.mask.compress.pd.256" => "__builtin_ia32_compressdf256_mask", - "llvm.x86.avx512.mask.compress.pd.512" => "__builtin_ia32_compressdf512_mask", - "llvm.x86.avx512.mask.compress.ps.128" => "__builtin_ia32_compresssf128_mask", - "llvm.x86.avx512.mask.compress.ps.256" => "__builtin_ia32_compresssf256_mask", - "llvm.x86.avx512.mask.compress.ps.512" => "__builtin_ia32_compresssf512_mask", - "llvm.x86.avx512.mask.compress.q.128" => "__builtin_ia32_compressdi128_mask", - "llvm.x86.avx512.mask.compress.q.256" => "__builtin_ia32_compressdi256_mask", - "llvm.x86.avx512.mask.compress.q.512" => "__builtin_ia32_compressdi512_mask", - "llvm.x86.avx512.mask.compress.store.d.128" => "__builtin_ia32_compressstoresi128_mask", - "llvm.x86.avx512.mask.compress.store.d.256" => "__builtin_ia32_compressstoresi256_mask", - "llvm.x86.avx512.mask.compress.store.d.512" => "__builtin_ia32_compressstoresi512_mask", - "llvm.x86.avx512.mask.compress.store.pd.128" => "__builtin_ia32_compressstoredf128_mask", - "llvm.x86.avx512.mask.compress.store.pd.256" => "__builtin_ia32_compressstoredf256_mask", - "llvm.x86.avx512.mask.compress.store.pd.512" => "__builtin_ia32_compressstoredf512_mask", - "llvm.x86.avx512.mask.compress.store.ps.128" => "__builtin_ia32_compressstoresf128_mask", - "llvm.x86.avx512.mask.compress.store.ps.256" => "__builtin_ia32_compressstoresf256_mask", - "llvm.x86.avx512.mask.compress.store.ps.512" => "__builtin_ia32_compressstoresf512_mask", - "llvm.x86.avx512.mask.compress.store.q.128" => "__builtin_ia32_compressstoredi128_mask", - "llvm.x86.avx512.mask.compress.store.q.256" => "__builtin_ia32_compressstoredi256_mask", - "llvm.x86.avx512.mask.compress.store.q.512" => "__builtin_ia32_compressstoredi512_mask", - "llvm.x86.avx512.mask.conflict.d.128" => "__builtin_ia32_vpconflictsi_128_mask", - "llvm.x86.avx512.mask.conflict.d.256" => "__builtin_ia32_vpconflictsi_256_mask", - "llvm.x86.avx512.mask.conflict.d.512" => "__builtin_ia32_vpconflictsi_512_mask", - "llvm.x86.avx512.mask.conflict.q.128" => "__builtin_ia32_vpconflictdi_128_mask", - "llvm.x86.avx512.mask.conflict.q.256" => "__builtin_ia32_vpconflictdi_256_mask", - "llvm.x86.avx512.mask.conflict.q.512" => "__builtin_ia32_vpconflictdi_512_mask", - "llvm.x86.avx512.mask.cvtdq2pd.128" => "__builtin_ia32_cvtdq2pd128_mask", - "llvm.x86.avx512.mask.cvtdq2pd.256" => "__builtin_ia32_cvtdq2pd256_mask", - "llvm.x86.avx512.mask.cvtdq2pd.512" => "__builtin_ia32_cvtdq2pd512_mask", - "llvm.x86.avx512.mask.cvtdq2ps.128" => "__builtin_ia32_cvtdq2ps128_mask", - "llvm.x86.avx512.mask.cvtdq2ps.256" => "__builtin_ia32_cvtdq2ps256_mask", - "llvm.x86.avx512.mask.cvtdq2ps.512" => "__builtin_ia32_cvtdq2ps512_mask", - "llvm.x86.avx512.mask.cvtpd2dq.128" => "__builtin_ia32_cvtpd2dq128_mask", - "llvm.x86.avx512.mask.cvtpd2dq.256" => "__builtin_ia32_cvtpd2dq256_mask", - "llvm.x86.avx512.mask.cvtpd2dq.512" => "__builtin_ia32_cvtpd2dq512_mask", - "llvm.x86.avx512.mask.cvtpd2ps" => "__builtin_ia32_cvtpd2ps_mask", - "llvm.x86.avx512.mask.cvtpd2ps.256" => "__builtin_ia32_cvtpd2ps256_mask", - "llvm.x86.avx512.mask.cvtpd2ps.512" => "__builtin_ia32_cvtpd2ps512_mask", - "llvm.x86.avx512.mask.cvtpd2qq.128" => "__builtin_ia32_cvtpd2qq128_mask", - "llvm.x86.avx512.mask.cvtpd2qq.256" => "__builtin_ia32_cvtpd2qq256_mask", - "llvm.x86.avx512.mask.cvtpd2qq.512" => "__builtin_ia32_cvtpd2qq512_mask", - "llvm.x86.avx512.mask.cvtpd2udq.128" => "__builtin_ia32_cvtpd2udq128_mask", - "llvm.x86.avx512.mask.cvtpd2udq.256" => "__builtin_ia32_cvtpd2udq256_mask", - "llvm.x86.avx512.mask.cvtpd2udq.512" => "__builtin_ia32_cvtpd2udq512_mask", - "llvm.x86.avx512.mask.cvtpd2uqq.128" => "__builtin_ia32_cvtpd2uqq128_mask", - "llvm.x86.avx512.mask.cvtpd2uqq.256" => "__builtin_ia32_cvtpd2uqq256_mask", - "llvm.x86.avx512.mask.cvtpd2uqq.512" => "__builtin_ia32_cvtpd2uqq512_mask", - "llvm.x86.avx512.mask.cvtps2dq.128" => "__builtin_ia32_cvtps2dq128_mask", - "llvm.x86.avx512.mask.cvtps2dq.256" => "__builtin_ia32_cvtps2dq256_mask", - "llvm.x86.avx512.mask.cvtps2dq.512" => "__builtin_ia32_cvtps2dq512_mask", - "llvm.x86.avx512.mask.cvtps2pd.128" => "__builtin_ia32_cvtps2pd128_mask", - "llvm.x86.avx512.mask.cvtps2pd.256" => "__builtin_ia32_cvtps2pd256_mask", - "llvm.x86.avx512.mask.cvtps2pd.512" => "__builtin_ia32_cvtps2pd512_mask", - "llvm.x86.avx512.mask.cvtps2qq.128" => "__builtin_ia32_cvtps2qq128_mask", - "llvm.x86.avx512.mask.cvtps2qq.256" => "__builtin_ia32_cvtps2qq256_mask", - "llvm.x86.avx512.mask.cvtps2qq.512" => "__builtin_ia32_cvtps2qq512_mask", - "llvm.x86.avx512.mask.cvtps2udq.128" => "__builtin_ia32_cvtps2udq128_mask", - "llvm.x86.avx512.mask.cvtps2udq.256" => "__builtin_ia32_cvtps2udq256_mask", - "llvm.x86.avx512.mask.cvtps2udq.512" => "__builtin_ia32_cvtps2udq512_mask", - "llvm.x86.avx512.mask.cvtps2uqq.128" => "__builtin_ia32_cvtps2uqq128_mask", - "llvm.x86.avx512.mask.cvtps2uqq.256" => "__builtin_ia32_cvtps2uqq256_mask", - "llvm.x86.avx512.mask.cvtps2uqq.512" => "__builtin_ia32_cvtps2uqq512_mask", - "llvm.x86.avx512.mask.cvtqq2pd.128" => "__builtin_ia32_cvtqq2pd128_mask", - "llvm.x86.avx512.mask.cvtqq2pd.256" => "__builtin_ia32_cvtqq2pd256_mask", - "llvm.x86.avx512.mask.cvtqq2pd.512" => "__builtin_ia32_cvtqq2pd512_mask", - "llvm.x86.avx512.mask.cvtqq2ps.128" => "__builtin_ia32_cvtqq2ps128_mask", - "llvm.x86.avx512.mask.cvtqq2ps.256" => "__builtin_ia32_cvtqq2ps256_mask", - "llvm.x86.avx512.mask.cvtqq2ps.512" => "__builtin_ia32_cvtqq2ps512_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512.mask.cvtsd2ss.round" => "__builtin_ia32_cvtsd2ss_round_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512.mask.cvtss2sd.round" => "__builtin_ia32_cvtss2sd_round_mask", - "llvm.x86.avx512.mask.cvttpd2dq.128" => "__builtin_ia32_cvttpd2dq128_mask", - "llvm.x86.avx512.mask.cvttpd2dq.256" => "__builtin_ia32_cvttpd2dq256_mask", - "llvm.x86.avx512.mask.cvttpd2dq.512" => "__builtin_ia32_cvttpd2dq512_mask", - "llvm.x86.avx512.mask.cvttpd2qq.128" => "__builtin_ia32_cvttpd2qq128_mask", - "llvm.x86.avx512.mask.cvttpd2qq.256" => "__builtin_ia32_cvttpd2qq256_mask", - "llvm.x86.avx512.mask.cvttpd2qq.512" => "__builtin_ia32_cvttpd2qq512_mask", - "llvm.x86.avx512.mask.cvttpd2udq.128" => "__builtin_ia32_cvttpd2udq128_mask", - "llvm.x86.avx512.mask.cvttpd2udq.256" => "__builtin_ia32_cvttpd2udq256_mask", - "llvm.x86.avx512.mask.cvttpd2udq.512" => "__builtin_ia32_cvttpd2udq512_mask", - "llvm.x86.avx512.mask.cvttpd2uqq.128" => "__builtin_ia32_cvttpd2uqq128_mask", - "llvm.x86.avx512.mask.cvttpd2uqq.256" => "__builtin_ia32_cvttpd2uqq256_mask", - "llvm.x86.avx512.mask.cvttpd2uqq.512" => "__builtin_ia32_cvttpd2uqq512_mask", - "llvm.x86.avx512.mask.cvttps2dq.128" => "__builtin_ia32_cvttps2dq128_mask", - "llvm.x86.avx512.mask.cvttps2dq.256" => "__builtin_ia32_cvttps2dq256_mask", - "llvm.x86.avx512.mask.cvttps2dq.512" => "__builtin_ia32_cvttps2dq512_mask", - "llvm.x86.avx512.mask.cvttps2qq.128" => "__builtin_ia32_cvttps2qq128_mask", - "llvm.x86.avx512.mask.cvttps2qq.256" => "__builtin_ia32_cvttps2qq256_mask", - "llvm.x86.avx512.mask.cvttps2qq.512" => "__builtin_ia32_cvttps2qq512_mask", - "llvm.x86.avx512.mask.cvttps2udq.128" => "__builtin_ia32_cvttps2udq128_mask", - "llvm.x86.avx512.mask.cvttps2udq.256" => "__builtin_ia32_cvttps2udq256_mask", - "llvm.x86.avx512.mask.cvttps2udq.512" => "__builtin_ia32_cvttps2udq512_mask", - "llvm.x86.avx512.mask.cvttps2uqq.128" => "__builtin_ia32_cvttps2uqq128_mask", - "llvm.x86.avx512.mask.cvttps2uqq.256" => "__builtin_ia32_cvttps2uqq256_mask", - "llvm.x86.avx512.mask.cvttps2uqq.512" => "__builtin_ia32_cvttps2uqq512_mask", - "llvm.x86.avx512.mask.cvtudq2pd.128" => "__builtin_ia32_cvtudq2pd128_mask", - "llvm.x86.avx512.mask.cvtudq2pd.256" => "__builtin_ia32_cvtudq2pd256_mask", - "llvm.x86.avx512.mask.cvtudq2pd.512" => "__builtin_ia32_cvtudq2pd512_mask", - "llvm.x86.avx512.mask.cvtudq2ps.128" => "__builtin_ia32_cvtudq2ps128_mask", - "llvm.x86.avx512.mask.cvtudq2ps.256" => "__builtin_ia32_cvtudq2ps256_mask", - "llvm.x86.avx512.mask.cvtudq2ps.512" => "__builtin_ia32_cvtudq2ps512_mask", - "llvm.x86.avx512.mask.cvtuqq2pd.128" => "__builtin_ia32_cvtuqq2pd128_mask", - "llvm.x86.avx512.mask.cvtuqq2pd.256" => "__builtin_ia32_cvtuqq2pd256_mask", - "llvm.x86.avx512.mask.cvtuqq2pd.512" => "__builtin_ia32_cvtuqq2pd512_mask", - "llvm.x86.avx512.mask.cvtuqq2ps.128" => "__builtin_ia32_cvtuqq2ps128_mask", - "llvm.x86.avx512.mask.cvtuqq2ps.256" => "__builtin_ia32_cvtuqq2ps256_mask", - "llvm.x86.avx512.mask.cvtuqq2ps.512" => "__builtin_ia32_cvtuqq2ps512_mask", - "llvm.x86.avx512.mask.dbpsadbw.128" => "__builtin_ia32_dbpsadbw128_mask", - "llvm.x86.avx512.mask.dbpsadbw.256" => "__builtin_ia32_dbpsadbw256_mask", - "llvm.x86.avx512.mask.dbpsadbw.512" => "__builtin_ia32_dbpsadbw512_mask", - "llvm.x86.avx512.mask.div.pd.128" => "__builtin_ia32_divpd_mask", - "llvm.x86.avx512.mask.div.pd.256" => "__builtin_ia32_divpd256_mask", - "llvm.x86.avx512.mask.div.pd.512" => "__builtin_ia32_divpd512_mask", - "llvm.x86.avx512.mask.div.ps.128" => "__builtin_ia32_divps_mask", - "llvm.x86.avx512.mask.div.ps.256" => "__builtin_ia32_divps256_mask", - "llvm.x86.avx512.mask.div.ps.512" => "__builtin_ia32_divps512_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512.mask.div.sd.round" => "__builtin_ia32_divsd_round_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512.mask.div.ss.round" => "__builtin_ia32_divss_round_mask", - "llvm.x86.avx512.mask.expand.d.128" => "__builtin_ia32_expandsi128_mask", - "llvm.x86.avx512.mask.expand.d.256" => "__builtin_ia32_expandsi256_mask", - "llvm.x86.avx512.mask.expand.d.512" => "__builtin_ia32_expandsi512_mask", - "llvm.x86.avx512.mask.expand.load.d.128" => "__builtin_ia32_expandloadsi128_mask", - "llvm.x86.avx512.mask.expand.load.d.256" => "__builtin_ia32_expandloadsi256_mask", - "llvm.x86.avx512.mask.expand.load.d.512" => "__builtin_ia32_expandloadsi512_mask", - "llvm.x86.avx512.mask.expand.load.pd.128" => "__builtin_ia32_expandloaddf128_mask", - "llvm.x86.avx512.mask.expand.load.pd.256" => "__builtin_ia32_expandloaddf256_mask", - "llvm.x86.avx512.mask.expand.load.pd.512" => "__builtin_ia32_expandloaddf512_mask", - "llvm.x86.avx512.mask.expand.load.ps.128" => "__builtin_ia32_expandloadsf128_mask", - "llvm.x86.avx512.mask.expand.load.ps.256" => "__builtin_ia32_expandloadsf256_mask", - "llvm.x86.avx512.mask.expand.load.ps.512" => "__builtin_ia32_expandloadsf512_mask", - "llvm.x86.avx512.mask.expand.load.q.128" => "__builtin_ia32_expandloaddi128_mask", - "llvm.x86.avx512.mask.expand.load.q.256" => "__builtin_ia32_expandloaddi256_mask", - "llvm.x86.avx512.mask.expand.load.q.512" => "__builtin_ia32_expandloaddi512_mask", - "llvm.x86.avx512.mask.expand.pd.128" => "__builtin_ia32_expanddf128_mask", - "llvm.x86.avx512.mask.expand.pd.256" => "__builtin_ia32_expanddf256_mask", - "llvm.x86.avx512.mask.expand.pd.512" => "__builtin_ia32_expanddf512_mask", - "llvm.x86.avx512.mask.expand.ps.128" => "__builtin_ia32_expandsf128_mask", - "llvm.x86.avx512.mask.expand.ps.256" => "__builtin_ia32_expandsf256_mask", - "llvm.x86.avx512.mask.expand.ps.512" => "__builtin_ia32_expandsf512_mask", - "llvm.x86.avx512.mask.expand.q.128" => "__builtin_ia32_expanddi128_mask", - "llvm.x86.avx512.mask.expand.q.256" => "__builtin_ia32_expanddi256_mask", - "llvm.x86.avx512.mask.expand.q.512" => "__builtin_ia32_expanddi512_mask", - "llvm.x86.avx512.mask.fixupimm.pd.128" => "__builtin_ia32_fixupimmpd128_mask", - "llvm.x86.avx512.mask.fixupimm.pd.256" => "__builtin_ia32_fixupimmpd256_mask", - "llvm.x86.avx512.mask.fixupimm.pd.512" => "__builtin_ia32_fixupimmpd512_mask", - "llvm.x86.avx512.mask.fixupimm.ps.128" => "__builtin_ia32_fixupimmps128_mask", - "llvm.x86.avx512.mask.fixupimm.ps.256" => "__builtin_ia32_fixupimmps256_mask", - "llvm.x86.avx512.mask.fixupimm.ps.512" => "__builtin_ia32_fixupimmps512_mask", - "llvm.x86.avx512.mask.fixupimm.sd" => "__builtin_ia32_fixupimmsd_mask", - "llvm.x86.avx512.mask.fixupimm.ss" => "__builtin_ia32_fixupimmss_mask", - "llvm.x86.avx512.mask.fpclass.pd.128" => "__builtin_ia32_fpclasspd128_mask", - "llvm.x86.avx512.mask.fpclass.pd.256" => "__builtin_ia32_fpclasspd256_mask", - "llvm.x86.avx512.mask.fpclass.pd.512" => "__builtin_ia32_fpclasspd512_mask", - "llvm.x86.avx512.mask.fpclass.ps.128" => "__builtin_ia32_fpclassps128_mask", - "llvm.x86.avx512.mask.fpclass.ps.256" => "__builtin_ia32_fpclassps256_mask", - "llvm.x86.avx512.mask.fpclass.ps.512" => "__builtin_ia32_fpclassps512_mask", - "llvm.x86.avx512.mask.fpclass.sd" => "__builtin_ia32_fpclasssd_mask", - "llvm.x86.avx512.mask.fpclass.ss" => "__builtin_ia32_fpclassss_mask", - "llvm.x86.avx512.mask.getexp.pd.128" => "__builtin_ia32_getexppd128_mask", - "llvm.x86.avx512.mask.getexp.pd.256" => "__builtin_ia32_getexppd256_mask", - "llvm.x86.avx512.mask.getexp.pd.512" => "__builtin_ia32_getexppd512_mask", - "llvm.x86.avx512.mask.getexp.ps.128" => "__builtin_ia32_getexpps128_mask", - "llvm.x86.avx512.mask.getexp.ps.256" => "__builtin_ia32_getexpps256_mask", - "llvm.x86.avx512.mask.getexp.ps.512" => "__builtin_ia32_getexpps512_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512.mask.getexp.sd" => "__builtin_ia32_getexpsd128_round_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512.mask.getexp.ss" => "__builtin_ia32_getexpss128_round_mask", - "llvm.x86.avx512.mask.getmant.pd.128" => "__builtin_ia32_getmantpd128_mask", - "llvm.x86.avx512.mask.getmant.pd.256" => "__builtin_ia32_getmantpd256_mask", - "llvm.x86.avx512.mask.getmant.pd.512" => "__builtin_ia32_getmantpd512_mask", - "llvm.x86.avx512.mask.getmant.ps.128" => "__builtin_ia32_getmantps128_mask", - "llvm.x86.avx512.mask.getmant.ps.256" => "__builtin_ia32_getmantps256_mask", - "llvm.x86.avx512.mask.getmant.ps.512" => "__builtin_ia32_getmantps512_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512.mask.getmant.sd" => "__builtin_ia32_getmantsd_round_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512.mask.getmant.ss" => "__builtin_ia32_getmantss_round_mask", - "llvm.x86.avx512.mask.insertf32x4.256" => "__builtin_ia32_insertf32x4_256_mask", - "llvm.x86.avx512.mask.insertf32x4.512" => "__builtin_ia32_insertf32x4_mask", - "llvm.x86.avx512.mask.insertf32x8.512" => "__builtin_ia32_insertf32x8_mask", - "llvm.x86.avx512.mask.insertf64x2.256" => "__builtin_ia32_insertf64x2_256_mask", - "llvm.x86.avx512.mask.insertf64x2.512" => "__builtin_ia32_insertf64x2_512_mask", - "llvm.x86.avx512.mask.insertf64x4.512" => "__builtin_ia32_insertf64x4_mask", - "llvm.x86.avx512.mask.inserti32x4.256" => "__builtin_ia32_inserti32x4_256_mask", - "llvm.x86.avx512.mask.inserti32x4.512" => "__builtin_ia32_inserti32x4_mask", - "llvm.x86.avx512.mask.inserti32x8.512" => "__builtin_ia32_inserti32x8_mask", - "llvm.x86.avx512.mask.inserti64x2.256" => "__builtin_ia32_inserti64x2_256_mask", - "llvm.x86.avx512.mask.inserti64x2.512" => "__builtin_ia32_inserti64x2_512_mask", - "llvm.x86.avx512.mask.inserti64x4.512" => "__builtin_ia32_inserti64x4_mask", - "llvm.x86.avx512.mask.loadu.d.512" => "__builtin_ia32_loaddqusi512_mask", - "llvm.x86.avx512.mask.loadu.pd.512" => "__builtin_ia32_loadupd512_mask", - "llvm.x86.avx512.mask.loadu.ps.512" => "__builtin_ia32_loadups512_mask", - "llvm.x86.avx512.mask.loadu.q.512" => "__builtin_ia32_loaddqudi512_mask", - "llvm.x86.avx512.mask.lzcnt.d.512" => "__builtin_ia32_vplzcntd_512_mask", - "llvm.x86.avx512.mask.lzcnt.q.512" => "__builtin_ia32_vplzcntq_512_mask", - "llvm.x86.avx512.mask.max.pd.128" => "__builtin_ia32_maxpd_mask", - "llvm.x86.avx512.mask.max.pd.256" => "__builtin_ia32_maxpd256_mask", - "llvm.x86.avx512.mask.max.pd.512" => "__builtin_ia32_maxpd512_mask", - "llvm.x86.avx512.mask.max.ps.128" => "__builtin_ia32_maxps_mask", - "llvm.x86.avx512.mask.max.ps.256" => "__builtin_ia32_maxps256_mask", - "llvm.x86.avx512.mask.max.ps.512" => "__builtin_ia32_maxps512_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512.mask.max.sd.round" => "__builtin_ia32_maxsd_round_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512.mask.max.ss.round" => "__builtin_ia32_maxss_round_mask", - "llvm.x86.avx512.mask.min.pd.128" => "__builtin_ia32_minpd_mask", - "llvm.x86.avx512.mask.min.pd.256" => "__builtin_ia32_minpd256_mask", - "llvm.x86.avx512.mask.min.pd.512" => "__builtin_ia32_minpd512_mask", - "llvm.x86.avx512.mask.min.ps.128" => "__builtin_ia32_minps_mask", - "llvm.x86.avx512.mask.min.ps.256" => "__builtin_ia32_minps256_mask", - "llvm.x86.avx512.mask.min.ps.512" => "__builtin_ia32_minps512_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512.mask.min.sd.round" => "__builtin_ia32_minsd_round_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512.mask.min.ss.round" => "__builtin_ia32_minss_round_mask", - "llvm.x86.avx512.mask.move.sd" => "__builtin_ia32_movsd_mask", - "llvm.x86.avx512.mask.move.ss" => "__builtin_ia32_movss_mask", - "llvm.x86.avx512.mask.mul.pd.128" => "__builtin_ia32_mulpd_mask", - "llvm.x86.avx512.mask.mul.pd.256" => "__builtin_ia32_mulpd256_mask", - "llvm.x86.avx512.mask.mul.pd.512" => "__builtin_ia32_mulpd512_mask", - "llvm.x86.avx512.mask.mul.ps.128" => "__builtin_ia32_mulps_mask", - "llvm.x86.avx512.mask.mul.ps.256" => "__builtin_ia32_mulps256_mask", - "llvm.x86.avx512.mask.mul.ps.512" => "__builtin_ia32_mulps512_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512.mask.mul.sd.round" => "__builtin_ia32_mulsd_round_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512.mask.mul.ss.round" => "__builtin_ia32_mulss_round_mask", - "llvm.x86.avx512.mask.or.pd.128" => "__builtin_ia32_orpd128_mask", - "llvm.x86.avx512.mask.or.pd.256" => "__builtin_ia32_orpd256_mask", - "llvm.x86.avx512.mask.or.pd.512" => "__builtin_ia32_orpd512_mask", - "llvm.x86.avx512.mask.or.ps.128" => "__builtin_ia32_orps128_mask", - "llvm.x86.avx512.mask.or.ps.256" => "__builtin_ia32_orps256_mask", - "llvm.x86.avx512.mask.or.ps.512" => "__builtin_ia32_orps512_mask", - "llvm.x86.avx512.mask.pabs.b.128" => "__builtin_ia32_pabsb128_mask", - "llvm.x86.avx512.mask.pabs.b.256" => "__builtin_ia32_pabsb256_mask", - "llvm.x86.avx512.mask.pabs.b.512" => "__builtin_ia32_pabsb512_mask", - "llvm.x86.avx512.mask.pabs.d.128" => "__builtin_ia32_pabsd128_mask", - "llvm.x86.avx512.mask.pabs.d.256" => "__builtin_ia32_pabsd256_mask", - "llvm.x86.avx512.mask.pabs.d.512" => "__builtin_ia32_pabsd512_mask", - "llvm.x86.avx512.mask.pabs.q.128" => "__builtin_ia32_pabsq128_mask", - "llvm.x86.avx512.mask.pabs.q.256" => "__builtin_ia32_pabsq256_mask", - "llvm.x86.avx512.mask.pabs.q.512" => "__builtin_ia32_pabsq512_mask", - "llvm.x86.avx512.mask.pabs.w.128" => "__builtin_ia32_pabsw128_mask", - "llvm.x86.avx512.mask.pabs.w.256" => "__builtin_ia32_pabsw256_mask", - "llvm.x86.avx512.mask.pabs.w.512" => "__builtin_ia32_pabsw512_mask", - "llvm.x86.avx512.mask.packssdw.128" => "__builtin_ia32_packssdw128_mask", - "llvm.x86.avx512.mask.packssdw.256" => "__builtin_ia32_packssdw256_mask", - "llvm.x86.avx512.mask.packssdw.512" => "__builtin_ia32_packssdw512_mask", - "llvm.x86.avx512.mask.packsswb.128" => "__builtin_ia32_packsswb128_mask", - "llvm.x86.avx512.mask.packsswb.256" => "__builtin_ia32_packsswb256_mask", - "llvm.x86.avx512.mask.packsswb.512" => "__builtin_ia32_packsswb512_mask", - "llvm.x86.avx512.mask.packusdw.128" => "__builtin_ia32_packusdw128_mask", - "llvm.x86.avx512.mask.packusdw.256" => "__builtin_ia32_packusdw256_mask", - "llvm.x86.avx512.mask.packusdw.512" => "__builtin_ia32_packusdw512_mask", - "llvm.x86.avx512.mask.packuswb.128" => "__builtin_ia32_packuswb128_mask", - "llvm.x86.avx512.mask.packuswb.256" => "__builtin_ia32_packuswb256_mask", - "llvm.x86.avx512.mask.packuswb.512" => "__builtin_ia32_packuswb512_mask", - "llvm.x86.avx512.mask.padd.b.128" => "__builtin_ia32_paddb128_mask", - "llvm.x86.avx512.mask.padd.b.256" => "__builtin_ia32_paddb256_mask", - "llvm.x86.avx512.mask.padd.b.512" => "__builtin_ia32_paddb512_mask", - "llvm.x86.avx512.mask.padd.d.128" => "__builtin_ia32_paddd128_mask", - "llvm.x86.avx512.mask.padd.d.256" => "__builtin_ia32_paddd256_mask", - "llvm.x86.avx512.mask.padd.d.512" => "__builtin_ia32_paddd512_mask", - "llvm.x86.avx512.mask.padd.q.128" => "__builtin_ia32_paddq128_mask", - "llvm.x86.avx512.mask.padd.q.256" => "__builtin_ia32_paddq256_mask", - "llvm.x86.avx512.mask.padd.q.512" => "__builtin_ia32_paddq512_mask", - "llvm.x86.avx512.mask.padd.w.128" => "__builtin_ia32_paddw128_mask", - "llvm.x86.avx512.mask.padd.w.256" => "__builtin_ia32_paddw256_mask", - "llvm.x86.avx512.mask.padd.w.512" => "__builtin_ia32_paddw512_mask", - "llvm.x86.avx512.mask.padds.b.128" => "__builtin_ia32_paddsb128_mask", - "llvm.x86.avx512.mask.padds.b.256" => "__builtin_ia32_paddsb256_mask", - "llvm.x86.avx512.mask.padds.b.512" => "__builtin_ia32_paddsb512_mask", - "llvm.x86.avx512.mask.padds.w.128" => "__builtin_ia32_paddsw128_mask", - "llvm.x86.avx512.mask.padds.w.256" => "__builtin_ia32_paddsw256_mask", - "llvm.x86.avx512.mask.padds.w.512" => "__builtin_ia32_paddsw512_mask", - "llvm.x86.avx512.mask.paddus.b.128" => "__builtin_ia32_paddusb128_mask", - "llvm.x86.avx512.mask.paddus.b.256" => "__builtin_ia32_paddusb256_mask", - "llvm.x86.avx512.mask.paddus.b.512" => "__builtin_ia32_paddusb512_mask", - "llvm.x86.avx512.mask.paddus.w.128" => "__builtin_ia32_paddusw128_mask", - "llvm.x86.avx512.mask.paddus.w.256" => "__builtin_ia32_paddusw256_mask", - "llvm.x86.avx512.mask.paddus.w.512" => "__builtin_ia32_paddusw512_mask", - "llvm.x86.avx512.mask.pand.d.512" => "__builtin_ia32_pandd512_mask", - "llvm.x86.avx512.mask.pand.q.512" => "__builtin_ia32_pandq512_mask", - "llvm.x86.avx512.mask.pavg.b.128" => "__builtin_ia32_pavgb128_mask", - "llvm.x86.avx512.mask.pavg.b.256" => "__builtin_ia32_pavgb256_mask", - "llvm.x86.avx512.mask.pavg.b.512" => "__builtin_ia32_pavgb512_mask", - "llvm.x86.avx512.mask.pavg.w.128" => "__builtin_ia32_pavgw128_mask", - "llvm.x86.avx512.mask.pavg.w.256" => "__builtin_ia32_pavgw256_mask", - "llvm.x86.avx512.mask.pavg.w.512" => "__builtin_ia32_pavgw512_mask", - "llvm.x86.avx512.mask.pbroadcast.b.gpr.128" => "__builtin_ia32_pbroadcastb128_gpr_mask", - "llvm.x86.avx512.mask.pbroadcast.b.gpr.256" => "__builtin_ia32_pbroadcastb256_gpr_mask", - "llvm.x86.avx512.mask.pbroadcast.b.gpr.512" => "__builtin_ia32_pbroadcastb512_gpr_mask", - "llvm.x86.avx512.mask.pbroadcast.d.gpr.128" => "__builtin_ia32_pbroadcastd128_gpr_mask", - "llvm.x86.avx512.mask.pbroadcast.d.gpr.256" => "__builtin_ia32_pbroadcastd256_gpr_mask", - "llvm.x86.avx512.mask.pbroadcast.d.gpr.512" => "__builtin_ia32_pbroadcastd512_gpr_mask", - "llvm.x86.avx512.mask.pbroadcast.q.gpr.128" => "__builtin_ia32_pbroadcastq128_gpr_mask", - "llvm.x86.avx512.mask.pbroadcast.q.gpr.256" => "__builtin_ia32_pbroadcastq256_gpr_mask", - "llvm.x86.avx512.mask.pbroadcast.q.gpr.512" => "__builtin_ia32_pbroadcastq512_gpr_mask", - "llvm.x86.avx512.mask.pbroadcast.q.mem.512" => "__builtin_ia32_pbroadcastq512_mem_mask", - "llvm.x86.avx512.mask.pbroadcast.w.gpr.128" => "__builtin_ia32_pbroadcastw128_gpr_mask", - "llvm.x86.avx512.mask.pbroadcast.w.gpr.256" => "__builtin_ia32_pbroadcastw256_gpr_mask", - "llvm.x86.avx512.mask.pbroadcast.w.gpr.512" => "__builtin_ia32_pbroadcastw512_gpr_mask", - "llvm.x86.avx512.mask.pcmpeq.b.128" => "__builtin_ia32_pcmpeqb128_mask", - "llvm.x86.avx512.mask.pcmpeq.b.256" => "__builtin_ia32_pcmpeqb256_mask", - "llvm.x86.avx512.mask.pcmpeq.b.512" => "__builtin_ia32_pcmpeqb512_mask", - "llvm.x86.avx512.mask.pcmpeq.d.128" => "__builtin_ia32_pcmpeqd128_mask", - "llvm.x86.avx512.mask.pcmpeq.d.256" => "__builtin_ia32_pcmpeqd256_mask", - "llvm.x86.avx512.mask.pcmpeq.d.512" => "__builtin_ia32_pcmpeqd512_mask", - "llvm.x86.avx512.mask.pcmpeq.q.128" => "__builtin_ia32_pcmpeqq128_mask", - "llvm.x86.avx512.mask.pcmpeq.q.256" => "__builtin_ia32_pcmpeqq256_mask", - "llvm.x86.avx512.mask.pcmpeq.q.512" => "__builtin_ia32_pcmpeqq512_mask", - "llvm.x86.avx512.mask.pcmpeq.w.128" => "__builtin_ia32_pcmpeqw128_mask", - "llvm.x86.avx512.mask.pcmpeq.w.256" => "__builtin_ia32_pcmpeqw256_mask", - "llvm.x86.avx512.mask.pcmpeq.w.512" => "__builtin_ia32_pcmpeqw512_mask", - "llvm.x86.avx512.mask.pcmpgt.b.128" => "__builtin_ia32_pcmpgtb128_mask", - "llvm.x86.avx512.mask.pcmpgt.b.256" => "__builtin_ia32_pcmpgtb256_mask", - "llvm.x86.avx512.mask.pcmpgt.b.512" => "__builtin_ia32_pcmpgtb512_mask", - "llvm.x86.avx512.mask.pcmpgt.d.128" => "__builtin_ia32_pcmpgtd128_mask", - "llvm.x86.avx512.mask.pcmpgt.d.256" => "__builtin_ia32_pcmpgtd256_mask", - "llvm.x86.avx512.mask.pcmpgt.d.512" => "__builtin_ia32_pcmpgtd512_mask", - "llvm.x86.avx512.mask.pcmpgt.q.128" => "__builtin_ia32_pcmpgtq128_mask", - "llvm.x86.avx512.mask.pcmpgt.q.256" => "__builtin_ia32_pcmpgtq256_mask", - "llvm.x86.avx512.mask.pcmpgt.q.512" => "__builtin_ia32_pcmpgtq512_mask", - "llvm.x86.avx512.mask.pcmpgt.w.128" => "__builtin_ia32_pcmpgtw128_mask", - "llvm.x86.avx512.mask.pcmpgt.w.256" => "__builtin_ia32_pcmpgtw256_mask", - "llvm.x86.avx512.mask.pcmpgt.w.512" => "__builtin_ia32_pcmpgtw512_mask", - "llvm.x86.avx512.mask.permvar.df.256" => "__builtin_ia32_permvardf256_mask", - "llvm.x86.avx512.mask.permvar.df.512" => "__builtin_ia32_permvardf512_mask", - "llvm.x86.avx512.mask.permvar.di.256" => "__builtin_ia32_permvardi256_mask", - "llvm.x86.avx512.mask.permvar.di.512" => "__builtin_ia32_permvardi512_mask", - "llvm.x86.avx512.mask.permvar.hi.128" => "__builtin_ia32_permvarhi128_mask", - "llvm.x86.avx512.mask.permvar.hi.256" => "__builtin_ia32_permvarhi256_mask", - "llvm.x86.avx512.mask.permvar.hi.512" => "__builtin_ia32_permvarhi512_mask", - "llvm.x86.avx512.mask.permvar.qi.128" => "__builtin_ia32_permvarqi128_mask", - "llvm.x86.avx512.mask.permvar.qi.256" => "__builtin_ia32_permvarqi256_mask", - "llvm.x86.avx512.mask.permvar.qi.512" => "__builtin_ia32_permvarqi512_mask", - "llvm.x86.avx512.mask.permvar.sf.256" => "__builtin_ia32_permvarsf256_mask", - "llvm.x86.avx512.mask.permvar.sf.512" => "__builtin_ia32_permvarsf512_mask", - "llvm.x86.avx512.mask.permvar.si.256" => "__builtin_ia32_permvarsi256_mask", - "llvm.x86.avx512.mask.permvar.si.512" => "__builtin_ia32_permvarsi512_mask", - "llvm.x86.avx512.mask.pmaddubs.w.128" => "__builtin_ia32_pmaddubsw128_mask", - "llvm.x86.avx512.mask.pmaddubs.w.256" => "__builtin_ia32_pmaddubsw256_mask", - "llvm.x86.avx512.mask.pmaddubs.w.512" => "__builtin_ia32_pmaddubsw512_mask", - "llvm.x86.avx512.mask.pmaddw.d.128" => "__builtin_ia32_pmaddwd128_mask", - "llvm.x86.avx512.mask.pmaddw.d.256" => "__builtin_ia32_pmaddwd256_mask", - "llvm.x86.avx512.mask.pmaddw.d.512" => "__builtin_ia32_pmaddwd512_mask", - "llvm.x86.avx512.mask.pmaxs.b.128" => "__builtin_ia32_pmaxsb128_mask", - "llvm.x86.avx512.mask.pmaxs.b.256" => "__builtin_ia32_pmaxsb256_mask", - "llvm.x86.avx512.mask.pmaxs.b.512" => "__builtin_ia32_pmaxsb512_mask", - "llvm.x86.avx512.mask.pmaxs.d.128" => "__builtin_ia32_pmaxsd128_mask", - "llvm.x86.avx512.mask.pmaxs.d.256" => "__builtin_ia32_pmaxsd256_mask", - "llvm.x86.avx512.mask.pmaxs.d.512" => "__builtin_ia32_pmaxsd512_mask", - "llvm.x86.avx512.mask.pmaxs.q.128" => "__builtin_ia32_pmaxsq128_mask", - "llvm.x86.avx512.mask.pmaxs.q.256" => "__builtin_ia32_pmaxsq256_mask", - "llvm.x86.avx512.mask.pmaxs.q.512" => "__builtin_ia32_pmaxsq512_mask", - "llvm.x86.avx512.mask.pmaxs.w.128" => "__builtin_ia32_pmaxsw128_mask", - "llvm.x86.avx512.mask.pmaxs.w.256" => "__builtin_ia32_pmaxsw256_mask", - "llvm.x86.avx512.mask.pmaxs.w.512" => "__builtin_ia32_pmaxsw512_mask", - "llvm.x86.avx512.mask.pmaxu.b.128" => "__builtin_ia32_pmaxub128_mask", - "llvm.x86.avx512.mask.pmaxu.b.256" => "__builtin_ia32_pmaxub256_mask", - "llvm.x86.avx512.mask.pmaxu.b.512" => "__builtin_ia32_pmaxub512_mask", - "llvm.x86.avx512.mask.pmaxu.d.128" => "__builtin_ia32_pmaxud128_mask", - "llvm.x86.avx512.mask.pmaxu.d.256" => "__builtin_ia32_pmaxud256_mask", - "llvm.x86.avx512.mask.pmaxu.d.512" => "__builtin_ia32_pmaxud512_mask", - "llvm.x86.avx512.mask.pmaxu.q.128" => "__builtin_ia32_pmaxuq128_mask", - "llvm.x86.avx512.mask.pmaxu.q.256" => "__builtin_ia32_pmaxuq256_mask", - "llvm.x86.avx512.mask.pmaxu.q.512" => "__builtin_ia32_pmaxuq512_mask", - "llvm.x86.avx512.mask.pmaxu.w.128" => "__builtin_ia32_pmaxuw128_mask", - "llvm.x86.avx512.mask.pmaxu.w.256" => "__builtin_ia32_pmaxuw256_mask", - "llvm.x86.avx512.mask.pmaxu.w.512" => "__builtin_ia32_pmaxuw512_mask", - "llvm.x86.avx512.mask.pmins.b.128" => "__builtin_ia32_pminsb128_mask", - "llvm.x86.avx512.mask.pmins.b.256" => "__builtin_ia32_pminsb256_mask", - "llvm.x86.avx512.mask.pmins.b.512" => "__builtin_ia32_pminsb512_mask", - "llvm.x86.avx512.mask.pmins.d.128" => "__builtin_ia32_pminsd128_mask", - "llvm.x86.avx512.mask.pmins.d.256" => "__builtin_ia32_pminsd256_mask", - "llvm.x86.avx512.mask.pmins.d.512" => "__builtin_ia32_pminsd512_mask", - "llvm.x86.avx512.mask.pmins.q.128" => "__builtin_ia32_pminsq128_mask", - "llvm.x86.avx512.mask.pmins.q.256" => "__builtin_ia32_pminsq256_mask", - "llvm.x86.avx512.mask.pmins.q.512" => "__builtin_ia32_pminsq512_mask", - "llvm.x86.avx512.mask.pmins.w.128" => "__builtin_ia32_pminsw128_mask", - "llvm.x86.avx512.mask.pmins.w.256" => "__builtin_ia32_pminsw256_mask", - "llvm.x86.avx512.mask.pmins.w.512" => "__builtin_ia32_pminsw512_mask", - "llvm.x86.avx512.mask.pminu.b.128" => "__builtin_ia32_pminub128_mask", - "llvm.x86.avx512.mask.pminu.b.256" => "__builtin_ia32_pminub256_mask", - "llvm.x86.avx512.mask.pminu.b.512" => "__builtin_ia32_pminub512_mask", - "llvm.x86.avx512.mask.pminu.d.128" => "__builtin_ia32_pminud128_mask", - "llvm.x86.avx512.mask.pminu.d.256" => "__builtin_ia32_pminud256_mask", - "llvm.x86.avx512.mask.pminu.d.512" => "__builtin_ia32_pminud512_mask", - "llvm.x86.avx512.mask.pminu.q.128" => "__builtin_ia32_pminuq128_mask", - "llvm.x86.avx512.mask.pminu.q.256" => "__builtin_ia32_pminuq256_mask", - "llvm.x86.avx512.mask.pminu.q.512" => "__builtin_ia32_pminuq512_mask", - "llvm.x86.avx512.mask.pminu.w.128" => "__builtin_ia32_pminuw128_mask", - "llvm.x86.avx512.mask.pminu.w.256" => "__builtin_ia32_pminuw256_mask", - "llvm.x86.avx512.mask.pminu.w.512" => "__builtin_ia32_pminuw512_mask", - "llvm.x86.avx512.mask.pmov.db.128" => "__builtin_ia32_pmovdb128_mask", - "llvm.x86.avx512.mask.pmov.db.256" => "__builtin_ia32_pmovdb256_mask", - "llvm.x86.avx512.mask.pmov.db.512" => "__builtin_ia32_pmovdb512_mask", - "llvm.x86.avx512.mask.pmov.db.mem.128" => "__builtin_ia32_pmovdb128mem_mask", - "llvm.x86.avx512.mask.pmov.db.mem.256" => "__builtin_ia32_pmovdb256mem_mask", - "llvm.x86.avx512.mask.pmov.db.mem.512" => "__builtin_ia32_pmovdb512mem_mask", - "llvm.x86.avx512.mask.pmov.dw.128" => "__builtin_ia32_pmovdw128_mask", - "llvm.x86.avx512.mask.pmov.dw.256" => "__builtin_ia32_pmovdw256_mask", - "llvm.x86.avx512.mask.pmov.dw.512" => "__builtin_ia32_pmovdw512_mask", - "llvm.x86.avx512.mask.pmov.dw.mem.128" => "__builtin_ia32_pmovdw128mem_mask", - "llvm.x86.avx512.mask.pmov.dw.mem.256" => "__builtin_ia32_pmovdw256mem_mask", - "llvm.x86.avx512.mask.pmov.dw.mem.512" => "__builtin_ia32_pmovdw512mem_mask", - "llvm.x86.avx512.mask.pmov.qb.128" => "__builtin_ia32_pmovqb128_mask", - "llvm.x86.avx512.mask.pmov.qb.256" => "__builtin_ia32_pmovqb256_mask", - "llvm.x86.avx512.mask.pmov.qb.512" => "__builtin_ia32_pmovqb512_mask", - "llvm.x86.avx512.mask.pmov.qb.mem.128" => "__builtin_ia32_pmovqb128mem_mask", - "llvm.x86.avx512.mask.pmov.qb.mem.256" => "__builtin_ia32_pmovqb256mem_mask", - "llvm.x86.avx512.mask.pmov.qb.mem.512" => "__builtin_ia32_pmovqb512mem_mask", - "llvm.x86.avx512.mask.pmov.qd.128" => "__builtin_ia32_pmovqd128_mask", - "llvm.x86.avx512.mask.pmov.qd.256" => "__builtin_ia32_pmovqd256_mask", - "llvm.x86.avx512.mask.pmov.qd.512" => "__builtin_ia32_pmovqd512_mask", - "llvm.x86.avx512.mask.pmov.qd.mem.128" => "__builtin_ia32_pmovqd128mem_mask", - "llvm.x86.avx512.mask.pmov.qd.mem.256" => "__builtin_ia32_pmovqd256mem_mask", - "llvm.x86.avx512.mask.pmov.qd.mem.512" => "__builtin_ia32_pmovqd512mem_mask", - "llvm.x86.avx512.mask.pmov.qw.128" => "__builtin_ia32_pmovqw128_mask", - "llvm.x86.avx512.mask.pmov.qw.256" => "__builtin_ia32_pmovqw256_mask", - "llvm.x86.avx512.mask.pmov.qw.512" => "__builtin_ia32_pmovqw512_mask", - "llvm.x86.avx512.mask.pmov.qw.mem.128" => "__builtin_ia32_pmovqw128mem_mask", - "llvm.x86.avx512.mask.pmov.qw.mem.256" => "__builtin_ia32_pmovqw256mem_mask", - "llvm.x86.avx512.mask.pmov.qw.mem.512" => "__builtin_ia32_pmovqw512mem_mask", - "llvm.x86.avx512.mask.pmov.wb.128" => "__builtin_ia32_pmovwb128_mask", - "llvm.x86.avx512.mask.pmov.wb.256" => "__builtin_ia32_pmovwb256_mask", - "llvm.x86.avx512.mask.pmov.wb.512" => "__builtin_ia32_pmovwb512_mask", - "llvm.x86.avx512.mask.pmov.wb.mem.128" => "__builtin_ia32_pmovwb128mem_mask", - "llvm.x86.avx512.mask.pmov.wb.mem.256" => "__builtin_ia32_pmovwb256mem_mask", - "llvm.x86.avx512.mask.pmov.wb.mem.512" => "__builtin_ia32_pmovwb512mem_mask", - "llvm.x86.avx512.mask.pmovs.db.128" => "__builtin_ia32_pmovsdb128_mask", - "llvm.x86.avx512.mask.pmovs.db.256" => "__builtin_ia32_pmovsdb256_mask", - "llvm.x86.avx512.mask.pmovs.db.512" => "__builtin_ia32_pmovsdb512_mask", - "llvm.x86.avx512.mask.pmovs.db.mem.128" => "__builtin_ia32_pmovsdb128mem_mask", - "llvm.x86.avx512.mask.pmovs.db.mem.256" => "__builtin_ia32_pmovsdb256mem_mask", - "llvm.x86.avx512.mask.pmovs.db.mem.512" => "__builtin_ia32_pmovsdb512mem_mask", - "llvm.x86.avx512.mask.pmovs.dw.128" => "__builtin_ia32_pmovsdw128_mask", - "llvm.x86.avx512.mask.pmovs.dw.256" => "__builtin_ia32_pmovsdw256_mask", - "llvm.x86.avx512.mask.pmovs.dw.512" => "__builtin_ia32_pmovsdw512_mask", - "llvm.x86.avx512.mask.pmovs.dw.mem.128" => "__builtin_ia32_pmovsdw128mem_mask", - "llvm.x86.avx512.mask.pmovs.dw.mem.256" => "__builtin_ia32_pmovsdw256mem_mask", - "llvm.x86.avx512.mask.pmovs.dw.mem.512" => "__builtin_ia32_pmovsdw512mem_mask", - "llvm.x86.avx512.mask.pmovs.qb.128" => "__builtin_ia32_pmovsqb128_mask", - "llvm.x86.avx512.mask.pmovs.qb.256" => "__builtin_ia32_pmovsqb256_mask", - "llvm.x86.avx512.mask.pmovs.qb.512" => "__builtin_ia32_pmovsqb512_mask", - "llvm.x86.avx512.mask.pmovs.qb.mem.128" => "__builtin_ia32_pmovsqb128mem_mask", - "llvm.x86.avx512.mask.pmovs.qb.mem.256" => "__builtin_ia32_pmovsqb256mem_mask", - "llvm.x86.avx512.mask.pmovs.qb.mem.512" => "__builtin_ia32_pmovsqb512mem_mask", - "llvm.x86.avx512.mask.pmovs.qd.128" => "__builtin_ia32_pmovsqd128_mask", - "llvm.x86.avx512.mask.pmovs.qd.256" => "__builtin_ia32_pmovsqd256_mask", - "llvm.x86.avx512.mask.pmovs.qd.512" => "__builtin_ia32_pmovsqd512_mask", - "llvm.x86.avx512.mask.pmovs.qd.mem.128" => "__builtin_ia32_pmovsqd128mem_mask", - "llvm.x86.avx512.mask.pmovs.qd.mem.256" => "__builtin_ia32_pmovsqd256mem_mask", - "llvm.x86.avx512.mask.pmovs.qd.mem.512" => "__builtin_ia32_pmovsqd512mem_mask", - "llvm.x86.avx512.mask.pmovs.qw.128" => "__builtin_ia32_pmovsqw128_mask", - "llvm.x86.avx512.mask.pmovs.qw.256" => "__builtin_ia32_pmovsqw256_mask", - "llvm.x86.avx512.mask.pmovs.qw.512" => "__builtin_ia32_pmovsqw512_mask", - "llvm.x86.avx512.mask.pmovs.qw.mem.128" => "__builtin_ia32_pmovsqw128mem_mask", - "llvm.x86.avx512.mask.pmovs.qw.mem.256" => "__builtin_ia32_pmovsqw256mem_mask", - "llvm.x86.avx512.mask.pmovs.qw.mem.512" => "__builtin_ia32_pmovsqw512mem_mask", - "llvm.x86.avx512.mask.pmovs.wb.128" => "__builtin_ia32_pmovswb128_mask", - "llvm.x86.avx512.mask.pmovs.wb.256" => "__builtin_ia32_pmovswb256_mask", - "llvm.x86.avx512.mask.pmovs.wb.512" => "__builtin_ia32_pmovswb512_mask", - "llvm.x86.avx512.mask.pmovs.wb.mem.128" => "__builtin_ia32_pmovswb128mem_mask", - "llvm.x86.avx512.mask.pmovs.wb.mem.256" => "__builtin_ia32_pmovswb256mem_mask", - "llvm.x86.avx512.mask.pmovs.wb.mem.512" => "__builtin_ia32_pmovswb512mem_mask", - "llvm.x86.avx512.mask.pmovsxb.d.128" => "__builtin_ia32_pmovsxbd128_mask", - "llvm.x86.avx512.mask.pmovsxb.d.256" => "__builtin_ia32_pmovsxbd256_mask", - "llvm.x86.avx512.mask.pmovsxb.d.512" => "__builtin_ia32_pmovsxbd512_mask", - "llvm.x86.avx512.mask.pmovsxb.q.128" => "__builtin_ia32_pmovsxbq128_mask", - "llvm.x86.avx512.mask.pmovsxb.q.256" => "__builtin_ia32_pmovsxbq256_mask", - "llvm.x86.avx512.mask.pmovsxb.q.512" => "__builtin_ia32_pmovsxbq512_mask", - "llvm.x86.avx512.mask.pmovsxb.w.128" => "__builtin_ia32_pmovsxbw128_mask", - "llvm.x86.avx512.mask.pmovsxb.w.256" => "__builtin_ia32_pmovsxbw256_mask", - "llvm.x86.avx512.mask.pmovsxb.w.512" => "__builtin_ia32_pmovsxbw512_mask", - "llvm.x86.avx512.mask.pmovsxd.q.128" => "__builtin_ia32_pmovsxdq128_mask", - "llvm.x86.avx512.mask.pmovsxd.q.256" => "__builtin_ia32_pmovsxdq256_mask", - "llvm.x86.avx512.mask.pmovsxd.q.512" => "__builtin_ia32_pmovsxdq512_mask", - "llvm.x86.avx512.mask.pmovsxw.d.128" => "__builtin_ia32_pmovsxwd128_mask", - "llvm.x86.avx512.mask.pmovsxw.d.256" => "__builtin_ia32_pmovsxwd256_mask", - "llvm.x86.avx512.mask.pmovsxw.d.512" => "__builtin_ia32_pmovsxwd512_mask", - "llvm.x86.avx512.mask.pmovsxw.q.128" => "__builtin_ia32_pmovsxwq128_mask", - "llvm.x86.avx512.mask.pmovsxw.q.256" => "__builtin_ia32_pmovsxwq256_mask", - "llvm.x86.avx512.mask.pmovsxw.q.512" => "__builtin_ia32_pmovsxwq512_mask", - "llvm.x86.avx512.mask.pmovus.db.128" => "__builtin_ia32_pmovusdb128_mask", - "llvm.x86.avx512.mask.pmovus.db.256" => "__builtin_ia32_pmovusdb256_mask", - "llvm.x86.avx512.mask.pmovus.db.512" => "__builtin_ia32_pmovusdb512_mask", - "llvm.x86.avx512.mask.pmovus.db.mem.128" => "__builtin_ia32_pmovusdb128mem_mask", - "llvm.x86.avx512.mask.pmovus.db.mem.256" => "__builtin_ia32_pmovusdb256mem_mask", - "llvm.x86.avx512.mask.pmovus.db.mem.512" => "__builtin_ia32_pmovusdb512mem_mask", - "llvm.x86.avx512.mask.pmovus.dw.128" => "__builtin_ia32_pmovusdw128_mask", - "llvm.x86.avx512.mask.pmovus.dw.256" => "__builtin_ia32_pmovusdw256_mask", - "llvm.x86.avx512.mask.pmovus.dw.512" => "__builtin_ia32_pmovusdw512_mask", - "llvm.x86.avx512.mask.pmovus.dw.mem.128" => "__builtin_ia32_pmovusdw128mem_mask", - "llvm.x86.avx512.mask.pmovus.dw.mem.256" => "__builtin_ia32_pmovusdw256mem_mask", - "llvm.x86.avx512.mask.pmovus.dw.mem.512" => "__builtin_ia32_pmovusdw512mem_mask", - "llvm.x86.avx512.mask.pmovus.qb.128" => "__builtin_ia32_pmovusqb128_mask", - "llvm.x86.avx512.mask.pmovus.qb.256" => "__builtin_ia32_pmovusqb256_mask", - "llvm.x86.avx512.mask.pmovus.qb.512" => "__builtin_ia32_pmovusqb512_mask", - "llvm.x86.avx512.mask.pmovus.qb.mem.128" => "__builtin_ia32_pmovusqb128mem_mask", - "llvm.x86.avx512.mask.pmovus.qb.mem.256" => "__builtin_ia32_pmovusqb256mem_mask", - "llvm.x86.avx512.mask.pmovus.qb.mem.512" => "__builtin_ia32_pmovusqb512mem_mask", - "llvm.x86.avx512.mask.pmovus.qd.128" => "__builtin_ia32_pmovusqd128_mask", - "llvm.x86.avx512.mask.pmovus.qd.256" => "__builtin_ia32_pmovusqd256_mask", - "llvm.x86.avx512.mask.pmovus.qd.512" => "__builtin_ia32_pmovusqd512_mask", - "llvm.x86.avx512.mask.pmovus.qd.mem.128" => "__builtin_ia32_pmovusqd128mem_mask", - "llvm.x86.avx512.mask.pmovus.qd.mem.256" => "__builtin_ia32_pmovusqd256mem_mask", - "llvm.x86.avx512.mask.pmovus.qd.mem.512" => "__builtin_ia32_pmovusqd512mem_mask", - "llvm.x86.avx512.mask.pmovus.qw.128" => "__builtin_ia32_pmovusqw128_mask", - "llvm.x86.avx512.mask.pmovus.qw.256" => "__builtin_ia32_pmovusqw256_mask", - "llvm.x86.avx512.mask.pmovus.qw.512" => "__builtin_ia32_pmovusqw512_mask", - "llvm.x86.avx512.mask.pmovus.qw.mem.128" => "__builtin_ia32_pmovusqw128mem_mask", - "llvm.x86.avx512.mask.pmovus.qw.mem.256" => "__builtin_ia32_pmovusqw256mem_mask", - "llvm.x86.avx512.mask.pmovus.qw.mem.512" => "__builtin_ia32_pmovusqw512mem_mask", - "llvm.x86.avx512.mask.pmovus.wb.128" => "__builtin_ia32_pmovuswb128_mask", - "llvm.x86.avx512.mask.pmovus.wb.256" => "__builtin_ia32_pmovuswb256_mask", - "llvm.x86.avx512.mask.pmovus.wb.512" => "__builtin_ia32_pmovuswb512_mask", - "llvm.x86.avx512.mask.pmovus.wb.mem.128" => "__builtin_ia32_pmovuswb128mem_mask", - "llvm.x86.avx512.mask.pmovus.wb.mem.256" => "__builtin_ia32_pmovuswb256mem_mask", - "llvm.x86.avx512.mask.pmovus.wb.mem.512" => "__builtin_ia32_pmovuswb512mem_mask", - "llvm.x86.avx512.mask.pmovzxb.d.128" => "__builtin_ia32_pmovzxbd128_mask", - "llvm.x86.avx512.mask.pmovzxb.d.256" => "__builtin_ia32_pmovzxbd256_mask", - "llvm.x86.avx512.mask.pmovzxb.d.512" => "__builtin_ia32_pmovzxbd512_mask", - "llvm.x86.avx512.mask.pmovzxb.q.128" => "__builtin_ia32_pmovzxbq128_mask", - "llvm.x86.avx512.mask.pmovzxb.q.256" => "__builtin_ia32_pmovzxbq256_mask", - "llvm.x86.avx512.mask.pmovzxb.q.512" => "__builtin_ia32_pmovzxbq512_mask", - "llvm.x86.avx512.mask.pmovzxb.w.128" => "__builtin_ia32_pmovzxbw128_mask", - "llvm.x86.avx512.mask.pmovzxb.w.256" => "__builtin_ia32_pmovzxbw256_mask", - "llvm.x86.avx512.mask.pmovzxb.w.512" => "__builtin_ia32_pmovzxbw512_mask", - "llvm.x86.avx512.mask.pmovzxd.q.128" => "__builtin_ia32_pmovzxdq128_mask", - "llvm.x86.avx512.mask.pmovzxd.q.256" => "__builtin_ia32_pmovzxdq256_mask", - "llvm.x86.avx512.mask.pmovzxd.q.512" => "__builtin_ia32_pmovzxdq512_mask", - "llvm.x86.avx512.mask.pmovzxw.d.128" => "__builtin_ia32_pmovzxwd128_mask", - "llvm.x86.avx512.mask.pmovzxw.d.256" => "__builtin_ia32_pmovzxwd256_mask", - "llvm.x86.avx512.mask.pmovzxw.d.512" => "__builtin_ia32_pmovzxwd512_mask", - "llvm.x86.avx512.mask.pmovzxw.q.128" => "__builtin_ia32_pmovzxwq128_mask", - "llvm.x86.avx512.mask.pmovzxw.q.256" => "__builtin_ia32_pmovzxwq256_mask", - "llvm.x86.avx512.mask.pmovzxw.q.512" => "__builtin_ia32_pmovzxwq512_mask", - "llvm.x86.avx512.mask.pmul.dq.128" => "__builtin_ia32_pmuldq128_mask", - "llvm.x86.avx512.mask.pmul.dq.256" => "__builtin_ia32_pmuldq256_mask", - "llvm.x86.avx512.mask.pmul.dq.512" => "__builtin_ia32_pmuldq512_mask", - "llvm.x86.avx512.mask.pmul.hr.sw.128" => "__builtin_ia32_pmulhrsw128_mask", - "llvm.x86.avx512.mask.pmul.hr.sw.256" => "__builtin_ia32_pmulhrsw256_mask", - "llvm.x86.avx512.mask.pmul.hr.sw.512" => "__builtin_ia32_pmulhrsw512_mask", - "llvm.x86.avx512.mask.pmulh.w.128" => "__builtin_ia32_pmulhw128_mask", - "llvm.x86.avx512.mask.pmulh.w.256" => "__builtin_ia32_pmulhw256_mask", - "llvm.x86.avx512.mask.pmulh.w.512" => "__builtin_ia32_pmulhw512_mask", - "llvm.x86.avx512.mask.pmulhu.w.128" => "__builtin_ia32_pmulhuw128_mask", - "llvm.x86.avx512.mask.pmulhu.w.256" => "__builtin_ia32_pmulhuw256_mask", - "llvm.x86.avx512.mask.pmulhu.w.512" => "__builtin_ia32_pmulhuw512_mask", - "llvm.x86.avx512.mask.pmull.d.128" => "__builtin_ia32_pmulld128_mask", - "llvm.x86.avx512.mask.pmull.d.256" => "__builtin_ia32_pmulld256_mask", - "llvm.x86.avx512.mask.pmull.d.512" => "__builtin_ia32_pmulld512_mask", - "llvm.x86.avx512.mask.pmull.q.128" => "__builtin_ia32_pmullq128_mask", - "llvm.x86.avx512.mask.pmull.q.256" => "__builtin_ia32_pmullq256_mask", - "llvm.x86.avx512.mask.pmull.q.512" => "__builtin_ia32_pmullq512_mask", - "llvm.x86.avx512.mask.pmull.w.128" => "__builtin_ia32_pmullw128_mask", - "llvm.x86.avx512.mask.pmull.w.256" => "__builtin_ia32_pmullw256_mask", - "llvm.x86.avx512.mask.pmull.w.512" => "__builtin_ia32_pmullw512_mask", - "llvm.x86.avx512.mask.pmultishift.qb.128" => "__builtin_ia32_vpmultishiftqb128_mask", - "llvm.x86.avx512.mask.pmultishift.qb.256" => "__builtin_ia32_vpmultishiftqb256_mask", - "llvm.x86.avx512.mask.pmultishift.qb.512" => "__builtin_ia32_vpmultishiftqb512_mask", - "llvm.x86.avx512.mask.pmulu.dq.128" => "__builtin_ia32_pmuludq128_mask", - "llvm.x86.avx512.mask.pmulu.dq.256" => "__builtin_ia32_pmuludq256_mask", - "llvm.x86.avx512.mask.pmulu.dq.512" => "__builtin_ia32_pmuludq512_mask", - "llvm.x86.avx512.mask.prol.d.128" => "__builtin_ia32_prold128_mask", - "llvm.x86.avx512.mask.prol.d.256" => "__builtin_ia32_prold256_mask", - "llvm.x86.avx512.mask.prol.d.512" => "__builtin_ia32_prold512_mask", - "llvm.x86.avx512.mask.prol.q.128" => "__builtin_ia32_prolq128_mask", - "llvm.x86.avx512.mask.prol.q.256" => "__builtin_ia32_prolq256_mask", - "llvm.x86.avx512.mask.prol.q.512" => "__builtin_ia32_prolq512_mask", - "llvm.x86.avx512.mask.prolv.d.128" => "__builtin_ia32_prolvd128_mask", - "llvm.x86.avx512.mask.prolv.d.256" => "__builtin_ia32_prolvd256_mask", - "llvm.x86.avx512.mask.prolv.d.512" => "__builtin_ia32_prolvd512_mask", - "llvm.x86.avx512.mask.prolv.q.128" => "__builtin_ia32_prolvq128_mask", - "llvm.x86.avx512.mask.prolv.q.256" => "__builtin_ia32_prolvq256_mask", - "llvm.x86.avx512.mask.prolv.q.512" => "__builtin_ia32_prolvq512_mask", - "llvm.x86.avx512.mask.pror.d.128" => "__builtin_ia32_prord128_mask", - "llvm.x86.avx512.mask.pror.d.256" => "__builtin_ia32_prord256_mask", - "llvm.x86.avx512.mask.pror.d.512" => "__builtin_ia32_prord512_mask", - "llvm.x86.avx512.mask.pror.q.128" => "__builtin_ia32_prorq128_mask", - "llvm.x86.avx512.mask.pror.q.256" => "__builtin_ia32_prorq256_mask", - "llvm.x86.avx512.mask.pror.q.512" => "__builtin_ia32_prorq512_mask", - "llvm.x86.avx512.mask.prorv.d.128" => "__builtin_ia32_prorvd128_mask", - "llvm.x86.avx512.mask.prorv.d.256" => "__builtin_ia32_prorvd256_mask", - "llvm.x86.avx512.mask.prorv.d.512" => "__builtin_ia32_prorvd512_mask", - "llvm.x86.avx512.mask.prorv.q.128" => "__builtin_ia32_prorvq128_mask", - "llvm.x86.avx512.mask.prorv.q.256" => "__builtin_ia32_prorvq256_mask", - "llvm.x86.avx512.mask.prorv.q.512" => "__builtin_ia32_prorvq512_mask", - "llvm.x86.avx512.mask.pshuf.b.128" => "__builtin_ia32_pshufb128_mask", - "llvm.x86.avx512.mask.pshuf.b.256" => "__builtin_ia32_pshufb256_mask", - "llvm.x86.avx512.mask.pshuf.b.512" => "__builtin_ia32_pshufb512_mask", - "llvm.x86.avx512.mask.psll.d" => "__builtin_ia32_pslld512_mask", - "llvm.x86.avx512.mask.psll.d.128" => "__builtin_ia32_pslld128_mask", - "llvm.x86.avx512.mask.psll.d.256" => "__builtin_ia32_pslld256_mask", - "llvm.x86.avx512.mask.psll.di.128" => "__builtin_ia32_pslldi128_mask", - "llvm.x86.avx512.mask.psll.di.256" => "__builtin_ia32_pslldi256_mask", - "llvm.x86.avx512.mask.psll.di.512" => "__builtin_ia32_pslldi512_mask", - "llvm.x86.avx512.mask.psll.q" => "__builtin_ia32_psllq512_mask", - "llvm.x86.avx512.mask.psll.q.128" => "__builtin_ia32_psllq128_mask", - "llvm.x86.avx512.mask.psll.q.256" => "__builtin_ia32_psllq256_mask", - "llvm.x86.avx512.mask.psll.qi.128" => "__builtin_ia32_psllqi128_mask", - "llvm.x86.avx512.mask.psll.qi.256" => "__builtin_ia32_psllqi256_mask", - "llvm.x86.avx512.mask.psll.qi.512" => "__builtin_ia32_psllqi512_mask", - "llvm.x86.avx512.mask.psll.w.128" => "__builtin_ia32_psllw128_mask", - "llvm.x86.avx512.mask.psll.w.256" => "__builtin_ia32_psllw256_mask", - "llvm.x86.avx512.mask.psll.w.512" => "__builtin_ia32_psllw512_mask", - "llvm.x86.avx512.mask.psll.wi.128" => "__builtin_ia32_psllwi128_mask", - "llvm.x86.avx512.mask.psll.wi.256" => "__builtin_ia32_psllwi256_mask", - "llvm.x86.avx512.mask.psll.wi.512" => "__builtin_ia32_psllwi512_mask", - "llvm.x86.avx512.mask.psllv.d" => "__builtin_ia32_psllv16si_mask", - "llvm.x86.avx512.mask.psllv.q" => "__builtin_ia32_psllv8di_mask", - "llvm.x86.avx512.mask.psllv16.hi" => "__builtin_ia32_psllv16hi_mask", - "llvm.x86.avx512.mask.psllv2.di" => "__builtin_ia32_psllv2di_mask", - "llvm.x86.avx512.mask.psllv32hi" => "__builtin_ia32_psllv32hi_mask", - "llvm.x86.avx512.mask.psllv4.di" => "__builtin_ia32_psllv4di_mask", - "llvm.x86.avx512.mask.psllv4.si" => "__builtin_ia32_psllv4si_mask", - "llvm.x86.avx512.mask.psllv8.hi" => "__builtin_ia32_psllv8hi_mask", - "llvm.x86.avx512.mask.psllv8.si" => "__builtin_ia32_psllv8si_mask", - "llvm.x86.avx512.mask.psra.d" => "__builtin_ia32_psrad512_mask", - "llvm.x86.avx512.mask.psra.d.128" => "__builtin_ia32_psrad128_mask", - "llvm.x86.avx512.mask.psra.d.256" => "__builtin_ia32_psrad256_mask", - "llvm.x86.avx512.mask.psra.di.128" => "__builtin_ia32_psradi128_mask", - "llvm.x86.avx512.mask.psra.di.256" => "__builtin_ia32_psradi256_mask", - "llvm.x86.avx512.mask.psra.di.512" => "__builtin_ia32_psradi512_mask", - "llvm.x86.avx512.mask.psra.q" => "__builtin_ia32_psraq512_mask", - "llvm.x86.avx512.mask.psra.q.128" => "__builtin_ia32_psraq128_mask", - "llvm.x86.avx512.mask.psra.q.256" => "__builtin_ia32_psraq256_mask", - "llvm.x86.avx512.mask.psra.qi.128" => "__builtin_ia32_psraqi128_mask", - "llvm.x86.avx512.mask.psra.qi.256" => "__builtin_ia32_psraqi256_mask", - "llvm.x86.avx512.mask.psra.qi.512" => "__builtin_ia32_psraqi512_mask", - "llvm.x86.avx512.mask.psra.w.128" => "__builtin_ia32_psraw128_mask", - "llvm.x86.avx512.mask.psra.w.256" => "__builtin_ia32_psraw256_mask", - "llvm.x86.avx512.mask.psra.w.512" => "__builtin_ia32_psraw512_mask", - "llvm.x86.avx512.mask.psra.wi.128" => "__builtin_ia32_psrawi128_mask", - "llvm.x86.avx512.mask.psra.wi.256" => "__builtin_ia32_psrawi256_mask", - "llvm.x86.avx512.mask.psra.wi.512" => "__builtin_ia32_psrawi512_mask", - "llvm.x86.avx512.mask.psrav.d" => "__builtin_ia32_psrav16si_mask", - "llvm.x86.avx512.mask.psrav.q" => "__builtin_ia32_psrav8di_mask", - "llvm.x86.avx512.mask.psrav.q.128" => "__builtin_ia32_psravq128_mask", - "llvm.x86.avx512.mask.psrav.q.256" => "__builtin_ia32_psravq256_mask", - "llvm.x86.avx512.mask.psrav16.hi" => "__builtin_ia32_psrav16hi_mask", - "llvm.x86.avx512.mask.psrav32.hi" => "__builtin_ia32_psrav32hi_mask", - "llvm.x86.avx512.mask.psrav4.si" => "__builtin_ia32_psrav4si_mask", - "llvm.x86.avx512.mask.psrav8.hi" => "__builtin_ia32_psrav8hi_mask", - "llvm.x86.avx512.mask.psrav8.si" => "__builtin_ia32_psrav8si_mask", - "llvm.x86.avx512.mask.psrl.d" => "__builtin_ia32_psrld512_mask", - "llvm.x86.avx512.mask.psrl.d.128" => "__builtin_ia32_psrld128_mask", - "llvm.x86.avx512.mask.psrl.d.256" => "__builtin_ia32_psrld256_mask", - "llvm.x86.avx512.mask.psrl.di.128" => "__builtin_ia32_psrldi128_mask", - "llvm.x86.avx512.mask.psrl.di.256" => "__builtin_ia32_psrldi256_mask", - "llvm.x86.avx512.mask.psrl.di.512" => "__builtin_ia32_psrldi512_mask", - "llvm.x86.avx512.mask.psrl.q" => "__builtin_ia32_psrlq512_mask", - "llvm.x86.avx512.mask.psrl.q.128" => "__builtin_ia32_psrlq128_mask", - "llvm.x86.avx512.mask.psrl.q.256" => "__builtin_ia32_psrlq256_mask", - "llvm.x86.avx512.mask.psrl.qi.128" => "__builtin_ia32_psrlqi128_mask", - "llvm.x86.avx512.mask.psrl.qi.256" => "__builtin_ia32_psrlqi256_mask", - "llvm.x86.avx512.mask.psrl.qi.512" => "__builtin_ia32_psrlqi512_mask", - "llvm.x86.avx512.mask.psrl.w.128" => "__builtin_ia32_psrlw128_mask", - "llvm.x86.avx512.mask.psrl.w.256" => "__builtin_ia32_psrlw256_mask", - "llvm.x86.avx512.mask.psrl.w.512" => "__builtin_ia32_psrlw512_mask", - "llvm.x86.avx512.mask.psrl.wi.128" => "__builtin_ia32_psrlwi128_mask", - "llvm.x86.avx512.mask.psrl.wi.256" => "__builtin_ia32_psrlwi256_mask", - "llvm.x86.avx512.mask.psrl.wi.512" => "__builtin_ia32_psrlwi512_mask", - "llvm.x86.avx512.mask.psrlv.d" => "__builtin_ia32_psrlv16si_mask", - "llvm.x86.avx512.mask.psrlv.q" => "__builtin_ia32_psrlv8di_mask", - "llvm.x86.avx512.mask.psrlv16.hi" => "__builtin_ia32_psrlv16hi_mask", - "llvm.x86.avx512.mask.psrlv2.di" => "__builtin_ia32_psrlv2di_mask", - "llvm.x86.avx512.mask.psrlv32hi" => "__builtin_ia32_psrlv32hi_mask", - "llvm.x86.avx512.mask.psrlv4.di" => "__builtin_ia32_psrlv4di_mask", - "llvm.x86.avx512.mask.psrlv4.si" => "__builtin_ia32_psrlv4si_mask", - "llvm.x86.avx512.mask.psrlv8.hi" => "__builtin_ia32_psrlv8hi_mask", - "llvm.x86.avx512.mask.psrlv8.si" => "__builtin_ia32_psrlv8si_mask", - "llvm.x86.avx512.mask.psub.b.128" => "__builtin_ia32_psubb128_mask", - "llvm.x86.avx512.mask.psub.b.256" => "__builtin_ia32_psubb256_mask", - "llvm.x86.avx512.mask.psub.b.512" => "__builtin_ia32_psubb512_mask", - "llvm.x86.avx512.mask.psub.d.128" => "__builtin_ia32_psubd128_mask", - "llvm.x86.avx512.mask.psub.d.256" => "__builtin_ia32_psubd256_mask", - "llvm.x86.avx512.mask.psub.d.512" => "__builtin_ia32_psubd512_mask", - "llvm.x86.avx512.mask.psub.q.128" => "__builtin_ia32_psubq128_mask", - "llvm.x86.avx512.mask.psub.q.256" => "__builtin_ia32_psubq256_mask", - "llvm.x86.avx512.mask.psub.q.512" => "__builtin_ia32_psubq512_mask", - "llvm.x86.avx512.mask.psub.w.128" => "__builtin_ia32_psubw128_mask", - "llvm.x86.avx512.mask.psub.w.256" => "__builtin_ia32_psubw256_mask", - "llvm.x86.avx512.mask.psub.w.512" => "__builtin_ia32_psubw512_mask", - "llvm.x86.avx512.mask.psubs.b.128" => "__builtin_ia32_psubsb128_mask", - "llvm.x86.avx512.mask.psubs.b.256" => "__builtin_ia32_psubsb256_mask", - "llvm.x86.avx512.mask.psubs.b.512" => "__builtin_ia32_psubsb512_mask", - "llvm.x86.avx512.mask.psubs.w.128" => "__builtin_ia32_psubsw128_mask", - "llvm.x86.avx512.mask.psubs.w.256" => "__builtin_ia32_psubsw256_mask", - "llvm.x86.avx512.mask.psubs.w.512" => "__builtin_ia32_psubsw512_mask", - "llvm.x86.avx512.mask.psubus.b.128" => "__builtin_ia32_psubusb128_mask", - "llvm.x86.avx512.mask.psubus.b.256" => "__builtin_ia32_psubusb256_mask", - "llvm.x86.avx512.mask.psubus.b.512" => "__builtin_ia32_psubusb512_mask", - "llvm.x86.avx512.mask.psubus.w.128" => "__builtin_ia32_psubusw128_mask", - "llvm.x86.avx512.mask.psubus.w.256" => "__builtin_ia32_psubusw256_mask", - "llvm.x86.avx512.mask.psubus.w.512" => "__builtin_ia32_psubusw512_mask", - "llvm.x86.avx512.mask.pternlog.d.128" => "__builtin_ia32_pternlogd128_mask", - "llvm.x86.avx512.mask.pternlog.d.256" => "__builtin_ia32_pternlogd256_mask", - "llvm.x86.avx512.mask.pternlog.d.512" => "__builtin_ia32_pternlogd512_mask", - "llvm.x86.avx512.mask.pternlog.q.128" => "__builtin_ia32_pternlogq128_mask", - "llvm.x86.avx512.mask.pternlog.q.256" => "__builtin_ia32_pternlogq256_mask", - "llvm.x86.avx512.mask.pternlog.q.512" => "__builtin_ia32_pternlogq512_mask", - "llvm.x86.avx512.mask.ptestm.d.512" => "__builtin_ia32_ptestmd512", - "llvm.x86.avx512.mask.ptestm.q.512" => "__builtin_ia32_ptestmq512", - "llvm.x86.avx512.mask.range.pd.128" => "__builtin_ia32_rangepd128_mask", - "llvm.x86.avx512.mask.range.pd.256" => "__builtin_ia32_rangepd256_mask", - "llvm.x86.avx512.mask.range.pd.512" => "__builtin_ia32_rangepd512_mask", - "llvm.x86.avx512.mask.range.ps.128" => "__builtin_ia32_rangeps128_mask", - "llvm.x86.avx512.mask.range.ps.256" => "__builtin_ia32_rangeps256_mask", - "llvm.x86.avx512.mask.range.ps.512" => "__builtin_ia32_rangeps512_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512.mask.range.sd" => "__builtin_ia32_rangesd128_round_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512.mask.range.ss" => "__builtin_ia32_rangess128_round_mask", - "llvm.x86.avx512.mask.reduce.pd.128" => "__builtin_ia32_reducepd128_mask", - "llvm.x86.avx512.mask.reduce.pd.256" => "__builtin_ia32_reducepd256_mask", - "llvm.x86.avx512.mask.reduce.pd.512" => "__builtin_ia32_reducepd512_mask", - "llvm.x86.avx512.mask.reduce.ps.128" => "__builtin_ia32_reduceps128_mask", - "llvm.x86.avx512.mask.reduce.ps.256" => "__builtin_ia32_reduceps256_mask", - "llvm.x86.avx512.mask.reduce.ps.512" => "__builtin_ia32_reduceps512_mask", - "llvm.x86.avx512.mask.reduce.sd" => "__builtin_ia32_reducesd_mask", - "llvm.x86.avx512.mask.reduce.ss" => "__builtin_ia32_reducess_mask", - "llvm.x86.avx512.mask.rndscale.pd.128" => "__builtin_ia32_rndscalepd_128_mask", - "llvm.x86.avx512.mask.rndscale.pd.256" => "__builtin_ia32_rndscalepd_256_mask", - "llvm.x86.avx512.mask.rndscale.pd.512" => "__builtin_ia32_rndscalepd_mask", - "llvm.x86.avx512.mask.rndscale.ps.128" => "__builtin_ia32_rndscaleps_128_mask", - "llvm.x86.avx512.mask.rndscale.ps.256" => "__builtin_ia32_rndscaleps_256_mask", - "llvm.x86.avx512.mask.rndscale.ps.512" => "__builtin_ia32_rndscaleps_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512.mask.rndscale.sd" => "__builtin_ia32_rndscalesd_round_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512.mask.rndscale.ss" => "__builtin_ia32_rndscaless_round_mask", - "llvm.x86.avx512.mask.scalef.pd.128" => "__builtin_ia32_scalefpd128_mask", - "llvm.x86.avx512.mask.scalef.pd.256" => "__builtin_ia32_scalefpd256_mask", - "llvm.x86.avx512.mask.scalef.pd.512" => "__builtin_ia32_scalefpd512_mask", - "llvm.x86.avx512.mask.scalef.ps.128" => "__builtin_ia32_scalefps128_mask", - "llvm.x86.avx512.mask.scalef.ps.256" => "__builtin_ia32_scalefps256_mask", - "llvm.x86.avx512.mask.scalef.ps.512" => "__builtin_ia32_scalefps512_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512.mask.scalef.sd" => "__builtin_ia32_scalefsd_round_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512.mask.scalef.ss" => "__builtin_ia32_scalefss_round_mask", - "llvm.x86.avx512.mask.shuf.f32x4" => "__builtin_ia32_shuf_f32x4_mask", - "llvm.x86.avx512.mask.shuf.f32x4.256" => "__builtin_ia32_shuf_f32x4_256_mask", - "llvm.x86.avx512.mask.shuf.f64x2" => "__builtin_ia32_shuf_f64x2_mask", - "llvm.x86.avx512.mask.shuf.f64x2.256" => "__builtin_ia32_shuf_f64x2_256_mask", - "llvm.x86.avx512.mask.shuf.i32x4" => "__builtin_ia32_shuf_i32x4_mask", - "llvm.x86.avx512.mask.shuf.i32x4.256" => "__builtin_ia32_shuf_i32x4_256_mask", - "llvm.x86.avx512.mask.shuf.i64x2" => "__builtin_ia32_shuf_i64x2_mask", - "llvm.x86.avx512.mask.shuf.i64x2.256" => "__builtin_ia32_shuf_i64x2_256_mask", - "llvm.x86.avx512.mask.shuf.pd.128" => "__builtin_ia32_shufpd128_mask", - "llvm.x86.avx512.mask.shuf.pd.256" => "__builtin_ia32_shufpd256_mask", - "llvm.x86.avx512.mask.shuf.pd.512" => "__builtin_ia32_shufpd512_mask", - "llvm.x86.avx512.mask.shuf.ps.128" => "__builtin_ia32_shufps128_mask", - "llvm.x86.avx512.mask.shuf.ps.256" => "__builtin_ia32_shufps256_mask", - "llvm.x86.avx512.mask.shuf.ps.512" => "__builtin_ia32_shufps512_mask", - "llvm.x86.avx512.mask.sqrt.pd.128" => "__builtin_ia32_sqrtpd128_mask", - "llvm.x86.avx512.mask.sqrt.pd.256" => "__builtin_ia32_sqrtpd256_mask", - "llvm.x86.avx512.mask.sqrt.pd.512" => "__builtin_ia32_sqrtpd512_mask", - "llvm.x86.avx512.mask.sqrt.ps.128" => "__builtin_ia32_sqrtps128_mask", - "llvm.x86.avx512.mask.sqrt.ps.256" => "__builtin_ia32_sqrtps256_mask", - "llvm.x86.avx512.mask.sqrt.ps.512" => "__builtin_ia32_sqrtps512_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512.mask.sqrt.sd" => "__builtin_ia32_sqrtsd_round_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512.mask.sqrt.ss" => "__builtin_ia32_sqrtss_round_mask", - "llvm.x86.avx512.mask.store.ss" => "__builtin_ia32_storess_mask", - "llvm.x86.avx512.mask.storeu.d.512" => "__builtin_ia32_storedqusi512_mask", - "llvm.x86.avx512.mask.storeu.pd.512" => "__builtin_ia32_storeupd512_mask", - "llvm.x86.avx512.mask.storeu.ps.512" => "__builtin_ia32_storeups512_mask", - "llvm.x86.avx512.mask.storeu.q.512" => "__builtin_ia32_storedqudi512_mask", - "llvm.x86.avx512.mask.sub.pd.128" => "__builtin_ia32_subpd128_mask", - "llvm.x86.avx512.mask.sub.pd.256" => "__builtin_ia32_subpd256_mask", - "llvm.x86.avx512.mask.sub.pd.512" => "__builtin_ia32_subpd512_mask", - "llvm.x86.avx512.mask.sub.ps.128" => "__builtin_ia32_subps128_mask", - "llvm.x86.avx512.mask.sub.ps.256" => "__builtin_ia32_subps256_mask", - "llvm.x86.avx512.mask.sub.ps.512" => "__builtin_ia32_subps512_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512.mask.sub.sd.round" => "__builtin_ia32_subsd_round_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512.mask.sub.ss.round" => "__builtin_ia32_subss_round_mask", - "llvm.x86.avx512.mask.valign.d.128" => "__builtin_ia32_alignd128_mask", - "llvm.x86.avx512.mask.valign.d.256" => "__builtin_ia32_alignd256_mask", - "llvm.x86.avx512.mask.valign.d.512" => "__builtin_ia32_alignd512_mask", - "llvm.x86.avx512.mask.valign.q.128" => "__builtin_ia32_alignq128_mask", - "llvm.x86.avx512.mask.valign.q.256" => "__builtin_ia32_alignq256_mask", - "llvm.x86.avx512.mask.valign.q.512" => "__builtin_ia32_alignq512_mask", - "llvm.x86.avx512.mask.vcvtph2ps.128" => "__builtin_ia32_vcvtph2ps_mask", - "llvm.x86.avx512.mask.vcvtph2ps.256" => "__builtin_ia32_vcvtph2ps256_mask", - "llvm.x86.avx512.mask.vcvtph2ps.512" => "__builtin_ia32_vcvtph2ps512_mask", - "llvm.x86.avx512.mask.vcvtps2ph.128" => "__builtin_ia32_vcvtps2ph_mask", - "llvm.x86.avx512.mask.vcvtps2ph.256" => "__builtin_ia32_vcvtps2ph256_mask", - "llvm.x86.avx512.mask.vcvtps2ph.512" => "__builtin_ia32_vcvtps2ph512_mask", - "llvm.x86.avx512.mask.vextractf32x4.256" => "__builtin_ia32_extractf32x4_256_mask", - "llvm.x86.avx512.mask.vextractf32x4.512" => "__builtin_ia32_extractf32x4_mask", - "llvm.x86.avx512.mask.vextractf32x8.512" => "__builtin_ia32_extractf32x8_mask", - "llvm.x86.avx512.mask.vextractf64x2.256" => "__builtin_ia32_extractf64x2_256_mask", - "llvm.x86.avx512.mask.vextractf64x2.512" => "__builtin_ia32_extractf64x2_512_mask", - "llvm.x86.avx512.mask.vextractf64x4.512" => "__builtin_ia32_extractf64x4_mask", - "llvm.x86.avx512.mask.vextracti32x4.256" => "__builtin_ia32_extracti32x4_256_mask", - "llvm.x86.avx512.mask.vextracti32x4.512" => "__builtin_ia32_extracti32x4_mask", - "llvm.x86.avx512.mask.vextracti32x8.512" => "__builtin_ia32_extracti32x8_mask", - "llvm.x86.avx512.mask.vextracti64x2.256" => "__builtin_ia32_extracti64x2_256_mask", - "llvm.x86.avx512.mask.vextracti64x2.512" => "__builtin_ia32_extracti64x2_512_mask", - "llvm.x86.avx512.mask.vextracti64x4.512" => "__builtin_ia32_extracti64x4_mask", - "llvm.x86.avx512.mask.vfmadd.pd.128" => "__builtin_ia32_vfmaddpd128_mask", - "llvm.x86.avx512.mask.vfmadd.pd.256" => "__builtin_ia32_vfmaddpd256_mask", - "llvm.x86.avx512.mask.vfmadd.pd.512" => "__builtin_ia32_vfmaddpd512_mask", - "llvm.x86.avx512.mask.vfmadd.ps.128" => "__builtin_ia32_vfmaddps128_mask", - "llvm.x86.avx512.mask.vfmadd.ps.256" => "__builtin_ia32_vfmaddps256_mask", - "llvm.x86.avx512.mask.vfmadd.ps.512" => "__builtin_ia32_vfmaddps512_mask", - "llvm.x86.avx512.mask.vfmadd.sd" => "__builtin_ia32_vfmaddsd3_mask", - "llvm.x86.avx512.mask.vfmadd.ss" => "__builtin_ia32_vfmaddss3_mask", - "llvm.x86.avx512.mask.vfmaddsub.pd.128" => "__builtin_ia32_vfmaddsubpd128_mask", - "llvm.x86.avx512.mask.vfmaddsub.pd.256" => "__builtin_ia32_vfmaddsubpd256_mask", - "llvm.x86.avx512.mask.vfmaddsub.pd.512" => "__builtin_ia32_vfmaddsubpd512_mask", - "llvm.x86.avx512.mask.vfmaddsub.ps.128" => "__builtin_ia32_vfmaddsubps128_mask", - "llvm.x86.avx512.mask.vfmaddsub.ps.256" => "__builtin_ia32_vfmaddsubps256_mask", - "llvm.x86.avx512.mask.vfmaddsub.ps.512" => "__builtin_ia32_vfmaddsubps512_mask", - "llvm.x86.avx512.mask.vfnmadd.pd.128" => "__builtin_ia32_vfnmaddpd128_mask", - "llvm.x86.avx512.mask.vfnmadd.pd.256" => "__builtin_ia32_vfnmaddpd256_mask", - "llvm.x86.avx512.mask.vfnmadd.pd.512" => "__builtin_ia32_vfnmaddpd512_mask", - "llvm.x86.avx512.mask.vfnmadd.ps.128" => "__builtin_ia32_vfnmaddps128_mask", - "llvm.x86.avx512.mask.vfnmadd.ps.256" => "__builtin_ia32_vfnmaddps256_mask", - "llvm.x86.avx512.mask.vfnmadd.ps.512" => "__builtin_ia32_vfnmaddps512_mask", - "llvm.x86.avx512.mask.vfnmsub.pd.128" => "__builtin_ia32_vfnmsubpd128_mask", - "llvm.x86.avx512.mask.vfnmsub.pd.256" => "__builtin_ia32_vfnmsubpd256_mask", - "llvm.x86.avx512.mask.vfnmsub.pd.512" => "__builtin_ia32_vfnmsubpd512_mask", - "llvm.x86.avx512.mask.vfnmsub.ps.128" => "__builtin_ia32_vfnmsubps128_mask", - "llvm.x86.avx512.mask.vfnmsub.ps.256" => "__builtin_ia32_vfnmsubps256_mask", - "llvm.x86.avx512.mask.vfnmsub.ps.512" => "__builtin_ia32_vfnmsubps512_mask", - "llvm.x86.avx512.mask.vpermi2var.d.128" => "__builtin_ia32_vpermi2vard128_mask", - "llvm.x86.avx512.mask.vpermi2var.d.256" => "__builtin_ia32_vpermi2vard256_mask", - "llvm.x86.avx512.mask.vpermi2var.d.512" => "__builtin_ia32_vpermi2vard512_mask", - "llvm.x86.avx512.mask.vpermi2var.hi.128" => "__builtin_ia32_vpermi2varhi128_mask", - "llvm.x86.avx512.mask.vpermi2var.hi.256" => "__builtin_ia32_vpermi2varhi256_mask", - "llvm.x86.avx512.mask.vpermi2var.hi.512" => "__builtin_ia32_vpermi2varhi512_mask", - "llvm.x86.avx512.mask.vpermi2var.pd.128" => "__builtin_ia32_vpermi2varpd128_mask", - "llvm.x86.avx512.mask.vpermi2var.pd.256" => "__builtin_ia32_vpermi2varpd256_mask", - "llvm.x86.avx512.mask.vpermi2var.pd.512" => "__builtin_ia32_vpermi2varpd512_mask", - "llvm.x86.avx512.mask.vpermi2var.ps.128" => "__builtin_ia32_vpermi2varps128_mask", - "llvm.x86.avx512.mask.vpermi2var.ps.256" => "__builtin_ia32_vpermi2varps256_mask", - "llvm.x86.avx512.mask.vpermi2var.ps.512" => "__builtin_ia32_vpermi2varps512_mask", - "llvm.x86.avx512.mask.vpermi2var.q.128" => "__builtin_ia32_vpermi2varq128_mask", - "llvm.x86.avx512.mask.vpermi2var.q.256" => "__builtin_ia32_vpermi2varq256_mask", - "llvm.x86.avx512.mask.vpermi2var.q.512" => "__builtin_ia32_vpermi2varq512_mask", - "llvm.x86.avx512.mask.vpermi2var.qi.128" => "__builtin_ia32_vpermi2varqi128_mask", - "llvm.x86.avx512.mask.vpermi2var.qi.256" => "__builtin_ia32_vpermi2varqi256_mask", - "llvm.x86.avx512.mask.vpermi2var.qi.512" => "__builtin_ia32_vpermi2varqi512_mask", - "llvm.x86.avx512.mask.vpermilvar.pd.128" => "__builtin_ia32_vpermilvarpd_mask", - "llvm.x86.avx512.mask.vpermilvar.pd.256" => "__builtin_ia32_vpermilvarpd256_mask", - "llvm.x86.avx512.mask.vpermilvar.pd.512" => "__builtin_ia32_vpermilvarpd512_mask", - "llvm.x86.avx512.mask.vpermilvar.ps.128" => "__builtin_ia32_vpermilvarps_mask", - "llvm.x86.avx512.mask.vpermilvar.ps.256" => "__builtin_ia32_vpermilvarps256_mask", - "llvm.x86.avx512.mask.vpermilvar.ps.512" => "__builtin_ia32_vpermilvarps512_mask", - "llvm.x86.avx512.mask.vpermt.d.512" => "__builtin_ia32_vpermt2vard512_mask", - "llvm.x86.avx512.mask.vpermt.pd.512" => "__builtin_ia32_vpermt2varpd512_mask", - "llvm.x86.avx512.mask.vpermt.ps.512" => "__builtin_ia32_vpermt2varps512_mask", - "llvm.x86.avx512.mask.vpermt.q.512" => "__builtin_ia32_vpermt2varq512_mask", - "llvm.x86.avx512.mask.vpermt2var.d.128" => "__builtin_ia32_vpermt2vard128_mask", - "llvm.x86.avx512.mask.vpermt2var.d.256" => "__builtin_ia32_vpermt2vard256_mask", - "llvm.x86.avx512.mask.vpermt2var.d.512" => "__builtin_ia32_vpermt2vard512_mask", - "llvm.x86.avx512.mask.vpermt2var.hi.128" => "__builtin_ia32_vpermt2varhi128_mask", - "llvm.x86.avx512.mask.vpermt2var.hi.256" => "__builtin_ia32_vpermt2varhi256_mask", - "llvm.x86.avx512.mask.vpermt2var.hi.512" => "__builtin_ia32_vpermt2varhi512_mask", - "llvm.x86.avx512.mask.vpermt2var.pd.128" => "__builtin_ia32_vpermt2varpd128_mask", - "llvm.x86.avx512.mask.vpermt2var.pd.256" => "__builtin_ia32_vpermt2varpd256_mask", - "llvm.x86.avx512.mask.vpermt2var.pd.512" => "__builtin_ia32_vpermt2varpd512_mask", - "llvm.x86.avx512.mask.vpermt2var.ps.128" => "__builtin_ia32_vpermt2varps128_mask", - "llvm.x86.avx512.mask.vpermt2var.ps.256" => "__builtin_ia32_vpermt2varps256_mask", - "llvm.x86.avx512.mask.vpermt2var.ps.512" => "__builtin_ia32_vpermt2varps512_mask", - "llvm.x86.avx512.mask.vpermt2var.q.128" => "__builtin_ia32_vpermt2varq128_mask", - "llvm.x86.avx512.mask.vpermt2var.q.256" => "__builtin_ia32_vpermt2varq256_mask", - "llvm.x86.avx512.mask.vpermt2var.q.512" => "__builtin_ia32_vpermt2varq512_mask", - "llvm.x86.avx512.mask.vpermt2var.qi.128" => "__builtin_ia32_vpermt2varqi128_mask", - "llvm.x86.avx512.mask.vpermt2var.qi.256" => "__builtin_ia32_vpermt2varqi256_mask", - "llvm.x86.avx512.mask.vpermt2var.qi.512" => "__builtin_ia32_vpermt2varqi512_mask", - "llvm.x86.avx512.mask.vpmadd52h.uq.128" => "__builtin_ia32_vpmadd52huq128_mask", - "llvm.x86.avx512.mask.vpmadd52h.uq.256" => "__builtin_ia32_vpmadd52huq256_mask", - "llvm.x86.avx512.mask.vpmadd52h.uq.512" => "__builtin_ia32_vpmadd52huq512_mask", - "llvm.x86.avx512.mask.vpmadd52l.uq.128" => "__builtin_ia32_vpmadd52luq128_mask", - "llvm.x86.avx512.mask.vpmadd52l.uq.256" => "__builtin_ia32_vpmadd52luq256_mask", - "llvm.x86.avx512.mask.vpmadd52l.uq.512" => "__builtin_ia32_vpmadd52luq512_mask", - "llvm.x86.avx512.mask.xor.pd.128" => "__builtin_ia32_xorpd128_mask", - "llvm.x86.avx512.mask.xor.pd.256" => "__builtin_ia32_xorpd256_mask", - "llvm.x86.avx512.mask.xor.pd.512" => "__builtin_ia32_xorpd512_mask", - "llvm.x86.avx512.mask.xor.ps.128" => "__builtin_ia32_xorps128_mask", - "llvm.x86.avx512.mask.xor.ps.256" => "__builtin_ia32_xorps256_mask", - "llvm.x86.avx512.mask.xor.ps.512" => "__builtin_ia32_xorps512_mask", - "llvm.x86.avx512.mask3.vfmadd.pd.128" => "__builtin_ia32_vfmaddpd128_mask3", - "llvm.x86.avx512.mask3.vfmadd.pd.256" => "__builtin_ia32_vfmaddpd256_mask3", - "llvm.x86.avx512.mask3.vfmadd.pd.512" => "__builtin_ia32_vfmaddpd512_mask3", - "llvm.x86.avx512.mask3.vfmadd.ps.128" => "__builtin_ia32_vfmaddps128_mask3", - "llvm.x86.avx512.mask3.vfmadd.ps.256" => "__builtin_ia32_vfmaddps256_mask3", - "llvm.x86.avx512.mask3.vfmadd.ps.512" => "__builtin_ia32_vfmaddps512_mask3", - "llvm.x86.avx512.mask3.vfmadd.sd" => "__builtin_ia32_vfmaddsd3_mask3", - "llvm.x86.avx512.mask3.vfmadd.ss" => "__builtin_ia32_vfmaddss3_mask3", - "llvm.x86.avx512.mask3.vfmaddsub.pd.128" => "__builtin_ia32_vfmaddsubpd128_mask3", - "llvm.x86.avx512.mask3.vfmaddsub.pd.256" => "__builtin_ia32_vfmaddsubpd256_mask3", - "llvm.x86.avx512.mask3.vfmaddsub.pd.512" => "__builtin_ia32_vfmaddsubpd512_mask3", - "llvm.x86.avx512.mask3.vfmaddsub.ps.128" => "__builtin_ia32_vfmaddsubps128_mask3", - "llvm.x86.avx512.mask3.vfmaddsub.ps.256" => "__builtin_ia32_vfmaddsubps256_mask3", - "llvm.x86.avx512.mask3.vfmaddsub.ps.512" => "__builtin_ia32_vfmaddsubps512_mask3", - "llvm.x86.avx512.mask3.vfmsub.pd.128" => "__builtin_ia32_vfmsubpd128_mask3", - "llvm.x86.avx512.mask3.vfmsub.pd.256" => "__builtin_ia32_vfmsubpd256_mask3", - "llvm.x86.avx512.mask3.vfmsub.pd.512" => "__builtin_ia32_vfmsubpd512_mask3", - "llvm.x86.avx512.mask3.vfmsub.ps.128" => "__builtin_ia32_vfmsubps128_mask3", - "llvm.x86.avx512.mask3.vfmsub.ps.256" => "__builtin_ia32_vfmsubps256_mask3", - "llvm.x86.avx512.mask3.vfmsub.ps.512" => "__builtin_ia32_vfmsubps512_mask3", - "llvm.x86.avx512.mask3.vfmsubadd.pd.128" => "__builtin_ia32_vfmsubaddpd128_mask3", - "llvm.x86.avx512.mask3.vfmsubadd.pd.256" => "__builtin_ia32_vfmsubaddpd256_mask3", - "llvm.x86.avx512.mask3.vfmsubadd.pd.512" => "__builtin_ia32_vfmsubaddpd512_mask3", - "llvm.x86.avx512.mask3.vfmsubadd.ps.128" => "__builtin_ia32_vfmsubaddps128_mask3", - "llvm.x86.avx512.mask3.vfmsubadd.ps.256" => "__builtin_ia32_vfmsubaddps256_mask3", - "llvm.x86.avx512.mask3.vfmsubadd.ps.512" => "__builtin_ia32_vfmsubaddps512_mask3", - "llvm.x86.avx512.mask3.vfnmsub.pd.128" => "__builtin_ia32_vfnmsubpd128_mask3", - "llvm.x86.avx512.mask3.vfnmsub.pd.256" => "__builtin_ia32_vfnmsubpd256_mask3", - "llvm.x86.avx512.mask3.vfnmsub.pd.512" => "__builtin_ia32_vfnmsubpd512_mask3", - "llvm.x86.avx512.mask3.vfnmsub.ps.128" => "__builtin_ia32_vfnmsubps128_mask3", - "llvm.x86.avx512.mask3.vfnmsub.ps.256" => "__builtin_ia32_vfnmsubps256_mask3", - "llvm.x86.avx512.mask3.vfnmsub.ps.512" => "__builtin_ia32_vfnmsubps512_mask3", - "llvm.x86.avx512.maskz.fixupimm.pd.128" => "__builtin_ia32_fixupimmpd128_maskz", - "llvm.x86.avx512.maskz.fixupimm.pd.256" => "__builtin_ia32_fixupimmpd256_maskz", - "llvm.x86.avx512.maskz.fixupimm.pd.512" => "__builtin_ia32_fixupimmpd512_maskz", - "llvm.x86.avx512.maskz.fixupimm.ps.128" => "__builtin_ia32_fixupimmps128_maskz", - "llvm.x86.avx512.maskz.fixupimm.ps.256" => "__builtin_ia32_fixupimmps256_maskz", - "llvm.x86.avx512.maskz.fixupimm.ps.512" => "__builtin_ia32_fixupimmps512_maskz", - "llvm.x86.avx512.maskz.fixupimm.sd" => "__builtin_ia32_fixupimmsd_maskz", - "llvm.x86.avx512.maskz.fixupimm.ss" => "__builtin_ia32_fixupimmss_maskz", - "llvm.x86.avx512.maskz.pternlog.d.128" => "__builtin_ia32_pternlogd128_maskz", - "llvm.x86.avx512.maskz.pternlog.d.256" => "__builtin_ia32_pternlogd256_maskz", - "llvm.x86.avx512.maskz.pternlog.d.512" => "__builtin_ia32_pternlogd512_maskz", - "llvm.x86.avx512.maskz.pternlog.q.128" => "__builtin_ia32_pternlogq128_maskz", - "llvm.x86.avx512.maskz.pternlog.q.256" => "__builtin_ia32_pternlogq256_maskz", - "llvm.x86.avx512.maskz.pternlog.q.512" => "__builtin_ia32_pternlogq512_maskz", - "llvm.x86.avx512.maskz.vfmadd.pd.128" => "__builtin_ia32_vfmaddpd128_maskz", - "llvm.x86.avx512.maskz.vfmadd.pd.256" => "__builtin_ia32_vfmaddpd256_maskz", - "llvm.x86.avx512.maskz.vfmadd.pd.512" => "__builtin_ia32_vfmaddpd512_maskz", - "llvm.x86.avx512.maskz.vfmadd.ps.128" => "__builtin_ia32_vfmaddps128_maskz", - "llvm.x86.avx512.maskz.vfmadd.ps.256" => "__builtin_ia32_vfmaddps256_maskz", - "llvm.x86.avx512.maskz.vfmadd.ps.512" => "__builtin_ia32_vfmaddps512_maskz", - "llvm.x86.avx512.maskz.vfmadd.sd" => "__builtin_ia32_vfmaddsd3_maskz", - "llvm.x86.avx512.maskz.vfmadd.ss" => "__builtin_ia32_vfmaddss3_maskz", - "llvm.x86.avx512.maskz.vfmaddsub.pd.128" => "__builtin_ia32_vfmaddsubpd128_maskz", - "llvm.x86.avx512.maskz.vfmaddsub.pd.256" => "__builtin_ia32_vfmaddsubpd256_maskz", - "llvm.x86.avx512.maskz.vfmaddsub.pd.512" => "__builtin_ia32_vfmaddsubpd512_maskz", - "llvm.x86.avx512.maskz.vfmaddsub.ps.128" => "__builtin_ia32_vfmaddsubps128_maskz", - "llvm.x86.avx512.maskz.vfmaddsub.ps.256" => "__builtin_ia32_vfmaddsubps256_maskz", - "llvm.x86.avx512.maskz.vfmaddsub.ps.512" => "__builtin_ia32_vfmaddsubps512_maskz", - "llvm.x86.avx512.maskz.vpermt2var.d.128" => "__builtin_ia32_vpermt2vard128_maskz", - "llvm.x86.avx512.maskz.vpermt2var.d.256" => "__builtin_ia32_vpermt2vard256_maskz", - "llvm.x86.avx512.maskz.vpermt2var.d.512" => "__builtin_ia32_vpermt2vard512_maskz", - "llvm.x86.avx512.maskz.vpermt2var.hi.128" => "__builtin_ia32_vpermt2varhi128_maskz", - "llvm.x86.avx512.maskz.vpermt2var.hi.256" => "__builtin_ia32_vpermt2varhi256_maskz", - "llvm.x86.avx512.maskz.vpermt2var.hi.512" => "__builtin_ia32_vpermt2varhi512_maskz", - "llvm.x86.avx512.maskz.vpermt2var.pd.128" => "__builtin_ia32_vpermt2varpd128_maskz", - "llvm.x86.avx512.maskz.vpermt2var.pd.256" => "__builtin_ia32_vpermt2varpd256_maskz", - "llvm.x86.avx512.maskz.vpermt2var.pd.512" => "__builtin_ia32_vpermt2varpd512_maskz", - "llvm.x86.avx512.maskz.vpermt2var.ps.128" => "__builtin_ia32_vpermt2varps128_maskz", - "llvm.x86.avx512.maskz.vpermt2var.ps.256" => "__builtin_ia32_vpermt2varps256_maskz", - "llvm.x86.avx512.maskz.vpermt2var.ps.512" => "__builtin_ia32_vpermt2varps512_maskz", - "llvm.x86.avx512.maskz.vpermt2var.q.128" => "__builtin_ia32_vpermt2varq128_maskz", - "llvm.x86.avx512.maskz.vpermt2var.q.256" => "__builtin_ia32_vpermt2varq256_maskz", - "llvm.x86.avx512.maskz.vpermt2var.q.512" => "__builtin_ia32_vpermt2varq512_maskz", - "llvm.x86.avx512.maskz.vpermt2var.qi.128" => "__builtin_ia32_vpermt2varqi128_maskz", - "llvm.x86.avx512.maskz.vpermt2var.qi.256" => "__builtin_ia32_vpermt2varqi256_maskz", - "llvm.x86.avx512.maskz.vpermt2var.qi.512" => "__builtin_ia32_vpermt2varqi512_maskz", - "llvm.x86.avx512.maskz.vpmadd52h.uq.128" => "__builtin_ia32_vpmadd52huq128_maskz", - "llvm.x86.avx512.maskz.vpmadd52h.uq.256" => "__builtin_ia32_vpmadd52huq256_maskz", - "llvm.x86.avx512.maskz.vpmadd52h.uq.512" => "__builtin_ia32_vpmadd52huq512_maskz", - "llvm.x86.avx512.maskz.vpmadd52l.uq.128" => "__builtin_ia32_vpmadd52luq128_maskz", - "llvm.x86.avx512.maskz.vpmadd52l.uq.256" => "__builtin_ia32_vpmadd52luq256_maskz", - "llvm.x86.avx512.maskz.vpmadd52l.uq.512" => "__builtin_ia32_vpmadd52luq512_maskz", - "llvm.x86.avx512.max.pd.512" => "__builtin_ia32_maxpd512", - "llvm.x86.avx512.max.ps.512" => "__builtin_ia32_maxps512", - "llvm.x86.avx512.min.pd.512" => "__builtin_ia32_minpd512", - "llvm.x86.avx512.min.ps.512" => "__builtin_ia32_minps512", - "llvm.x86.avx512.movntdqa" => "__builtin_ia32_movntdqa512", - "llvm.x86.avx512.mul.pd.512" => "__builtin_ia32_mulpd512", - "llvm.x86.avx512.mul.ps.512" => "__builtin_ia32_mulps512", - "llvm.x86.avx512.packssdw.512" => "__builtin_ia32_packssdw512", - "llvm.x86.avx512.packsswb.512" => "__builtin_ia32_packsswb512", - "llvm.x86.avx512.packusdw.512" => "__builtin_ia32_packusdw512", - "llvm.x86.avx512.packuswb.512" => "__builtin_ia32_packuswb512", - "llvm.x86.avx512.pavg.b.512" => "__builtin_ia32_pavgb512", - "llvm.x86.avx512.pavg.w.512" => "__builtin_ia32_pavgw512", - "llvm.x86.avx512.pbroadcastd.512" => "__builtin_ia32_pbroadcastd512", - "llvm.x86.avx512.pbroadcastq.512" => "__builtin_ia32_pbroadcastq512", - "llvm.x86.avx512.permvar.df.256" => "__builtin_ia32_permvardf256", - "llvm.x86.avx512.permvar.df.512" => "__builtin_ia32_permvardf512", - "llvm.x86.avx512.permvar.di.256" => "__builtin_ia32_permvardi256", - "llvm.x86.avx512.permvar.di.512" => "__builtin_ia32_permvardi512", - "llvm.x86.avx512.permvar.hi.128" => "__builtin_ia32_permvarhi128", - "llvm.x86.avx512.permvar.hi.256" => "__builtin_ia32_permvarhi256", - "llvm.x86.avx512.permvar.hi.512" => "__builtin_ia32_permvarhi512", - "llvm.x86.avx512.permvar.qi.128" => "__builtin_ia32_permvarqi128", - "llvm.x86.avx512.permvar.qi.256" => "__builtin_ia32_permvarqi256", - "llvm.x86.avx512.permvar.qi.512" => "__builtin_ia32_permvarqi512", - "llvm.x86.avx512.permvar.sf.512" => "__builtin_ia32_permvarsf512", - "llvm.x86.avx512.permvar.si.512" => "__builtin_ia32_permvarsi512", - "llvm.x86.avx512.pmaddubs.w.512" => "__builtin_ia32_pmaddubsw512", - "llvm.x86.avx512.pmaddw.d.512" => "__builtin_ia32_pmaddwd512", - "llvm.x86.avx512.pmovzxbd" => "__builtin_ia32_pmovzxbd512", - "llvm.x86.avx512.pmovzxbq" => "__builtin_ia32_pmovzxbq512", - "llvm.x86.avx512.pmovzxdq" => "__builtin_ia32_pmovzxdq512", - "llvm.x86.avx512.pmovzxwd" => "__builtin_ia32_pmovzxwd512", - "llvm.x86.avx512.pmovzxwq" => "__builtin_ia32_pmovzxwq512", - "llvm.x86.avx512.pmul.hr.sw.512" => "__builtin_ia32_pmulhrsw512", - "llvm.x86.avx512.pmulh.w.512" => "__builtin_ia32_pmulhw512", - "llvm.x86.avx512.pmulhu.w.512" => "__builtin_ia32_pmulhuw512", - "llvm.x86.avx512.pmultishift.qb.128" => "__builtin_ia32_vpmultishiftqb128", - "llvm.x86.avx512.pmultishift.qb.256" => "__builtin_ia32_vpmultishiftqb256", - "llvm.x86.avx512.pmultishift.qb.512" => "__builtin_ia32_vpmultishiftqb512", - "llvm.x86.avx512.psad.bw.512" => "__builtin_ia32_psadbw512", - "llvm.x86.avx512.pshuf.b.512" => "__builtin_ia32_pshufb512", - "llvm.x86.avx512.psll.d.512" => "__builtin_ia32_pslld512", - "llvm.x86.avx512.psll.dq" => "__builtin_ia32_pslldqi512", - "llvm.x86.avx512.psll.dq.bs" => "__builtin_ia32_pslldqi512_byteshift", - "llvm.x86.avx512.psll.q.512" => "__builtin_ia32_psllq512", - "llvm.x86.avx512.psll.w.512" => "__builtin_ia32_psllw512", - "llvm.x86.avx512.pslli.d.512" => "__builtin_ia32_pslldi512", - "llvm.x86.avx512.pslli.q.512" => "__builtin_ia32_psllqi512", - "llvm.x86.avx512.pslli.w.512" => "__builtin_ia32_psllwi512", - "llvm.x86.avx512.psllv.d.512" => "__builtin_ia32_psllv16si", - "llvm.x86.avx512.psllv.q.512" => "__builtin_ia32_psllv8di", - "llvm.x86.avx512.psllv.w.128" => "__builtin_ia32_psllv8hi", - "llvm.x86.avx512.psllv.w.256" => "__builtin_ia32_psllv16hi", - "llvm.x86.avx512.psllv.w.512" => "__builtin_ia32_psllv32hi", - "llvm.x86.avx512.psra.d.512" => "__builtin_ia32_psrad512", - "llvm.x86.avx512.psra.q.128" => "__builtin_ia32_psraq128", - "llvm.x86.avx512.psra.q.256" => "__builtin_ia32_psraq256", - "llvm.x86.avx512.psra.q.512" => "__builtin_ia32_psraq512", - "llvm.x86.avx512.psra.w.512" => "__builtin_ia32_psraw512", - "llvm.x86.avx512.psrai.d.512" => "__builtin_ia32_psradi512", - "llvm.x86.avx512.psrai.q.128" => "__builtin_ia32_psraqi128", - "llvm.x86.avx512.psrai.q.256" => "__builtin_ia32_psraqi256", - "llvm.x86.avx512.psrai.q.512" => "__builtin_ia32_psraqi512", - "llvm.x86.avx512.psrai.w.512" => "__builtin_ia32_psrawi512", - "llvm.x86.avx512.psrav.d.512" => "__builtin_ia32_psrav16si", - "llvm.x86.avx512.psrav.q.128" => "__builtin_ia32_psravq128", - "llvm.x86.avx512.psrav.q.256" => "__builtin_ia32_psravq256", - "llvm.x86.avx512.psrav.q.512" => "__builtin_ia32_psrav8di", - "llvm.x86.avx512.psrav.w.128" => "__builtin_ia32_psrav8hi", - "llvm.x86.avx512.psrav.w.256" => "__builtin_ia32_psrav16hi", - "llvm.x86.avx512.psrav.w.512" => "__builtin_ia32_psrav32hi", - "llvm.x86.avx512.psrl.d.512" => "__builtin_ia32_psrld512", - "llvm.x86.avx512.psrl.dq" => "__builtin_ia32_psrldqi512", - "llvm.x86.avx512.psrl.dq.bs" => "__builtin_ia32_psrldqi512_byteshift", - "llvm.x86.avx512.psrl.q.512" => "__builtin_ia32_psrlq512", - "llvm.x86.avx512.psrl.w.512" => "__builtin_ia32_psrlw512", - "llvm.x86.avx512.psrli.d.512" => "__builtin_ia32_psrldi512", - "llvm.x86.avx512.psrli.q.512" => "__builtin_ia32_psrlqi512", - "llvm.x86.avx512.psrli.w.512" => "__builtin_ia32_psrlwi512", - "llvm.x86.avx512.psrlv.d.512" => "__builtin_ia32_psrlv16si", - "llvm.x86.avx512.psrlv.q.512" => "__builtin_ia32_psrlv8di", - "llvm.x86.avx512.psrlv.w.128" => "__builtin_ia32_psrlv8hi", - "llvm.x86.avx512.psrlv.w.256" => "__builtin_ia32_psrlv16hi", - "llvm.x86.avx512.psrlv.w.512" => "__builtin_ia32_psrlv32hi", - "llvm.x86.avx512.pternlog.d.128" => "__builtin_ia32_pternlogd128", - "llvm.x86.avx512.pternlog.d.256" => "__builtin_ia32_pternlogd256", - "llvm.x86.avx512.pternlog.d.512" => "__builtin_ia32_pternlogd512", - "llvm.x86.avx512.pternlog.q.128" => "__builtin_ia32_pternlogq128", - "llvm.x86.avx512.pternlog.q.256" => "__builtin_ia32_pternlogq256", - "llvm.x86.avx512.pternlog.q.512" => "__builtin_ia32_pternlogq512", - "llvm.x86.avx512.ptestm.b.128" => "__builtin_ia32_ptestmb128", - "llvm.x86.avx512.ptestm.b.256" => "__builtin_ia32_ptestmb256", - "llvm.x86.avx512.ptestm.b.512" => "__builtin_ia32_ptestmb512", - "llvm.x86.avx512.ptestm.d.128" => "__builtin_ia32_ptestmd128", - "llvm.x86.avx512.ptestm.d.256" => "__builtin_ia32_ptestmd256", - "llvm.x86.avx512.ptestm.d.512" => "__builtin_ia32_ptestmd512", - "llvm.x86.avx512.ptestm.q.128" => "__builtin_ia32_ptestmq128", - "llvm.x86.avx512.ptestm.q.256" => "__builtin_ia32_ptestmq256", - "llvm.x86.avx512.ptestm.q.512" => "__builtin_ia32_ptestmq512", - "llvm.x86.avx512.ptestm.w.128" => "__builtin_ia32_ptestmw128", - "llvm.x86.avx512.ptestm.w.256" => "__builtin_ia32_ptestmw256", - "llvm.x86.avx512.ptestm.w.512" => "__builtin_ia32_ptestmw512", - "llvm.x86.avx512.ptestnm.b.128" => "__builtin_ia32_ptestnmb128", - "llvm.x86.avx512.ptestnm.b.256" => "__builtin_ia32_ptestnmb256", - "llvm.x86.avx512.ptestnm.b.512" => "__builtin_ia32_ptestnmb512", - "llvm.x86.avx512.ptestnm.d.128" => "__builtin_ia32_ptestnmd128", - "llvm.x86.avx512.ptestnm.d.256" => "__builtin_ia32_ptestnmd256", - "llvm.x86.avx512.ptestnm.d.512" => "__builtin_ia32_ptestnmd512", - "llvm.x86.avx512.ptestnm.q.128" => "__builtin_ia32_ptestnmq128", - "llvm.x86.avx512.ptestnm.q.256" => "__builtin_ia32_ptestnmq256", - "llvm.x86.avx512.ptestnm.q.512" => "__builtin_ia32_ptestnmq512", - "llvm.x86.avx512.ptestnm.w.128" => "__builtin_ia32_ptestnmw128", - "llvm.x86.avx512.ptestnm.w.256" => "__builtin_ia32_ptestnmw256", - "llvm.x86.avx512.ptestnm.w.512" => "__builtin_ia32_ptestnmw512", - "llvm.x86.avx512.rcp14.pd.128" => "__builtin_ia32_rcp14pd128_mask", - "llvm.x86.avx512.rcp14.pd.256" => "__builtin_ia32_rcp14pd256_mask", - "llvm.x86.avx512.rcp14.pd.512" => "__builtin_ia32_rcp14pd512_mask", - "llvm.x86.avx512.rcp14.ps.128" => "__builtin_ia32_rcp14ps128_mask", - "llvm.x86.avx512.rcp14.ps.256" => "__builtin_ia32_rcp14ps256_mask", - "llvm.x86.avx512.rcp14.ps.512" => "__builtin_ia32_rcp14ps512_mask", - "llvm.x86.avx512.rcp14.sd" => "__builtin_ia32_rcp14sd_mask", - "llvm.x86.avx512.rcp14.ss" => "__builtin_ia32_rcp14ss_mask", - "llvm.x86.avx512.rcp28.pd" => "__builtin_ia32_rcp28pd_mask", - "llvm.x86.avx512.rcp28.ps" => "__builtin_ia32_rcp28ps_mask", - "llvm.x86.avx512.rcp28.sd" => "__builtin_ia32_rcp28sd_mask", - // [DUPLICATE]: "llvm.x86.avx512.rcp28.sd" => "__builtin_ia32_rcp28sd_round_mask", - "llvm.x86.avx512.rcp28.ss" => "__builtin_ia32_rcp28ss_mask", - // [DUPLICATE]: "llvm.x86.avx512.rcp28.ss" => "__builtin_ia32_rcp28ss_round_mask", - "llvm.x86.avx512.rndscale.sd" => "__builtin_ia32_rndscalesd", - "llvm.x86.avx512.rndscale.ss" => "__builtin_ia32_rndscaless", - "llvm.x86.avx512.rsqrt14.pd.128" => "__builtin_ia32_rsqrt14pd128_mask", - "llvm.x86.avx512.rsqrt14.pd.256" => "__builtin_ia32_rsqrt14pd256_mask", - "llvm.x86.avx512.rsqrt14.pd.512" => "__builtin_ia32_rsqrt14pd512_mask", - "llvm.x86.avx512.rsqrt14.ps.128" => "__builtin_ia32_rsqrt14ps128_mask", - "llvm.x86.avx512.rsqrt14.ps.256" => "__builtin_ia32_rsqrt14ps256_mask", - "llvm.x86.avx512.rsqrt14.ps.512" => "__builtin_ia32_rsqrt14ps512_mask", - "llvm.x86.avx512.rsqrt14.sd" => "__builtin_ia32_rsqrt14sd_mask", - "llvm.x86.avx512.rsqrt14.ss" => "__builtin_ia32_rsqrt14ss_mask", - "llvm.x86.avx512.rsqrt28.pd" => "__builtin_ia32_rsqrt28pd_mask", - "llvm.x86.avx512.rsqrt28.ps" => "__builtin_ia32_rsqrt28ps_mask", - "llvm.x86.avx512.rsqrt28.sd" => "__builtin_ia32_rsqrt28sd_mask", - // [DUPLICATE]: "llvm.x86.avx512.rsqrt28.sd" => "__builtin_ia32_rsqrt28sd_round_mask", - "llvm.x86.avx512.rsqrt28.ss" => "__builtin_ia32_rsqrt28ss_mask", - // [DUPLICATE]: "llvm.x86.avx512.rsqrt28.ss" => "__builtin_ia32_rsqrt28ss_round_mask", - "llvm.x86.avx512.scatter.dpd.512" => "__builtin_ia32_scattersiv8df", - "llvm.x86.avx512.scatter.dpi.512" => "__builtin_ia32_scattersiv16si", - "llvm.x86.avx512.scatter.dpq.512" => "__builtin_ia32_scattersiv8di", - "llvm.x86.avx512.scatter.dps.512" => "__builtin_ia32_scattersiv16sf", - "llvm.x86.avx512.scatter.qpd.512" => "__builtin_ia32_scatterdiv8df", - "llvm.x86.avx512.scatter.qpi.512" => "__builtin_ia32_scatterdiv16si", - "llvm.x86.avx512.scatter.qpq.512" => "__builtin_ia32_scatterdiv8di", - "llvm.x86.avx512.scatter.qps.512" => "__builtin_ia32_scatterdiv16sf", - "llvm.x86.avx512.scatterdiv2.df" => "__builtin_ia32_scatterdiv2df", - "llvm.x86.avx512.scatterdiv2.di" => "__builtin_ia32_scatterdiv2di", - "llvm.x86.avx512.scatterdiv4.df" => "__builtin_ia32_scatterdiv4df", - "llvm.x86.avx512.scatterdiv4.di" => "__builtin_ia32_scatterdiv4di", - "llvm.x86.avx512.scatterdiv4.sf" => "__builtin_ia32_scatterdiv4sf", - "llvm.x86.avx512.scatterdiv4.si" => "__builtin_ia32_scatterdiv4si", - "llvm.x86.avx512.scatterdiv8.sf" => "__builtin_ia32_scatterdiv8sf", - "llvm.x86.avx512.scatterdiv8.si" => "__builtin_ia32_scatterdiv8si", - "llvm.x86.avx512.scatterpf.dpd.512" => "__builtin_ia32_scatterpfdpd", - "llvm.x86.avx512.scatterpf.dps.512" => "__builtin_ia32_scatterpfdps", - "llvm.x86.avx512.scatterpf.qpd.512" => "__builtin_ia32_scatterpfqpd", - "llvm.x86.avx512.scatterpf.qps.512" => "__builtin_ia32_scatterpfqps", - "llvm.x86.avx512.scattersiv2.df" => "__builtin_ia32_scattersiv2df", - "llvm.x86.avx512.scattersiv2.di" => "__builtin_ia32_scattersiv2di", - "llvm.x86.avx512.scattersiv4.df" => "__builtin_ia32_scattersiv4df", - "llvm.x86.avx512.scattersiv4.di" => "__builtin_ia32_scattersiv4di", - "llvm.x86.avx512.scattersiv4.sf" => "__builtin_ia32_scattersiv4sf", - "llvm.x86.avx512.scattersiv4.si" => "__builtin_ia32_scattersiv4si", - "llvm.x86.avx512.scattersiv8.sf" => "__builtin_ia32_scattersiv8sf", - "llvm.x86.avx512.scattersiv8.si" => "__builtin_ia32_scattersiv8si", - "llvm.x86.avx512.sqrt.pd.512" => "__builtin_ia32_sqrtpd512_mask", - "llvm.x86.avx512.sqrt.ps.512" => "__builtin_ia32_sqrtps512_mask", - "llvm.x86.avx512.sqrt.sd" => "__builtin_ia32_sqrtrndsd", - "llvm.x86.avx512.sqrt.ss" => "__builtin_ia32_sqrtrndss", - "llvm.x86.avx512.sub.pd.512" => "__builtin_ia32_subpd512", - "llvm.x86.avx512.sub.ps.512" => "__builtin_ia32_subps512", - "llvm.x86.avx512.vbroadcast.sd.512" => "__builtin_ia32_vbroadcastsd512", - "llvm.x86.avx512.vbroadcast.sd.pd.512" => "__builtin_ia32_vbroadcastsd_pd512", - "llvm.x86.avx512.vbroadcast.ss.512" => "__builtin_ia32_vbroadcastss512", - "llvm.x86.avx512.vbroadcast.ss.ps.512" => "__builtin_ia32_vbroadcastss_ps512", - "llvm.x86.avx512.vcomi.sd" => "__builtin_ia32_vcomisd", - "llvm.x86.avx512.vcomi.ss" => "__builtin_ia32_vcomiss", - "llvm.x86.avx512.vcvtsd2si32" => "__builtin_ia32_vcvtsd2si32", - "llvm.x86.avx512.vcvtsd2si64" => "__builtin_ia32_vcvtsd2si64", - "llvm.x86.avx512.vcvtsd2usi32" => "__builtin_ia32_vcvtsd2usi32", - "llvm.x86.avx512.vcvtsd2usi64" => "__builtin_ia32_vcvtsd2usi64", - "llvm.x86.avx512.vcvtss2si32" => "__builtin_ia32_vcvtss2si32", - "llvm.x86.avx512.vcvtss2si64" => "__builtin_ia32_vcvtss2si64", - "llvm.x86.avx512.vcvtss2usi32" => "__builtin_ia32_vcvtss2usi32", - "llvm.x86.avx512.vcvtss2usi64" => "__builtin_ia32_vcvtss2usi64", - "llvm.x86.avx512.vpdpbusd.128" => "__builtin_ia32_vpdpbusd128", - "llvm.x86.avx512.vpdpbusd.256" => "__builtin_ia32_vpdpbusd256", - "llvm.x86.avx512.vpdpbusd.512" => "__builtin_ia32_vpdpbusd512", - "llvm.x86.avx512.vpdpbusds.128" => "__builtin_ia32_vpdpbusds128", - "llvm.x86.avx512.vpdpbusds.256" => "__builtin_ia32_vpdpbusds256", - "llvm.x86.avx512.vpdpbusds.512" => "__builtin_ia32_vpdpbusds512", - "llvm.x86.avx512.vpdpwssd.128" => "__builtin_ia32_vpdpwssd128", - "llvm.x86.avx512.vpdpwssd.256" => "__builtin_ia32_vpdpwssd256", - "llvm.x86.avx512.vpdpwssd.512" => "__builtin_ia32_vpdpwssd512", - "llvm.x86.avx512.vpdpwssds.128" => "__builtin_ia32_vpdpwssds128", - "llvm.x86.avx512.vpdpwssds.256" => "__builtin_ia32_vpdpwssds256", - "llvm.x86.avx512.vpdpwssds.512" => "__builtin_ia32_vpdpwssds512", - "llvm.x86.avx512.vpermi2var.d.128" => "__builtin_ia32_vpermi2vard128", - "llvm.x86.avx512.vpermi2var.d.256" => "__builtin_ia32_vpermi2vard256", - "llvm.x86.avx512.vpermi2var.d.512" => "__builtin_ia32_vpermi2vard512", - "llvm.x86.avx512.vpermi2var.hi.128" => "__builtin_ia32_vpermi2varhi128", - "llvm.x86.avx512.vpermi2var.hi.256" => "__builtin_ia32_vpermi2varhi256", - "llvm.x86.avx512.vpermi2var.hi.512" => "__builtin_ia32_vpermi2varhi512", - "llvm.x86.avx512.vpermi2var.pd.128" => "__builtin_ia32_vpermi2varpd128", - "llvm.x86.avx512.vpermi2var.pd.256" => "__builtin_ia32_vpermi2varpd256", - "llvm.x86.avx512.vpermi2var.pd.512" => "__builtin_ia32_vpermi2varpd512", - "llvm.x86.avx512.vpermi2var.ps.128" => "__builtin_ia32_vpermi2varps128", - "llvm.x86.avx512.vpermi2var.ps.256" => "__builtin_ia32_vpermi2varps256", - "llvm.x86.avx512.vpermi2var.ps.512" => "__builtin_ia32_vpermi2varps512", - "llvm.x86.avx512.vpermi2var.q.128" => "__builtin_ia32_vpermi2varq128", - "llvm.x86.avx512.vpermi2var.q.256" => "__builtin_ia32_vpermi2varq256", - "llvm.x86.avx512.vpermi2var.q.512" => "__builtin_ia32_vpermi2varq512", - "llvm.x86.avx512.vpermi2var.qi.128" => "__builtin_ia32_vpermi2varqi128", - "llvm.x86.avx512.vpermi2var.qi.256" => "__builtin_ia32_vpermi2varqi256", - "llvm.x86.avx512.vpermi2var.qi.512" => "__builtin_ia32_vpermi2varqi512", - "llvm.x86.avx512.vpermilvar.pd.512" => "__builtin_ia32_vpermilvarpd512", - "llvm.x86.avx512.vpermilvar.ps.512" => "__builtin_ia32_vpermilvarps512", - "llvm.x86.avx512.vpmadd52h.uq.128" => "__builtin_ia32_vpmadd52huq128", - "llvm.x86.avx512.vpmadd52h.uq.256" => "__builtin_ia32_vpmadd52huq256", - "llvm.x86.avx512.vpmadd52h.uq.512" => "__builtin_ia32_vpmadd52huq512", - "llvm.x86.avx512.vpmadd52l.uq.128" => "__builtin_ia32_vpmadd52luq128", - "llvm.x86.avx512.vpmadd52l.uq.256" => "__builtin_ia32_vpmadd52luq256", - "llvm.x86.avx512.vpmadd52l.uq.512" => "__builtin_ia32_vpmadd52luq512", - "llvm.x86.avx512bf16.cvtne2ps2bf16.128" => "__builtin_ia32_cvtne2ps2bf16_128", - "llvm.x86.avx512bf16.cvtne2ps2bf16.256" => "__builtin_ia32_cvtne2ps2bf16_256", - "llvm.x86.avx512bf16.cvtne2ps2bf16.512" => "__builtin_ia32_cvtne2ps2bf16_512", - "llvm.x86.avx512bf16.cvtneps2bf16.256" => "__builtin_ia32_cvtneps2bf16_256", - "llvm.x86.avx512bf16.cvtneps2bf16.512" => "__builtin_ia32_cvtneps2bf16_512", - "llvm.x86.avx512bf16.dpbf16ps.128" => "__builtin_ia32_dpbf16ps_128", - "llvm.x86.avx512bf16.dpbf16ps.256" => "__builtin_ia32_dpbf16ps_256", - "llvm.x86.avx512bf16.dpbf16ps.512" => "__builtin_ia32_dpbf16ps_512", - "llvm.x86.avx512fp16.add.ph.512" => "__builtin_ia32_addph512", - "llvm.x86.avx512fp16.div.ph.512" => "__builtin_ia32_divph512", - // [INVALID CONVERSION]: "llvm.x86.avx512fp16.mask.add.sh.round" => "__builtin_ia32_addsh_round_mask", - "llvm.x86.avx512fp16.mask.cmp.sh" => "__builtin_ia32_cmpsh_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512fp16.mask.div.sh.round" => "__builtin_ia32_divsh_round_mask", - "llvm.x86.avx512fp16.mask.fpclass.sh" => "__builtin_ia32_fpclasssh_mask", - "llvm.x86.avx512fp16.mask.getexp.ph.128" => "__builtin_ia32_getexpph128_mask", - "llvm.x86.avx512fp16.mask.getexp.ph.256" => "__builtin_ia32_getexpph256_mask", - "llvm.x86.avx512fp16.mask.getexp.ph.512" => "__builtin_ia32_getexpph512_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512fp16.mask.getexp.sh" => "__builtin_ia32_getexpsh128_round_mask", - "llvm.x86.avx512fp16.mask.getmant.ph.128" => "__builtin_ia32_getmantph128_mask", - "llvm.x86.avx512fp16.mask.getmant.ph.256" => "__builtin_ia32_getmantph256_mask", - "llvm.x86.avx512fp16.mask.getmant.ph.512" => "__builtin_ia32_getmantph512_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512fp16.mask.getmant.sh" => "__builtin_ia32_getmantsh_round_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512fp16.mask.max.sh.round" => "__builtin_ia32_maxsh_round_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512fp16.mask.min.sh.round" => "__builtin_ia32_minsh_round_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512fp16.mask.mul.sh.round" => "__builtin_ia32_mulsh_round_mask", - "llvm.x86.avx512fp16.mask.rcp.ph.128" => "__builtin_ia32_rcpph128_mask", - "llvm.x86.avx512fp16.mask.rcp.ph.256" => "__builtin_ia32_rcpph256_mask", - "llvm.x86.avx512fp16.mask.rcp.ph.512" => "__builtin_ia32_rcpph512_mask", - "llvm.x86.avx512fp16.mask.rcp.sh" => "__builtin_ia32_rcpsh_mask", - "llvm.x86.avx512fp16.mask.reduce.ph.128" => "__builtin_ia32_reduceph128_mask", - "llvm.x86.avx512fp16.mask.reduce.ph.256" => "__builtin_ia32_reduceph256_mask", - "llvm.x86.avx512fp16.mask.reduce.ph.512" => "__builtin_ia32_reduceph512_mask", - "llvm.x86.avx512fp16.mask.reduce.sh" => "__builtin_ia32_reducesh_mask", - "llvm.x86.avx512fp16.mask.rndscale.ph.128" => "__builtin_ia32_rndscaleph_128_mask", - "llvm.x86.avx512fp16.mask.rndscale.ph.256" => "__builtin_ia32_rndscaleph_256_mask", - "llvm.x86.avx512fp16.mask.rndscale.ph.512" => "__builtin_ia32_rndscaleph_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512fp16.mask.rndscale.sh" => "__builtin_ia32_rndscalesh_round_mask", - "llvm.x86.avx512fp16.mask.rsqrt.ph.128" => "__builtin_ia32_rsqrtph128_mask", - "llvm.x86.avx512fp16.mask.rsqrt.ph.256" => "__builtin_ia32_rsqrtph256_mask", - "llvm.x86.avx512fp16.mask.rsqrt.ph.512" => "__builtin_ia32_rsqrtph512_mask", - "llvm.x86.avx512fp16.mask.rsqrt.sh" => "__builtin_ia32_rsqrtsh_mask", - "llvm.x86.avx512fp16.mask.scalef.ph.128" => "__builtin_ia32_scalefph128_mask", - "llvm.x86.avx512fp16.mask.scalef.ph.256" => "__builtin_ia32_scalefph256_mask", - "llvm.x86.avx512fp16.mask.scalef.ph.512" => "__builtin_ia32_scalefph512_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512fp16.mask.scalef.sh" => "__builtin_ia32_scalefsh_round_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512fp16.mask.sub.sh.round" => "__builtin_ia32_subsh_round_mask", - "llvm.x86.avx512fp16.mask.vcvtdq2ph.128" => "__builtin_ia32_vcvtdq2ph128_mask", - "llvm.x86.avx512fp16.mask.vcvtpd2ph.128" => "__builtin_ia32_vcvtpd2ph128_mask", - "llvm.x86.avx512fp16.mask.vcvtpd2ph.256" => "__builtin_ia32_vcvtpd2ph256_mask", - "llvm.x86.avx512fp16.mask.vcvtpd2ph.512" => "__builtin_ia32_vcvtpd2ph512_mask", - "llvm.x86.avx512fp16.mask.vcvtph2dq.128" => "__builtin_ia32_vcvtph2dq128_mask", - "llvm.x86.avx512fp16.mask.vcvtph2dq.256" => "__builtin_ia32_vcvtph2dq256_mask", - "llvm.x86.avx512fp16.mask.vcvtph2dq.512" => "__builtin_ia32_vcvtph2dq512_mask", - "llvm.x86.avx512fp16.mask.vcvtph2pd.128" => "__builtin_ia32_vcvtph2pd128_mask", - "llvm.x86.avx512fp16.mask.vcvtph2pd.256" => "__builtin_ia32_vcvtph2pd256_mask", - "llvm.x86.avx512fp16.mask.vcvtph2pd.512" => "__builtin_ia32_vcvtph2pd512_mask", - "llvm.x86.avx512fp16.mask.vcvtph2psx.128" => "__builtin_ia32_vcvtph2psx128_mask", - "llvm.x86.avx512fp16.mask.vcvtph2psx.256" => "__builtin_ia32_vcvtph2psx256_mask", - "llvm.x86.avx512fp16.mask.vcvtph2psx.512" => "__builtin_ia32_vcvtph2psx512_mask", - "llvm.x86.avx512fp16.mask.vcvtph2qq.128" => "__builtin_ia32_vcvtph2qq128_mask", - "llvm.x86.avx512fp16.mask.vcvtph2qq.256" => "__builtin_ia32_vcvtph2qq256_mask", - "llvm.x86.avx512fp16.mask.vcvtph2qq.512" => "__builtin_ia32_vcvtph2qq512_mask", - "llvm.x86.avx512fp16.mask.vcvtph2udq.128" => "__builtin_ia32_vcvtph2udq128_mask", - "llvm.x86.avx512fp16.mask.vcvtph2udq.256" => "__builtin_ia32_vcvtph2udq256_mask", - "llvm.x86.avx512fp16.mask.vcvtph2udq.512" => "__builtin_ia32_vcvtph2udq512_mask", - "llvm.x86.avx512fp16.mask.vcvtph2uqq.128" => "__builtin_ia32_vcvtph2uqq128_mask", - "llvm.x86.avx512fp16.mask.vcvtph2uqq.256" => "__builtin_ia32_vcvtph2uqq256_mask", - "llvm.x86.avx512fp16.mask.vcvtph2uqq.512" => "__builtin_ia32_vcvtph2uqq512_mask", - "llvm.x86.avx512fp16.mask.vcvtph2uw.128" => "__builtin_ia32_vcvtph2uw128_mask", - "llvm.x86.avx512fp16.mask.vcvtph2uw.256" => "__builtin_ia32_vcvtph2uw256_mask", - "llvm.x86.avx512fp16.mask.vcvtph2uw.512" => "__builtin_ia32_vcvtph2uw512_mask", - "llvm.x86.avx512fp16.mask.vcvtph2w.128" => "__builtin_ia32_vcvtph2w128_mask", - "llvm.x86.avx512fp16.mask.vcvtph2w.256" => "__builtin_ia32_vcvtph2w256_mask", - "llvm.x86.avx512fp16.mask.vcvtph2w.512" => "__builtin_ia32_vcvtph2w512_mask", - "llvm.x86.avx512fp16.mask.vcvtps2phx.128" => "__builtin_ia32_vcvtps2phx128_mask", - "llvm.x86.avx512fp16.mask.vcvtps2phx.256" => "__builtin_ia32_vcvtps2phx256_mask", - "llvm.x86.avx512fp16.mask.vcvtps2phx.512" => "__builtin_ia32_vcvtps2phx512_mask", - "llvm.x86.avx512fp16.mask.vcvtqq2ph.128" => "__builtin_ia32_vcvtqq2ph128_mask", - "llvm.x86.avx512fp16.mask.vcvtqq2ph.256" => "__builtin_ia32_vcvtqq2ph256_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512fp16.mask.vcvtsd2sh.round" => "__builtin_ia32_vcvtsd2sh_round_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512fp16.mask.vcvtsh2sd.round" => "__builtin_ia32_vcvtsh2sd_round_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512fp16.mask.vcvtsh2ss.round" => "__builtin_ia32_vcvtsh2ss_round_mask", - // [INVALID CONVERSION]: "llvm.x86.avx512fp16.mask.vcvtss2sh.round" => "__builtin_ia32_vcvtss2sh_round_mask", - "llvm.x86.avx512fp16.mask.vcvttph2dq.128" => "__builtin_ia32_vcvttph2dq128_mask", - "llvm.x86.avx512fp16.mask.vcvttph2dq.256" => "__builtin_ia32_vcvttph2dq256_mask", - "llvm.x86.avx512fp16.mask.vcvttph2dq.512" => "__builtin_ia32_vcvttph2dq512_mask", - "llvm.x86.avx512fp16.mask.vcvttph2qq.128" => "__builtin_ia32_vcvttph2qq128_mask", - "llvm.x86.avx512fp16.mask.vcvttph2qq.256" => "__builtin_ia32_vcvttph2qq256_mask", - "llvm.x86.avx512fp16.mask.vcvttph2qq.512" => "__builtin_ia32_vcvttph2qq512_mask", - "llvm.x86.avx512fp16.mask.vcvttph2udq.128" => "__builtin_ia32_vcvttph2udq128_mask", - "llvm.x86.avx512fp16.mask.vcvttph2udq.256" => "__builtin_ia32_vcvttph2udq256_mask", - "llvm.x86.avx512fp16.mask.vcvttph2udq.512" => "__builtin_ia32_vcvttph2udq512_mask", - "llvm.x86.avx512fp16.mask.vcvttph2uqq.128" => "__builtin_ia32_vcvttph2uqq128_mask", - "llvm.x86.avx512fp16.mask.vcvttph2uqq.256" => "__builtin_ia32_vcvttph2uqq256_mask", - "llvm.x86.avx512fp16.mask.vcvttph2uqq.512" => "__builtin_ia32_vcvttph2uqq512_mask", - "llvm.x86.avx512fp16.mask.vcvttph2uw.128" => "__builtin_ia32_vcvttph2uw128_mask", - "llvm.x86.avx512fp16.mask.vcvttph2uw.256" => "__builtin_ia32_vcvttph2uw256_mask", - "llvm.x86.avx512fp16.mask.vcvttph2uw.512" => "__builtin_ia32_vcvttph2uw512_mask", - "llvm.x86.avx512fp16.mask.vcvttph2w.128" => "__builtin_ia32_vcvttph2w128_mask", - "llvm.x86.avx512fp16.mask.vcvttph2w.256" => "__builtin_ia32_vcvttph2w256_mask", - "llvm.x86.avx512fp16.mask.vcvttph2w.512" => "__builtin_ia32_vcvttph2w512_mask", - "llvm.x86.avx512fp16.mask.vcvtudq2ph.128" => "__builtin_ia32_vcvtudq2ph128_mask", - "llvm.x86.avx512fp16.mask.vcvtuqq2ph.128" => "__builtin_ia32_vcvtuqq2ph128_mask", - "llvm.x86.avx512fp16.mask.vcvtuqq2ph.256" => "__builtin_ia32_vcvtuqq2ph256_mask", - "llvm.x86.avx512fp16.mask.vfcmadd.cph.128" => "__builtin_ia32_vfcmaddcph128_mask", - "llvm.x86.avx512fp16.mask.vfcmadd.cph.256" => "__builtin_ia32_vfcmaddcph256_mask", - "llvm.x86.avx512fp16.mask.vfcmadd.cph.512" => "__builtin_ia32_vfcmaddcph512_mask3", - "llvm.x86.avx512fp16.mask.vfcmadd.csh" => "__builtin_ia32_vfcmaddcsh_mask", - "llvm.x86.avx512fp16.mask.vfcmul.cph.128" => "__builtin_ia32_vfcmulcph128_mask", - "llvm.x86.avx512fp16.mask.vfcmul.cph.256" => "__builtin_ia32_vfcmulcph256_mask", - "llvm.x86.avx512fp16.mask.vfcmul.cph.512" => "__builtin_ia32_vfcmulcph512_mask", - "llvm.x86.avx512fp16.mask.vfcmul.csh" => "__builtin_ia32_vfcmulcsh_mask", - "llvm.x86.avx512fp16.mask.vfmadd.cph.128" => "__builtin_ia32_vfmaddcph128_mask", - "llvm.x86.avx512fp16.mask.vfmadd.cph.256" => "__builtin_ia32_vfmaddcph256_mask", - "llvm.x86.avx512fp16.mask.vfmadd.cph.512" => "__builtin_ia32_vfmaddcph512_mask3", - "llvm.x86.avx512fp16.mask.vfmadd.csh" => "__builtin_ia32_vfmaddcsh_mask", - "llvm.x86.avx512fp16.mask.vfmul.cph.128" => "__builtin_ia32_vfmulcph128_mask", - "llvm.x86.avx512fp16.mask.vfmul.cph.256" => "__builtin_ia32_vfmulcph256_mask", - "llvm.x86.avx512fp16.mask.vfmul.cph.512" => "__builtin_ia32_vfmulcph512_mask", - "llvm.x86.avx512fp16.mask.vfmul.csh" => "__builtin_ia32_vfmulcsh_mask", - "llvm.x86.avx512fp16.maskz.vfcmadd.cph.128" => "__builtin_ia32_vfcmaddcph128_maskz", - "llvm.x86.avx512fp16.maskz.vfcmadd.cph.256" => "__builtin_ia32_vfcmaddcph256_maskz", - "llvm.x86.avx512fp16.maskz.vfcmadd.cph.512" => "__builtin_ia32_vfcmaddcph512_maskz", - "llvm.x86.avx512fp16.maskz.vfcmadd.csh" => "__builtin_ia32_vfcmaddcsh_maskz", - "llvm.x86.avx512fp16.maskz.vfmadd.cph.128" => "__builtin_ia32_vfmaddcph128_maskz", - "llvm.x86.avx512fp16.maskz.vfmadd.cph.256" => "__builtin_ia32_vfmaddcph256_maskz", - "llvm.x86.avx512fp16.maskz.vfmadd.cph.512" => "__builtin_ia32_vfmaddcph512_maskz", - "llvm.x86.avx512fp16.maskz.vfmadd.csh" => "__builtin_ia32_vfmaddcsh_maskz", - "llvm.x86.avx512fp16.max.ph.128" => "__builtin_ia32_maxph128", - "llvm.x86.avx512fp16.max.ph.256" => "__builtin_ia32_maxph256", - "llvm.x86.avx512fp16.max.ph.512" => "__builtin_ia32_maxph512", - "llvm.x86.avx512fp16.min.ph.128" => "__builtin_ia32_minph128", - "llvm.x86.avx512fp16.min.ph.256" => "__builtin_ia32_minph256", - "llvm.x86.avx512fp16.min.ph.512" => "__builtin_ia32_minph512", - "llvm.x86.avx512fp16.mul.ph.512" => "__builtin_ia32_mulph512", - "llvm.x86.avx512fp16.sub.ph.512" => "__builtin_ia32_subph512", - "llvm.x86.avx512fp16.vcomi.sh" => "__builtin_ia32_vcomish", - "llvm.x86.avx512fp16.vcvtsh2si32" => "__builtin_ia32_vcvtsh2si32", - "llvm.x86.avx512fp16.vcvtsh2si64" => "__builtin_ia32_vcvtsh2si64", - "llvm.x86.avx512fp16.vcvtsh2usi32" => "__builtin_ia32_vcvtsh2usi32", - "llvm.x86.avx512fp16.vcvtsh2usi64" => "__builtin_ia32_vcvtsh2usi64", - "llvm.x86.avx512fp16.vcvtsi2sh" => "__builtin_ia32_vcvtsi2sh", - "llvm.x86.avx512fp16.vcvtsi642sh" => "__builtin_ia32_vcvtsi642sh", - "llvm.x86.avx512fp16.vcvttsh2si32" => "__builtin_ia32_vcvttsh2si32", - "llvm.x86.avx512fp16.vcvttsh2si64" => "__builtin_ia32_vcvttsh2si64", - "llvm.x86.avx512fp16.vcvttsh2usi32" => "__builtin_ia32_vcvttsh2usi32", - "llvm.x86.avx512fp16.vcvttsh2usi64" => "__builtin_ia32_vcvttsh2usi64", - "llvm.x86.avx512fp16.vcvtusi2sh" => "__builtin_ia32_vcvtusi2sh", - "llvm.x86.avx512fp16.vcvtusi642sh" => "__builtin_ia32_vcvtusi642sh", - "llvm.x86.avx512fp16.vfmaddsub.ph.128" => "__builtin_ia32_vfmaddsubph", - "llvm.x86.avx512fp16.vfmaddsub.ph.256" => "__builtin_ia32_vfmaddsubph256", - "llvm.x86.axor32" => "__builtin_ia32_axor32", - "llvm.x86.axor64" => "__builtin_ia32_axor64", - "llvm.x86.bmi.bextr.32" => "__builtin_ia32_bextr_u32", - "llvm.x86.bmi.bextr.64" => "__builtin_ia32_bextr_u64", - "llvm.x86.bmi.bzhi.32" => "__builtin_ia32_bzhi_si", - "llvm.x86.bmi.bzhi.64" => "__builtin_ia32_bzhi_di", - "llvm.x86.bmi.pdep.32" => "__builtin_ia32_pdep_si", - "llvm.x86.bmi.pdep.64" => "__builtin_ia32_pdep_di", - "llvm.x86.bmi.pext.32" => "__builtin_ia32_pext_si", - "llvm.x86.bmi.pext.64" => "__builtin_ia32_pext_di", - "llvm.x86.cldemote" => "__builtin_ia32_cldemote", - "llvm.x86.clflushopt" => "__builtin_ia32_clflushopt", - "llvm.x86.clrssbsy" => "__builtin_ia32_clrssbsy", - "llvm.x86.clui" => "__builtin_ia32_clui", - "llvm.x86.clwb" => "__builtin_ia32_clwb", - "llvm.x86.clzero" => "__builtin_ia32_clzero", - "llvm.x86.cmpccxadd32" => "__builtin_ia32_cmpccxadd32", - "llvm.x86.cmpccxadd64" => "__builtin_ia32_cmpccxadd64", - "llvm.x86.directstore32" => "__builtin_ia32_directstore_u32", - "llvm.x86.directstore64" => "__builtin_ia32_directstore_u64", - "llvm.x86.enqcmd" => "__builtin_ia32_enqcmd", - "llvm.x86.enqcmds" => "__builtin_ia32_enqcmds", - "llvm.x86.flags.read.u32" => "__builtin_ia32_readeflags_u32", - "llvm.x86.flags.read.u64" => "__builtin_ia32_readeflags_u64", - "llvm.x86.flags.write.u32" => "__builtin_ia32_writeeflags_u32", - "llvm.x86.flags.write.u64" => "__builtin_ia32_writeeflags_u64", - "llvm.x86.fma.mask.vfmadd.pd.512" => "__builtin_ia32_vfmaddpd512_mask", - "llvm.x86.fma.mask.vfmadd.ps.512" => "__builtin_ia32_vfmaddps512_mask", - "llvm.x86.fma.mask.vfmaddsub.pd.512" => "__builtin_ia32_vfmaddsubpd512_mask", - "llvm.x86.fma.mask.vfmaddsub.ps.512" => "__builtin_ia32_vfmaddsubps512_mask", - "llvm.x86.fma.mask.vfmsub.pd.512" => "__builtin_ia32_vfmsubpd512_mask", - "llvm.x86.fma.mask.vfmsub.ps.512" => "__builtin_ia32_vfmsubps512_mask", - "llvm.x86.fma.mask.vfmsubadd.pd.512" => "__builtin_ia32_vfmsubaddpd512_mask", - "llvm.x86.fma.mask.vfmsubadd.ps.512" => "__builtin_ia32_vfmsubaddps512_mask", - "llvm.x86.fma.mask.vfnmadd.pd.512" => "__builtin_ia32_vfnmaddpd512_mask", - "llvm.x86.fma.mask.vfnmadd.ps.512" => "__builtin_ia32_vfnmaddps512_mask", - "llvm.x86.fma.mask.vfnmsub.pd.512" => "__builtin_ia32_vfnmsubpd512_mask", - "llvm.x86.fma.mask.vfnmsub.ps.512" => "__builtin_ia32_vfnmsubps512_mask", - "llvm.x86.fma.vfmadd.pd" => "__builtin_ia32_vfmaddpd", - "llvm.x86.fma.vfmadd.pd.256" => "__builtin_ia32_vfmaddpd256", - "llvm.x86.fma.vfmadd.ps" => "__builtin_ia32_vfmaddps", - "llvm.x86.fma.vfmadd.ps.256" => "__builtin_ia32_vfmaddps256", - "llvm.x86.fma.vfmadd.sd" => "__builtin_ia32_vfmaddsd", - "llvm.x86.fma.vfmadd.ss" => "__builtin_ia32_vfmaddss", - "llvm.x86.fma.vfmaddsub.pd" => "__builtin_ia32_vfmaddsubpd", - "llvm.x86.fma.vfmaddsub.pd.256" => "__builtin_ia32_vfmaddsubpd256", - "llvm.x86.fma.vfmaddsub.ps" => "__builtin_ia32_vfmaddsubps", - "llvm.x86.fma.vfmaddsub.ps.256" => "__builtin_ia32_vfmaddsubps256", - "llvm.x86.fma.vfmsub.pd" => "__builtin_ia32_vfmsubpd", - "llvm.x86.fma.vfmsub.pd.256" => "__builtin_ia32_vfmsubpd256", - "llvm.x86.fma.vfmsub.ps" => "__builtin_ia32_vfmsubps", - "llvm.x86.fma.vfmsub.ps.256" => "__builtin_ia32_vfmsubps256", - "llvm.x86.fma.vfmsub.sd" => "__builtin_ia32_vfmsubsd", - "llvm.x86.fma.vfmsub.ss" => "__builtin_ia32_vfmsubss", - "llvm.x86.fma.vfmsubadd.pd" => "__builtin_ia32_vfmsubaddpd", - "llvm.x86.fma.vfmsubadd.pd.256" => "__builtin_ia32_vfmsubaddpd256", - "llvm.x86.fma.vfmsubadd.ps" => "__builtin_ia32_vfmsubaddps", - "llvm.x86.fma.vfmsubadd.ps.256" => "__builtin_ia32_vfmsubaddps256", - "llvm.x86.fma.vfnmadd.pd" => "__builtin_ia32_vfnmaddpd", - "llvm.x86.fma.vfnmadd.pd.256" => "__builtin_ia32_vfnmaddpd256", - "llvm.x86.fma.vfnmadd.ps" => "__builtin_ia32_vfnmaddps", - "llvm.x86.fma.vfnmadd.ps.256" => "__builtin_ia32_vfnmaddps256", - "llvm.x86.fma.vfnmadd.sd" => "__builtin_ia32_vfnmaddsd", - "llvm.x86.fma.vfnmadd.ss" => "__builtin_ia32_vfnmaddss", - "llvm.x86.fma.vfnmsub.pd" => "__builtin_ia32_vfnmsubpd", - "llvm.x86.fma.vfnmsub.pd.256" => "__builtin_ia32_vfnmsubpd256", - "llvm.x86.fma.vfnmsub.ps" => "__builtin_ia32_vfnmsubps", - "llvm.x86.fma.vfnmsub.ps.256" => "__builtin_ia32_vfnmsubps256", - "llvm.x86.fma.vfnmsub.sd" => "__builtin_ia32_vfnmsubsd", - "llvm.x86.fma.vfnmsub.ss" => "__builtin_ia32_vfnmsubss", - "llvm.x86.fxrstor" => "__builtin_ia32_fxrstor", - "llvm.x86.fxrstor64" => "__builtin_ia32_fxrstor64", - "llvm.x86.fxsave" => "__builtin_ia32_fxsave", - "llvm.x86.fxsave64" => "__builtin_ia32_fxsave64", - "llvm.x86.incsspd" => "__builtin_ia32_incsspd", - "llvm.x86.incsspq" => "__builtin_ia32_incsspq", - "llvm.x86.invpcid" => "__builtin_ia32_invpcid", - "llvm.x86.ldtilecfg" => "__builtin_ia32_tile_loadconfig", - "llvm.x86.ldtilecfg.internal" => "__builtin_ia32_tile_loadconfig_internal", - "llvm.x86.llwpcb" => "__builtin_ia32_llwpcb", - "llvm.x86.loadiwkey" => "__builtin_ia32_loadiwkey", - "llvm.x86.lwpins32" => "__builtin_ia32_lwpins32", - "llvm.x86.lwpins64" => "__builtin_ia32_lwpins64", - "llvm.x86.lwpval32" => "__builtin_ia32_lwpval32", - "llvm.x86.lwpval64" => "__builtin_ia32_lwpval64", - "llvm.x86.mmx.emms" => "__builtin_ia32_emms", - "llvm.x86.mmx.femms" => "__builtin_ia32_femms", - "llvm.x86.monitorx" => "__builtin_ia32_monitorx", - "llvm.x86.movdir64b" => "__builtin_ia32_movdir64b", - "llvm.x86.movrsdi" => "__builtin_ia32_movrsdi", - "llvm.x86.movrshi" => "__builtin_ia32_movrshi", - "llvm.x86.movrsqi" => "__builtin_ia32_movrsqi", - "llvm.x86.movrssi" => "__builtin_ia32_movrssi", - "llvm.x86.mwaitx" => "__builtin_ia32_mwaitx", - "llvm.x86.pclmulqdq" => "__builtin_ia32_pclmulqdq128", - "llvm.x86.pclmulqdq.256" => "__builtin_ia32_pclmulqdq256", - "llvm.x86.pclmulqdq.512" => "__builtin_ia32_pclmulqdq512", - "llvm.x86.prefetchrs" => "__builtin_ia32_prefetchrs", - "llvm.x86.ptwrite32" => "__builtin_ia32_ptwrite32", - "llvm.x86.ptwrite64" => "__builtin_ia32_ptwrite64", - "llvm.x86.rdfsbase.32" => "__builtin_ia32_rdfsbase32", - "llvm.x86.rdfsbase.64" => "__builtin_ia32_rdfsbase64", - "llvm.x86.rdgsbase.32" => "__builtin_ia32_rdgsbase32", - "llvm.x86.rdgsbase.64" => "__builtin_ia32_rdgsbase64", - "llvm.x86.rdpid" => "__builtin_ia32_rdpid", - "llvm.x86.rdpkru" => "__builtin_ia32_rdpkru", - "llvm.x86.rdpmc" => "__builtin_ia32_rdpmc", - "llvm.x86.rdpru" => "__builtin_ia32_rdpru", - "llvm.x86.rdsspd" => "__builtin_ia32_rdsspd", - "llvm.x86.rdsspq" => "__builtin_ia32_rdsspq", - "llvm.x86.rdtsc" => "__builtin_ia32_rdtsc", - "llvm.x86.rdtscp" => "__builtin_ia32_rdtscp", - "llvm.x86.rstorssp" => "__builtin_ia32_rstorssp", - "llvm.x86.saveprevssp" => "__builtin_ia32_saveprevssp", - "llvm.x86.senduipi" => "__builtin_ia32_senduipi", - "llvm.x86.serialize" => "__builtin_ia32_serialize", - "llvm.x86.setssbsy" => "__builtin_ia32_setssbsy", - "llvm.x86.sha1msg1" => "__builtin_ia32_sha1msg1", - "llvm.x86.sha1msg2" => "__builtin_ia32_sha1msg2", - "llvm.x86.sha1nexte" => "__builtin_ia32_sha1nexte", - "llvm.x86.sha1rnds4" => "__builtin_ia32_sha1rnds4", - "llvm.x86.sha256msg1" => "__builtin_ia32_sha256msg1", - "llvm.x86.sha256msg2" => "__builtin_ia32_sha256msg2", - "llvm.x86.sha256rnds2" => "__builtin_ia32_sha256rnds2", - "llvm.x86.slwpcb" => "__builtin_ia32_slwpcb", - "llvm.x86.sse.add.ss" => "__builtin_ia32_addss", - "llvm.x86.sse.cmp.ps" => "__builtin_ia32_cmpps", - "llvm.x86.sse.cmp.ss" => "__builtin_ia32_cmpss", - "llvm.x86.sse.comieq.ss" => "__builtin_ia32_comieq", - "llvm.x86.sse.comige.ss" => "__builtin_ia32_comige", - "llvm.x86.sse.comigt.ss" => "__builtin_ia32_comigt", - "llvm.x86.sse.comile.ss" => "__builtin_ia32_comile", - "llvm.x86.sse.comilt.ss" => "__builtin_ia32_comilt", - "llvm.x86.sse.comineq.ss" => "__builtin_ia32_comineq", - "llvm.x86.sse.cvtsi2ss" => "__builtin_ia32_cvtsi2ss", - "llvm.x86.sse.cvtsi642ss" => "__builtin_ia32_cvtsi642ss", - "llvm.x86.sse.cvtss2si" => "__builtin_ia32_cvtss2si", - "llvm.x86.sse.cvtss2si64" => "__builtin_ia32_cvtss2si64", - "llvm.x86.sse.cvttss2si" => "__builtin_ia32_cvttss2si", - "llvm.x86.sse.cvttss2si64" => "__builtin_ia32_cvttss2si64", - "llvm.x86.sse.div.ss" => "__builtin_ia32_divss", - "llvm.x86.sse.max.ps" => "__builtin_ia32_maxps", - "llvm.x86.sse.max.ss" => "__builtin_ia32_maxss", - "llvm.x86.sse.min.ps" => "__builtin_ia32_minps", - "llvm.x86.sse.min.ss" => "__builtin_ia32_minss", - "llvm.x86.sse.movmsk.ps" => "__builtin_ia32_movmskps", - "llvm.x86.sse.mul.ss" => "__builtin_ia32_mulss", - "llvm.x86.sse.rcp.ps" => "__builtin_ia32_rcpps", - "llvm.x86.sse.rcp.ss" => "__builtin_ia32_rcpss", - "llvm.x86.sse.rsqrt.ps" => "__builtin_ia32_rsqrtps", - "llvm.x86.sse.rsqrt.ss" => "__builtin_ia32_rsqrtss", - "llvm.x86.sse.sfence" => "__builtin_ia32_sfence", - "llvm.x86.sse.sqrt.ps" => "__builtin_ia32_sqrtps", - "llvm.x86.sse.sqrt.ss" => "__builtin_ia32_sqrtss", - "llvm.x86.sse.storeu.ps" => "__builtin_ia32_storeups", - "llvm.x86.sse.sub.ss" => "__builtin_ia32_subss", - "llvm.x86.sse.ucomieq.ss" => "__builtin_ia32_ucomieq", - "llvm.x86.sse.ucomige.ss" => "__builtin_ia32_ucomige", - "llvm.x86.sse.ucomigt.ss" => "__builtin_ia32_ucomigt", - "llvm.x86.sse.ucomile.ss" => "__builtin_ia32_ucomile", - "llvm.x86.sse.ucomilt.ss" => "__builtin_ia32_ucomilt", - "llvm.x86.sse.ucomineq.ss" => "__builtin_ia32_ucomineq", - "llvm.x86.sse2.add.sd" => "__builtin_ia32_addsd", - "llvm.x86.sse2.clflush" => "__builtin_ia32_clflush", - "llvm.x86.sse2.cmp.pd" => "__builtin_ia32_cmppd", - "llvm.x86.sse2.cmp.sd" => "__builtin_ia32_cmpsd", - "llvm.x86.sse2.comieq.sd" => "__builtin_ia32_comisdeq", - "llvm.x86.sse2.comige.sd" => "__builtin_ia32_comisdge", - "llvm.x86.sse2.comigt.sd" => "__builtin_ia32_comisdgt", - "llvm.x86.sse2.comile.sd" => "__builtin_ia32_comisdle", - "llvm.x86.sse2.comilt.sd" => "__builtin_ia32_comisdlt", - "llvm.x86.sse2.comineq.sd" => "__builtin_ia32_comisdneq", - "llvm.x86.sse2.cvtdq2pd" => "__builtin_ia32_cvtdq2pd", - "llvm.x86.sse2.cvtdq2ps" => "__builtin_ia32_cvtdq2ps", - "llvm.x86.sse2.cvtpd2dq" => "__builtin_ia32_cvtpd2dq", - "llvm.x86.sse2.cvtpd2ps" => "__builtin_ia32_cvtpd2ps", - "llvm.x86.sse2.cvtps2dq" => "__builtin_ia32_cvtps2dq", - "llvm.x86.sse2.cvtps2pd" => "__builtin_ia32_cvtps2pd", - "llvm.x86.sse2.cvtsd2si" => "__builtin_ia32_cvtsd2si", - "llvm.x86.sse2.cvtsd2si64" => "__builtin_ia32_cvtsd2si64", - "llvm.x86.sse2.cvtsd2ss" => "__builtin_ia32_cvtsd2ss", - "llvm.x86.sse2.cvtsi2sd" => "__builtin_ia32_cvtsi2sd", - "llvm.x86.sse2.cvtsi642sd" => "__builtin_ia32_cvtsi642sd", - "llvm.x86.sse2.cvtss2sd" => "__builtin_ia32_cvtss2sd", - "llvm.x86.sse2.cvttpd2dq" => "__builtin_ia32_cvttpd2dq", - "llvm.x86.sse2.cvttps2dq" => "__builtin_ia32_cvttps2dq", - "llvm.x86.sse2.cvttsd2si" => "__builtin_ia32_cvttsd2si", - "llvm.x86.sse2.cvttsd2si64" => "__builtin_ia32_cvttsd2si64", - "llvm.x86.sse2.div.sd" => "__builtin_ia32_divsd", - "llvm.x86.sse2.lfence" => "__builtin_ia32_lfence", - "llvm.x86.sse2.maskmov.dqu" => "__builtin_ia32_maskmovdqu", - "llvm.x86.sse2.max.pd" => "__builtin_ia32_maxpd", - "llvm.x86.sse2.max.sd" => "__builtin_ia32_maxsd", - "llvm.x86.sse2.mfence" => "__builtin_ia32_mfence", - "llvm.x86.sse2.min.pd" => "__builtin_ia32_minpd", - "llvm.x86.sse2.min.sd" => "__builtin_ia32_minsd", - "llvm.x86.sse2.movmsk.pd" => "__builtin_ia32_movmskpd", - "llvm.x86.sse2.mul.sd" => "__builtin_ia32_mulsd", - "llvm.x86.sse2.packssdw.128" => "__builtin_ia32_packssdw128", - "llvm.x86.sse2.packsswb.128" => "__builtin_ia32_packsswb128", - "llvm.x86.sse2.packuswb.128" => "__builtin_ia32_packuswb128", - "llvm.x86.sse2.padds.b" => "__builtin_ia32_paddsb128", - "llvm.x86.sse2.padds.w" => "__builtin_ia32_paddsw128", - "llvm.x86.sse2.paddus.b" => "__builtin_ia32_paddusb128", - "llvm.x86.sse2.paddus.w" => "__builtin_ia32_paddusw128", - "llvm.x86.sse2.pause" => "__builtin_ia32_pause", - "llvm.x86.sse2.pavg.b" => "__builtin_ia32_pavgb128", - "llvm.x86.sse2.pavg.w" => "__builtin_ia32_pavgw128", - "llvm.x86.sse2.pmadd.wd" => "__builtin_ia32_pmaddwd128", - "llvm.x86.sse2.pmaxs.w" => "__builtin_ia32_pmaxsw128", - "llvm.x86.sse2.pmaxu.b" => "__builtin_ia32_pmaxub128", - "llvm.x86.sse2.pmins.w" => "__builtin_ia32_pminsw128", - "llvm.x86.sse2.pminu.b" => "__builtin_ia32_pminub128", - "llvm.x86.sse2.pmovmskb.128" => "__builtin_ia32_pmovmskb128", - "llvm.x86.sse2.pmulh.w" => "__builtin_ia32_pmulhw128", - "llvm.x86.sse2.pmulhu.w" => "__builtin_ia32_pmulhuw128", - "llvm.x86.sse2.pmulu.dq" => "__builtin_ia32_pmuludq128", - "llvm.x86.sse2.psad.bw" => "__builtin_ia32_psadbw128", - "llvm.x86.sse2.pshuf.d" => "__builtin_ia32_pshufd", - "llvm.x86.sse2.pshufh.w" => "__builtin_ia32_pshufhw", - "llvm.x86.sse2.pshufl.w" => "__builtin_ia32_pshuflw", - "llvm.x86.sse2.psll.d" => "__builtin_ia32_pslld128", - "llvm.x86.sse2.psll.dq" => "__builtin_ia32_pslldqi128", - "llvm.x86.sse2.psll.dq.bs" => "__builtin_ia32_pslldqi128_byteshift", - "llvm.x86.sse2.psll.q" => "__builtin_ia32_psllq128", - "llvm.x86.sse2.psll.w" => "__builtin_ia32_psllw128", - "llvm.x86.sse2.pslli.d" => "__builtin_ia32_pslldi128", - "llvm.x86.sse2.pslli.q" => "__builtin_ia32_psllqi128", - "llvm.x86.sse2.pslli.w" => "__builtin_ia32_psllwi128", - "llvm.x86.sse2.psra.d" => "__builtin_ia32_psrad128", - "llvm.x86.sse2.psra.w" => "__builtin_ia32_psraw128", - "llvm.x86.sse2.psrai.d" => "__builtin_ia32_psradi128", - "llvm.x86.sse2.psrai.w" => "__builtin_ia32_psrawi128", - "llvm.x86.sse2.psrl.d" => "__builtin_ia32_psrld128", - "llvm.x86.sse2.psrl.dq" => "__builtin_ia32_psrldqi128", - "llvm.x86.sse2.psrl.dq.bs" => "__builtin_ia32_psrldqi128_byteshift", - "llvm.x86.sse2.psrl.q" => "__builtin_ia32_psrlq128", - "llvm.x86.sse2.psrl.w" => "__builtin_ia32_psrlw128", - "llvm.x86.sse2.psrli.d" => "__builtin_ia32_psrldi128", - "llvm.x86.sse2.psrli.q" => "__builtin_ia32_psrlqi128", - "llvm.x86.sse2.psrli.w" => "__builtin_ia32_psrlwi128", - "llvm.x86.sse2.psubs.b" => "__builtin_ia32_psubsb128", - "llvm.x86.sse2.psubs.w" => "__builtin_ia32_psubsw128", - "llvm.x86.sse2.psubus.b" => "__builtin_ia32_psubusb128", - "llvm.x86.sse2.psubus.w" => "__builtin_ia32_psubusw128", - "llvm.x86.sse2.sqrt.pd" => "__builtin_ia32_sqrtpd", - "llvm.x86.sse2.sqrt.sd" => "__builtin_ia32_sqrtsd", - "llvm.x86.sse2.storel.dq" => "__builtin_ia32_storelv4si", - "llvm.x86.sse2.storeu.dq" => "__builtin_ia32_storedqu", - "llvm.x86.sse2.storeu.pd" => "__builtin_ia32_storeupd", - "llvm.x86.sse2.sub.sd" => "__builtin_ia32_subsd", - "llvm.x86.sse2.ucomieq.sd" => "__builtin_ia32_ucomisdeq", - "llvm.x86.sse2.ucomige.sd" => "__builtin_ia32_ucomisdge", - "llvm.x86.sse2.ucomigt.sd" => "__builtin_ia32_ucomisdgt", - "llvm.x86.sse2.ucomile.sd" => "__builtin_ia32_ucomisdle", - "llvm.x86.sse2.ucomilt.sd" => "__builtin_ia32_ucomisdlt", - "llvm.x86.sse2.ucomineq.sd" => "__builtin_ia32_ucomisdneq", - "llvm.x86.sse3.addsub.pd" => "__builtin_ia32_addsubpd", - "llvm.x86.sse3.addsub.ps" => "__builtin_ia32_addsubps", - "llvm.x86.sse3.hadd.pd" => "__builtin_ia32_haddpd", - "llvm.x86.sse3.hadd.ps" => "__builtin_ia32_haddps", - "llvm.x86.sse3.hsub.pd" => "__builtin_ia32_hsubpd", - "llvm.x86.sse3.hsub.ps" => "__builtin_ia32_hsubps", - "llvm.x86.sse3.ldu.dq" => "__builtin_ia32_lddqu", - "llvm.x86.sse3.monitor" => "__builtin_ia32_monitor", - "llvm.x86.sse3.mwait" => "__builtin_ia32_mwait", - "llvm.x86.sse41.blendpd" => "__builtin_ia32_blendpd", - "llvm.x86.sse41.blendps" => "__builtin_ia32_blendps", - "llvm.x86.sse41.blendvpd" => "__builtin_ia32_blendvpd", - "llvm.x86.sse41.blendvps" => "__builtin_ia32_blendvps", - "llvm.x86.sse41.dppd" => "__builtin_ia32_dppd", - "llvm.x86.sse41.dpps" => "__builtin_ia32_dpps", - "llvm.x86.sse41.extractps" => "__builtin_ia32_extractps128", - "llvm.x86.sse41.insertps" => "__builtin_ia32_insertps128", - "llvm.x86.sse41.movntdqa" => "__builtin_ia32_movntdqa", - "llvm.x86.sse41.mpsadbw" => "__builtin_ia32_mpsadbw128", - "llvm.x86.sse41.packusdw" => "__builtin_ia32_packusdw128", - "llvm.x86.sse41.pblendvb" => "__builtin_ia32_pblendvb128", - "llvm.x86.sse41.pblendw" => "__builtin_ia32_pblendw128", - "llvm.x86.sse41.phminposuw" => "__builtin_ia32_phminposuw128", - "llvm.x86.sse41.pmaxsb" => "__builtin_ia32_pmaxsb128", - "llvm.x86.sse41.pmaxsd" => "__builtin_ia32_pmaxsd128", - "llvm.x86.sse41.pmaxud" => "__builtin_ia32_pmaxud128", - "llvm.x86.sse41.pmaxuw" => "__builtin_ia32_pmaxuw128", - "llvm.x86.sse41.pminsb" => "__builtin_ia32_pminsb128", - "llvm.x86.sse41.pminsd" => "__builtin_ia32_pminsd128", - "llvm.x86.sse41.pminud" => "__builtin_ia32_pminud128", - "llvm.x86.sse41.pminuw" => "__builtin_ia32_pminuw128", - "llvm.x86.sse41.pmovsxbd" => "__builtin_ia32_pmovsxbd128", - "llvm.x86.sse41.pmovsxbq" => "__builtin_ia32_pmovsxbq128", - "llvm.x86.sse41.pmovsxbw" => "__builtin_ia32_pmovsxbw128", - "llvm.x86.sse41.pmovsxdq" => "__builtin_ia32_pmovsxdq128", - "llvm.x86.sse41.pmovsxwd" => "__builtin_ia32_pmovsxwd128", - "llvm.x86.sse41.pmovsxwq" => "__builtin_ia32_pmovsxwq128", - "llvm.x86.sse41.pmovzxbd" => "__builtin_ia32_pmovzxbd128", - "llvm.x86.sse41.pmovzxbq" => "__builtin_ia32_pmovzxbq128", - "llvm.x86.sse41.pmovzxbw" => "__builtin_ia32_pmovzxbw128", - "llvm.x86.sse41.pmovzxdq" => "__builtin_ia32_pmovzxdq128", - "llvm.x86.sse41.pmovzxwd" => "__builtin_ia32_pmovzxwd128", - "llvm.x86.sse41.pmovzxwq" => "__builtin_ia32_pmovzxwq128", - "llvm.x86.sse41.pmuldq" => "__builtin_ia32_pmuldq128", - "llvm.x86.sse41.ptestc" => "__builtin_ia32_ptestc128", - "llvm.x86.sse41.ptestnzc" => "__builtin_ia32_ptestnzc128", - "llvm.x86.sse41.ptestz" => "__builtin_ia32_ptestz128", - "llvm.x86.sse41.round.pd" => "__builtin_ia32_roundpd", - "llvm.x86.sse41.round.ps" => "__builtin_ia32_roundps", - "llvm.x86.sse41.round.sd" => "__builtin_ia32_roundsd", - "llvm.x86.sse41.round.ss" => "__builtin_ia32_roundss", - "llvm.x86.sse42.crc32.32.16" => "__builtin_ia32_crc32hi", - "llvm.x86.sse42.crc32.32.32" => "__builtin_ia32_crc32si", - "llvm.x86.sse42.crc32.32.8" => "__builtin_ia32_crc32qi", - "llvm.x86.sse42.crc32.64.64" => "__builtin_ia32_crc32di", - "llvm.x86.sse42.pcmpestri128" => "__builtin_ia32_pcmpestri128", - "llvm.x86.sse42.pcmpestria128" => "__builtin_ia32_pcmpestria128", - "llvm.x86.sse42.pcmpestric128" => "__builtin_ia32_pcmpestric128", - "llvm.x86.sse42.pcmpestrio128" => "__builtin_ia32_pcmpestrio128", - "llvm.x86.sse42.pcmpestris128" => "__builtin_ia32_pcmpestris128", - "llvm.x86.sse42.pcmpestriz128" => "__builtin_ia32_pcmpestriz128", - "llvm.x86.sse42.pcmpestrm128" => "__builtin_ia32_pcmpestrm128", - "llvm.x86.sse42.pcmpistri128" => "__builtin_ia32_pcmpistri128", - "llvm.x86.sse42.pcmpistria128" => "__builtin_ia32_pcmpistria128", - "llvm.x86.sse42.pcmpistric128" => "__builtin_ia32_pcmpistric128", - "llvm.x86.sse42.pcmpistrio128" => "__builtin_ia32_pcmpistrio128", - "llvm.x86.sse42.pcmpistris128" => "__builtin_ia32_pcmpistris128", - "llvm.x86.sse42.pcmpistriz128" => "__builtin_ia32_pcmpistriz128", - "llvm.x86.sse42.pcmpistrm128" => "__builtin_ia32_pcmpistrm128", - "llvm.x86.sse4a.extrq" => "__builtin_ia32_extrq", - "llvm.x86.sse4a.extrqi" => "__builtin_ia32_extrqi", - "llvm.x86.sse4a.insertq" => "__builtin_ia32_insertq", - "llvm.x86.sse4a.insertqi" => "__builtin_ia32_insertqi", - "llvm.x86.sse4a.movnt.sd" => "__builtin_ia32_movntsd", - "llvm.x86.sse4a.movnt.ss" => "__builtin_ia32_movntss", - "llvm.x86.ssse3.pabs.b.128" => "__builtin_ia32_pabsb128", - "llvm.x86.ssse3.pabs.d.128" => "__builtin_ia32_pabsd128", - "llvm.x86.ssse3.pabs.w.128" => "__builtin_ia32_pabsw128", - "llvm.x86.ssse3.phadd.d.128" => "__builtin_ia32_phaddd128", - "llvm.x86.ssse3.phadd.sw.128" => "__builtin_ia32_phaddsw128", - "llvm.x86.ssse3.phadd.w.128" => "__builtin_ia32_phaddw128", - "llvm.x86.ssse3.phsub.d.128" => "__builtin_ia32_phsubd128", - "llvm.x86.ssse3.phsub.sw.128" => "__builtin_ia32_phsubsw128", - "llvm.x86.ssse3.phsub.w.128" => "__builtin_ia32_phsubw128", - "llvm.x86.ssse3.pmadd.ub.sw.128" => "__builtin_ia32_pmaddubsw128", - "llvm.x86.ssse3.pmul.hr.sw.128" => "__builtin_ia32_pmulhrsw128", - "llvm.x86.ssse3.pshuf.b.128" => "__builtin_ia32_pshufb128", - "llvm.x86.ssse3.psign.b.128" => "__builtin_ia32_psignb128", - "llvm.x86.ssse3.psign.d.128" => "__builtin_ia32_psignd128", - "llvm.x86.ssse3.psign.w.128" => "__builtin_ia32_psignw128", - "llvm.x86.sttilecfg" => "__builtin_ia32_tile_storeconfig", - "llvm.x86.stui" => "__builtin_ia32_stui", - "llvm.x86.subborrow.u32" => "__builtin_ia32_subborrow_u32", - "llvm.x86.subborrow.u64" => "__builtin_ia32_subborrow_u64", - "llvm.x86.t2rpntlvwz0" => "__builtin_ia32_t2rpntlvwz0", - "llvm.x86.t2rpntlvwz0rs" => "__builtin_ia32_t2rpntlvwz0rs", - "llvm.x86.t2rpntlvwz0rst1" => "__builtin_ia32_t2rpntlvwz0rst1", - "llvm.x86.t2rpntlvwz0t1" => "__builtin_ia32_t2rpntlvwz0t1", - "llvm.x86.t2rpntlvwz1" => "__builtin_ia32_t2rpntlvwz1", - "llvm.x86.t2rpntlvwz1rs" => "__builtin_ia32_t2rpntlvwz1rs", - "llvm.x86.t2rpntlvwz1rst1" => "__builtin_ia32_t2rpntlvwz1rst1", - "llvm.x86.t2rpntlvwz1t1" => "__builtin_ia32_t2rpntlvwz1t1", - "llvm.x86.tbm.bextri.u32" => "__builtin_ia32_bextri_u32", - "llvm.x86.tbm.bextri.u64" => "__builtin_ia32_bextri_u64", - "llvm.x86.tcmmimfp16ps" => "__builtin_ia32_tcmmimfp16ps", - "llvm.x86.tcmmimfp16ps.internal" => "__builtin_ia32_tcmmimfp16ps_internal", - "llvm.x86.tcmmrlfp16ps" => "__builtin_ia32_tcmmrlfp16ps", - "llvm.x86.tcmmrlfp16ps.internal" => "__builtin_ia32_tcmmrlfp16ps_internal", - "llvm.x86.tconjtcmmimfp16ps" => "__builtin_ia32_tconjtcmmimfp16ps", - "llvm.x86.tconjtcmmimfp16ps.internal" => "__builtin_ia32_tconjtcmmimfp16ps_internal", - "llvm.x86.tconjtfp16" => "__builtin_ia32_tconjtfp16", - "llvm.x86.tconjtfp16.internal" => "__builtin_ia32_tconjtfp16_internal", - "llvm.x86.tcvtrowd2ps" => "__builtin_ia32_tcvtrowd2ps", - "llvm.x86.tcvtrowd2ps.internal" => "__builtin_ia32_tcvtrowd2ps_internal", - "llvm.x86.tcvtrowps2bf16h" => "__builtin_ia32_tcvtrowps2bf16h", - "llvm.x86.tcvtrowps2bf16h.internal" => "__builtin_ia32_tcvtrowps2bf16h_internal", - "llvm.x86.tcvtrowps2bf16l" => "__builtin_ia32_tcvtrowps2bf16l", - "llvm.x86.tcvtrowps2bf16l.internal" => "__builtin_ia32_tcvtrowps2bf16l_internal", - "llvm.x86.tcvtrowps2phh" => "__builtin_ia32_tcvtrowps2phh", - "llvm.x86.tcvtrowps2phh.internal" => "__builtin_ia32_tcvtrowps2phh_internal", - "llvm.x86.tcvtrowps2phl" => "__builtin_ia32_tcvtrowps2phl", - "llvm.x86.tcvtrowps2phl.internal" => "__builtin_ia32_tcvtrowps2phl_internal", - "llvm.x86.tdpbf16ps" => "__builtin_ia32_tdpbf16ps", - "llvm.x86.tdpbf16ps.internal" => "__builtin_ia32_tdpbf16ps_internal", - "llvm.x86.tdpbf8ps" => "__builtin_ia32_tdpbf8ps", - "llvm.x86.tdpbf8ps.internal" => "__builtin_ia32_tdpbf8ps_internal", - "llvm.x86.tdpbhf8ps" => "__builtin_ia32_tdpbhf8ps", - "llvm.x86.tdpbhf8ps.internal" => "__builtin_ia32_tdpbhf8ps_internal", - "llvm.x86.tdpbssd" => "__builtin_ia32_tdpbssd", - "llvm.x86.tdpbssd.internal" => "__builtin_ia32_tdpbssd_internal", - "llvm.x86.tdpbsud" => "__builtin_ia32_tdpbsud", - "llvm.x86.tdpbsud.internal" => "__builtin_ia32_tdpbsud_internal", - "llvm.x86.tdpbusd" => "__builtin_ia32_tdpbusd", - "llvm.x86.tdpbusd.internal" => "__builtin_ia32_tdpbusd_internal", - "llvm.x86.tdpbuud" => "__builtin_ia32_tdpbuud", - "llvm.x86.tdpbuud.internal" => "__builtin_ia32_tdpbuud_internal", - "llvm.x86.tdpfp16ps" => "__builtin_ia32_tdpfp16ps", - "llvm.x86.tdpfp16ps.internal" => "__builtin_ia32_tdpfp16ps_internal", - "llvm.x86.tdphbf8ps" => "__builtin_ia32_tdphbf8ps", - "llvm.x86.tdphbf8ps.internal" => "__builtin_ia32_tdphbf8ps_internal", - "llvm.x86.tdphf8ps" => "__builtin_ia32_tdphf8ps", - "llvm.x86.tdphf8ps.internal" => "__builtin_ia32_tdphf8ps_internal", - "llvm.x86.testui" => "__builtin_ia32_testui", - "llvm.x86.tileloadd64" => "__builtin_ia32_tileloadd64", - "llvm.x86.tileloadd64.internal" => "__builtin_ia32_tileloadd64_internal", - "llvm.x86.tileloaddrs64" => "__builtin_ia32_tileloaddrs64", - "llvm.x86.tileloaddrs64.internal" => "__builtin_ia32_tileloaddrs64_internal", - "llvm.x86.tileloaddrst164" => "__builtin_ia32_tileloaddrst164", - "llvm.x86.tileloaddrst164.internal" => "__builtin_ia32_tileloaddrst164_internal", - "llvm.x86.tileloaddt164" => "__builtin_ia32_tileloaddt164", - "llvm.x86.tileloaddt164.internal" => "__builtin_ia32_tileloaddt164_internal", - "llvm.x86.tilemovrow" => "__builtin_ia32_tilemovrow", - "llvm.x86.tilemovrow.internal" => "__builtin_ia32_tilemovrow_internal", - "llvm.x86.tilerelease" => "__builtin_ia32_tilerelease", - "llvm.x86.tilestored64" => "__builtin_ia32_tilestored64", - "llvm.x86.tilestored64.internal" => "__builtin_ia32_tilestored64_internal", - "llvm.x86.tilezero" => "__builtin_ia32_tilezero", - "llvm.x86.tilezero.internal" => "__builtin_ia32_tilezero_internal", - "llvm.x86.tmmultf32ps" => "__builtin_ia32_tmmultf32ps", - "llvm.x86.tmmultf32ps.internal" => "__builtin_ia32_tmmultf32ps_internal", - "llvm.x86.tpause" => "__builtin_ia32_tpause", - "llvm.x86.ttcmmimfp16ps" => "__builtin_ia32_ttcmmimfp16ps", - "llvm.x86.ttcmmimfp16ps.internal" => "__builtin_ia32_ttcmmimfp16ps_internal", - "llvm.x86.ttcmmrlfp16ps" => "__builtin_ia32_ttcmmrlfp16ps", - "llvm.x86.ttcmmrlfp16ps.internal" => "__builtin_ia32_ttcmmrlfp16ps_internal", - "llvm.x86.ttdpbf16ps" => "__builtin_ia32_ttdpbf16ps", - "llvm.x86.ttdpbf16ps.internal" => "__builtin_ia32_ttdpbf16ps_internal", - "llvm.x86.ttdpfp16ps" => "__builtin_ia32_ttdpfp16ps", - "llvm.x86.ttdpfp16ps.internal" => "__builtin_ia32_ttdpfp16ps_internal", - "llvm.x86.ttmmultf32ps" => "__builtin_ia32_ttmmultf32ps", - "llvm.x86.ttmmultf32ps.internal" => "__builtin_ia32_ttmmultf32ps_internal", - "llvm.x86.ttransposed" => "__builtin_ia32_ttransposed", - "llvm.x86.ttransposed.internal" => "__builtin_ia32_ttransposed_internal", - "llvm.x86.umonitor" => "__builtin_ia32_umonitor", - "llvm.x86.umwait" => "__builtin_ia32_umwait", - "llvm.x86.urdmsr" => "__builtin_ia32_urdmsr", - "llvm.x86.uwrmsr" => "__builtin_ia32_uwrmsr", - "llvm.x86.vbcstnebf162ps128" => "__builtin_ia32_vbcstnebf162ps128", - "llvm.x86.vbcstnebf162ps256" => "__builtin_ia32_vbcstnebf162ps256", - "llvm.x86.vbcstnesh2ps128" => "__builtin_ia32_vbcstnesh2ps128", - "llvm.x86.vbcstnesh2ps256" => "__builtin_ia32_vbcstnesh2ps256", - "llvm.x86.vcvtneebf162ps128" => "__builtin_ia32_vcvtneebf162ps128", - "llvm.x86.vcvtneebf162ps256" => "__builtin_ia32_vcvtneebf162ps256", - "llvm.x86.vcvtneeph2ps128" => "__builtin_ia32_vcvtneeph2ps128", - "llvm.x86.vcvtneeph2ps256" => "__builtin_ia32_vcvtneeph2ps256", - "llvm.x86.vcvtneobf162ps128" => "__builtin_ia32_vcvtneobf162ps128", - "llvm.x86.vcvtneobf162ps256" => "__builtin_ia32_vcvtneobf162ps256", - "llvm.x86.vcvtneoph2ps128" => "__builtin_ia32_vcvtneoph2ps128", - "llvm.x86.vcvtneoph2ps256" => "__builtin_ia32_vcvtneoph2ps256", - "llvm.x86.vcvtneps2bf16128" => "__builtin_ia32_vcvtneps2bf16128", - "llvm.x86.vcvtneps2bf16256" => "__builtin_ia32_vcvtneps2bf16256", - "llvm.x86.vcvtph2ps.128" => "__builtin_ia32_vcvtph2ps", - "llvm.x86.vcvtph2ps.256" => "__builtin_ia32_vcvtph2ps256", - "llvm.x86.vcvtps2ph.128" => "__builtin_ia32_vcvtps2ph", - "llvm.x86.vcvtps2ph.256" => "__builtin_ia32_vcvtps2ph256", - "llvm.x86.vgf2p8affineinvqb.128" => "__builtin_ia32_vgf2p8affineinvqb_v16qi", - "llvm.x86.vgf2p8affineinvqb.256" => "__builtin_ia32_vgf2p8affineinvqb_v32qi", - "llvm.x86.vgf2p8affineinvqb.512" => "__builtin_ia32_vgf2p8affineinvqb_v64qi", - "llvm.x86.vgf2p8affineqb.128" => "__builtin_ia32_vgf2p8affineqb_v16qi", - "llvm.x86.vgf2p8affineqb.256" => "__builtin_ia32_vgf2p8affineqb_v32qi", - "llvm.x86.vgf2p8affineqb.512" => "__builtin_ia32_vgf2p8affineqb_v64qi", - "llvm.x86.vgf2p8mulb.128" => "__builtin_ia32_vgf2p8mulb_v16qi", - "llvm.x86.vgf2p8mulb.256" => "__builtin_ia32_vgf2p8mulb_v32qi", - "llvm.x86.vgf2p8mulb.512" => "__builtin_ia32_vgf2p8mulb_v64qi", - "llvm.x86.vsha512msg1" => "__builtin_ia32_vsha512msg1", - "llvm.x86.vsha512msg2" => "__builtin_ia32_vsha512msg2", - "llvm.x86.vsha512rnds2" => "__builtin_ia32_vsha512rnds2", - "llvm.x86.vsm3msg1" => "__builtin_ia32_vsm3msg1", - "llvm.x86.vsm3msg2" => "__builtin_ia32_vsm3msg2", - "llvm.x86.vsm3rnds2" => "__builtin_ia32_vsm3rnds2", - "llvm.x86.vsm4key4128" => "__builtin_ia32_vsm4key4128", - "llvm.x86.vsm4key4256" => "__builtin_ia32_vsm4key4256", - "llvm.x86.vsm4key4512" => "__builtin_ia32_vsm4key4512", - "llvm.x86.vsm4rnds4128" => "__builtin_ia32_vsm4rnds4128", - "llvm.x86.vsm4rnds4256" => "__builtin_ia32_vsm4rnds4256", - "llvm.x86.vsm4rnds4512" => "__builtin_ia32_vsm4rnds4512", - "llvm.x86.wbinvd" => "__builtin_ia32_wbinvd", - "llvm.x86.wbnoinvd" => "__builtin_ia32_wbnoinvd", - "llvm.x86.wrfsbase.32" => "__builtin_ia32_wrfsbase32", - "llvm.x86.wrfsbase.64" => "__builtin_ia32_wrfsbase64", - "llvm.x86.wrgsbase.32" => "__builtin_ia32_wrgsbase32", - "llvm.x86.wrgsbase.64" => "__builtin_ia32_wrgsbase64", - "llvm.x86.wrpkru" => "__builtin_ia32_wrpkru", - "llvm.x86.wrssd" => "__builtin_ia32_wrssd", - "llvm.x86.wrssq" => "__builtin_ia32_wrssq", - "llvm.x86.wrussd" => "__builtin_ia32_wrussd", - "llvm.x86.wrussq" => "__builtin_ia32_wrussq", - "llvm.x86.xabort" => "__builtin_ia32_xabort", - "llvm.x86.xbegin" => "__builtin_ia32_xbegin", - "llvm.x86.xend" => "__builtin_ia32_xend", - "llvm.x86.xop.vfrcz.pd" => "__builtin_ia32_vfrczpd", - "llvm.x86.xop.vfrcz.pd.256" => "__builtin_ia32_vfrczpd256", - "llvm.x86.xop.vfrcz.ps" => "__builtin_ia32_vfrczps", - "llvm.x86.xop.vfrcz.ps.256" => "__builtin_ia32_vfrczps256", - "llvm.x86.xop.vfrcz.sd" => "__builtin_ia32_vfrczsd", - "llvm.x86.xop.vfrcz.ss" => "__builtin_ia32_vfrczss", - "llvm.x86.xop.vpcmov" => "__builtin_ia32_vpcmov", - "llvm.x86.xop.vpcmov.256" => "__builtin_ia32_vpcmov_256", - "llvm.x86.xop.vpcomb" => "__builtin_ia32_vpcomb", - "llvm.x86.xop.vpcomd" => "__builtin_ia32_vpcomd", - "llvm.x86.xop.vpcomq" => "__builtin_ia32_vpcomq", - "llvm.x86.xop.vpcomub" => "__builtin_ia32_vpcomub", - "llvm.x86.xop.vpcomud" => "__builtin_ia32_vpcomud", - "llvm.x86.xop.vpcomuq" => "__builtin_ia32_vpcomuq", - "llvm.x86.xop.vpcomuw" => "__builtin_ia32_vpcomuw", - "llvm.x86.xop.vpcomw" => "__builtin_ia32_vpcomw", - "llvm.x86.xop.vpermil2pd" => "__builtin_ia32_vpermil2pd", - "llvm.x86.xop.vpermil2pd.256" => "__builtin_ia32_vpermil2pd256", - "llvm.x86.xop.vpermil2ps" => "__builtin_ia32_vpermil2ps", - "llvm.x86.xop.vpermil2ps.256" => "__builtin_ia32_vpermil2ps256", - "llvm.x86.xop.vphaddbd" => "__builtin_ia32_vphaddbd", - "llvm.x86.xop.vphaddbq" => "__builtin_ia32_vphaddbq", - "llvm.x86.xop.vphaddbw" => "__builtin_ia32_vphaddbw", - "llvm.x86.xop.vphadddq" => "__builtin_ia32_vphadddq", - "llvm.x86.xop.vphaddubd" => "__builtin_ia32_vphaddubd", - "llvm.x86.xop.vphaddubq" => "__builtin_ia32_vphaddubq", - "llvm.x86.xop.vphaddubw" => "__builtin_ia32_vphaddubw", - "llvm.x86.xop.vphaddudq" => "__builtin_ia32_vphaddudq", - "llvm.x86.xop.vphadduwd" => "__builtin_ia32_vphadduwd", - "llvm.x86.xop.vphadduwq" => "__builtin_ia32_vphadduwq", - "llvm.x86.xop.vphaddwd" => "__builtin_ia32_vphaddwd", - "llvm.x86.xop.vphaddwq" => "__builtin_ia32_vphaddwq", - "llvm.x86.xop.vphsubbw" => "__builtin_ia32_vphsubbw", - "llvm.x86.xop.vphsubdq" => "__builtin_ia32_vphsubdq", - "llvm.x86.xop.vphsubwd" => "__builtin_ia32_vphsubwd", - "llvm.x86.xop.vpmacsdd" => "__builtin_ia32_vpmacsdd", - "llvm.x86.xop.vpmacsdqh" => "__builtin_ia32_vpmacsdqh", - "llvm.x86.xop.vpmacsdql" => "__builtin_ia32_vpmacsdql", - "llvm.x86.xop.vpmacssdd" => "__builtin_ia32_vpmacssdd", - "llvm.x86.xop.vpmacssdqh" => "__builtin_ia32_vpmacssdqh", - "llvm.x86.xop.vpmacssdql" => "__builtin_ia32_vpmacssdql", - "llvm.x86.xop.vpmacsswd" => "__builtin_ia32_vpmacsswd", - "llvm.x86.xop.vpmacssww" => "__builtin_ia32_vpmacssww", - "llvm.x86.xop.vpmacswd" => "__builtin_ia32_vpmacswd", - "llvm.x86.xop.vpmacsww" => "__builtin_ia32_vpmacsww", - "llvm.x86.xop.vpmadcsswd" => "__builtin_ia32_vpmadcsswd", - "llvm.x86.xop.vpmadcswd" => "__builtin_ia32_vpmadcswd", - "llvm.x86.xop.vpperm" => "__builtin_ia32_vpperm", - "llvm.x86.xop.vprotb" => "__builtin_ia32_vprotb", - "llvm.x86.xop.vprotbi" => "__builtin_ia32_vprotbi", - "llvm.x86.xop.vprotd" => "__builtin_ia32_vprotd", - "llvm.x86.xop.vprotdi" => "__builtin_ia32_vprotdi", - "llvm.x86.xop.vprotq" => "__builtin_ia32_vprotq", - "llvm.x86.xop.vprotqi" => "__builtin_ia32_vprotqi", - "llvm.x86.xop.vprotw" => "__builtin_ia32_vprotw", - "llvm.x86.xop.vprotwi" => "__builtin_ia32_vprotwi", - "llvm.x86.xop.vpshab" => "__builtin_ia32_vpshab", - "llvm.x86.xop.vpshad" => "__builtin_ia32_vpshad", - "llvm.x86.xop.vpshaq" => "__builtin_ia32_vpshaq", - "llvm.x86.xop.vpshaw" => "__builtin_ia32_vpshaw", - "llvm.x86.xop.vpshlb" => "__builtin_ia32_vpshlb", - "llvm.x86.xop.vpshld" => "__builtin_ia32_vpshld", - "llvm.x86.xop.vpshlq" => "__builtin_ia32_vpshlq", - "llvm.x86.xop.vpshlw" => "__builtin_ia32_vpshlw", - "llvm.x86.xresldtrk" => "__builtin_ia32_xresldtrk", - "llvm.x86.xsusldtrk" => "__builtin_ia32_xsusldtrk", - "llvm.x86.xtest" => "__builtin_ia32_xtest", - // xcore - "llvm.xcore.bitrev" => "__builtin_bitrev", - "llvm.xcore.getid" => "__builtin_getid", - "llvm.xcore.getps" => "__builtin_getps", - "llvm.xcore.setps" => "__builtin_setps", - _ => unimplemented!("***** unsupported LLVM intrinsic {}", name), +/// Translate a given LLVM intrinsic name to an equivalent GCC one. +fn map_arch_intrinsic(name: &str) -> &str { + let Some(name) = name.strip_prefix("llvm.") else { + unimplemented!("***** unsupported LLVM intrinsic {}", name) + }; + let Some((arch, name)) = name.split_once('.') else { + unimplemented!("***** unsupported LLVM intrinsic {}", name) + }; + match arch { + "AMDGPU" => { + #[allow(non_snake_case)] + fn AMDGPU(name: &str) -> &str { + match name { + // AMDGPU + "div.fixup.f32" => "__builtin_amdgpu_div_fixup", + "div.fixup.f64" => "__builtin_amdgpu_div_fixup", + "div.fixup.v2f64" => "__builtin_amdgpu_div_fixup", + "div.fixup.v4f32" => "__builtin_amdgpu_div_fixup", + "div.fmas.f32" => "__builtin_amdgpu_div_fmas", + "div.fmas.f64" => "__builtin_amdgpu_div_fmas", + "div.fmas.v2f64" => "__builtin_amdgpu_div_fmas", + "div.fmas.v4f32" => "__builtin_amdgpu_div_fmas", + "ldexp.f32" => "__builtin_amdgpu_ldexp", + "ldexp.f64" => "__builtin_amdgpu_ldexp", + "ldexp.v2f64" => "__builtin_amdgpu_ldexp", + "ldexp.v4f32" => "__builtin_amdgpu_ldexp", + "rcp.f32" => "__builtin_amdgpu_rcp", + "rcp.f64" => "__builtin_amdgpu_rcp", + "rcp.v2f64" => "__builtin_amdgpu_rcp", + "rcp.v4f32" => "__builtin_amdgpu_rcp", + "rsq.clamped.f32" => "__builtin_amdgpu_rsq_clamped", + "rsq.clamped.f64" => "__builtin_amdgpu_rsq_clamped", + "rsq.clamped.v2f64" => "__builtin_amdgpu_rsq_clamped", + "rsq.clamped.v4f32" => "__builtin_amdgpu_rsq_clamped", + "rsq.f32" => "__builtin_amdgpu_rsq", + "rsq.f64" => "__builtin_amdgpu_rsq", + "rsq.v2f64" => "__builtin_amdgpu_rsq", + "rsq.v4f32" => "__builtin_amdgpu_rsq", + "trig.preop.f32" => "__builtin_amdgpu_trig_preop", + "trig.preop.f64" => "__builtin_amdgpu_trig_preop", + "trig.preop.v2f64" => "__builtin_amdgpu_trig_preop", + "trig.preop.v4f32" => "__builtin_amdgpu_trig_preop", + _ => unimplemented!("***** unsupported LLVM intrinsic {}", name), + } + } + AMDGPU(name) + } + "aarch64" => { + #[allow(non_snake_case)] + fn aarch64(name: &str) -> &str { + match name { + // aarch64 + "chkfeat" => "__builtin_arm_chkfeat", + "dmb" => "__builtin_arm_dmb", + "dsb" => "__builtin_arm_dsb", + "gcspopm" => "__builtin_arm_gcspopm", + "gcsss" => "__builtin_arm_gcsss", + "isb" => "__builtin_arm_isb", + "prefetch" => "__builtin_arm_prefetch", + "sme.in.streaming.mode" => "__builtin_arm_in_streaming_mode", + "sve.aesd" => "__builtin_sve_svaesd_u8", + "sve.aese" => "__builtin_sve_svaese_u8", + "sve.aesimc" => "__builtin_sve_svaesimc_u8", + "sve.aesmc" => "__builtin_sve_svaesmc_u8", + "sve.rax1" => "__builtin_sve_svrax1_u64", + "sve.rdffr" => "__builtin_sve_svrdffr", + "sve.rdffr.z" => "__builtin_sve_svrdffr_z", + "sve.setffr" => "__builtin_sve_svsetffr", + "sve.sm4e" => "__builtin_sve_svsm4e_u32", + "sve.sm4ekey" => "__builtin_sve_svsm4ekey_u32", + "sve.wrffr" => "__builtin_sve_svwrffr", + "tcancel" => "__builtin_arm_tcancel", + "tcommit" => "__builtin_arm_tcommit", + "tstart" => "__builtin_arm_tstart", + "ttest" => "__builtin_arm_ttest", + _ => unimplemented!("***** unsupported LLVM intrinsic {}", name), + } + } + aarch64(name) + } + "amdgcn" => { + #[allow(non_snake_case)] + fn amdgcn(name: &str) -> &str { + match name { + // amdgcn + "alignbyte" => "__builtin_amdgcn_alignbyte", + "ashr.pk.i8.i32" => "__builtin_amdgcn_ashr_pk_i8_i32", + "ashr.pk.u8.i32" => "__builtin_amdgcn_ashr_pk_u8_i32", + "buffer.wbinvl1" => "__builtin_amdgcn_buffer_wbinvl1", + "buffer.wbinvl1.sc" => "__builtin_amdgcn_buffer_wbinvl1_sc", + "buffer.wbinvl1.vol" => "__builtin_amdgcn_buffer_wbinvl1_vol", + "cubeid" => "__builtin_amdgcn_cubeid", + "cubema" => "__builtin_amdgcn_cubema", + "cubesc" => "__builtin_amdgcn_cubesc", + "cubetc" => "__builtin_amdgcn_cubetc", + "cvt.f32.bf8" => "__builtin_amdgcn_cvt_f32_bf8", + "cvt.f32.fp8" => "__builtin_amdgcn_cvt_f32_fp8", + "cvt.off.f32.i4" => "__builtin_amdgcn_cvt_off_f32_i4", + "cvt.pk.bf8.f32" => "__builtin_amdgcn_cvt_pk_bf8_f32", + "cvt.pk.f32.bf8" => "__builtin_amdgcn_cvt_pk_f32_bf8", + "cvt.pk.f32.fp8" => "__builtin_amdgcn_cvt_pk_f32_fp8", + "cvt.pk.fp8.f32" => "__builtin_amdgcn_cvt_pk_fp8_f32", + "cvt.pk.i16" => "__builtin_amdgcn_cvt_pk_i16", + "cvt.pk.u16" => "__builtin_amdgcn_cvt_pk_u16", + "cvt.pk.u8.f32" => "__builtin_amdgcn_cvt_pk_u8_f32", + "cvt.pknorm.i16" => "__builtin_amdgcn_cvt_pknorm_i16", + "cvt.pknorm.u16" => "__builtin_amdgcn_cvt_pknorm_u16", + "cvt.pkrtz" => "__builtin_amdgcn_cvt_pkrtz", + "cvt.scalef32.2xpk16.bf6.f32" => "__builtin_amdgcn_cvt_scalef32_2xpk16_bf6_f32", + "cvt.scalef32.2xpk16.fp6.f32" => "__builtin_amdgcn_cvt_scalef32_2xpk16_fp6_f32", + "cvt.scalef32.f16.bf8" => "__builtin_amdgcn_cvt_scalef32_f16_bf8", + "cvt.scalef32.f16.fp8" => "__builtin_amdgcn_cvt_scalef32_f16_fp8", + "cvt.scalef32.f32.bf8" => "__builtin_amdgcn_cvt_scalef32_f32_bf8", + "cvt.scalef32.f32.fp8" => "__builtin_amdgcn_cvt_scalef32_f32_fp8", + "cvt.scalef32.pk.bf16.bf8" => "__builtin_amdgcn_cvt_scalef32_pk_bf16_bf8", + "cvt.scalef32.pk.bf16.fp4" => "__builtin_amdgcn_cvt_scalef32_pk_bf16_fp4", + "cvt.scalef32.pk.bf16.fp8" => "__builtin_amdgcn_cvt_scalef32_pk_bf16_fp8", + "cvt.scalef32.pk.bf8.bf16" => "__builtin_amdgcn_cvt_scalef32_pk_bf8_bf16", + "cvt.scalef32.pk.bf8.f16" => "__builtin_amdgcn_cvt_scalef32_pk_bf8_f16", + "cvt.scalef32.pk.bf8.f32" => "__builtin_amdgcn_cvt_scalef32_pk_bf8_f32", + "cvt.scalef32.pk.f16.bf8" => "__builtin_amdgcn_cvt_scalef32_pk_f16_bf8", + "cvt.scalef32.pk.f16.fp4" => "__builtin_amdgcn_cvt_scalef32_pk_f16_fp4", + "cvt.scalef32.pk.f16.fp8" => "__builtin_amdgcn_cvt_scalef32_pk_f16_fp8", + "cvt.scalef32.pk.f32.bf8" => "__builtin_amdgcn_cvt_scalef32_pk_f32_bf8", + "cvt.scalef32.pk.f32.fp4" => "__builtin_amdgcn_cvt_scalef32_pk_f32_fp4", + "cvt.scalef32.pk.f32.fp8" => "__builtin_amdgcn_cvt_scalef32_pk_f32_fp8", + "cvt.scalef32.pk.fp4.bf16" => "__builtin_amdgcn_cvt_scalef32_pk_fp4_bf16", + "cvt.scalef32.pk.fp4.f16" => "__builtin_amdgcn_cvt_scalef32_pk_fp4_f16", + "cvt.scalef32.pk.fp4.f32" => "__builtin_amdgcn_cvt_scalef32_pk_fp4_f32", + "cvt.scalef32.pk.fp8.bf16" => "__builtin_amdgcn_cvt_scalef32_pk_fp8_bf16", + "cvt.scalef32.pk.fp8.f16" => "__builtin_amdgcn_cvt_scalef32_pk_fp8_f16", + "cvt.scalef32.pk.fp8.f32" => "__builtin_amdgcn_cvt_scalef32_pk_fp8_f32", + "cvt.scalef32.pk32.bf16.bf6" => "__builtin_amdgcn_cvt_scalef32_pk32_bf16_bf6", + "cvt.scalef32.pk32.bf16.fp6" => "__builtin_amdgcn_cvt_scalef32_pk32_bf16_fp6", + "cvt.scalef32.pk32.bf6.bf16" => "__builtin_amdgcn_cvt_scalef32_pk32_bf6_bf16", + "cvt.scalef32.pk32.bf6.f16" => "__builtin_amdgcn_cvt_scalef32_pk32_bf6_f16", + "cvt.scalef32.pk32.f16.bf6" => "__builtin_amdgcn_cvt_scalef32_pk32_f16_bf6", + "cvt.scalef32.pk32.f16.fp6" => "__builtin_amdgcn_cvt_scalef32_pk32_f16_fp6", + "cvt.scalef32.pk32.f32.bf6" => "__builtin_amdgcn_cvt_scalef32_pk32_f32_bf6", + "cvt.scalef32.pk32.f32.fp6" => "__builtin_amdgcn_cvt_scalef32_pk32_f32_fp6", + "cvt.scalef32.pk32.fp6.bf16" => "__builtin_amdgcn_cvt_scalef32_pk32_fp6_bf16", + "cvt.scalef32.pk32.fp6.f16" => "__builtin_amdgcn_cvt_scalef32_pk32_fp6_f16", + "cvt.scalef32.sr.bf8.bf16" => "__builtin_amdgcn_cvt_scalef32_sr_bf8_bf16", + "cvt.scalef32.sr.bf8.f16" => "__builtin_amdgcn_cvt_scalef32_sr_bf8_f16", + "cvt.scalef32.sr.bf8.f32" => "__builtin_amdgcn_cvt_scalef32_sr_bf8_f32", + "cvt.scalef32.sr.fp8.bf16" => "__builtin_amdgcn_cvt_scalef32_sr_fp8_bf16", + "cvt.scalef32.sr.fp8.f16" => "__builtin_amdgcn_cvt_scalef32_sr_fp8_f16", + "cvt.scalef32.sr.fp8.f32" => "__builtin_amdgcn_cvt_scalef32_sr_fp8_f32", + "cvt.scalef32.sr.pk.fp4.bf16" => "__builtin_amdgcn_cvt_scalef32_sr_pk_fp4_bf16", + "cvt.scalef32.sr.pk.fp4.f16" => "__builtin_amdgcn_cvt_scalef32_sr_pk_fp4_f16", + "cvt.scalef32.sr.pk.fp4.f32" => "__builtin_amdgcn_cvt_scalef32_sr_pk_fp4_f32", + "cvt.scalef32.sr.pk32.bf6.bf16" => { + "__builtin_amdgcn_cvt_scalef32_sr_pk32_bf6_bf16" + } + "cvt.scalef32.sr.pk32.bf6.f16" => { + "__builtin_amdgcn_cvt_scalef32_sr_pk32_bf6_f16" + } + "cvt.scalef32.sr.pk32.bf6.f32" => { + "__builtin_amdgcn_cvt_scalef32_sr_pk32_bf6_f32" + } + "cvt.scalef32.sr.pk32.fp6.bf16" => { + "__builtin_amdgcn_cvt_scalef32_sr_pk32_fp6_bf16" + } + "cvt.scalef32.sr.pk32.fp6.f16" => { + "__builtin_amdgcn_cvt_scalef32_sr_pk32_fp6_f16" + } + "cvt.scalef32.sr.pk32.fp6.f32" => { + "__builtin_amdgcn_cvt_scalef32_sr_pk32_fp6_f32" + } + "cvt.sr.bf16.f32" => "__builtin_amdgcn_cvt_sr_bf16_f32", + "cvt.sr.bf8.f32" => "__builtin_amdgcn_cvt_sr_bf8_f32", + "cvt.sr.f16.f32" => "__builtin_amdgcn_cvt_sr_f16_f32", + "cvt.sr.fp8.f32" => "__builtin_amdgcn_cvt_sr_fp8_f32", + "dispatch.id" => "__builtin_amdgcn_dispatch_id", + "dot4.f32.bf8.bf8" => "__builtin_amdgcn_dot4_f32_bf8_bf8", + "dot4.f32.bf8.fp8" => "__builtin_amdgcn_dot4_f32_bf8_fp8", + "dot4.f32.fp8.bf8" => "__builtin_amdgcn_dot4_f32_fp8_bf8", + "dot4.f32.fp8.fp8" => "__builtin_amdgcn_dot4_f32_fp8_fp8", + "ds.add.gs.reg.rtn" => "__builtin_amdgcn_ds_add_gs_reg_rtn", + "ds.bpermute" => "__builtin_amdgcn_ds_bpermute", + "ds.bpermute.fi.b32" => "__builtin_amdgcn_ds_bpermute_fi_b32", + "ds.gws.barrier" => "__builtin_amdgcn_ds_gws_barrier", + "ds.gws.init" => "__builtin_amdgcn_ds_gws_init", + "ds.gws.sema.br" => "__builtin_amdgcn_ds_gws_sema_br", + "ds.gws.sema.p" => "__builtin_amdgcn_ds_gws_sema_p", + "ds.gws.sema.release.all" => "__builtin_amdgcn_ds_gws_sema_release_all", + "ds.gws.sema.v" => "__builtin_amdgcn_ds_gws_sema_v", + "ds.permute" => "__builtin_amdgcn_ds_permute", + "ds.sub.gs.reg.rtn" => "__builtin_amdgcn_ds_sub_gs_reg_rtn", + "ds.swizzle" => "__builtin_amdgcn_ds_swizzle", + "endpgm" => "__builtin_amdgcn_endpgm", + "fdot2" => "__builtin_amdgcn_fdot2", + "fdot2.bf16.bf16" => "__builtin_amdgcn_fdot2_bf16_bf16", + "fdot2.f16.f16" => "__builtin_amdgcn_fdot2_f16_f16", + "fdot2.f32.bf16" => "__builtin_amdgcn_fdot2_f32_bf16", + "fdot2c.f32.bf16" => "__builtin_amdgcn_fdot2c_f32_bf16", + "fmul.legacy" => "__builtin_amdgcn_fmul_legacy", + "global.load.lds" => "__builtin_amdgcn_global_load_lds", + "groupstaticsize" => "__builtin_amdgcn_groupstaticsize", + "iglp.opt" => "__builtin_amdgcn_iglp_opt", + "implicit.buffer.ptr" => "__builtin_amdgcn_implicit_buffer_ptr", + "implicitarg.ptr" => "__builtin_amdgcn_implicitarg_ptr", + "interp.mov" => "__builtin_amdgcn_interp_mov", + "interp.p1" => "__builtin_amdgcn_interp_p1", + "interp.p1.f16" => "__builtin_amdgcn_interp_p1_f16", + "interp.p2" => "__builtin_amdgcn_interp_p2", + "interp.p2.f16" => "__builtin_amdgcn_interp_p2_f16", + "is.private" => "__builtin_amdgcn_is_private", + "is.shared" => "__builtin_amdgcn_is_shared", + "kernarg.segment.ptr" => "__builtin_amdgcn_kernarg_segment_ptr", + "lerp" => "__builtin_amdgcn_lerp", + "mbcnt.hi" => "__builtin_amdgcn_mbcnt_hi", + "mbcnt.lo" => "__builtin_amdgcn_mbcnt_lo", + "mfma.f32.16x16x16bf16.1k" => "__builtin_amdgcn_mfma_f32_16x16x16bf16_1k", + "mfma.f32.16x16x16f16" => "__builtin_amdgcn_mfma_f32_16x16x16f16", + "mfma.f32.16x16x1f32" => "__builtin_amdgcn_mfma_f32_16x16x1f32", + "mfma.f32.16x16x2bf16" => "__builtin_amdgcn_mfma_f32_16x16x2bf16", + "mfma.f32.16x16x32.bf16" => "__builtin_amdgcn_mfma_f32_16x16x32_bf16", + "mfma.f32.16x16x32.bf8.bf8" => "__builtin_amdgcn_mfma_f32_16x16x32_bf8_bf8", + "mfma.f32.16x16x32.bf8.fp8" => "__builtin_amdgcn_mfma_f32_16x16x32_bf8_fp8", + "mfma.f32.16x16x32.f16" => "__builtin_amdgcn_mfma_f32_16x16x32_f16", + "mfma.f32.16x16x32.fp8.bf8" => "__builtin_amdgcn_mfma_f32_16x16x32_fp8_bf8", + "mfma.f32.16x16x32.fp8.fp8" => "__builtin_amdgcn_mfma_f32_16x16x32_fp8_fp8", + "mfma.f32.16x16x4bf16.1k" => "__builtin_amdgcn_mfma_f32_16x16x4bf16_1k", + "mfma.f32.16x16x4f16" => "__builtin_amdgcn_mfma_f32_16x16x4f16", + "mfma.f32.16x16x4f32" => "__builtin_amdgcn_mfma_f32_16x16x4f32", + "mfma.f32.16x16x8.xf32" => "__builtin_amdgcn_mfma_f32_16x16x8_xf32", + "mfma.f32.16x16x8bf16" => "__builtin_amdgcn_mfma_f32_16x16x8bf16", + "mfma.f32.32x32x16.bf16" => "__builtin_amdgcn_mfma_f32_32x32x16_bf16", + "mfma.f32.32x32x16.bf8.bf8" => "__builtin_amdgcn_mfma_f32_32x32x16_bf8_bf8", + "mfma.f32.32x32x16.bf8.fp8" => "__builtin_amdgcn_mfma_f32_32x32x16_bf8_fp8", + "mfma.f32.32x32x16.f16" => "__builtin_amdgcn_mfma_f32_32x32x16_f16", + "mfma.f32.32x32x16.fp8.bf8" => "__builtin_amdgcn_mfma_f32_32x32x16_fp8_bf8", + "mfma.f32.32x32x16.fp8.fp8" => "__builtin_amdgcn_mfma_f32_32x32x16_fp8_fp8", + "mfma.f32.32x32x1f32" => "__builtin_amdgcn_mfma_f32_32x32x1f32", + "mfma.f32.32x32x2bf16" => "__builtin_amdgcn_mfma_f32_32x32x2bf16", + "mfma.f32.32x32x2f32" => "__builtin_amdgcn_mfma_f32_32x32x2f32", + "mfma.f32.32x32x4.xf32" => "__builtin_amdgcn_mfma_f32_32x32x4_xf32", + "mfma.f32.32x32x4bf16" => "__builtin_amdgcn_mfma_f32_32x32x4bf16", + "mfma.f32.32x32x4bf16.1k" => "__builtin_amdgcn_mfma_f32_32x32x4bf16_1k", + "mfma.f32.32x32x4f16" => "__builtin_amdgcn_mfma_f32_32x32x4f16", + "mfma.f32.32x32x8bf16.1k" => "__builtin_amdgcn_mfma_f32_32x32x8bf16_1k", + "mfma.f32.32x32x8f16" => "__builtin_amdgcn_mfma_f32_32x32x8f16", + "mfma.f32.4x4x1f32" => "__builtin_amdgcn_mfma_f32_4x4x1f32", + "mfma.f32.4x4x2bf16" => "__builtin_amdgcn_mfma_f32_4x4x2bf16", + "mfma.f32.4x4x4bf16.1k" => "__builtin_amdgcn_mfma_f32_4x4x4bf16_1k", + "mfma.f32.4x4x4f16" => "__builtin_amdgcn_mfma_f32_4x4x4f16", + "mfma.f64.16x16x4f64" => "__builtin_amdgcn_mfma_f64_16x16x4f64", + "mfma.f64.4x4x4f64" => "__builtin_amdgcn_mfma_f64_4x4x4f64", + "mfma.i32.16x16x16i8" => "__builtin_amdgcn_mfma_i32_16x16x16i8", + "mfma.i32.16x16x32.i8" => "__builtin_amdgcn_mfma_i32_16x16x32_i8", + "mfma.i32.16x16x4i8" => "__builtin_amdgcn_mfma_i32_16x16x4i8", + "mfma.i32.16x16x64.i8" => "__builtin_amdgcn_mfma_i32_16x16x64_i8", + "mfma.i32.32x32x16.i8" => "__builtin_amdgcn_mfma_i32_32x32x16_i8", + "mfma.i32.32x32x32.i8" => "__builtin_amdgcn_mfma_i32_32x32x32_i8", + "mfma.i32.32x32x4i8" => "__builtin_amdgcn_mfma_i32_32x32x4i8", + "mfma.i32.32x32x8i8" => "__builtin_amdgcn_mfma_i32_32x32x8i8", + "mfma.i32.4x4x4i8" => "__builtin_amdgcn_mfma_i32_4x4x4i8", + "mqsad.pk.u16.u8" => "__builtin_amdgcn_mqsad_pk_u16_u8", + "mqsad.u32.u8" => "__builtin_amdgcn_mqsad_u32_u8", + "msad.u8" => "__builtin_amdgcn_msad_u8", + "perm" => "__builtin_amdgcn_perm", + "permlane16.var" => "__builtin_amdgcn_permlane16_var", + "permlanex16.var" => "__builtin_amdgcn_permlanex16_var", + "prng.b32" => "__builtin_amdgcn_prng_b32", + "qsad.pk.u16.u8" => "__builtin_amdgcn_qsad_pk_u16_u8", + "queue.ptr" => "__builtin_amdgcn_queue_ptr", + "raw.ptr.buffer.load.lds" => "__builtin_amdgcn_raw_ptr_buffer_load_lds", + "rcp.legacy" => "__builtin_amdgcn_rcp_legacy", + "rsq.legacy" => "__builtin_amdgcn_rsq_legacy", + "s.barrier" => "__builtin_amdgcn_s_barrier", + "s.barrier.signal" => "__builtin_amdgcn_s_barrier_signal", + "s.barrier.signal.isfirst" => "__builtin_amdgcn_s_barrier_signal_isfirst", + "s.barrier.signal.var" => "__builtin_amdgcn_s_barrier_signal_var", + "s.barrier.wait" => "__builtin_amdgcn_s_barrier_wait", + "s.buffer.prefetch.data" => "__builtin_amdgcn_s_buffer_prefetch_data", + "s.dcache.inv" => "__builtin_amdgcn_s_dcache_inv", + "s.dcache.inv.vol" => "__builtin_amdgcn_s_dcache_inv_vol", + "s.dcache.wb" => "__builtin_amdgcn_s_dcache_wb", + "s.dcache.wb.vol" => "__builtin_amdgcn_s_dcache_wb_vol", + "s.decperflevel" => "__builtin_amdgcn_s_decperflevel", + "s.get.barrier.state" => "__builtin_amdgcn_s_get_barrier_state", + "s.get.named.barrier.state" => "__builtin_amdgcn_s_get_named_barrier_state", + "s.get.waveid.in.workgroup" => "__builtin_amdgcn_s_get_waveid_in_workgroup", + "s.getpc" => "__builtin_amdgcn_s_getpc", + "s.getreg" => "__builtin_amdgcn_s_getreg", + "s.incperflevel" => "__builtin_amdgcn_s_incperflevel", + "s.memrealtime" => "__builtin_amdgcn_s_memrealtime", + "s.memtime" => "__builtin_amdgcn_s_memtime", + "s.sendmsg" => "__builtin_amdgcn_s_sendmsg", + "s.sendmsghalt" => "__builtin_amdgcn_s_sendmsghalt", + "s.setprio" => "__builtin_amdgcn_s_setprio", + "s.setreg" => "__builtin_amdgcn_s_setreg", + "s.sleep" => "__builtin_amdgcn_s_sleep", + "s.sleep.var" => "__builtin_amdgcn_s_sleep_var", + "s.ttracedata" => "__builtin_amdgcn_s_ttracedata", + "s.ttracedata.imm" => "__builtin_amdgcn_s_ttracedata_imm", + "s.wait.event.export.ready" => "__builtin_amdgcn_s_wait_event_export_ready", + "s.waitcnt" => "__builtin_amdgcn_s_waitcnt", + "sad.hi.u8" => "__builtin_amdgcn_sad_hi_u8", + "sad.u16" => "__builtin_amdgcn_sad_u16", + "sad.u8" => "__builtin_amdgcn_sad_u8", + "sched.barrier" => "__builtin_amdgcn_sched_barrier", + "sched.group.barrier" => "__builtin_amdgcn_sched_group_barrier", + "sdot2" => "__builtin_amdgcn_sdot2", + "sdot4" => "__builtin_amdgcn_sdot4", + "sdot8" => "__builtin_amdgcn_sdot8", + "smfmac.f32.16x16x128.bf8.bf8" => { + "__builtin_amdgcn_smfmac_f32_16x16x128_bf8_bf8" + } + "smfmac.f32.16x16x128.bf8.fp8" => { + "__builtin_amdgcn_smfmac_f32_16x16x128_bf8_fp8" + } + "smfmac.f32.16x16x128.fp8.bf8" => { + "__builtin_amdgcn_smfmac_f32_16x16x128_fp8_bf8" + } + "smfmac.f32.16x16x128.fp8.fp8" => { + "__builtin_amdgcn_smfmac_f32_16x16x128_fp8_fp8" + } + "smfmac.f32.16x16x32.bf16" => "__builtin_amdgcn_smfmac_f32_16x16x32_bf16", + "smfmac.f32.16x16x32.f16" => "__builtin_amdgcn_smfmac_f32_16x16x32_f16", + "smfmac.f32.16x16x64.bf16" => "__builtin_amdgcn_smfmac_f32_16x16x64_bf16", + "smfmac.f32.16x16x64.bf8.bf8" => "__builtin_amdgcn_smfmac_f32_16x16x64_bf8_bf8", + "smfmac.f32.16x16x64.bf8.fp8" => "__builtin_amdgcn_smfmac_f32_16x16x64_bf8_fp8", + "smfmac.f32.16x16x64.f16" => "__builtin_amdgcn_smfmac_f32_16x16x64_f16", + "smfmac.f32.16x16x64.fp8.bf8" => "__builtin_amdgcn_smfmac_f32_16x16x64_fp8_bf8", + "smfmac.f32.16x16x64.fp8.fp8" => "__builtin_amdgcn_smfmac_f32_16x16x64_fp8_fp8", + "smfmac.f32.32x32x16.bf16" => "__builtin_amdgcn_smfmac_f32_32x32x16_bf16", + "smfmac.f32.32x32x16.f16" => "__builtin_amdgcn_smfmac_f32_32x32x16_f16", + "smfmac.f32.32x32x32.bf16" => "__builtin_amdgcn_smfmac_f32_32x32x32_bf16", + "smfmac.f32.32x32x32.bf8.bf8" => "__builtin_amdgcn_smfmac_f32_32x32x32_bf8_bf8", + "smfmac.f32.32x32x32.bf8.fp8" => "__builtin_amdgcn_smfmac_f32_32x32x32_bf8_fp8", + "smfmac.f32.32x32x32.f16" => "__builtin_amdgcn_smfmac_f32_32x32x32_f16", + "smfmac.f32.32x32x32.fp8.bf8" => "__builtin_amdgcn_smfmac_f32_32x32x32_fp8_bf8", + "smfmac.f32.32x32x32.fp8.fp8" => "__builtin_amdgcn_smfmac_f32_32x32x32_fp8_fp8", + "smfmac.f32.32x32x64.bf8.bf8" => "__builtin_amdgcn_smfmac_f32_32x32x64_bf8_bf8", + "smfmac.f32.32x32x64.bf8.fp8" => "__builtin_amdgcn_smfmac_f32_32x32x64_bf8_fp8", + "smfmac.f32.32x32x64.fp8.bf8" => "__builtin_amdgcn_smfmac_f32_32x32x64_fp8_bf8", + "smfmac.f32.32x32x64.fp8.fp8" => "__builtin_amdgcn_smfmac_f32_32x32x64_fp8_fp8", + "smfmac.i32.16x16x128.i8" => "__builtin_amdgcn_smfmac_i32_16x16x128_i8", + "smfmac.i32.16x16x64.i8" => "__builtin_amdgcn_smfmac_i32_16x16x64_i8", + "smfmac.i32.32x32x32.i8" => "__builtin_amdgcn_smfmac_i32_32x32x32_i8", + "smfmac.i32.32x32x64.i8" => "__builtin_amdgcn_smfmac_i32_32x32x64_i8", + "sudot4" => "__builtin_amdgcn_sudot4", + "sudot8" => "__builtin_amdgcn_sudot8", + "udot2" => "__builtin_amdgcn_udot2", + "udot4" => "__builtin_amdgcn_udot4", + "udot8" => "__builtin_amdgcn_udot8", + "wave.barrier" => "__builtin_amdgcn_wave_barrier", + "wavefrontsize" => "__builtin_amdgcn_wavefrontsize", + "workgroup.id.x" => "__builtin_amdgcn_workgroup_id_x", + "workgroup.id.y" => "__builtin_amdgcn_workgroup_id_y", + "workgroup.id.z" => "__builtin_amdgcn_workgroup_id_z", + "workitem.id.x" => "__builtin_amdgcn_workitem_id_x", + "workitem.id.y" => "__builtin_amdgcn_workitem_id_y", + "workitem.id.z" => "__builtin_amdgcn_workitem_id_z", + _ => unimplemented!("***** unsupported LLVM intrinsic {}", name), + } + } + amdgcn(name) + } + "arm" => { + #[allow(non_snake_case)] + fn arm(name: &str) -> &str { + match name { + // arm + "cdp" => "__builtin_arm_cdp", + "cdp2" => "__builtin_arm_cdp2", + "cmse.tt" => "__builtin_arm_cmse_TT", + "cmse.tta" => "__builtin_arm_cmse_TTA", + "cmse.ttat" => "__builtin_arm_cmse_TTAT", + "cmse.ttt" => "__builtin_arm_cmse_TTT", + "dmb" => "__builtin_arm_dmb", + "dsb" => "__builtin_arm_dsb", + "get.fpscr" => "__builtin_arm_get_fpscr", + "isb" => "__builtin_arm_isb", + "ldc" => "__builtin_arm_ldc", + "ldc2" => "__builtin_arm_ldc2", + "ldc2l" => "__builtin_arm_ldc2l", + "ldcl" => "__builtin_arm_ldcl", + "mcr" => "__builtin_arm_mcr", + "mcr2" => "__builtin_arm_mcr2", + "mcrr" => "__builtin_arm_mcrr", + "mcrr2" => "__builtin_arm_mcrr2", + "mrc" => "__builtin_arm_mrc", + "mrc2" => "__builtin_arm_mrc2", + "qadd" => "__builtin_arm_qadd", + "qadd16" => "__builtin_arm_qadd16", + "qadd8" => "__builtin_arm_qadd8", + "qasx" => "__builtin_arm_qasx", + "qsax" => "__builtin_arm_qsax", + "qsub" => "__builtin_arm_qsub", + "qsub16" => "__builtin_arm_qsub16", + "qsub8" => "__builtin_arm_qsub8", + "sadd16" => "__builtin_arm_sadd16", + "sadd8" => "__builtin_arm_sadd8", + "sasx" => "__builtin_arm_sasx", + "sel" => "__builtin_arm_sel", + "set.fpscr" => "__builtin_arm_set_fpscr", + "shadd16" => "__builtin_arm_shadd16", + "shadd8" => "__builtin_arm_shadd8", + "shasx" => "__builtin_arm_shasx", + "shsax" => "__builtin_arm_shsax", + "shsub16" => "__builtin_arm_shsub16", + "shsub8" => "__builtin_arm_shsub8", + "smlabb" => "__builtin_arm_smlabb", + "smlabt" => "__builtin_arm_smlabt", + "smlad" => "__builtin_arm_smlad", + "smladx" => "__builtin_arm_smladx", + "smlald" => "__builtin_arm_smlald", + "smlaldx" => "__builtin_arm_smlaldx", + "smlatb" => "__builtin_arm_smlatb", + "smlatt" => "__builtin_arm_smlatt", + "smlawb" => "__builtin_arm_smlawb", + "smlawt" => "__builtin_arm_smlawt", + "smlsd" => "__builtin_arm_smlsd", + "smlsdx" => "__builtin_arm_smlsdx", + "smlsld" => "__builtin_arm_smlsld", + "smlsldx" => "__builtin_arm_smlsldx", + "smuad" => "__builtin_arm_smuad", + "smuadx" => "__builtin_arm_smuadx", + "smulbb" => "__builtin_arm_smulbb", + "smulbt" => "__builtin_arm_smulbt", + "smultb" => "__builtin_arm_smultb", + "smultt" => "__builtin_arm_smultt", + "smulwb" => "__builtin_arm_smulwb", + "smulwt" => "__builtin_arm_smulwt", + "smusd" => "__builtin_arm_smusd", + "smusdx" => "__builtin_arm_smusdx", + "ssat" => "__builtin_arm_ssat", + "ssat16" => "__builtin_arm_ssat16", + "ssax" => "__builtin_arm_ssax", + "ssub16" => "__builtin_arm_ssub16", + "ssub8" => "__builtin_arm_ssub8", + "stc" => "__builtin_arm_stc", + "stc2" => "__builtin_arm_stc2", + "stc2l" => "__builtin_arm_stc2l", + "stcl" => "__builtin_arm_stcl", + "sxtab16" => "__builtin_arm_sxtab16", + "sxtb16" => "__builtin_arm_sxtb16", + "thread.pointer" => "__builtin_thread_pointer", + "uadd16" => "__builtin_arm_uadd16", + "uadd8" => "__builtin_arm_uadd8", + "uasx" => "__builtin_arm_uasx", + "uhadd16" => "__builtin_arm_uhadd16", + "uhadd8" => "__builtin_arm_uhadd8", + "uhasx" => "__builtin_arm_uhasx", + "uhsax" => "__builtin_arm_uhsax", + "uhsub16" => "__builtin_arm_uhsub16", + "uhsub8" => "__builtin_arm_uhsub8", + "uqadd16" => "__builtin_arm_uqadd16", + "uqadd8" => "__builtin_arm_uqadd8", + "uqasx" => "__builtin_arm_uqasx", + "uqsax" => "__builtin_arm_uqsax", + "uqsub16" => "__builtin_arm_uqsub16", + "uqsub8" => "__builtin_arm_uqsub8", + "usad8" => "__builtin_arm_usad8", + "usada8" => "__builtin_arm_usada8", + "usat" => "__builtin_arm_usat", + "usat16" => "__builtin_arm_usat16", + "usax" => "__builtin_arm_usax", + "usub16" => "__builtin_arm_usub16", + "usub8" => "__builtin_arm_usub8", + "uxtab16" => "__builtin_arm_uxtab16", + "uxtb16" => "__builtin_arm_uxtb16", + _ => unimplemented!("***** unsupported LLVM intrinsic {}", name), + } + } + arm(name) + } + "bpf" => { + #[allow(non_snake_case)] + fn bpf(name: &str) -> &str { + match name { + // bpf + "btf.type.id" => "__builtin_bpf_btf_type_id", + "compare" => "__builtin_bpf_compare", + "getelementptr.and.load" => "__builtin_bpf_getelementptr_and_load", + "getelementptr.and.store" => "__builtin_bpf_getelementptr_and_store", + "load.byte" => "__builtin_bpf_load_byte", + "load.half" => "__builtin_bpf_load_half", + "load.word" => "__builtin_bpf_load_word", + "passthrough" => "__builtin_bpf_passthrough", + "preserve.enum.value" => "__builtin_bpf_preserve_enum_value", + "preserve.field.info" => "__builtin_bpf_preserve_field_info", + "preserve.type.info" => "__builtin_bpf_preserve_type_info", + "pseudo" => "__builtin_bpf_pseudo", + _ => unimplemented!("***** unsupported LLVM intrinsic {}", name), + } + } + bpf(name) + } + "cuda" => { + #[allow(non_snake_case)] + fn cuda(name: &str) -> &str { + match name { + // cuda + "syncthreads" => "__syncthreads", + _ => unimplemented!("***** unsupported LLVM intrinsic {}", name), + } + } + cuda(name) + } + "hexagon" => { + #[allow(non_snake_case)] + fn hexagon(name: &str) -> &str { + match name { + // hexagon + "A2.abs" => "__builtin_HEXAGON_A2_abs", + "A2.absp" => "__builtin_HEXAGON_A2_absp", + "A2.abssat" => "__builtin_HEXAGON_A2_abssat", + "A2.add" => "__builtin_HEXAGON_A2_add", + "A2.addh.h16.hh" => "__builtin_HEXAGON_A2_addh_h16_hh", + "A2.addh.h16.hl" => "__builtin_HEXAGON_A2_addh_h16_hl", + "A2.addh.h16.lh" => "__builtin_HEXAGON_A2_addh_h16_lh", + "A2.addh.h16.ll" => "__builtin_HEXAGON_A2_addh_h16_ll", + "A2.addh.h16.sat.hh" => "__builtin_HEXAGON_A2_addh_h16_sat_hh", + "A2.addh.h16.sat.hl" => "__builtin_HEXAGON_A2_addh_h16_sat_hl", + "A2.addh.h16.sat.lh" => "__builtin_HEXAGON_A2_addh_h16_sat_lh", + "A2.addh.h16.sat.ll" => "__builtin_HEXAGON_A2_addh_h16_sat_ll", + "A2.addh.l16.hl" => "__builtin_HEXAGON_A2_addh_l16_hl", + "A2.addh.l16.ll" => "__builtin_HEXAGON_A2_addh_l16_ll", + "A2.addh.l16.sat.hl" => "__builtin_HEXAGON_A2_addh_l16_sat_hl", + "A2.addh.l16.sat.ll" => "__builtin_HEXAGON_A2_addh_l16_sat_ll", + "A2.addi" => "__builtin_HEXAGON_A2_addi", + "A2.addp" => "__builtin_HEXAGON_A2_addp", + "A2.addpsat" => "__builtin_HEXAGON_A2_addpsat", + "A2.addsat" => "__builtin_HEXAGON_A2_addsat", + "A2.addsp" => "__builtin_HEXAGON_A2_addsp", + "A2.and" => "__builtin_HEXAGON_A2_and", + "A2.andir" => "__builtin_HEXAGON_A2_andir", + "A2.andp" => "__builtin_HEXAGON_A2_andp", + "A2.aslh" => "__builtin_HEXAGON_A2_aslh", + "A2.asrh" => "__builtin_HEXAGON_A2_asrh", + "A2.combine.hh" => "__builtin_HEXAGON_A2_combine_hh", + "A2.combine.hl" => "__builtin_HEXAGON_A2_combine_hl", + "A2.combine.lh" => "__builtin_HEXAGON_A2_combine_lh", + "A2.combine.ll" => "__builtin_HEXAGON_A2_combine_ll", + "A2.combineii" => "__builtin_HEXAGON_A2_combineii", + "A2.combinew" => "__builtin_HEXAGON_A2_combinew", + "A2.max" => "__builtin_HEXAGON_A2_max", + "A2.maxp" => "__builtin_HEXAGON_A2_maxp", + "A2.maxu" => "__builtin_HEXAGON_A2_maxu", + "A2.maxup" => "__builtin_HEXAGON_A2_maxup", + "A2.min" => "__builtin_HEXAGON_A2_min", + "A2.minp" => "__builtin_HEXAGON_A2_minp", + "A2.minu" => "__builtin_HEXAGON_A2_minu", + "A2.minup" => "__builtin_HEXAGON_A2_minup", + "A2.neg" => "__builtin_HEXAGON_A2_neg", + "A2.negp" => "__builtin_HEXAGON_A2_negp", + "A2.negsat" => "__builtin_HEXAGON_A2_negsat", + "A2.not" => "__builtin_HEXAGON_A2_not", + "A2.notp" => "__builtin_HEXAGON_A2_notp", + "A2.or" => "__builtin_HEXAGON_A2_or", + "A2.orir" => "__builtin_HEXAGON_A2_orir", + "A2.orp" => "__builtin_HEXAGON_A2_orp", + "A2.roundsat" => "__builtin_HEXAGON_A2_roundsat", + "A2.sat" => "__builtin_HEXAGON_A2_sat", + "A2.satb" => "__builtin_HEXAGON_A2_satb", + "A2.sath" => "__builtin_HEXAGON_A2_sath", + "A2.satub" => "__builtin_HEXAGON_A2_satub", + "A2.satuh" => "__builtin_HEXAGON_A2_satuh", + "A2.sub" => "__builtin_HEXAGON_A2_sub", + "A2.subh.h16.hh" => "__builtin_HEXAGON_A2_subh_h16_hh", + "A2.subh.h16.hl" => "__builtin_HEXAGON_A2_subh_h16_hl", + "A2.subh.h16.lh" => "__builtin_HEXAGON_A2_subh_h16_lh", + "A2.subh.h16.ll" => "__builtin_HEXAGON_A2_subh_h16_ll", + "A2.subh.h16.sat.hh" => "__builtin_HEXAGON_A2_subh_h16_sat_hh", + "A2.subh.h16.sat.hl" => "__builtin_HEXAGON_A2_subh_h16_sat_hl", + "A2.subh.h16.sat.lh" => "__builtin_HEXAGON_A2_subh_h16_sat_lh", + "A2.subh.h16.sat.ll" => "__builtin_HEXAGON_A2_subh_h16_sat_ll", + "A2.subh.l16.hl" => "__builtin_HEXAGON_A2_subh_l16_hl", + "A2.subh.l16.ll" => "__builtin_HEXAGON_A2_subh_l16_ll", + "A2.subh.l16.sat.hl" => "__builtin_HEXAGON_A2_subh_l16_sat_hl", + "A2.subh.l16.sat.ll" => "__builtin_HEXAGON_A2_subh_l16_sat_ll", + "A2.subp" => "__builtin_HEXAGON_A2_subp", + "A2.subri" => "__builtin_HEXAGON_A2_subri", + "A2.subsat" => "__builtin_HEXAGON_A2_subsat", + "A2.svaddh" => "__builtin_HEXAGON_A2_svaddh", + "A2.svaddhs" => "__builtin_HEXAGON_A2_svaddhs", + "A2.svadduhs" => "__builtin_HEXAGON_A2_svadduhs", + "A2.svavgh" => "__builtin_HEXAGON_A2_svavgh", + "A2.svavghs" => "__builtin_HEXAGON_A2_svavghs", + "A2.svnavgh" => "__builtin_HEXAGON_A2_svnavgh", + "A2.svsubh" => "__builtin_HEXAGON_A2_svsubh", + "A2.svsubhs" => "__builtin_HEXAGON_A2_svsubhs", + "A2.svsubuhs" => "__builtin_HEXAGON_A2_svsubuhs", + "A2.swiz" => "__builtin_HEXAGON_A2_swiz", + "A2.sxtb" => "__builtin_HEXAGON_A2_sxtb", + "A2.sxth" => "__builtin_HEXAGON_A2_sxth", + "A2.sxtw" => "__builtin_HEXAGON_A2_sxtw", + "A2.tfr" => "__builtin_HEXAGON_A2_tfr", + "A2.tfrih" => "__builtin_HEXAGON_A2_tfrih", + "A2.tfril" => "__builtin_HEXAGON_A2_tfril", + "A2.tfrp" => "__builtin_HEXAGON_A2_tfrp", + "A2.tfrpi" => "__builtin_HEXAGON_A2_tfrpi", + "A2.tfrsi" => "__builtin_HEXAGON_A2_tfrsi", + "A2.vabsh" => "__builtin_HEXAGON_A2_vabsh", + "A2.vabshsat" => "__builtin_HEXAGON_A2_vabshsat", + "A2.vabsw" => "__builtin_HEXAGON_A2_vabsw", + "A2.vabswsat" => "__builtin_HEXAGON_A2_vabswsat", + "A2.vaddb.map" => "__builtin_HEXAGON_A2_vaddb_map", + "A2.vaddh" => "__builtin_HEXAGON_A2_vaddh", + "A2.vaddhs" => "__builtin_HEXAGON_A2_vaddhs", + "A2.vaddub" => "__builtin_HEXAGON_A2_vaddub", + "A2.vaddubs" => "__builtin_HEXAGON_A2_vaddubs", + "A2.vadduhs" => "__builtin_HEXAGON_A2_vadduhs", + "A2.vaddw" => "__builtin_HEXAGON_A2_vaddw", + "A2.vaddws" => "__builtin_HEXAGON_A2_vaddws", + "A2.vavgh" => "__builtin_HEXAGON_A2_vavgh", + "A2.vavghcr" => "__builtin_HEXAGON_A2_vavghcr", + "A2.vavghr" => "__builtin_HEXAGON_A2_vavghr", + "A2.vavgub" => "__builtin_HEXAGON_A2_vavgub", + "A2.vavgubr" => "__builtin_HEXAGON_A2_vavgubr", + "A2.vavguh" => "__builtin_HEXAGON_A2_vavguh", + "A2.vavguhr" => "__builtin_HEXAGON_A2_vavguhr", + "A2.vavguw" => "__builtin_HEXAGON_A2_vavguw", + "A2.vavguwr" => "__builtin_HEXAGON_A2_vavguwr", + "A2.vavgw" => "__builtin_HEXAGON_A2_vavgw", + "A2.vavgwcr" => "__builtin_HEXAGON_A2_vavgwcr", + "A2.vavgwr" => "__builtin_HEXAGON_A2_vavgwr", + "A2.vcmpbeq" => "__builtin_HEXAGON_A2_vcmpbeq", + "A2.vcmpbgtu" => "__builtin_HEXAGON_A2_vcmpbgtu", + "A2.vcmpheq" => "__builtin_HEXAGON_A2_vcmpheq", + "A2.vcmphgt" => "__builtin_HEXAGON_A2_vcmphgt", + "A2.vcmphgtu" => "__builtin_HEXAGON_A2_vcmphgtu", + "A2.vcmpweq" => "__builtin_HEXAGON_A2_vcmpweq", + "A2.vcmpwgt" => "__builtin_HEXAGON_A2_vcmpwgt", + "A2.vcmpwgtu" => "__builtin_HEXAGON_A2_vcmpwgtu", + "A2.vconj" => "__builtin_HEXAGON_A2_vconj", + "A2.vmaxb" => "__builtin_HEXAGON_A2_vmaxb", + "A2.vmaxh" => "__builtin_HEXAGON_A2_vmaxh", + "A2.vmaxub" => "__builtin_HEXAGON_A2_vmaxub", + "A2.vmaxuh" => "__builtin_HEXAGON_A2_vmaxuh", + "A2.vmaxuw" => "__builtin_HEXAGON_A2_vmaxuw", + "A2.vmaxw" => "__builtin_HEXAGON_A2_vmaxw", + "A2.vminb" => "__builtin_HEXAGON_A2_vminb", + "A2.vminh" => "__builtin_HEXAGON_A2_vminh", + "A2.vminub" => "__builtin_HEXAGON_A2_vminub", + "A2.vminuh" => "__builtin_HEXAGON_A2_vminuh", + "A2.vminuw" => "__builtin_HEXAGON_A2_vminuw", + "A2.vminw" => "__builtin_HEXAGON_A2_vminw", + "A2.vnavgh" => "__builtin_HEXAGON_A2_vnavgh", + "A2.vnavghcr" => "__builtin_HEXAGON_A2_vnavghcr", + "A2.vnavghr" => "__builtin_HEXAGON_A2_vnavghr", + "A2.vnavgw" => "__builtin_HEXAGON_A2_vnavgw", + "A2.vnavgwcr" => "__builtin_HEXAGON_A2_vnavgwcr", + "A2.vnavgwr" => "__builtin_HEXAGON_A2_vnavgwr", + "A2.vraddub" => "__builtin_HEXAGON_A2_vraddub", + "A2.vraddub.acc" => "__builtin_HEXAGON_A2_vraddub_acc", + "A2.vrsadub" => "__builtin_HEXAGON_A2_vrsadub", + "A2.vrsadub.acc" => "__builtin_HEXAGON_A2_vrsadub_acc", + "A2.vsubb.map" => "__builtin_HEXAGON_A2_vsubb_map", + "A2.vsubh" => "__builtin_HEXAGON_A2_vsubh", + "A2.vsubhs" => "__builtin_HEXAGON_A2_vsubhs", + "A2.vsubub" => "__builtin_HEXAGON_A2_vsubub", + "A2.vsububs" => "__builtin_HEXAGON_A2_vsububs", + "A2.vsubuhs" => "__builtin_HEXAGON_A2_vsubuhs", + "A2.vsubw" => "__builtin_HEXAGON_A2_vsubw", + "A2.vsubws" => "__builtin_HEXAGON_A2_vsubws", + "A2.xor" => "__builtin_HEXAGON_A2_xor", + "A2.xorp" => "__builtin_HEXAGON_A2_xorp", + "A2.zxtb" => "__builtin_HEXAGON_A2_zxtb", + "A2.zxth" => "__builtin_HEXAGON_A2_zxth", + "A4.andn" => "__builtin_HEXAGON_A4_andn", + "A4.andnp" => "__builtin_HEXAGON_A4_andnp", + "A4.bitsplit" => "__builtin_HEXAGON_A4_bitsplit", + "A4.bitspliti" => "__builtin_HEXAGON_A4_bitspliti", + "A4.boundscheck" => "__builtin_HEXAGON_A4_boundscheck", + "A4.cmpbeq" => "__builtin_HEXAGON_A4_cmpbeq", + "A4.cmpbeqi" => "__builtin_HEXAGON_A4_cmpbeqi", + "A4.cmpbgt" => "__builtin_HEXAGON_A4_cmpbgt", + "A4.cmpbgti" => "__builtin_HEXAGON_A4_cmpbgti", + "A4.cmpbgtu" => "__builtin_HEXAGON_A4_cmpbgtu", + "A4.cmpbgtui" => "__builtin_HEXAGON_A4_cmpbgtui", + "A4.cmpheq" => "__builtin_HEXAGON_A4_cmpheq", + "A4.cmpheqi" => "__builtin_HEXAGON_A4_cmpheqi", + "A4.cmphgt" => "__builtin_HEXAGON_A4_cmphgt", + "A4.cmphgti" => "__builtin_HEXAGON_A4_cmphgti", + "A4.cmphgtu" => "__builtin_HEXAGON_A4_cmphgtu", + "A4.cmphgtui" => "__builtin_HEXAGON_A4_cmphgtui", + "A4.combineir" => "__builtin_HEXAGON_A4_combineir", + "A4.combineri" => "__builtin_HEXAGON_A4_combineri", + "A4.cround.ri" => "__builtin_HEXAGON_A4_cround_ri", + "A4.cround.rr" => "__builtin_HEXAGON_A4_cround_rr", + "A4.modwrapu" => "__builtin_HEXAGON_A4_modwrapu", + "A4.orn" => "__builtin_HEXAGON_A4_orn", + "A4.ornp" => "__builtin_HEXAGON_A4_ornp", + "A4.rcmpeq" => "__builtin_HEXAGON_A4_rcmpeq", + "A4.rcmpeqi" => "__builtin_HEXAGON_A4_rcmpeqi", + "A4.rcmpneq" => "__builtin_HEXAGON_A4_rcmpneq", + "A4.rcmpneqi" => "__builtin_HEXAGON_A4_rcmpneqi", + "A4.round.ri" => "__builtin_HEXAGON_A4_round_ri", + "A4.round.ri.sat" => "__builtin_HEXAGON_A4_round_ri_sat", + "A4.round.rr" => "__builtin_HEXAGON_A4_round_rr", + "A4.round.rr.sat" => "__builtin_HEXAGON_A4_round_rr_sat", + "A4.tlbmatch" => "__builtin_HEXAGON_A4_tlbmatch", + "A4.vcmpbeq.any" => "__builtin_HEXAGON_A4_vcmpbeq_any", + "A4.vcmpbeqi" => "__builtin_HEXAGON_A4_vcmpbeqi", + "A4.vcmpbgt" => "__builtin_HEXAGON_A4_vcmpbgt", + "A4.vcmpbgti" => "__builtin_HEXAGON_A4_vcmpbgti", + "A4.vcmpbgtui" => "__builtin_HEXAGON_A4_vcmpbgtui", + "A4.vcmpheqi" => "__builtin_HEXAGON_A4_vcmpheqi", + "A4.vcmphgti" => "__builtin_HEXAGON_A4_vcmphgti", + "A4.vcmphgtui" => "__builtin_HEXAGON_A4_vcmphgtui", + "A4.vcmpweqi" => "__builtin_HEXAGON_A4_vcmpweqi", + "A4.vcmpwgti" => "__builtin_HEXAGON_A4_vcmpwgti", + "A4.vcmpwgtui" => "__builtin_HEXAGON_A4_vcmpwgtui", + "A4.vrmaxh" => "__builtin_HEXAGON_A4_vrmaxh", + "A4.vrmaxuh" => "__builtin_HEXAGON_A4_vrmaxuh", + "A4.vrmaxuw" => "__builtin_HEXAGON_A4_vrmaxuw", + "A4.vrmaxw" => "__builtin_HEXAGON_A4_vrmaxw", + "A4.vrminh" => "__builtin_HEXAGON_A4_vrminh", + "A4.vrminuh" => "__builtin_HEXAGON_A4_vrminuh", + "A4.vrminuw" => "__builtin_HEXAGON_A4_vrminuw", + "A4.vrminw" => "__builtin_HEXAGON_A4_vrminw", + "A5.vaddhubs" => "__builtin_HEXAGON_A5_vaddhubs", + "A6.vcmpbeq.notany" => "__builtin_HEXAGON_A6_vcmpbeq_notany", + "A7.clip" => "__builtin_HEXAGON_A7_clip", + "A7.croundd.ri" => "__builtin_HEXAGON_A7_croundd_ri", + "A7.croundd.rr" => "__builtin_HEXAGON_A7_croundd_rr", + "A7.vclip" => "__builtin_HEXAGON_A7_vclip", + "C2.all8" => "__builtin_HEXAGON_C2_all8", + "C2.and" => "__builtin_HEXAGON_C2_and", + "C2.andn" => "__builtin_HEXAGON_C2_andn", + "C2.any8" => "__builtin_HEXAGON_C2_any8", + "C2.bitsclr" => "__builtin_HEXAGON_C2_bitsclr", + "C2.bitsclri" => "__builtin_HEXAGON_C2_bitsclri", + "C2.bitsset" => "__builtin_HEXAGON_C2_bitsset", + "C2.cmpeq" => "__builtin_HEXAGON_C2_cmpeq", + "C2.cmpeqi" => "__builtin_HEXAGON_C2_cmpeqi", + "C2.cmpeqp" => "__builtin_HEXAGON_C2_cmpeqp", + "C2.cmpgei" => "__builtin_HEXAGON_C2_cmpgei", + "C2.cmpgeui" => "__builtin_HEXAGON_C2_cmpgeui", + "C2.cmpgt" => "__builtin_HEXAGON_C2_cmpgt", + "C2.cmpgti" => "__builtin_HEXAGON_C2_cmpgti", + "C2.cmpgtp" => "__builtin_HEXAGON_C2_cmpgtp", + "C2.cmpgtu" => "__builtin_HEXAGON_C2_cmpgtu", + "C2.cmpgtui" => "__builtin_HEXAGON_C2_cmpgtui", + "C2.cmpgtup" => "__builtin_HEXAGON_C2_cmpgtup", + "C2.cmplt" => "__builtin_HEXAGON_C2_cmplt", + "C2.cmpltu" => "__builtin_HEXAGON_C2_cmpltu", + "C2.mask" => "__builtin_HEXAGON_C2_mask", + "C2.mux" => "__builtin_HEXAGON_C2_mux", + "C2.muxii" => "__builtin_HEXAGON_C2_muxii", + "C2.muxir" => "__builtin_HEXAGON_C2_muxir", + "C2.muxri" => "__builtin_HEXAGON_C2_muxri", + "C2.not" => "__builtin_HEXAGON_C2_not", + "C2.or" => "__builtin_HEXAGON_C2_or", + "C2.orn" => "__builtin_HEXAGON_C2_orn", + "C2.pxfer.map" => "__builtin_HEXAGON_C2_pxfer_map", + "C2.tfrpr" => "__builtin_HEXAGON_C2_tfrpr", + "C2.tfrrp" => "__builtin_HEXAGON_C2_tfrrp", + "C2.vitpack" => "__builtin_HEXAGON_C2_vitpack", + "C2.vmux" => "__builtin_HEXAGON_C2_vmux", + "C2.xor" => "__builtin_HEXAGON_C2_xor", + "C4.and.and" => "__builtin_HEXAGON_C4_and_and", + "C4.and.andn" => "__builtin_HEXAGON_C4_and_andn", + "C4.and.or" => "__builtin_HEXAGON_C4_and_or", + "C4.and.orn" => "__builtin_HEXAGON_C4_and_orn", + "C4.cmplte" => "__builtin_HEXAGON_C4_cmplte", + "C4.cmpltei" => "__builtin_HEXAGON_C4_cmpltei", + "C4.cmplteu" => "__builtin_HEXAGON_C4_cmplteu", + "C4.cmplteui" => "__builtin_HEXAGON_C4_cmplteui", + "C4.cmpneq" => "__builtin_HEXAGON_C4_cmpneq", + "C4.cmpneqi" => "__builtin_HEXAGON_C4_cmpneqi", + "C4.fastcorner9" => "__builtin_HEXAGON_C4_fastcorner9", + "C4.fastcorner9.not" => "__builtin_HEXAGON_C4_fastcorner9_not", + "C4.nbitsclr" => "__builtin_HEXAGON_C4_nbitsclr", + "C4.nbitsclri" => "__builtin_HEXAGON_C4_nbitsclri", + "C4.nbitsset" => "__builtin_HEXAGON_C4_nbitsset", + "C4.or.and" => "__builtin_HEXAGON_C4_or_and", + "C4.or.andn" => "__builtin_HEXAGON_C4_or_andn", + "C4.or.or" => "__builtin_HEXAGON_C4_or_or", + "C4.or.orn" => "__builtin_HEXAGON_C4_or_orn", + "F2.conv.d2df" => "__builtin_HEXAGON_F2_conv_d2df", + "F2.conv.d2sf" => "__builtin_HEXAGON_F2_conv_d2sf", + "F2.conv.df2d" => "__builtin_HEXAGON_F2_conv_df2d", + "F2.conv.df2d.chop" => "__builtin_HEXAGON_F2_conv_df2d_chop", + "F2.conv.df2sf" => "__builtin_HEXAGON_F2_conv_df2sf", + "F2.conv.df2ud" => "__builtin_HEXAGON_F2_conv_df2ud", + "F2.conv.df2ud.chop" => "__builtin_HEXAGON_F2_conv_df2ud_chop", + "F2.conv.df2uw" => "__builtin_HEXAGON_F2_conv_df2uw", + "F2.conv.df2uw.chop" => "__builtin_HEXAGON_F2_conv_df2uw_chop", + "F2.conv.df2w" => "__builtin_HEXAGON_F2_conv_df2w", + "F2.conv.df2w.chop" => "__builtin_HEXAGON_F2_conv_df2w_chop", + "F2.conv.sf2d" => "__builtin_HEXAGON_F2_conv_sf2d", + "F2.conv.sf2d.chop" => "__builtin_HEXAGON_F2_conv_sf2d_chop", + "F2.conv.sf2df" => "__builtin_HEXAGON_F2_conv_sf2df", + "F2.conv.sf2ud" => "__builtin_HEXAGON_F2_conv_sf2ud", + "F2.conv.sf2ud.chop" => "__builtin_HEXAGON_F2_conv_sf2ud_chop", + "F2.conv.sf2uw" => "__builtin_HEXAGON_F2_conv_sf2uw", + "F2.conv.sf2uw.chop" => "__builtin_HEXAGON_F2_conv_sf2uw_chop", + "F2.conv.sf2w" => "__builtin_HEXAGON_F2_conv_sf2w", + "F2.conv.sf2w.chop" => "__builtin_HEXAGON_F2_conv_sf2w_chop", + "F2.conv.ud2df" => "__builtin_HEXAGON_F2_conv_ud2df", + "F2.conv.ud2sf" => "__builtin_HEXAGON_F2_conv_ud2sf", + "F2.conv.uw2df" => "__builtin_HEXAGON_F2_conv_uw2df", + "F2.conv.uw2sf" => "__builtin_HEXAGON_F2_conv_uw2sf", + "F2.conv.w2df" => "__builtin_HEXAGON_F2_conv_w2df", + "F2.conv.w2sf" => "__builtin_HEXAGON_F2_conv_w2sf", + "F2.dfadd" => "__builtin_HEXAGON_F2_dfadd", + "F2.dfclass" => "__builtin_HEXAGON_F2_dfclass", + "F2.dfcmpeq" => "__builtin_HEXAGON_F2_dfcmpeq", + "F2.dfcmpge" => "__builtin_HEXAGON_F2_dfcmpge", + "F2.dfcmpgt" => "__builtin_HEXAGON_F2_dfcmpgt", + "F2.dfcmpuo" => "__builtin_HEXAGON_F2_dfcmpuo", + "F2.dffixupd" => "__builtin_HEXAGON_F2_dffixupd", + "F2.dffixupn" => "__builtin_HEXAGON_F2_dffixupn", + "F2.dffixupr" => "__builtin_HEXAGON_F2_dffixupr", + "F2.dffma" => "__builtin_HEXAGON_F2_dffma", + "F2.dffma.lib" => "__builtin_HEXAGON_F2_dffma_lib", + "F2.dffma.sc" => "__builtin_HEXAGON_F2_dffma_sc", + "F2.dffms" => "__builtin_HEXAGON_F2_dffms", + "F2.dffms.lib" => "__builtin_HEXAGON_F2_dffms_lib", + "F2.dfimm.n" => "__builtin_HEXAGON_F2_dfimm_n", + "F2.dfimm.p" => "__builtin_HEXAGON_F2_dfimm_p", + "F2.dfmax" => "__builtin_HEXAGON_F2_dfmax", + "F2.dfmin" => "__builtin_HEXAGON_F2_dfmin", + "F2.dfmpy" => "__builtin_HEXAGON_F2_dfmpy", + "F2.dfmpyfix" => "__builtin_HEXAGON_F2_dfmpyfix", + "F2.dfmpyhh" => "__builtin_HEXAGON_F2_dfmpyhh", + "F2.dfmpylh" => "__builtin_HEXAGON_F2_dfmpylh", + "F2.dfmpyll" => "__builtin_HEXAGON_F2_dfmpyll", + "F2.dfsub" => "__builtin_HEXAGON_F2_dfsub", + "F2.sfadd" => "__builtin_HEXAGON_F2_sfadd", + "F2.sfclass" => "__builtin_HEXAGON_F2_sfclass", + "F2.sfcmpeq" => "__builtin_HEXAGON_F2_sfcmpeq", + "F2.sfcmpge" => "__builtin_HEXAGON_F2_sfcmpge", + "F2.sfcmpgt" => "__builtin_HEXAGON_F2_sfcmpgt", + "F2.sfcmpuo" => "__builtin_HEXAGON_F2_sfcmpuo", + "F2.sffixupd" => "__builtin_HEXAGON_F2_sffixupd", + "F2.sffixupn" => "__builtin_HEXAGON_F2_sffixupn", + "F2.sffixupr" => "__builtin_HEXAGON_F2_sffixupr", + "F2.sffma" => "__builtin_HEXAGON_F2_sffma", + "F2.sffma.lib" => "__builtin_HEXAGON_F2_sffma_lib", + "F2.sffma.sc" => "__builtin_HEXAGON_F2_sffma_sc", + "F2.sffms" => "__builtin_HEXAGON_F2_sffms", + "F2.sffms.lib" => "__builtin_HEXAGON_F2_sffms_lib", + "F2.sfimm.n" => "__builtin_HEXAGON_F2_sfimm_n", + "F2.sfimm.p" => "__builtin_HEXAGON_F2_sfimm_p", + "F2.sfmax" => "__builtin_HEXAGON_F2_sfmax", + "F2.sfmin" => "__builtin_HEXAGON_F2_sfmin", + "F2.sfmpy" => "__builtin_HEXAGON_F2_sfmpy", + "F2.sfsub" => "__builtin_HEXAGON_F2_sfsub", + "L2.loadw.locked" => "__builtin_HEXAGON_L2_loadw_locked", + "L4.loadd.locked" => "__builtin__HEXAGON_L4_loadd_locked", + "M2.acci" => "__builtin_HEXAGON_M2_acci", + "M2.accii" => "__builtin_HEXAGON_M2_accii", + "M2.cmaci.s0" => "__builtin_HEXAGON_M2_cmaci_s0", + "M2.cmacr.s0" => "__builtin_HEXAGON_M2_cmacr_s0", + "M2.cmacs.s0" => "__builtin_HEXAGON_M2_cmacs_s0", + "M2.cmacs.s1" => "__builtin_HEXAGON_M2_cmacs_s1", + "M2.cmacsc.s0" => "__builtin_HEXAGON_M2_cmacsc_s0", + "M2.cmacsc.s1" => "__builtin_HEXAGON_M2_cmacsc_s1", + "M2.cmpyi.s0" => "__builtin_HEXAGON_M2_cmpyi_s0", + "M2.cmpyr.s0" => "__builtin_HEXAGON_M2_cmpyr_s0", + "M2.cmpyrs.s0" => "__builtin_HEXAGON_M2_cmpyrs_s0", + "M2.cmpyrs.s1" => "__builtin_HEXAGON_M2_cmpyrs_s1", + "M2.cmpyrsc.s0" => "__builtin_HEXAGON_M2_cmpyrsc_s0", + "M2.cmpyrsc.s1" => "__builtin_HEXAGON_M2_cmpyrsc_s1", + "M2.cmpys.s0" => "__builtin_HEXAGON_M2_cmpys_s0", + "M2.cmpys.s1" => "__builtin_HEXAGON_M2_cmpys_s1", + "M2.cmpysc.s0" => "__builtin_HEXAGON_M2_cmpysc_s0", + "M2.cmpysc.s1" => "__builtin_HEXAGON_M2_cmpysc_s1", + "M2.cnacs.s0" => "__builtin_HEXAGON_M2_cnacs_s0", + "M2.cnacs.s1" => "__builtin_HEXAGON_M2_cnacs_s1", + "M2.cnacsc.s0" => "__builtin_HEXAGON_M2_cnacsc_s0", + "M2.cnacsc.s1" => "__builtin_HEXAGON_M2_cnacsc_s1", + "M2.dpmpyss.acc.s0" => "__builtin_HEXAGON_M2_dpmpyss_acc_s0", + "M2.dpmpyss.nac.s0" => "__builtin_HEXAGON_M2_dpmpyss_nac_s0", + "M2.dpmpyss.rnd.s0" => "__builtin_HEXAGON_M2_dpmpyss_rnd_s0", + "M2.dpmpyss.s0" => "__builtin_HEXAGON_M2_dpmpyss_s0", + "M2.dpmpyuu.acc.s0" => "__builtin_HEXAGON_M2_dpmpyuu_acc_s0", + "M2.dpmpyuu.nac.s0" => "__builtin_HEXAGON_M2_dpmpyuu_nac_s0", + "M2.dpmpyuu.s0" => "__builtin_HEXAGON_M2_dpmpyuu_s0", + "M2.hmmpyh.rs1" => "__builtin_HEXAGON_M2_hmmpyh_rs1", + "M2.hmmpyh.s1" => "__builtin_HEXAGON_M2_hmmpyh_s1", + "M2.hmmpyl.rs1" => "__builtin_HEXAGON_M2_hmmpyl_rs1", + "M2.hmmpyl.s1" => "__builtin_HEXAGON_M2_hmmpyl_s1", + "M2.maci" => "__builtin_HEXAGON_M2_maci", + "M2.macsin" => "__builtin_HEXAGON_M2_macsin", + "M2.macsip" => "__builtin_HEXAGON_M2_macsip", + "M2.mmachs.rs0" => "__builtin_HEXAGON_M2_mmachs_rs0", + "M2.mmachs.rs1" => "__builtin_HEXAGON_M2_mmachs_rs1", + "M2.mmachs.s0" => "__builtin_HEXAGON_M2_mmachs_s0", + "M2.mmachs.s1" => "__builtin_HEXAGON_M2_mmachs_s1", + "M2.mmacls.rs0" => "__builtin_HEXAGON_M2_mmacls_rs0", + "M2.mmacls.rs1" => "__builtin_HEXAGON_M2_mmacls_rs1", + "M2.mmacls.s0" => "__builtin_HEXAGON_M2_mmacls_s0", + "M2.mmacls.s1" => "__builtin_HEXAGON_M2_mmacls_s1", + "M2.mmacuhs.rs0" => "__builtin_HEXAGON_M2_mmacuhs_rs0", + "M2.mmacuhs.rs1" => "__builtin_HEXAGON_M2_mmacuhs_rs1", + "M2.mmacuhs.s0" => "__builtin_HEXAGON_M2_mmacuhs_s0", + "M2.mmacuhs.s1" => "__builtin_HEXAGON_M2_mmacuhs_s1", + "M2.mmaculs.rs0" => "__builtin_HEXAGON_M2_mmaculs_rs0", + "M2.mmaculs.rs1" => "__builtin_HEXAGON_M2_mmaculs_rs1", + "M2.mmaculs.s0" => "__builtin_HEXAGON_M2_mmaculs_s0", + "M2.mmaculs.s1" => "__builtin_HEXAGON_M2_mmaculs_s1", + "M2.mmpyh.rs0" => "__builtin_HEXAGON_M2_mmpyh_rs0", + "M2.mmpyh.rs1" => "__builtin_HEXAGON_M2_mmpyh_rs1", + "M2.mmpyh.s0" => "__builtin_HEXAGON_M2_mmpyh_s0", + "M2.mmpyh.s1" => "__builtin_HEXAGON_M2_mmpyh_s1", + "M2.mmpyl.rs0" => "__builtin_HEXAGON_M2_mmpyl_rs0", + "M2.mmpyl.rs1" => "__builtin_HEXAGON_M2_mmpyl_rs1", + "M2.mmpyl.s0" => "__builtin_HEXAGON_M2_mmpyl_s0", + "M2.mmpyl.s1" => "__builtin_HEXAGON_M2_mmpyl_s1", + "M2.mmpyuh.rs0" => "__builtin_HEXAGON_M2_mmpyuh_rs0", + "M2.mmpyuh.rs1" => "__builtin_HEXAGON_M2_mmpyuh_rs1", + "M2.mmpyuh.s0" => "__builtin_HEXAGON_M2_mmpyuh_s0", + "M2.mmpyuh.s1" => "__builtin_HEXAGON_M2_mmpyuh_s1", + "M2.mmpyul.rs0" => "__builtin_HEXAGON_M2_mmpyul_rs0", + "M2.mmpyul.rs1" => "__builtin_HEXAGON_M2_mmpyul_rs1", + "M2.mmpyul.s0" => "__builtin_HEXAGON_M2_mmpyul_s0", + "M2.mmpyul.s1" => "__builtin_HEXAGON_M2_mmpyul_s1", + "M2.mnaci" => "__builtin_HEXAGON_M2_mnaci", + "M2.mpy.acc.hh.s0" => "__builtin_HEXAGON_M2_mpy_acc_hh_s0", + "M2.mpy.acc.hh.s1" => "__builtin_HEXAGON_M2_mpy_acc_hh_s1", + "M2.mpy.acc.hl.s0" => "__builtin_HEXAGON_M2_mpy_acc_hl_s0", + "M2.mpy.acc.hl.s1" => "__builtin_HEXAGON_M2_mpy_acc_hl_s1", + "M2.mpy.acc.lh.s0" => "__builtin_HEXAGON_M2_mpy_acc_lh_s0", + "M2.mpy.acc.lh.s1" => "__builtin_HEXAGON_M2_mpy_acc_lh_s1", + "M2.mpy.acc.ll.s0" => "__builtin_HEXAGON_M2_mpy_acc_ll_s0", + "M2.mpy.acc.ll.s1" => "__builtin_HEXAGON_M2_mpy_acc_ll_s1", + "M2.mpy.acc.sat.hh.s0" => "__builtin_HEXAGON_M2_mpy_acc_sat_hh_s0", + "M2.mpy.acc.sat.hh.s1" => "__builtin_HEXAGON_M2_mpy_acc_sat_hh_s1", + "M2.mpy.acc.sat.hl.s0" => "__builtin_HEXAGON_M2_mpy_acc_sat_hl_s0", + "M2.mpy.acc.sat.hl.s1" => "__builtin_HEXAGON_M2_mpy_acc_sat_hl_s1", + "M2.mpy.acc.sat.lh.s0" => "__builtin_HEXAGON_M2_mpy_acc_sat_lh_s0", + "M2.mpy.acc.sat.lh.s1" => "__builtin_HEXAGON_M2_mpy_acc_sat_lh_s1", + "M2.mpy.acc.sat.ll.s0" => "__builtin_HEXAGON_M2_mpy_acc_sat_ll_s0", + "M2.mpy.acc.sat.ll.s1" => "__builtin_HEXAGON_M2_mpy_acc_sat_ll_s1", + "M2.mpy.hh.s0" => "__builtin_HEXAGON_M2_mpy_hh_s0", + "M2.mpy.hh.s1" => "__builtin_HEXAGON_M2_mpy_hh_s1", + "M2.mpy.hl.s0" => "__builtin_HEXAGON_M2_mpy_hl_s0", + "M2.mpy.hl.s1" => "__builtin_HEXAGON_M2_mpy_hl_s1", + "M2.mpy.lh.s0" => "__builtin_HEXAGON_M2_mpy_lh_s0", + "M2.mpy.lh.s1" => "__builtin_HEXAGON_M2_mpy_lh_s1", + "M2.mpy.ll.s0" => "__builtin_HEXAGON_M2_mpy_ll_s0", + "M2.mpy.ll.s1" => "__builtin_HEXAGON_M2_mpy_ll_s1", + "M2.mpy.nac.hh.s0" => "__builtin_HEXAGON_M2_mpy_nac_hh_s0", + "M2.mpy.nac.hh.s1" => "__builtin_HEXAGON_M2_mpy_nac_hh_s1", + "M2.mpy.nac.hl.s0" => "__builtin_HEXAGON_M2_mpy_nac_hl_s0", + "M2.mpy.nac.hl.s1" => "__builtin_HEXAGON_M2_mpy_nac_hl_s1", + "M2.mpy.nac.lh.s0" => "__builtin_HEXAGON_M2_mpy_nac_lh_s0", + "M2.mpy.nac.lh.s1" => "__builtin_HEXAGON_M2_mpy_nac_lh_s1", + "M2.mpy.nac.ll.s0" => "__builtin_HEXAGON_M2_mpy_nac_ll_s0", + "M2.mpy.nac.ll.s1" => "__builtin_HEXAGON_M2_mpy_nac_ll_s1", + "M2.mpy.nac.sat.hh.s0" => "__builtin_HEXAGON_M2_mpy_nac_sat_hh_s0", + "M2.mpy.nac.sat.hh.s1" => "__builtin_HEXAGON_M2_mpy_nac_sat_hh_s1", + "M2.mpy.nac.sat.hl.s0" => "__builtin_HEXAGON_M2_mpy_nac_sat_hl_s0", + "M2.mpy.nac.sat.hl.s1" => "__builtin_HEXAGON_M2_mpy_nac_sat_hl_s1", + "M2.mpy.nac.sat.lh.s0" => "__builtin_HEXAGON_M2_mpy_nac_sat_lh_s0", + "M2.mpy.nac.sat.lh.s1" => "__builtin_HEXAGON_M2_mpy_nac_sat_lh_s1", + "M2.mpy.nac.sat.ll.s0" => "__builtin_HEXAGON_M2_mpy_nac_sat_ll_s0", + "M2.mpy.nac.sat.ll.s1" => "__builtin_HEXAGON_M2_mpy_nac_sat_ll_s1", + "M2.mpy.rnd.hh.s0" => "__builtin_HEXAGON_M2_mpy_rnd_hh_s0", + "M2.mpy.rnd.hh.s1" => "__builtin_HEXAGON_M2_mpy_rnd_hh_s1", + "M2.mpy.rnd.hl.s0" => "__builtin_HEXAGON_M2_mpy_rnd_hl_s0", + "M2.mpy.rnd.hl.s1" => "__builtin_HEXAGON_M2_mpy_rnd_hl_s1", + "M2.mpy.rnd.lh.s0" => "__builtin_HEXAGON_M2_mpy_rnd_lh_s0", + "M2.mpy.rnd.lh.s1" => "__builtin_HEXAGON_M2_mpy_rnd_lh_s1", + "M2.mpy.rnd.ll.s0" => "__builtin_HEXAGON_M2_mpy_rnd_ll_s0", + "M2.mpy.rnd.ll.s1" => "__builtin_HEXAGON_M2_mpy_rnd_ll_s1", + "M2.mpy.sat.hh.s0" => "__builtin_HEXAGON_M2_mpy_sat_hh_s0", + "M2.mpy.sat.hh.s1" => "__builtin_HEXAGON_M2_mpy_sat_hh_s1", + "M2.mpy.sat.hl.s0" => "__builtin_HEXAGON_M2_mpy_sat_hl_s0", + "M2.mpy.sat.hl.s1" => "__builtin_HEXAGON_M2_mpy_sat_hl_s1", + "M2.mpy.sat.lh.s0" => "__builtin_HEXAGON_M2_mpy_sat_lh_s0", + "M2.mpy.sat.lh.s1" => "__builtin_HEXAGON_M2_mpy_sat_lh_s1", + "M2.mpy.sat.ll.s0" => "__builtin_HEXAGON_M2_mpy_sat_ll_s0", + "M2.mpy.sat.ll.s1" => "__builtin_HEXAGON_M2_mpy_sat_ll_s1", + "M2.mpy.sat.rnd.hh.s0" => "__builtin_HEXAGON_M2_mpy_sat_rnd_hh_s0", + "M2.mpy.sat.rnd.hh.s1" => "__builtin_HEXAGON_M2_mpy_sat_rnd_hh_s1", + "M2.mpy.sat.rnd.hl.s0" => "__builtin_HEXAGON_M2_mpy_sat_rnd_hl_s0", + "M2.mpy.sat.rnd.hl.s1" => "__builtin_HEXAGON_M2_mpy_sat_rnd_hl_s1", + "M2.mpy.sat.rnd.lh.s0" => "__builtin_HEXAGON_M2_mpy_sat_rnd_lh_s0", + "M2.mpy.sat.rnd.lh.s1" => "__builtin_HEXAGON_M2_mpy_sat_rnd_lh_s1", + "M2.mpy.sat.rnd.ll.s0" => "__builtin_HEXAGON_M2_mpy_sat_rnd_ll_s0", + "M2.mpy.sat.rnd.ll.s1" => "__builtin_HEXAGON_M2_mpy_sat_rnd_ll_s1", + "M2.mpy.up" => "__builtin_HEXAGON_M2_mpy_up", + "M2.mpy.up.s1" => "__builtin_HEXAGON_M2_mpy_up_s1", + "M2.mpy.up.s1.sat" => "__builtin_HEXAGON_M2_mpy_up_s1_sat", + "M2.mpyd.acc.hh.s0" => "__builtin_HEXAGON_M2_mpyd_acc_hh_s0", + "M2.mpyd.acc.hh.s1" => "__builtin_HEXAGON_M2_mpyd_acc_hh_s1", + "M2.mpyd.acc.hl.s0" => "__builtin_HEXAGON_M2_mpyd_acc_hl_s0", + "M2.mpyd.acc.hl.s1" => "__builtin_HEXAGON_M2_mpyd_acc_hl_s1", + "M2.mpyd.acc.lh.s0" => "__builtin_HEXAGON_M2_mpyd_acc_lh_s0", + "M2.mpyd.acc.lh.s1" => "__builtin_HEXAGON_M2_mpyd_acc_lh_s1", + "M2.mpyd.acc.ll.s0" => "__builtin_HEXAGON_M2_mpyd_acc_ll_s0", + "M2.mpyd.acc.ll.s1" => "__builtin_HEXAGON_M2_mpyd_acc_ll_s1", + "M2.mpyd.hh.s0" => "__builtin_HEXAGON_M2_mpyd_hh_s0", + "M2.mpyd.hh.s1" => "__builtin_HEXAGON_M2_mpyd_hh_s1", + "M2.mpyd.hl.s0" => "__builtin_HEXAGON_M2_mpyd_hl_s0", + "M2.mpyd.hl.s1" => "__builtin_HEXAGON_M2_mpyd_hl_s1", + "M2.mpyd.lh.s0" => "__builtin_HEXAGON_M2_mpyd_lh_s0", + "M2.mpyd.lh.s1" => "__builtin_HEXAGON_M2_mpyd_lh_s1", + "M2.mpyd.ll.s0" => "__builtin_HEXAGON_M2_mpyd_ll_s0", + "M2.mpyd.ll.s1" => "__builtin_HEXAGON_M2_mpyd_ll_s1", + "M2.mpyd.nac.hh.s0" => "__builtin_HEXAGON_M2_mpyd_nac_hh_s0", + "M2.mpyd.nac.hh.s1" => "__builtin_HEXAGON_M2_mpyd_nac_hh_s1", + "M2.mpyd.nac.hl.s0" => "__builtin_HEXAGON_M2_mpyd_nac_hl_s0", + "M2.mpyd.nac.hl.s1" => "__builtin_HEXAGON_M2_mpyd_nac_hl_s1", + "M2.mpyd.nac.lh.s0" => "__builtin_HEXAGON_M2_mpyd_nac_lh_s0", + "M2.mpyd.nac.lh.s1" => "__builtin_HEXAGON_M2_mpyd_nac_lh_s1", + "M2.mpyd.nac.ll.s0" => "__builtin_HEXAGON_M2_mpyd_nac_ll_s0", + "M2.mpyd.nac.ll.s1" => "__builtin_HEXAGON_M2_mpyd_nac_ll_s1", + "M2.mpyd.rnd.hh.s0" => "__builtin_HEXAGON_M2_mpyd_rnd_hh_s0", + "M2.mpyd.rnd.hh.s1" => "__builtin_HEXAGON_M2_mpyd_rnd_hh_s1", + "M2.mpyd.rnd.hl.s0" => "__builtin_HEXAGON_M2_mpyd_rnd_hl_s0", + "M2.mpyd.rnd.hl.s1" => "__builtin_HEXAGON_M2_mpyd_rnd_hl_s1", + "M2.mpyd.rnd.lh.s0" => "__builtin_HEXAGON_M2_mpyd_rnd_lh_s0", + "M2.mpyd.rnd.lh.s1" => "__builtin_HEXAGON_M2_mpyd_rnd_lh_s1", + "M2.mpyd.rnd.ll.s0" => "__builtin_HEXAGON_M2_mpyd_rnd_ll_s0", + "M2.mpyd.rnd.ll.s1" => "__builtin_HEXAGON_M2_mpyd_rnd_ll_s1", + "M2.mpyi" => "__builtin_HEXAGON_M2_mpyi", + "M2.mpysmi" => "__builtin_HEXAGON_M2_mpysmi", + "M2.mpysu.up" => "__builtin_HEXAGON_M2_mpysu_up", + "M2.mpyu.acc.hh.s0" => "__builtin_HEXAGON_M2_mpyu_acc_hh_s0", + "M2.mpyu.acc.hh.s1" => "__builtin_HEXAGON_M2_mpyu_acc_hh_s1", + "M2.mpyu.acc.hl.s0" => "__builtin_HEXAGON_M2_mpyu_acc_hl_s0", + "M2.mpyu.acc.hl.s1" => "__builtin_HEXAGON_M2_mpyu_acc_hl_s1", + "M2.mpyu.acc.lh.s0" => "__builtin_HEXAGON_M2_mpyu_acc_lh_s0", + "M2.mpyu.acc.lh.s1" => "__builtin_HEXAGON_M2_mpyu_acc_lh_s1", + "M2.mpyu.acc.ll.s0" => "__builtin_HEXAGON_M2_mpyu_acc_ll_s0", + "M2.mpyu.acc.ll.s1" => "__builtin_HEXAGON_M2_mpyu_acc_ll_s1", + "M2.mpyu.hh.s0" => "__builtin_HEXAGON_M2_mpyu_hh_s0", + "M2.mpyu.hh.s1" => "__builtin_HEXAGON_M2_mpyu_hh_s1", + "M2.mpyu.hl.s0" => "__builtin_HEXAGON_M2_mpyu_hl_s0", + "M2.mpyu.hl.s1" => "__builtin_HEXAGON_M2_mpyu_hl_s1", + "M2.mpyu.lh.s0" => "__builtin_HEXAGON_M2_mpyu_lh_s0", + "M2.mpyu.lh.s1" => "__builtin_HEXAGON_M2_mpyu_lh_s1", + "M2.mpyu.ll.s0" => "__builtin_HEXAGON_M2_mpyu_ll_s0", + "M2.mpyu.ll.s1" => "__builtin_HEXAGON_M2_mpyu_ll_s1", + "M2.mpyu.nac.hh.s0" => "__builtin_HEXAGON_M2_mpyu_nac_hh_s0", + "M2.mpyu.nac.hh.s1" => "__builtin_HEXAGON_M2_mpyu_nac_hh_s1", + "M2.mpyu.nac.hl.s0" => "__builtin_HEXAGON_M2_mpyu_nac_hl_s0", + "M2.mpyu.nac.hl.s1" => "__builtin_HEXAGON_M2_mpyu_nac_hl_s1", + "M2.mpyu.nac.lh.s0" => "__builtin_HEXAGON_M2_mpyu_nac_lh_s0", + "M2.mpyu.nac.lh.s1" => "__builtin_HEXAGON_M2_mpyu_nac_lh_s1", + "M2.mpyu.nac.ll.s0" => "__builtin_HEXAGON_M2_mpyu_nac_ll_s0", + "M2.mpyu.nac.ll.s1" => "__builtin_HEXAGON_M2_mpyu_nac_ll_s1", + "M2.mpyu.up" => "__builtin_HEXAGON_M2_mpyu_up", + "M2.mpyud.acc.hh.s0" => "__builtin_HEXAGON_M2_mpyud_acc_hh_s0", + "M2.mpyud.acc.hh.s1" => "__builtin_HEXAGON_M2_mpyud_acc_hh_s1", + "M2.mpyud.acc.hl.s0" => "__builtin_HEXAGON_M2_mpyud_acc_hl_s0", + "M2.mpyud.acc.hl.s1" => "__builtin_HEXAGON_M2_mpyud_acc_hl_s1", + "M2.mpyud.acc.lh.s0" => "__builtin_HEXAGON_M2_mpyud_acc_lh_s0", + "M2.mpyud.acc.lh.s1" => "__builtin_HEXAGON_M2_mpyud_acc_lh_s1", + "M2.mpyud.acc.ll.s0" => "__builtin_HEXAGON_M2_mpyud_acc_ll_s0", + "M2.mpyud.acc.ll.s1" => "__builtin_HEXAGON_M2_mpyud_acc_ll_s1", + "M2.mpyud.hh.s0" => "__builtin_HEXAGON_M2_mpyud_hh_s0", + "M2.mpyud.hh.s1" => "__builtin_HEXAGON_M2_mpyud_hh_s1", + "M2.mpyud.hl.s0" => "__builtin_HEXAGON_M2_mpyud_hl_s0", + "M2.mpyud.hl.s1" => "__builtin_HEXAGON_M2_mpyud_hl_s1", + "M2.mpyud.lh.s0" => "__builtin_HEXAGON_M2_mpyud_lh_s0", + "M2.mpyud.lh.s1" => "__builtin_HEXAGON_M2_mpyud_lh_s1", + "M2.mpyud.ll.s0" => "__builtin_HEXAGON_M2_mpyud_ll_s0", + "M2.mpyud.ll.s1" => "__builtin_HEXAGON_M2_mpyud_ll_s1", + "M2.mpyud.nac.hh.s0" => "__builtin_HEXAGON_M2_mpyud_nac_hh_s0", + "M2.mpyud.nac.hh.s1" => "__builtin_HEXAGON_M2_mpyud_nac_hh_s1", + "M2.mpyud.nac.hl.s0" => "__builtin_HEXAGON_M2_mpyud_nac_hl_s0", + "M2.mpyud.nac.hl.s1" => "__builtin_HEXAGON_M2_mpyud_nac_hl_s1", + "M2.mpyud.nac.lh.s0" => "__builtin_HEXAGON_M2_mpyud_nac_lh_s0", + "M2.mpyud.nac.lh.s1" => "__builtin_HEXAGON_M2_mpyud_nac_lh_s1", + "M2.mpyud.nac.ll.s0" => "__builtin_HEXAGON_M2_mpyud_nac_ll_s0", + "M2.mpyud.nac.ll.s1" => "__builtin_HEXAGON_M2_mpyud_nac_ll_s1", + "M2.mpyui" => "__builtin_HEXAGON_M2_mpyui", + "M2.nacci" => "__builtin_HEXAGON_M2_nacci", + "M2.naccii" => "__builtin_HEXAGON_M2_naccii", + "M2.subacc" => "__builtin_HEXAGON_M2_subacc", + "M2.vabsdiffh" => "__builtin_HEXAGON_M2_vabsdiffh", + "M2.vabsdiffw" => "__builtin_HEXAGON_M2_vabsdiffw", + "M2.vcmac.s0.sat.i" => "__builtin_HEXAGON_M2_vcmac_s0_sat_i", + "M2.vcmac.s0.sat.r" => "__builtin_HEXAGON_M2_vcmac_s0_sat_r", + "M2.vcmpy.s0.sat.i" => "__builtin_HEXAGON_M2_vcmpy_s0_sat_i", + "M2.vcmpy.s0.sat.r" => "__builtin_HEXAGON_M2_vcmpy_s0_sat_r", + "M2.vcmpy.s1.sat.i" => "__builtin_HEXAGON_M2_vcmpy_s1_sat_i", + "M2.vcmpy.s1.sat.r" => "__builtin_HEXAGON_M2_vcmpy_s1_sat_r", + "M2.vdmacs.s0" => "__builtin_HEXAGON_M2_vdmacs_s0", + "M2.vdmacs.s1" => "__builtin_HEXAGON_M2_vdmacs_s1", + "M2.vdmpyrs.s0" => "__builtin_HEXAGON_M2_vdmpyrs_s0", + "M2.vdmpyrs.s1" => "__builtin_HEXAGON_M2_vdmpyrs_s1", + "M2.vdmpys.s0" => "__builtin_HEXAGON_M2_vdmpys_s0", + "M2.vdmpys.s1" => "__builtin_HEXAGON_M2_vdmpys_s1", + "M2.vmac2" => "__builtin_HEXAGON_M2_vmac2", + "M2.vmac2es" => "__builtin_HEXAGON_M2_vmac2es", + "M2.vmac2es.s0" => "__builtin_HEXAGON_M2_vmac2es_s0", + "M2.vmac2es.s1" => "__builtin_HEXAGON_M2_vmac2es_s1", + "M2.vmac2s.s0" => "__builtin_HEXAGON_M2_vmac2s_s0", + "M2.vmac2s.s1" => "__builtin_HEXAGON_M2_vmac2s_s1", + "M2.vmac2su.s0" => "__builtin_HEXAGON_M2_vmac2su_s0", + "M2.vmac2su.s1" => "__builtin_HEXAGON_M2_vmac2su_s1", + "M2.vmpy2es.s0" => "__builtin_HEXAGON_M2_vmpy2es_s0", + "M2.vmpy2es.s1" => "__builtin_HEXAGON_M2_vmpy2es_s1", + "M2.vmpy2s.s0" => "__builtin_HEXAGON_M2_vmpy2s_s0", + "M2.vmpy2s.s0pack" => "__builtin_HEXAGON_M2_vmpy2s_s0pack", + "M2.vmpy2s.s1" => "__builtin_HEXAGON_M2_vmpy2s_s1", + "M2.vmpy2s.s1pack" => "__builtin_HEXAGON_M2_vmpy2s_s1pack", + "M2.vmpy2su.s0" => "__builtin_HEXAGON_M2_vmpy2su_s0", + "M2.vmpy2su.s1" => "__builtin_HEXAGON_M2_vmpy2su_s1", + "M2.vraddh" => "__builtin_HEXAGON_M2_vraddh", + "M2.vradduh" => "__builtin_HEXAGON_M2_vradduh", + "M2.vrcmaci.s0" => "__builtin_HEXAGON_M2_vrcmaci_s0", + "M2.vrcmaci.s0c" => "__builtin_HEXAGON_M2_vrcmaci_s0c", + "M2.vrcmacr.s0" => "__builtin_HEXAGON_M2_vrcmacr_s0", + "M2.vrcmacr.s0c" => "__builtin_HEXAGON_M2_vrcmacr_s0c", + "M2.vrcmpyi.s0" => "__builtin_HEXAGON_M2_vrcmpyi_s0", + "M2.vrcmpyi.s0c" => "__builtin_HEXAGON_M2_vrcmpyi_s0c", + "M2.vrcmpyr.s0" => "__builtin_HEXAGON_M2_vrcmpyr_s0", + "M2.vrcmpyr.s0c" => "__builtin_HEXAGON_M2_vrcmpyr_s0c", + "M2.vrcmpys.acc.s1" => "__builtin_HEXAGON_M2_vrcmpys_acc_s1", + "M2.vrcmpys.s1" => "__builtin_HEXAGON_M2_vrcmpys_s1", + "M2.vrcmpys.s1rp" => "__builtin_HEXAGON_M2_vrcmpys_s1rp", + "M2.vrmac.s0" => "__builtin_HEXAGON_M2_vrmac_s0", + "M2.vrmpy.s0" => "__builtin_HEXAGON_M2_vrmpy_s0", + "M2.xor.xacc" => "__builtin_HEXAGON_M2_xor_xacc", + "M4.and.and" => "__builtin_HEXAGON_M4_and_and", + "M4.and.andn" => "__builtin_HEXAGON_M4_and_andn", + "M4.and.or" => "__builtin_HEXAGON_M4_and_or", + "M4.and.xor" => "__builtin_HEXAGON_M4_and_xor", + "M4.cmpyi.wh" => "__builtin_HEXAGON_M4_cmpyi_wh", + "M4.cmpyi.whc" => "__builtin_HEXAGON_M4_cmpyi_whc", + "M4.cmpyr.wh" => "__builtin_HEXAGON_M4_cmpyr_wh", + "M4.cmpyr.whc" => "__builtin_HEXAGON_M4_cmpyr_whc", + "M4.mac.up.s1.sat" => "__builtin_HEXAGON_M4_mac_up_s1_sat", + "M4.mpyri.addi" => "__builtin_HEXAGON_M4_mpyri_addi", + "M4.mpyri.addr" => "__builtin_HEXAGON_M4_mpyri_addr", + "M4.mpyri.addr.u2" => "__builtin_HEXAGON_M4_mpyri_addr_u2", + "M4.mpyrr.addi" => "__builtin_HEXAGON_M4_mpyrr_addi", + "M4.mpyrr.addr" => "__builtin_HEXAGON_M4_mpyrr_addr", + "M4.nac.up.s1.sat" => "__builtin_HEXAGON_M4_nac_up_s1_sat", + "M4.or.and" => "__builtin_HEXAGON_M4_or_and", + "M4.or.andn" => "__builtin_HEXAGON_M4_or_andn", + "M4.or.or" => "__builtin_HEXAGON_M4_or_or", + "M4.or.xor" => "__builtin_HEXAGON_M4_or_xor", + "M4.pmpyw" => "__builtin_HEXAGON_M4_pmpyw", + "M4.pmpyw.acc" => "__builtin_HEXAGON_M4_pmpyw_acc", + "M4.vpmpyh" => "__builtin_HEXAGON_M4_vpmpyh", + "M4.vpmpyh.acc" => "__builtin_HEXAGON_M4_vpmpyh_acc", + "M4.vrmpyeh.acc.s0" => "__builtin_HEXAGON_M4_vrmpyeh_acc_s0", + "M4.vrmpyeh.acc.s1" => "__builtin_HEXAGON_M4_vrmpyeh_acc_s1", + "M4.vrmpyeh.s0" => "__builtin_HEXAGON_M4_vrmpyeh_s0", + "M4.vrmpyeh.s1" => "__builtin_HEXAGON_M4_vrmpyeh_s1", + "M4.vrmpyoh.acc.s0" => "__builtin_HEXAGON_M4_vrmpyoh_acc_s0", + "M4.vrmpyoh.acc.s1" => "__builtin_HEXAGON_M4_vrmpyoh_acc_s1", + "M4.vrmpyoh.s0" => "__builtin_HEXAGON_M4_vrmpyoh_s0", + "M4.vrmpyoh.s1" => "__builtin_HEXAGON_M4_vrmpyoh_s1", + "M4.xor.and" => "__builtin_HEXAGON_M4_xor_and", + "M4.xor.andn" => "__builtin_HEXAGON_M4_xor_andn", + "M4.xor.or" => "__builtin_HEXAGON_M4_xor_or", + "M4.xor.xacc" => "__builtin_HEXAGON_M4_xor_xacc", + "M5.vdmacbsu" => "__builtin_HEXAGON_M5_vdmacbsu", + "M5.vdmpybsu" => "__builtin_HEXAGON_M5_vdmpybsu", + "M5.vmacbsu" => "__builtin_HEXAGON_M5_vmacbsu", + "M5.vmacbuu" => "__builtin_HEXAGON_M5_vmacbuu", + "M5.vmpybsu" => "__builtin_HEXAGON_M5_vmpybsu", + "M5.vmpybuu" => "__builtin_HEXAGON_M5_vmpybuu", + "M5.vrmacbsu" => "__builtin_HEXAGON_M5_vrmacbsu", + "M5.vrmacbuu" => "__builtin_HEXAGON_M5_vrmacbuu", + "M5.vrmpybsu" => "__builtin_HEXAGON_M5_vrmpybsu", + "M5.vrmpybuu" => "__builtin_HEXAGON_M5_vrmpybuu", + "M6.vabsdiffb" => "__builtin_HEXAGON_M6_vabsdiffb", + "M6.vabsdiffub" => "__builtin_HEXAGON_M6_vabsdiffub", + "M7.dcmpyiw" => "__builtin_HEXAGON_M7_dcmpyiw", + "M7.dcmpyiw.acc" => "__builtin_HEXAGON_M7_dcmpyiw_acc", + "M7.dcmpyiwc" => "__builtin_HEXAGON_M7_dcmpyiwc", + "M7.dcmpyiwc.acc" => "__builtin_HEXAGON_M7_dcmpyiwc_acc", + "M7.dcmpyrw" => "__builtin_HEXAGON_M7_dcmpyrw", + "M7.dcmpyrw.acc" => "__builtin_HEXAGON_M7_dcmpyrw_acc", + "M7.dcmpyrwc" => "__builtin_HEXAGON_M7_dcmpyrwc", + "M7.dcmpyrwc.acc" => "__builtin_HEXAGON_M7_dcmpyrwc_acc", + "M7.vdmpy" => "__builtin_HEXAGON_M7_vdmpy", + "M7.vdmpy.acc" => "__builtin_HEXAGON_M7_vdmpy_acc", + "M7.wcmpyiw" => "__builtin_HEXAGON_M7_wcmpyiw", + "M7.wcmpyiw.rnd" => "__builtin_HEXAGON_M7_wcmpyiw_rnd", + "M7.wcmpyiwc" => "__builtin_HEXAGON_M7_wcmpyiwc", + "M7.wcmpyiwc.rnd" => "__builtin_HEXAGON_M7_wcmpyiwc_rnd", + "M7.wcmpyrw" => "__builtin_HEXAGON_M7_wcmpyrw", + "M7.wcmpyrw.rnd" => "__builtin_HEXAGON_M7_wcmpyrw_rnd", + "M7.wcmpyrwc" => "__builtin_HEXAGON_M7_wcmpyrwc", + "M7.wcmpyrwc.rnd" => "__builtin_HEXAGON_M7_wcmpyrwc_rnd", + "S2.addasl.rrri" => "__builtin_HEXAGON_S2_addasl_rrri", + "S2.asl.i.p" => "__builtin_HEXAGON_S2_asl_i_p", + "S2.asl.i.p.acc" => "__builtin_HEXAGON_S2_asl_i_p_acc", + "S2.asl.i.p.and" => "__builtin_HEXAGON_S2_asl_i_p_and", + "S2.asl.i.p.nac" => "__builtin_HEXAGON_S2_asl_i_p_nac", + "S2.asl.i.p.or" => "__builtin_HEXAGON_S2_asl_i_p_or", + "S2.asl.i.p.xacc" => "__builtin_HEXAGON_S2_asl_i_p_xacc", + "S2.asl.i.r" => "__builtin_HEXAGON_S2_asl_i_r", + "S2.asl.i.r.acc" => "__builtin_HEXAGON_S2_asl_i_r_acc", + "S2.asl.i.r.and" => "__builtin_HEXAGON_S2_asl_i_r_and", + "S2.asl.i.r.nac" => "__builtin_HEXAGON_S2_asl_i_r_nac", + "S2.asl.i.r.or" => "__builtin_HEXAGON_S2_asl_i_r_or", + "S2.asl.i.r.sat" => "__builtin_HEXAGON_S2_asl_i_r_sat", + "S2.asl.i.r.xacc" => "__builtin_HEXAGON_S2_asl_i_r_xacc", + "S2.asl.i.vh" => "__builtin_HEXAGON_S2_asl_i_vh", + "S2.asl.i.vw" => "__builtin_HEXAGON_S2_asl_i_vw", + "S2.asl.r.p" => "__builtin_HEXAGON_S2_asl_r_p", + "S2.asl.r.p.acc" => "__builtin_HEXAGON_S2_asl_r_p_acc", + "S2.asl.r.p.and" => "__builtin_HEXAGON_S2_asl_r_p_and", + "S2.asl.r.p.nac" => "__builtin_HEXAGON_S2_asl_r_p_nac", + "S2.asl.r.p.or" => "__builtin_HEXAGON_S2_asl_r_p_or", + "S2.asl.r.p.xor" => "__builtin_HEXAGON_S2_asl_r_p_xor", + "S2.asl.r.r" => "__builtin_HEXAGON_S2_asl_r_r", + "S2.asl.r.r.acc" => "__builtin_HEXAGON_S2_asl_r_r_acc", + "S2.asl.r.r.and" => "__builtin_HEXAGON_S2_asl_r_r_and", + "S2.asl.r.r.nac" => "__builtin_HEXAGON_S2_asl_r_r_nac", + "S2.asl.r.r.or" => "__builtin_HEXAGON_S2_asl_r_r_or", + "S2.asl.r.r.sat" => "__builtin_HEXAGON_S2_asl_r_r_sat", + "S2.asl.r.vh" => "__builtin_HEXAGON_S2_asl_r_vh", + "S2.asl.r.vw" => "__builtin_HEXAGON_S2_asl_r_vw", + "S2.asr.i.p" => "__builtin_HEXAGON_S2_asr_i_p", + "S2.asr.i.p.acc" => "__builtin_HEXAGON_S2_asr_i_p_acc", + "S2.asr.i.p.and" => "__builtin_HEXAGON_S2_asr_i_p_and", + "S2.asr.i.p.nac" => "__builtin_HEXAGON_S2_asr_i_p_nac", + "S2.asr.i.p.or" => "__builtin_HEXAGON_S2_asr_i_p_or", + "S2.asr.i.p.rnd" => "__builtin_HEXAGON_S2_asr_i_p_rnd", + "S2.asr.i.p.rnd.goodsyntax" => "__builtin_HEXAGON_S2_asr_i_p_rnd_goodsyntax", + "S2.asr.i.r" => "__builtin_HEXAGON_S2_asr_i_r", + "S2.asr.i.r.acc" => "__builtin_HEXAGON_S2_asr_i_r_acc", + "S2.asr.i.r.and" => "__builtin_HEXAGON_S2_asr_i_r_and", + "S2.asr.i.r.nac" => "__builtin_HEXAGON_S2_asr_i_r_nac", + "S2.asr.i.r.or" => "__builtin_HEXAGON_S2_asr_i_r_or", + "S2.asr.i.r.rnd" => "__builtin_HEXAGON_S2_asr_i_r_rnd", + "S2.asr.i.r.rnd.goodsyntax" => "__builtin_HEXAGON_S2_asr_i_r_rnd_goodsyntax", + "S2.asr.i.svw.trun" => "__builtin_HEXAGON_S2_asr_i_svw_trun", + "S2.asr.i.vh" => "__builtin_HEXAGON_S2_asr_i_vh", + "S2.asr.i.vw" => "__builtin_HEXAGON_S2_asr_i_vw", + "S2.asr.r.p" => "__builtin_HEXAGON_S2_asr_r_p", + "S2.asr.r.p.acc" => "__builtin_HEXAGON_S2_asr_r_p_acc", + "S2.asr.r.p.and" => "__builtin_HEXAGON_S2_asr_r_p_and", + "S2.asr.r.p.nac" => "__builtin_HEXAGON_S2_asr_r_p_nac", + "S2.asr.r.p.or" => "__builtin_HEXAGON_S2_asr_r_p_or", + "S2.asr.r.p.xor" => "__builtin_HEXAGON_S2_asr_r_p_xor", + "S2.asr.r.r" => "__builtin_HEXAGON_S2_asr_r_r", + "S2.asr.r.r.acc" => "__builtin_HEXAGON_S2_asr_r_r_acc", + "S2.asr.r.r.and" => "__builtin_HEXAGON_S2_asr_r_r_and", + "S2.asr.r.r.nac" => "__builtin_HEXAGON_S2_asr_r_r_nac", + "S2.asr.r.r.or" => "__builtin_HEXAGON_S2_asr_r_r_or", + "S2.asr.r.r.sat" => "__builtin_HEXAGON_S2_asr_r_r_sat", + "S2.asr.r.svw.trun" => "__builtin_HEXAGON_S2_asr_r_svw_trun", + "S2.asr.r.vh" => "__builtin_HEXAGON_S2_asr_r_vh", + "S2.asr.r.vw" => "__builtin_HEXAGON_S2_asr_r_vw", + "S2.brev" => "__builtin_HEXAGON_S2_brev", + "S2.brevp" => "__builtin_HEXAGON_S2_brevp", + "S2.cabacencbin" => "__builtin_HEXAGON_S2_cabacencbin", + "S2.cl0" => "__builtin_HEXAGON_S2_cl0", + "S2.cl0p" => "__builtin_HEXAGON_S2_cl0p", + "S2.cl1" => "__builtin_HEXAGON_S2_cl1", + "S2.cl1p" => "__builtin_HEXAGON_S2_cl1p", + "S2.clb" => "__builtin_HEXAGON_S2_clb", + "S2.clbnorm" => "__builtin_HEXAGON_S2_clbnorm", + "S2.clbp" => "__builtin_HEXAGON_S2_clbp", + "S2.clrbit.i" => "__builtin_HEXAGON_S2_clrbit_i", + "S2.clrbit.r" => "__builtin_HEXAGON_S2_clrbit_r", + "S2.ct0" => "__builtin_HEXAGON_S2_ct0", + "S2.ct0p" => "__builtin_HEXAGON_S2_ct0p", + "S2.ct1" => "__builtin_HEXAGON_S2_ct1", + "S2.ct1p" => "__builtin_HEXAGON_S2_ct1p", + "S2.deinterleave" => "__builtin_HEXAGON_S2_deinterleave", + "S2.extractu" => "__builtin_HEXAGON_S2_extractu", + "S2.extractu.rp" => "__builtin_HEXAGON_S2_extractu_rp", + "S2.extractup" => "__builtin_HEXAGON_S2_extractup", + "S2.extractup.rp" => "__builtin_HEXAGON_S2_extractup_rp", + "S2.insert" => "__builtin_HEXAGON_S2_insert", + "S2.insert.rp" => "__builtin_HEXAGON_S2_insert_rp", + "S2.insertp" => "__builtin_HEXAGON_S2_insertp", + "S2.insertp.rp" => "__builtin_HEXAGON_S2_insertp_rp", + "S2.interleave" => "__builtin_HEXAGON_S2_interleave", + "S2.lfsp" => "__builtin_HEXAGON_S2_lfsp", + "S2.lsl.r.p" => "__builtin_HEXAGON_S2_lsl_r_p", + "S2.lsl.r.p.acc" => "__builtin_HEXAGON_S2_lsl_r_p_acc", + "S2.lsl.r.p.and" => "__builtin_HEXAGON_S2_lsl_r_p_and", + "S2.lsl.r.p.nac" => "__builtin_HEXAGON_S2_lsl_r_p_nac", + "S2.lsl.r.p.or" => "__builtin_HEXAGON_S2_lsl_r_p_or", + "S2.lsl.r.p.xor" => "__builtin_HEXAGON_S2_lsl_r_p_xor", + "S2.lsl.r.r" => "__builtin_HEXAGON_S2_lsl_r_r", + "S2.lsl.r.r.acc" => "__builtin_HEXAGON_S2_lsl_r_r_acc", + "S2.lsl.r.r.and" => "__builtin_HEXAGON_S2_lsl_r_r_and", + "S2.lsl.r.r.nac" => "__builtin_HEXAGON_S2_lsl_r_r_nac", + "S2.lsl.r.r.or" => "__builtin_HEXAGON_S2_lsl_r_r_or", + "S2.lsl.r.vh" => "__builtin_HEXAGON_S2_lsl_r_vh", + "S2.lsl.r.vw" => "__builtin_HEXAGON_S2_lsl_r_vw", + "S2.lsr.i.p" => "__builtin_HEXAGON_S2_lsr_i_p", + "S2.lsr.i.p.acc" => "__builtin_HEXAGON_S2_lsr_i_p_acc", + "S2.lsr.i.p.and" => "__builtin_HEXAGON_S2_lsr_i_p_and", + "S2.lsr.i.p.nac" => "__builtin_HEXAGON_S2_lsr_i_p_nac", + "S2.lsr.i.p.or" => "__builtin_HEXAGON_S2_lsr_i_p_or", + "S2.lsr.i.p.xacc" => "__builtin_HEXAGON_S2_lsr_i_p_xacc", + "S2.lsr.i.r" => "__builtin_HEXAGON_S2_lsr_i_r", + "S2.lsr.i.r.acc" => "__builtin_HEXAGON_S2_lsr_i_r_acc", + "S2.lsr.i.r.and" => "__builtin_HEXAGON_S2_lsr_i_r_and", + "S2.lsr.i.r.nac" => "__builtin_HEXAGON_S2_lsr_i_r_nac", + "S2.lsr.i.r.or" => "__builtin_HEXAGON_S2_lsr_i_r_or", + "S2.lsr.i.r.xacc" => "__builtin_HEXAGON_S2_lsr_i_r_xacc", + "S2.lsr.i.vh" => "__builtin_HEXAGON_S2_lsr_i_vh", + "S2.lsr.i.vw" => "__builtin_HEXAGON_S2_lsr_i_vw", + "S2.lsr.r.p" => "__builtin_HEXAGON_S2_lsr_r_p", + "S2.lsr.r.p.acc" => "__builtin_HEXAGON_S2_lsr_r_p_acc", + "S2.lsr.r.p.and" => "__builtin_HEXAGON_S2_lsr_r_p_and", + "S2.lsr.r.p.nac" => "__builtin_HEXAGON_S2_lsr_r_p_nac", + "S2.lsr.r.p.or" => "__builtin_HEXAGON_S2_lsr_r_p_or", + "S2.lsr.r.p.xor" => "__builtin_HEXAGON_S2_lsr_r_p_xor", + "S2.lsr.r.r" => "__builtin_HEXAGON_S2_lsr_r_r", + "S2.lsr.r.r.acc" => "__builtin_HEXAGON_S2_lsr_r_r_acc", + "S2.lsr.r.r.and" => "__builtin_HEXAGON_S2_lsr_r_r_and", + "S2.lsr.r.r.nac" => "__builtin_HEXAGON_S2_lsr_r_r_nac", + "S2.lsr.r.r.or" => "__builtin_HEXAGON_S2_lsr_r_r_or", + "S2.lsr.r.vh" => "__builtin_HEXAGON_S2_lsr_r_vh", + "S2.lsr.r.vw" => "__builtin_HEXAGON_S2_lsr_r_vw", + "S2.mask" => "__builtin_HEXAGON_S2_mask", + "S2.packhl" => "__builtin_HEXAGON_S2_packhl", + "S2.parityp" => "__builtin_HEXAGON_S2_parityp", + "S2.setbit.i" => "__builtin_HEXAGON_S2_setbit_i", + "S2.setbit.r" => "__builtin_HEXAGON_S2_setbit_r", + "S2.shuffeb" => "__builtin_HEXAGON_S2_shuffeb", + "S2.shuffeh" => "__builtin_HEXAGON_S2_shuffeh", + "S2.shuffob" => "__builtin_HEXAGON_S2_shuffob", + "S2.shuffoh" => "__builtin_HEXAGON_S2_shuffoh", + "S2.storerb.pbr" => "__builtin_brev_stb", + "S2.storerd.pbr" => "__builtin_brev_std", + "S2.storerf.pbr" => "__builtin_brev_sthhi", + "S2.storerh.pbr" => "__builtin_brev_sth", + "S2.storeri.pbr" => "__builtin_brev_stw", + "S2.storew.locked" => "__builtin_HEXAGON_S2_storew_locked", + "S2.svsathb" => "__builtin_HEXAGON_S2_svsathb", + "S2.svsathub" => "__builtin_HEXAGON_S2_svsathub", + "S2.tableidxb.goodsyntax" => "__builtin_HEXAGON_S2_tableidxb_goodsyntax", + "S2.tableidxd.goodsyntax" => "__builtin_HEXAGON_S2_tableidxd_goodsyntax", + "S2.tableidxh.goodsyntax" => "__builtin_HEXAGON_S2_tableidxh_goodsyntax", + "S2.tableidxw.goodsyntax" => "__builtin_HEXAGON_S2_tableidxw_goodsyntax", + "S2.togglebit.i" => "__builtin_HEXAGON_S2_togglebit_i", + "S2.togglebit.r" => "__builtin_HEXAGON_S2_togglebit_r", + "S2.tstbit.i" => "__builtin_HEXAGON_S2_tstbit_i", + "S2.tstbit.r" => "__builtin_HEXAGON_S2_tstbit_r", + "S2.valignib" => "__builtin_HEXAGON_S2_valignib", + "S2.valignrb" => "__builtin_HEXAGON_S2_valignrb", + "S2.vcnegh" => "__builtin_HEXAGON_S2_vcnegh", + "S2.vcrotate" => "__builtin_HEXAGON_S2_vcrotate", + "S2.vrcnegh" => "__builtin_HEXAGON_S2_vrcnegh", + "S2.vrndpackwh" => "__builtin_HEXAGON_S2_vrndpackwh", + "S2.vrndpackwhs" => "__builtin_HEXAGON_S2_vrndpackwhs", + "S2.vsathb" => "__builtin_HEXAGON_S2_vsathb", + "S2.vsathb.nopack" => "__builtin_HEXAGON_S2_vsathb_nopack", + "S2.vsathub" => "__builtin_HEXAGON_S2_vsathub", + "S2.vsathub.nopack" => "__builtin_HEXAGON_S2_vsathub_nopack", + "S2.vsatwh" => "__builtin_HEXAGON_S2_vsatwh", + "S2.vsatwh.nopack" => "__builtin_HEXAGON_S2_vsatwh_nopack", + "S2.vsatwuh" => "__builtin_HEXAGON_S2_vsatwuh", + "S2.vsatwuh.nopack" => "__builtin_HEXAGON_S2_vsatwuh_nopack", + "S2.vsplatrb" => "__builtin_HEXAGON_S2_vsplatrb", + "S2.vsplatrh" => "__builtin_HEXAGON_S2_vsplatrh", + "S2.vspliceib" => "__builtin_HEXAGON_S2_vspliceib", + "S2.vsplicerb" => "__builtin_HEXAGON_S2_vsplicerb", + "S2.vsxtbh" => "__builtin_HEXAGON_S2_vsxtbh", + "S2.vsxthw" => "__builtin_HEXAGON_S2_vsxthw", + "S2.vtrunehb" => "__builtin_HEXAGON_S2_vtrunehb", + "S2.vtrunewh" => "__builtin_HEXAGON_S2_vtrunewh", + "S2.vtrunohb" => "__builtin_HEXAGON_S2_vtrunohb", + "S2.vtrunowh" => "__builtin_HEXAGON_S2_vtrunowh", + "S2.vzxtbh" => "__builtin_HEXAGON_S2_vzxtbh", + "S2.vzxthw" => "__builtin_HEXAGON_S2_vzxthw", + "S4.addaddi" => "__builtin_HEXAGON_S4_addaddi", + "S4.addi.asl.ri" => "__builtin_HEXAGON_S4_addi_asl_ri", + "S4.addi.lsr.ri" => "__builtin_HEXAGON_S4_addi_lsr_ri", + "S4.andi.asl.ri" => "__builtin_HEXAGON_S4_andi_asl_ri", + "S4.andi.lsr.ri" => "__builtin_HEXAGON_S4_andi_lsr_ri", + "S4.clbaddi" => "__builtin_HEXAGON_S4_clbaddi", + "S4.clbpaddi" => "__builtin_HEXAGON_S4_clbpaddi", + "S4.clbpnorm" => "__builtin_HEXAGON_S4_clbpnorm", + "S4.extract" => "__builtin_HEXAGON_S4_extract", + "S4.extract.rp" => "__builtin_HEXAGON_S4_extract_rp", + "S4.extractp" => "__builtin_HEXAGON_S4_extractp", + "S4.extractp.rp" => "__builtin_HEXAGON_S4_extractp_rp", + "S4.lsli" => "__builtin_HEXAGON_S4_lsli", + "S4.ntstbit.i" => "__builtin_HEXAGON_S4_ntstbit_i", + "S4.ntstbit.r" => "__builtin_HEXAGON_S4_ntstbit_r", + "S4.or.andi" => "__builtin_HEXAGON_S4_or_andi", + "S4.or.andix" => "__builtin_HEXAGON_S4_or_andix", + "S4.or.ori" => "__builtin_HEXAGON_S4_or_ori", + "S4.ori.asl.ri" => "__builtin_HEXAGON_S4_ori_asl_ri", + "S4.ori.lsr.ri" => "__builtin_HEXAGON_S4_ori_lsr_ri", + "S4.parity" => "__builtin_HEXAGON_S4_parity", + "S4.stored.locked" => "__builtin_HEXAGON_S4_stored_locked", + "S4.subaddi" => "__builtin_HEXAGON_S4_subaddi", + "S4.subi.asl.ri" => "__builtin_HEXAGON_S4_subi_asl_ri", + "S4.subi.lsr.ri" => "__builtin_HEXAGON_S4_subi_lsr_ri", + "S4.vrcrotate" => "__builtin_HEXAGON_S4_vrcrotate", + "S4.vrcrotate.acc" => "__builtin_HEXAGON_S4_vrcrotate_acc", + "S4.vxaddsubh" => "__builtin_HEXAGON_S4_vxaddsubh", + "S4.vxaddsubhr" => "__builtin_HEXAGON_S4_vxaddsubhr", + "S4.vxaddsubw" => "__builtin_HEXAGON_S4_vxaddsubw", + "S4.vxsubaddh" => "__builtin_HEXAGON_S4_vxsubaddh", + "S4.vxsubaddhr" => "__builtin_HEXAGON_S4_vxsubaddhr", + "S4.vxsubaddw" => "__builtin_HEXAGON_S4_vxsubaddw", + "S5.asrhub.rnd.sat.goodsyntax" => { + "__builtin_HEXAGON_S5_asrhub_rnd_sat_goodsyntax" + } + "S5.asrhub.sat" => "__builtin_HEXAGON_S5_asrhub_sat", + "S5.popcountp" => "__builtin_HEXAGON_S5_popcountp", + "S5.vasrhrnd.goodsyntax" => "__builtin_HEXAGON_S5_vasrhrnd_goodsyntax", + "S6.rol.i.p" => "__builtin_HEXAGON_S6_rol_i_p", + "S6.rol.i.p.acc" => "__builtin_HEXAGON_S6_rol_i_p_acc", + "S6.rol.i.p.and" => "__builtin_HEXAGON_S6_rol_i_p_and", + "S6.rol.i.p.nac" => "__builtin_HEXAGON_S6_rol_i_p_nac", + "S6.rol.i.p.or" => "__builtin_HEXAGON_S6_rol_i_p_or", + "S6.rol.i.p.xacc" => "__builtin_HEXAGON_S6_rol_i_p_xacc", + "S6.rol.i.r" => "__builtin_HEXAGON_S6_rol_i_r", + "S6.rol.i.r.acc" => "__builtin_HEXAGON_S6_rol_i_r_acc", + "S6.rol.i.r.and" => "__builtin_HEXAGON_S6_rol_i_r_and", + "S6.rol.i.r.nac" => "__builtin_HEXAGON_S6_rol_i_r_nac", + "S6.rol.i.r.or" => "__builtin_HEXAGON_S6_rol_i_r_or", + "S6.rol.i.r.xacc" => "__builtin_HEXAGON_S6_rol_i_r_xacc", + "S6.vsplatrbp" => "__builtin_HEXAGON_S6_vsplatrbp", + "S6.vtrunehb.ppp" => "__builtin_HEXAGON_S6_vtrunehb_ppp", + "S6.vtrunohb.ppp" => "__builtin_HEXAGON_S6_vtrunohb_ppp", + "SI.to.SXTHI.asrh" => "__builtin_SI_to_SXTHI_asrh", + "V6.extractw" => "__builtin_HEXAGON_V6_extractw", + "V6.extractw.128B" => "__builtin_HEXAGON_V6_extractw_128B", + "V6.get.qfext" => "__builtin_HEXAGON_V6_get_qfext", + "V6.get.qfext.128B" => "__builtin_HEXAGON_V6_get_qfext_128B", + "V6.get.qfext.oracc" => "__builtin_HEXAGON_V6_get_qfext_oracc", + "V6.get.qfext.oracc.128B" => "__builtin_HEXAGON_V6_get_qfext_oracc_128B", + "V6.hi" => "__builtin_HEXAGON_V6_hi", + "V6.hi.128B" => "__builtin_HEXAGON_V6_hi_128B", + "V6.lo" => "__builtin_HEXAGON_V6_lo", + "V6.lo.128B" => "__builtin_HEXAGON_V6_lo_128B", + "V6.lvsplatb" => "__builtin_HEXAGON_V6_lvsplatb", + "V6.lvsplatb.128B" => "__builtin_HEXAGON_V6_lvsplatb_128B", + "V6.lvsplath" => "__builtin_HEXAGON_V6_lvsplath", + "V6.lvsplath.128B" => "__builtin_HEXAGON_V6_lvsplath_128B", + "V6.lvsplatw" => "__builtin_HEXAGON_V6_lvsplatw", + "V6.lvsplatw.128B" => "__builtin_HEXAGON_V6_lvsplatw_128B", + "V6.pred.and" => "__builtin_HEXAGON_V6_pred_and", + "V6.pred.and.128B" => "__builtin_HEXAGON_V6_pred_and_128B", + "V6.pred.and.n" => "__builtin_HEXAGON_V6_pred_and_n", + "V6.pred.and.n.128B" => "__builtin_HEXAGON_V6_pred_and_n_128B", + "V6.pred.not" => "__builtin_HEXAGON_V6_pred_not", + "V6.pred.not.128B" => "__builtin_HEXAGON_V6_pred_not_128B", + "V6.pred.or" => "__builtin_HEXAGON_V6_pred_or", + "V6.pred.or.128B" => "__builtin_HEXAGON_V6_pred_or_128B", + "V6.pred.or.n" => "__builtin_HEXAGON_V6_pred_or_n", + "V6.pred.or.n.128B" => "__builtin_HEXAGON_V6_pred_or_n_128B", + "V6.pred.scalar2" => "__builtin_HEXAGON_V6_pred_scalar2", + "V6.pred.scalar2.128B" => "__builtin_HEXAGON_V6_pred_scalar2_128B", + "V6.pred.scalar2v2" => "__builtin_HEXAGON_V6_pred_scalar2v2", + "V6.pred.scalar2v2.128B" => "__builtin_HEXAGON_V6_pred_scalar2v2_128B", + "V6.pred.xor" => "__builtin_HEXAGON_V6_pred_xor", + "V6.pred.xor.128B" => "__builtin_HEXAGON_V6_pred_xor_128B", + "V6.set.qfext" => "__builtin_HEXAGON_V6_set_qfext", + "V6.set.qfext.128B" => "__builtin_HEXAGON_V6_set_qfext_128B", + "V6.shuffeqh" => "__builtin_HEXAGON_V6_shuffeqh", + "V6.shuffeqh.128B" => "__builtin_HEXAGON_V6_shuffeqh_128B", + "V6.shuffeqw" => "__builtin_HEXAGON_V6_shuffeqw", + "V6.shuffeqw.128B" => "__builtin_HEXAGON_V6_shuffeqw_128B", + "V6.v6mpyhubs10" => "__builtin_HEXAGON_V6_v6mpyhubs10", + "V6.v6mpyhubs10.128B" => "__builtin_HEXAGON_V6_v6mpyhubs10_128B", + "V6.v6mpyhubs10.vxx" => "__builtin_HEXAGON_V6_v6mpyhubs10_vxx", + "V6.v6mpyhubs10.vxx.128B" => "__builtin_HEXAGON_V6_v6mpyhubs10_vxx_128B", + "V6.v6mpyvubs10" => "__builtin_HEXAGON_V6_v6mpyvubs10", + "V6.v6mpyvubs10.128B" => "__builtin_HEXAGON_V6_v6mpyvubs10_128B", + "V6.v6mpyvubs10.vxx" => "__builtin_HEXAGON_V6_v6mpyvubs10_vxx", + "V6.v6mpyvubs10.vxx.128B" => "__builtin_HEXAGON_V6_v6mpyvubs10_vxx_128B", + "V6.vS32b.nqpred.ai" => "__builtin_HEXAGON_V6_vS32b_nqpred_ai", + "V6.vS32b.nqpred.ai.128B" => "__builtin_HEXAGON_V6_vS32b_nqpred_ai_128B", + "V6.vS32b.nt.nqpred.ai" => "__builtin_HEXAGON_V6_vS32b_nt_nqpred_ai", + "V6.vS32b.nt.nqpred.ai.128B" => "__builtin_HEXAGON_V6_vS32b_nt_nqpred_ai_128B", + "V6.vS32b.nt.qpred.ai" => "__builtin_HEXAGON_V6_vS32b_nt_qpred_ai", + "V6.vS32b.nt.qpred.ai.128B" => "__builtin_HEXAGON_V6_vS32b_nt_qpred_ai_128B", + "V6.vS32b.qpred.ai" => "__builtin_HEXAGON_V6_vS32b_qpred_ai", + "V6.vS32b.qpred.ai.128B" => "__builtin_HEXAGON_V6_vS32b_qpred_ai_128B", + "V6.vabs.f8" => "__builtin_HEXAGON_V6_vabs_f8", + "V6.vabs.f8.128B" => "__builtin_HEXAGON_V6_vabs_f8_128B", + "V6.vabs.hf" => "__builtin_HEXAGON_V6_vabs_hf", + "V6.vabs.hf.128B" => "__builtin_HEXAGON_V6_vabs_hf_128B", + "V6.vabs.sf" => "__builtin_HEXAGON_V6_vabs_sf", + "V6.vabs.sf.128B" => "__builtin_HEXAGON_V6_vabs_sf_128B", + "V6.vabsb" => "__builtin_HEXAGON_V6_vabsb", + "V6.vabsb.128B" => "__builtin_HEXAGON_V6_vabsb_128B", + "V6.vabsb.sat" => "__builtin_HEXAGON_V6_vabsb_sat", + "V6.vabsb.sat.128B" => "__builtin_HEXAGON_V6_vabsb_sat_128B", + "V6.vabsdiffh" => "__builtin_HEXAGON_V6_vabsdiffh", + "V6.vabsdiffh.128B" => "__builtin_HEXAGON_V6_vabsdiffh_128B", + "V6.vabsdiffub" => "__builtin_HEXAGON_V6_vabsdiffub", + "V6.vabsdiffub.128B" => "__builtin_HEXAGON_V6_vabsdiffub_128B", + "V6.vabsdiffuh" => "__builtin_HEXAGON_V6_vabsdiffuh", + "V6.vabsdiffuh.128B" => "__builtin_HEXAGON_V6_vabsdiffuh_128B", + "V6.vabsdiffw" => "__builtin_HEXAGON_V6_vabsdiffw", + "V6.vabsdiffw.128B" => "__builtin_HEXAGON_V6_vabsdiffw_128B", + "V6.vabsh" => "__builtin_HEXAGON_V6_vabsh", + "V6.vabsh.128B" => "__builtin_HEXAGON_V6_vabsh_128B", + "V6.vabsh.sat" => "__builtin_HEXAGON_V6_vabsh_sat", + "V6.vabsh.sat.128B" => "__builtin_HEXAGON_V6_vabsh_sat_128B", + "V6.vabsw" => "__builtin_HEXAGON_V6_vabsw", + "V6.vabsw.128B" => "__builtin_HEXAGON_V6_vabsw_128B", + "V6.vabsw.sat" => "__builtin_HEXAGON_V6_vabsw_sat", + "V6.vabsw.sat.128B" => "__builtin_HEXAGON_V6_vabsw_sat_128B", + "V6.vadd.hf" => "__builtin_HEXAGON_V6_vadd_hf", + "V6.vadd.hf.128B" => "__builtin_HEXAGON_V6_vadd_hf_128B", + "V6.vadd.hf.f8" => "__builtin_HEXAGON_V6_vadd_hf_f8", + "V6.vadd.hf.f8.128B" => "__builtin_HEXAGON_V6_vadd_hf_f8_128B", + "V6.vadd.hf.hf" => "__builtin_HEXAGON_V6_vadd_hf_hf", + "V6.vadd.hf.hf.128B" => "__builtin_HEXAGON_V6_vadd_hf_hf_128B", + "V6.vadd.qf16" => "__builtin_HEXAGON_V6_vadd_qf16", + "V6.vadd.qf16.128B" => "__builtin_HEXAGON_V6_vadd_qf16_128B", + "V6.vadd.qf16.mix" => "__builtin_HEXAGON_V6_vadd_qf16_mix", + "V6.vadd.qf16.mix.128B" => "__builtin_HEXAGON_V6_vadd_qf16_mix_128B", + "V6.vadd.qf32" => "__builtin_HEXAGON_V6_vadd_qf32", + "V6.vadd.qf32.128B" => "__builtin_HEXAGON_V6_vadd_qf32_128B", + "V6.vadd.qf32.mix" => "__builtin_HEXAGON_V6_vadd_qf32_mix", + "V6.vadd.qf32.mix.128B" => "__builtin_HEXAGON_V6_vadd_qf32_mix_128B", + "V6.vadd.sf" => "__builtin_HEXAGON_V6_vadd_sf", + "V6.vadd.sf.128B" => "__builtin_HEXAGON_V6_vadd_sf_128B", + "V6.vadd.sf.bf" => "__builtin_HEXAGON_V6_vadd_sf_bf", + "V6.vadd.sf.bf.128B" => "__builtin_HEXAGON_V6_vadd_sf_bf_128B", + "V6.vadd.sf.hf" => "__builtin_HEXAGON_V6_vadd_sf_hf", + "V6.vadd.sf.hf.128B" => "__builtin_HEXAGON_V6_vadd_sf_hf_128B", + "V6.vadd.sf.sf" => "__builtin_HEXAGON_V6_vadd_sf_sf", + "V6.vadd.sf.sf.128B" => "__builtin_HEXAGON_V6_vadd_sf_sf_128B", + "V6.vaddb" => "__builtin_HEXAGON_V6_vaddb", + "V6.vaddb.128B" => "__builtin_HEXAGON_V6_vaddb_128B", + "V6.vaddb.dv" => "__builtin_HEXAGON_V6_vaddb_dv", + "V6.vaddb.dv.128B" => "__builtin_HEXAGON_V6_vaddb_dv_128B", + "V6.vaddbnq" => "__builtin_HEXAGON_V6_vaddbnq", + "V6.vaddbnq.128B" => "__builtin_HEXAGON_V6_vaddbnq_128B", + "V6.vaddbq" => "__builtin_HEXAGON_V6_vaddbq", + "V6.vaddbq.128B" => "__builtin_HEXAGON_V6_vaddbq_128B", + "V6.vaddbsat" => "__builtin_HEXAGON_V6_vaddbsat", + "V6.vaddbsat.128B" => "__builtin_HEXAGON_V6_vaddbsat_128B", + "V6.vaddbsat.dv" => "__builtin_HEXAGON_V6_vaddbsat_dv", + "V6.vaddbsat.dv.128B" => "__builtin_HEXAGON_V6_vaddbsat_dv_128B", + "V6.vaddcarrysat" => "__builtin_HEXAGON_V6_vaddcarrysat", + "V6.vaddcarrysat.128B" => "__builtin_HEXAGON_V6_vaddcarrysat_128B", + "V6.vaddclbh" => "__builtin_HEXAGON_V6_vaddclbh", + "V6.vaddclbh.128B" => "__builtin_HEXAGON_V6_vaddclbh_128B", + "V6.vaddclbw" => "__builtin_HEXAGON_V6_vaddclbw", + "V6.vaddclbw.128B" => "__builtin_HEXAGON_V6_vaddclbw_128B", + "V6.vaddh" => "__builtin_HEXAGON_V6_vaddh", + "V6.vaddh.128B" => "__builtin_HEXAGON_V6_vaddh_128B", + "V6.vaddh.dv" => "__builtin_HEXAGON_V6_vaddh_dv", + "V6.vaddh.dv.128B" => "__builtin_HEXAGON_V6_vaddh_dv_128B", + "V6.vaddhnq" => "__builtin_HEXAGON_V6_vaddhnq", + "V6.vaddhnq.128B" => "__builtin_HEXAGON_V6_vaddhnq_128B", + "V6.vaddhq" => "__builtin_HEXAGON_V6_vaddhq", + "V6.vaddhq.128B" => "__builtin_HEXAGON_V6_vaddhq_128B", + "V6.vaddhsat" => "__builtin_HEXAGON_V6_vaddhsat", + "V6.vaddhsat.128B" => "__builtin_HEXAGON_V6_vaddhsat_128B", + "V6.vaddhsat.dv" => "__builtin_HEXAGON_V6_vaddhsat_dv", + "V6.vaddhsat.dv.128B" => "__builtin_HEXAGON_V6_vaddhsat_dv_128B", + "V6.vaddhw" => "__builtin_HEXAGON_V6_vaddhw", + "V6.vaddhw.128B" => "__builtin_HEXAGON_V6_vaddhw_128B", + "V6.vaddhw.acc" => "__builtin_HEXAGON_V6_vaddhw_acc", + "V6.vaddhw.acc.128B" => "__builtin_HEXAGON_V6_vaddhw_acc_128B", + "V6.vaddubh" => "__builtin_HEXAGON_V6_vaddubh", + "V6.vaddubh.128B" => "__builtin_HEXAGON_V6_vaddubh_128B", + "V6.vaddubh.acc" => "__builtin_HEXAGON_V6_vaddubh_acc", + "V6.vaddubh.acc.128B" => "__builtin_HEXAGON_V6_vaddubh_acc_128B", + "V6.vaddubsat" => "__builtin_HEXAGON_V6_vaddubsat", + "V6.vaddubsat.128B" => "__builtin_HEXAGON_V6_vaddubsat_128B", + "V6.vaddubsat.dv" => "__builtin_HEXAGON_V6_vaddubsat_dv", + "V6.vaddubsat.dv.128B" => "__builtin_HEXAGON_V6_vaddubsat_dv_128B", + "V6.vaddububb.sat" => "__builtin_HEXAGON_V6_vaddububb_sat", + "V6.vaddububb.sat.128B" => "__builtin_HEXAGON_V6_vaddububb_sat_128B", + "V6.vadduhsat" => "__builtin_HEXAGON_V6_vadduhsat", + "V6.vadduhsat.128B" => "__builtin_HEXAGON_V6_vadduhsat_128B", + "V6.vadduhsat.dv" => "__builtin_HEXAGON_V6_vadduhsat_dv", + "V6.vadduhsat.dv.128B" => "__builtin_HEXAGON_V6_vadduhsat_dv_128B", + "V6.vadduhw" => "__builtin_HEXAGON_V6_vadduhw", + "V6.vadduhw.128B" => "__builtin_HEXAGON_V6_vadduhw_128B", + "V6.vadduhw.acc" => "__builtin_HEXAGON_V6_vadduhw_acc", + "V6.vadduhw.acc.128B" => "__builtin_HEXAGON_V6_vadduhw_acc_128B", + "V6.vadduwsat" => "__builtin_HEXAGON_V6_vadduwsat", + "V6.vadduwsat.128B" => "__builtin_HEXAGON_V6_vadduwsat_128B", + "V6.vadduwsat.dv" => "__builtin_HEXAGON_V6_vadduwsat_dv", + "V6.vadduwsat.dv.128B" => "__builtin_HEXAGON_V6_vadduwsat_dv_128B", + "V6.vaddw" => "__builtin_HEXAGON_V6_vaddw", + "V6.vaddw.128B" => "__builtin_HEXAGON_V6_vaddw_128B", + "V6.vaddw.dv" => "__builtin_HEXAGON_V6_vaddw_dv", + "V6.vaddw.dv.128B" => "__builtin_HEXAGON_V6_vaddw_dv_128B", + "V6.vaddwnq" => "__builtin_HEXAGON_V6_vaddwnq", + "V6.vaddwnq.128B" => "__builtin_HEXAGON_V6_vaddwnq_128B", + "V6.vaddwq" => "__builtin_HEXAGON_V6_vaddwq", + "V6.vaddwq.128B" => "__builtin_HEXAGON_V6_vaddwq_128B", + "V6.vaddwsat" => "__builtin_HEXAGON_V6_vaddwsat", + "V6.vaddwsat.128B" => "__builtin_HEXAGON_V6_vaddwsat_128B", + "V6.vaddwsat.dv" => "__builtin_HEXAGON_V6_vaddwsat_dv", + "V6.vaddwsat.dv.128B" => "__builtin_HEXAGON_V6_vaddwsat_dv_128B", + "V6.valignb" => "__builtin_HEXAGON_V6_valignb", + "V6.valignb.128B" => "__builtin_HEXAGON_V6_valignb_128B", + "V6.valignbi" => "__builtin_HEXAGON_V6_valignbi", + "V6.valignbi.128B" => "__builtin_HEXAGON_V6_valignbi_128B", + "V6.vand" => "__builtin_HEXAGON_V6_vand", + "V6.vand.128B" => "__builtin_HEXAGON_V6_vand_128B", + "V6.vandnqrt" => "__builtin_HEXAGON_V6_vandnqrt", + "V6.vandnqrt.128B" => "__builtin_HEXAGON_V6_vandnqrt_128B", + "V6.vandnqrt.acc" => "__builtin_HEXAGON_V6_vandnqrt_acc", + "V6.vandnqrt.acc.128B" => "__builtin_HEXAGON_V6_vandnqrt_acc_128B", + "V6.vandqrt" => "__builtin_HEXAGON_V6_vandqrt", + "V6.vandqrt.128B" => "__builtin_HEXAGON_V6_vandqrt_128B", + "V6.vandqrt.acc" => "__builtin_HEXAGON_V6_vandqrt_acc", + "V6.vandqrt.acc.128B" => "__builtin_HEXAGON_V6_vandqrt_acc_128B", + "V6.vandvnqv" => "__builtin_HEXAGON_V6_vandvnqv", + "V6.vandvnqv.128B" => "__builtin_HEXAGON_V6_vandvnqv_128B", + "V6.vandvqv" => "__builtin_HEXAGON_V6_vandvqv", + "V6.vandvqv.128B" => "__builtin_HEXAGON_V6_vandvqv_128B", + "V6.vandvrt" => "__builtin_HEXAGON_V6_vandvrt", + "V6.vandvrt.128B" => "__builtin_HEXAGON_V6_vandvrt_128B", + "V6.vandvrt.acc" => "__builtin_HEXAGON_V6_vandvrt_acc", + "V6.vandvrt.acc.128B" => "__builtin_HEXAGON_V6_vandvrt_acc_128B", + "V6.vaslh" => "__builtin_HEXAGON_V6_vaslh", + "V6.vaslh.128B" => "__builtin_HEXAGON_V6_vaslh_128B", + "V6.vaslh.acc" => "__builtin_HEXAGON_V6_vaslh_acc", + "V6.vaslh.acc.128B" => "__builtin_HEXAGON_V6_vaslh_acc_128B", + "V6.vaslhv" => "__builtin_HEXAGON_V6_vaslhv", + "V6.vaslhv.128B" => "__builtin_HEXAGON_V6_vaslhv_128B", + "V6.vaslw" => "__builtin_HEXAGON_V6_vaslw", + "V6.vaslw.128B" => "__builtin_HEXAGON_V6_vaslw_128B", + "V6.vaslw.acc" => "__builtin_HEXAGON_V6_vaslw_acc", + "V6.vaslw.acc.128B" => "__builtin_HEXAGON_V6_vaslw_acc_128B", + "V6.vaslwv" => "__builtin_HEXAGON_V6_vaslwv", + "V6.vaslwv.128B" => "__builtin_HEXAGON_V6_vaslwv_128B", + "V6.vasr.into" => "__builtin_HEXAGON_V6_vasr_into", + "V6.vasr.into.128B" => "__builtin_HEXAGON_V6_vasr_into_128B", + "V6.vasrh" => "__builtin_HEXAGON_V6_vasrh", + "V6.vasrh.128B" => "__builtin_HEXAGON_V6_vasrh_128B", + "V6.vasrh.acc" => "__builtin_HEXAGON_V6_vasrh_acc", + "V6.vasrh.acc.128B" => "__builtin_HEXAGON_V6_vasrh_acc_128B", + "V6.vasrhbrndsat" => "__builtin_HEXAGON_V6_vasrhbrndsat", + "V6.vasrhbrndsat.128B" => "__builtin_HEXAGON_V6_vasrhbrndsat_128B", + "V6.vasrhbsat" => "__builtin_HEXAGON_V6_vasrhbsat", + "V6.vasrhbsat.128B" => "__builtin_HEXAGON_V6_vasrhbsat_128B", + "V6.vasrhubrndsat" => "__builtin_HEXAGON_V6_vasrhubrndsat", + "V6.vasrhubrndsat.128B" => "__builtin_HEXAGON_V6_vasrhubrndsat_128B", + "V6.vasrhubsat" => "__builtin_HEXAGON_V6_vasrhubsat", + "V6.vasrhubsat.128B" => "__builtin_HEXAGON_V6_vasrhubsat_128B", + "V6.vasrhv" => "__builtin_HEXAGON_V6_vasrhv", + "V6.vasrhv.128B" => "__builtin_HEXAGON_V6_vasrhv_128B", + "V6.vasruhubrndsat" => "__builtin_HEXAGON_V6_vasruhubrndsat", + "V6.vasruhubrndsat.128B" => "__builtin_HEXAGON_V6_vasruhubrndsat_128B", + "V6.vasruhubsat" => "__builtin_HEXAGON_V6_vasruhubsat", + "V6.vasruhubsat.128B" => "__builtin_HEXAGON_V6_vasruhubsat_128B", + "V6.vasruwuhrndsat" => "__builtin_HEXAGON_V6_vasruwuhrndsat", + "V6.vasruwuhrndsat.128B" => "__builtin_HEXAGON_V6_vasruwuhrndsat_128B", + "V6.vasruwuhsat" => "__builtin_HEXAGON_V6_vasruwuhsat", + "V6.vasruwuhsat.128B" => "__builtin_HEXAGON_V6_vasruwuhsat_128B", + "V6.vasrvuhubrndsat" => "__builtin_HEXAGON_V6_vasrvuhubrndsat", + "V6.vasrvuhubrndsat.128B" => "__builtin_HEXAGON_V6_vasrvuhubrndsat_128B", + "V6.vasrvuhubsat" => "__builtin_HEXAGON_V6_vasrvuhubsat", + "V6.vasrvuhubsat.128B" => "__builtin_HEXAGON_V6_vasrvuhubsat_128B", + "V6.vasrvwuhrndsat" => "__builtin_HEXAGON_V6_vasrvwuhrndsat", + "V6.vasrvwuhrndsat.128B" => "__builtin_HEXAGON_V6_vasrvwuhrndsat_128B", + "V6.vasrvwuhsat" => "__builtin_HEXAGON_V6_vasrvwuhsat", + "V6.vasrvwuhsat.128B" => "__builtin_HEXAGON_V6_vasrvwuhsat_128B", + "V6.vasrw" => "__builtin_HEXAGON_V6_vasrw", + "V6.vasrw.128B" => "__builtin_HEXAGON_V6_vasrw_128B", + "V6.vasrw.acc" => "__builtin_HEXAGON_V6_vasrw_acc", + "V6.vasrw.acc.128B" => "__builtin_HEXAGON_V6_vasrw_acc_128B", + "V6.vasrwh" => "__builtin_HEXAGON_V6_vasrwh", + "V6.vasrwh.128B" => "__builtin_HEXAGON_V6_vasrwh_128B", + "V6.vasrwhrndsat" => "__builtin_HEXAGON_V6_vasrwhrndsat", + "V6.vasrwhrndsat.128B" => "__builtin_HEXAGON_V6_vasrwhrndsat_128B", + "V6.vasrwhsat" => "__builtin_HEXAGON_V6_vasrwhsat", + "V6.vasrwhsat.128B" => "__builtin_HEXAGON_V6_vasrwhsat_128B", + "V6.vasrwuhrndsat" => "__builtin_HEXAGON_V6_vasrwuhrndsat", + "V6.vasrwuhrndsat.128B" => "__builtin_HEXAGON_V6_vasrwuhrndsat_128B", + "V6.vasrwuhsat" => "__builtin_HEXAGON_V6_vasrwuhsat", + "V6.vasrwuhsat.128B" => "__builtin_HEXAGON_V6_vasrwuhsat_128B", + "V6.vasrwv" => "__builtin_HEXAGON_V6_vasrwv", + "V6.vasrwv.128B" => "__builtin_HEXAGON_V6_vasrwv_128B", + "V6.vassign" => "__builtin_HEXAGON_V6_vassign", + "V6.vassign.128B" => "__builtin_HEXAGON_V6_vassign_128B", + "V6.vassign.fp" => "__builtin_HEXAGON_V6_vassign_fp", + "V6.vassign.fp.128B" => "__builtin_HEXAGON_V6_vassign_fp_128B", + "V6.vassignp" => "__builtin_HEXAGON_V6_vassignp", + "V6.vassignp.128B" => "__builtin_HEXAGON_V6_vassignp_128B", + "V6.vavgb" => "__builtin_HEXAGON_V6_vavgb", + "V6.vavgb.128B" => "__builtin_HEXAGON_V6_vavgb_128B", + "V6.vavgbrnd" => "__builtin_HEXAGON_V6_vavgbrnd", + "V6.vavgbrnd.128B" => "__builtin_HEXAGON_V6_vavgbrnd_128B", + "V6.vavgh" => "__builtin_HEXAGON_V6_vavgh", + "V6.vavgh.128B" => "__builtin_HEXAGON_V6_vavgh_128B", + "V6.vavghrnd" => "__builtin_HEXAGON_V6_vavghrnd", + "V6.vavghrnd.128B" => "__builtin_HEXAGON_V6_vavghrnd_128B", + "V6.vavgub" => "__builtin_HEXAGON_V6_vavgub", + "V6.vavgub.128B" => "__builtin_HEXAGON_V6_vavgub_128B", + "V6.vavgubrnd" => "__builtin_HEXAGON_V6_vavgubrnd", + "V6.vavgubrnd.128B" => "__builtin_HEXAGON_V6_vavgubrnd_128B", + "V6.vavguh" => "__builtin_HEXAGON_V6_vavguh", + "V6.vavguh.128B" => "__builtin_HEXAGON_V6_vavguh_128B", + "V6.vavguhrnd" => "__builtin_HEXAGON_V6_vavguhrnd", + "V6.vavguhrnd.128B" => "__builtin_HEXAGON_V6_vavguhrnd_128B", + "V6.vavguw" => "__builtin_HEXAGON_V6_vavguw", + "V6.vavguw.128B" => "__builtin_HEXAGON_V6_vavguw_128B", + "V6.vavguwrnd" => "__builtin_HEXAGON_V6_vavguwrnd", + "V6.vavguwrnd.128B" => "__builtin_HEXAGON_V6_vavguwrnd_128B", + "V6.vavgw" => "__builtin_HEXAGON_V6_vavgw", + "V6.vavgw.128B" => "__builtin_HEXAGON_V6_vavgw_128B", + "V6.vavgwrnd" => "__builtin_HEXAGON_V6_vavgwrnd", + "V6.vavgwrnd.128B" => "__builtin_HEXAGON_V6_vavgwrnd_128B", + "V6.vcl0h" => "__builtin_HEXAGON_V6_vcl0h", + "V6.vcl0h.128B" => "__builtin_HEXAGON_V6_vcl0h_128B", + "V6.vcl0w" => "__builtin_HEXAGON_V6_vcl0w", + "V6.vcl0w.128B" => "__builtin_HEXAGON_V6_vcl0w_128B", + "V6.vcombine" => "__builtin_HEXAGON_V6_vcombine", + "V6.vcombine.128B" => "__builtin_HEXAGON_V6_vcombine_128B", + "V6.vconv.h.hf" => "__builtin_HEXAGON_V6_vconv_h_hf", + "V6.vconv.h.hf.128B" => "__builtin_HEXAGON_V6_vconv_h_hf_128B", + "V6.vconv.hf.h" => "__builtin_HEXAGON_V6_vconv_hf_h", + "V6.vconv.hf.h.128B" => "__builtin_HEXAGON_V6_vconv_hf_h_128B", + "V6.vconv.hf.qf16" => "__builtin_HEXAGON_V6_vconv_hf_qf16", + "V6.vconv.hf.qf16.128B" => "__builtin_HEXAGON_V6_vconv_hf_qf16_128B", + "V6.vconv.hf.qf32" => "__builtin_HEXAGON_V6_vconv_hf_qf32", + "V6.vconv.hf.qf32.128B" => "__builtin_HEXAGON_V6_vconv_hf_qf32_128B", + "V6.vconv.sf.qf32" => "__builtin_HEXAGON_V6_vconv_sf_qf32", + "V6.vconv.sf.qf32.128B" => "__builtin_HEXAGON_V6_vconv_sf_qf32_128B", + "V6.vconv.sf.w" => "__builtin_HEXAGON_V6_vconv_sf_w", + "V6.vconv.sf.w.128B" => "__builtin_HEXAGON_V6_vconv_sf_w_128B", + "V6.vconv.w.sf" => "__builtin_HEXAGON_V6_vconv_w_sf", + "V6.vconv.w.sf.128B" => "__builtin_HEXAGON_V6_vconv_w_sf_128B", + "V6.vcvt.b.hf" => "__builtin_HEXAGON_V6_vcvt_b_hf", + "V6.vcvt.b.hf.128B" => "__builtin_HEXAGON_V6_vcvt_b_hf_128B", + "V6.vcvt.bf.sf" => "__builtin_HEXAGON_V6_vcvt_bf_sf", + "V6.vcvt.bf.sf.128B" => "__builtin_HEXAGON_V6_vcvt_bf_sf_128B", + "V6.vcvt.f8.hf" => "__builtin_HEXAGON_V6_vcvt_f8_hf", + "V6.vcvt.f8.hf.128B" => "__builtin_HEXAGON_V6_vcvt_f8_hf_128B", + "V6.vcvt.h.hf" => "__builtin_HEXAGON_V6_vcvt_h_hf", + "V6.vcvt.h.hf.128B" => "__builtin_HEXAGON_V6_vcvt_h_hf_128B", + "V6.vcvt.hf.b" => "__builtin_HEXAGON_V6_vcvt_hf_b", + "V6.vcvt.hf.b.128B" => "__builtin_HEXAGON_V6_vcvt_hf_b_128B", + "V6.vcvt.hf.f8" => "__builtin_HEXAGON_V6_vcvt_hf_f8", + "V6.vcvt.hf.f8.128B" => "__builtin_HEXAGON_V6_vcvt_hf_f8_128B", + "V6.vcvt.hf.h" => "__builtin_HEXAGON_V6_vcvt_hf_h", + "V6.vcvt.hf.h.128B" => "__builtin_HEXAGON_V6_vcvt_hf_h_128B", + "V6.vcvt.hf.sf" => "__builtin_HEXAGON_V6_vcvt_hf_sf", + "V6.vcvt.hf.sf.128B" => "__builtin_HEXAGON_V6_vcvt_hf_sf_128B", + "V6.vcvt.hf.ub" => "__builtin_HEXAGON_V6_vcvt_hf_ub", + "V6.vcvt.hf.ub.128B" => "__builtin_HEXAGON_V6_vcvt_hf_ub_128B", + "V6.vcvt.hf.uh" => "__builtin_HEXAGON_V6_vcvt_hf_uh", + "V6.vcvt.hf.uh.128B" => "__builtin_HEXAGON_V6_vcvt_hf_uh_128B", + "V6.vcvt.sf.hf" => "__builtin_HEXAGON_V6_vcvt_sf_hf", + "V6.vcvt.sf.hf.128B" => "__builtin_HEXAGON_V6_vcvt_sf_hf_128B", + "V6.vcvt.ub.hf" => "__builtin_HEXAGON_V6_vcvt_ub_hf", + "V6.vcvt.ub.hf.128B" => "__builtin_HEXAGON_V6_vcvt_ub_hf_128B", + "V6.vcvt.uh.hf" => "__builtin_HEXAGON_V6_vcvt_uh_hf", + "V6.vcvt.uh.hf.128B" => "__builtin_HEXAGON_V6_vcvt_uh_hf_128B", + "V6.vcvt2.b.hf" => "__builtin_HEXAGON_V6_vcvt2_b_hf", + "V6.vcvt2.b.hf.128B" => "__builtin_HEXAGON_V6_vcvt2_b_hf_128B", + "V6.vcvt2.hf.b" => "__builtin_HEXAGON_V6_vcvt2_hf_b", + "V6.vcvt2.hf.b.128B" => "__builtin_HEXAGON_V6_vcvt2_hf_b_128B", + "V6.vcvt2.hf.ub" => "__builtin_HEXAGON_V6_vcvt2_hf_ub", + "V6.vcvt2.hf.ub.128B" => "__builtin_HEXAGON_V6_vcvt2_hf_ub_128B", + "V6.vcvt2.ub.hf" => "__builtin_HEXAGON_V6_vcvt2_ub_hf", + "V6.vcvt2.ub.hf.128B" => "__builtin_HEXAGON_V6_vcvt2_ub_hf_128B", + "V6.vd0" => "__builtin_HEXAGON_V6_vd0", + "V6.vd0.128B" => "__builtin_HEXAGON_V6_vd0_128B", + "V6.vdd0" => "__builtin_HEXAGON_V6_vdd0", + "V6.vdd0.128B" => "__builtin_HEXAGON_V6_vdd0_128B", + "V6.vdealb" => "__builtin_HEXAGON_V6_vdealb", + "V6.vdealb.128B" => "__builtin_HEXAGON_V6_vdealb_128B", + "V6.vdealb4w" => "__builtin_HEXAGON_V6_vdealb4w", + "V6.vdealb4w.128B" => "__builtin_HEXAGON_V6_vdealb4w_128B", + "V6.vdealh" => "__builtin_HEXAGON_V6_vdealh", + "V6.vdealh.128B" => "__builtin_HEXAGON_V6_vdealh_128B", + "V6.vdealvdd" => "__builtin_HEXAGON_V6_vdealvdd", + "V6.vdealvdd.128B" => "__builtin_HEXAGON_V6_vdealvdd_128B", + "V6.vdelta" => "__builtin_HEXAGON_V6_vdelta", + "V6.vdelta.128B" => "__builtin_HEXAGON_V6_vdelta_128B", + "V6.vdmpy.sf.hf" => "__builtin_HEXAGON_V6_vdmpy_sf_hf", + "V6.vdmpy.sf.hf.128B" => "__builtin_HEXAGON_V6_vdmpy_sf_hf_128B", + "V6.vdmpy.sf.hf.acc" => "__builtin_HEXAGON_V6_vdmpy_sf_hf_acc", + "V6.vdmpy.sf.hf.acc.128B" => "__builtin_HEXAGON_V6_vdmpy_sf_hf_acc_128B", + "V6.vdmpybus" => "__builtin_HEXAGON_V6_vdmpybus", + "V6.vdmpybus.128B" => "__builtin_HEXAGON_V6_vdmpybus_128B", + "V6.vdmpybus.acc" => "__builtin_HEXAGON_V6_vdmpybus_acc", + "V6.vdmpybus.acc.128B" => "__builtin_HEXAGON_V6_vdmpybus_acc_128B", + "V6.vdmpybus.dv" => "__builtin_HEXAGON_V6_vdmpybus_dv", + "V6.vdmpybus.dv.128B" => "__builtin_HEXAGON_V6_vdmpybus_dv_128B", + "V6.vdmpybus.dv.acc" => "__builtin_HEXAGON_V6_vdmpybus_dv_acc", + "V6.vdmpybus.dv.acc.128B" => "__builtin_HEXAGON_V6_vdmpybus_dv_acc_128B", + "V6.vdmpyhb" => "__builtin_HEXAGON_V6_vdmpyhb", + "V6.vdmpyhb.128B" => "__builtin_HEXAGON_V6_vdmpyhb_128B", + "V6.vdmpyhb.acc" => "__builtin_HEXAGON_V6_vdmpyhb_acc", + "V6.vdmpyhb.acc.128B" => "__builtin_HEXAGON_V6_vdmpyhb_acc_128B", + "V6.vdmpyhb.dv" => "__builtin_HEXAGON_V6_vdmpyhb_dv", + "V6.vdmpyhb.dv.128B" => "__builtin_HEXAGON_V6_vdmpyhb_dv_128B", + "V6.vdmpyhb.dv.acc" => "__builtin_HEXAGON_V6_vdmpyhb_dv_acc", + "V6.vdmpyhb.dv.acc.128B" => "__builtin_HEXAGON_V6_vdmpyhb_dv_acc_128B", + "V6.vdmpyhisat" => "__builtin_HEXAGON_V6_vdmpyhisat", + "V6.vdmpyhisat.128B" => "__builtin_HEXAGON_V6_vdmpyhisat_128B", + "V6.vdmpyhisat.acc" => "__builtin_HEXAGON_V6_vdmpyhisat_acc", + "V6.vdmpyhisat.acc.128B" => "__builtin_HEXAGON_V6_vdmpyhisat_acc_128B", + "V6.vdmpyhsat" => "__builtin_HEXAGON_V6_vdmpyhsat", + "V6.vdmpyhsat.128B" => "__builtin_HEXAGON_V6_vdmpyhsat_128B", + "V6.vdmpyhsat.acc" => "__builtin_HEXAGON_V6_vdmpyhsat_acc", + "V6.vdmpyhsat.acc.128B" => "__builtin_HEXAGON_V6_vdmpyhsat_acc_128B", + "V6.vdmpyhsuisat" => "__builtin_HEXAGON_V6_vdmpyhsuisat", + "V6.vdmpyhsuisat.128B" => "__builtin_HEXAGON_V6_vdmpyhsuisat_128B", + "V6.vdmpyhsuisat.acc" => "__builtin_HEXAGON_V6_vdmpyhsuisat_acc", + "V6.vdmpyhsuisat.acc.128B" => "__builtin_HEXAGON_V6_vdmpyhsuisat_acc_128B", + "V6.vdmpyhsusat" => "__builtin_HEXAGON_V6_vdmpyhsusat", + "V6.vdmpyhsusat.128B" => "__builtin_HEXAGON_V6_vdmpyhsusat_128B", + "V6.vdmpyhsusat.acc" => "__builtin_HEXAGON_V6_vdmpyhsusat_acc", + "V6.vdmpyhsusat.acc.128B" => "__builtin_HEXAGON_V6_vdmpyhsusat_acc_128B", + "V6.vdmpyhvsat" => "__builtin_HEXAGON_V6_vdmpyhvsat", + "V6.vdmpyhvsat.128B" => "__builtin_HEXAGON_V6_vdmpyhvsat_128B", + "V6.vdmpyhvsat.acc" => "__builtin_HEXAGON_V6_vdmpyhvsat_acc", + "V6.vdmpyhvsat.acc.128B" => "__builtin_HEXAGON_V6_vdmpyhvsat_acc_128B", + "V6.vdsaduh" => "__builtin_HEXAGON_V6_vdsaduh", + "V6.vdsaduh.128B" => "__builtin_HEXAGON_V6_vdsaduh_128B", + "V6.vdsaduh.acc" => "__builtin_HEXAGON_V6_vdsaduh_acc", + "V6.vdsaduh.acc.128B" => "__builtin_HEXAGON_V6_vdsaduh_acc_128B", + "V6.veqb" => "__builtin_HEXAGON_V6_veqb", + "V6.veqb.128B" => "__builtin_HEXAGON_V6_veqb_128B", + "V6.veqb.and" => "__builtin_HEXAGON_V6_veqb_and", + "V6.veqb.and.128B" => "__builtin_HEXAGON_V6_veqb_and_128B", + "V6.veqb.or" => "__builtin_HEXAGON_V6_veqb_or", + "V6.veqb.or.128B" => "__builtin_HEXAGON_V6_veqb_or_128B", + "V6.veqb.xor" => "__builtin_HEXAGON_V6_veqb_xor", + "V6.veqb.xor.128B" => "__builtin_HEXAGON_V6_veqb_xor_128B", + "V6.veqh" => "__builtin_HEXAGON_V6_veqh", + "V6.veqh.128B" => "__builtin_HEXAGON_V6_veqh_128B", + "V6.veqh.and" => "__builtin_HEXAGON_V6_veqh_and", + "V6.veqh.and.128B" => "__builtin_HEXAGON_V6_veqh_and_128B", + "V6.veqh.or" => "__builtin_HEXAGON_V6_veqh_or", + "V6.veqh.or.128B" => "__builtin_HEXAGON_V6_veqh_or_128B", + "V6.veqh.xor" => "__builtin_HEXAGON_V6_veqh_xor", + "V6.veqh.xor.128B" => "__builtin_HEXAGON_V6_veqh_xor_128B", + "V6.veqw" => "__builtin_HEXAGON_V6_veqw", + "V6.veqw.128B" => "__builtin_HEXAGON_V6_veqw_128B", + "V6.veqw.and" => "__builtin_HEXAGON_V6_veqw_and", + "V6.veqw.and.128B" => "__builtin_HEXAGON_V6_veqw_and_128B", + "V6.veqw.or" => "__builtin_HEXAGON_V6_veqw_or", + "V6.veqw.or.128B" => "__builtin_HEXAGON_V6_veqw_or_128B", + "V6.veqw.xor" => "__builtin_HEXAGON_V6_veqw_xor", + "V6.veqw.xor.128B" => "__builtin_HEXAGON_V6_veqw_xor_128B", + "V6.vfmax.f8" => "__builtin_HEXAGON_V6_vfmax_f8", + "V6.vfmax.f8.128B" => "__builtin_HEXAGON_V6_vfmax_f8_128B", + "V6.vfmax.hf" => "__builtin_HEXAGON_V6_vfmax_hf", + "V6.vfmax.hf.128B" => "__builtin_HEXAGON_V6_vfmax_hf_128B", + "V6.vfmax.sf" => "__builtin_HEXAGON_V6_vfmax_sf", + "V6.vfmax.sf.128B" => "__builtin_HEXAGON_V6_vfmax_sf_128B", + "V6.vfmin.f8" => "__builtin_HEXAGON_V6_vfmin_f8", + "V6.vfmin.f8.128B" => "__builtin_HEXAGON_V6_vfmin_f8_128B", + "V6.vfmin.hf" => "__builtin_HEXAGON_V6_vfmin_hf", + "V6.vfmin.hf.128B" => "__builtin_HEXAGON_V6_vfmin_hf_128B", + "V6.vfmin.sf" => "__builtin_HEXAGON_V6_vfmin_sf", + "V6.vfmin.sf.128B" => "__builtin_HEXAGON_V6_vfmin_sf_128B", + "V6.vfneg.f8" => "__builtin_HEXAGON_V6_vfneg_f8", + "V6.vfneg.f8.128B" => "__builtin_HEXAGON_V6_vfneg_f8_128B", + "V6.vfneg.hf" => "__builtin_HEXAGON_V6_vfneg_hf", + "V6.vfneg.hf.128B" => "__builtin_HEXAGON_V6_vfneg_hf_128B", + "V6.vfneg.sf" => "__builtin_HEXAGON_V6_vfneg_sf", + "V6.vfneg.sf.128B" => "__builtin_HEXAGON_V6_vfneg_sf_128B", + "V6.vgathermh" => "__builtin_HEXAGON_V6_vgathermh", + "V6.vgathermh.128B" => "__builtin_HEXAGON_V6_vgathermh_128B", + "V6.vgathermhq" => "__builtin_HEXAGON_V6_vgathermhq", + "V6.vgathermhq.128B" => "__builtin_HEXAGON_V6_vgathermhq_128B", + "V6.vgathermhw" => "__builtin_HEXAGON_V6_vgathermhw", + "V6.vgathermhw.128B" => "__builtin_HEXAGON_V6_vgathermhw_128B", + "V6.vgathermhwq" => "__builtin_HEXAGON_V6_vgathermhwq", + "V6.vgathermhwq.128B" => "__builtin_HEXAGON_V6_vgathermhwq_128B", + "V6.vgathermw" => "__builtin_HEXAGON_V6_vgathermw", + "V6.vgathermw.128B" => "__builtin_HEXAGON_V6_vgathermw_128B", + "V6.vgathermwq" => "__builtin_HEXAGON_V6_vgathermwq", + "V6.vgathermwq.128B" => "__builtin_HEXAGON_V6_vgathermwq_128B", + "V6.vgtb" => "__builtin_HEXAGON_V6_vgtb", + "V6.vgtb.128B" => "__builtin_HEXAGON_V6_vgtb_128B", + "V6.vgtb.and" => "__builtin_HEXAGON_V6_vgtb_and", + "V6.vgtb.and.128B" => "__builtin_HEXAGON_V6_vgtb_and_128B", + "V6.vgtb.or" => "__builtin_HEXAGON_V6_vgtb_or", + "V6.vgtb.or.128B" => "__builtin_HEXAGON_V6_vgtb_or_128B", + "V6.vgtb.xor" => "__builtin_HEXAGON_V6_vgtb_xor", + "V6.vgtb.xor.128B" => "__builtin_HEXAGON_V6_vgtb_xor_128B", + "V6.vgtbf" => "__builtin_HEXAGON_V6_vgtbf", + "V6.vgtbf.128B" => "__builtin_HEXAGON_V6_vgtbf_128B", + "V6.vgtbf.and" => "__builtin_HEXAGON_V6_vgtbf_and", + "V6.vgtbf.and.128B" => "__builtin_HEXAGON_V6_vgtbf_and_128B", + "V6.vgtbf.or" => "__builtin_HEXAGON_V6_vgtbf_or", + "V6.vgtbf.or.128B" => "__builtin_HEXAGON_V6_vgtbf_or_128B", + "V6.vgtbf.xor" => "__builtin_HEXAGON_V6_vgtbf_xor", + "V6.vgtbf.xor.128B" => "__builtin_HEXAGON_V6_vgtbf_xor_128B", + "V6.vgth" => "__builtin_HEXAGON_V6_vgth", + "V6.vgth.128B" => "__builtin_HEXAGON_V6_vgth_128B", + "V6.vgth.and" => "__builtin_HEXAGON_V6_vgth_and", + "V6.vgth.and.128B" => "__builtin_HEXAGON_V6_vgth_and_128B", + "V6.vgth.or" => "__builtin_HEXAGON_V6_vgth_or", + "V6.vgth.or.128B" => "__builtin_HEXAGON_V6_vgth_or_128B", + "V6.vgth.xor" => "__builtin_HEXAGON_V6_vgth_xor", + "V6.vgth.xor.128B" => "__builtin_HEXAGON_V6_vgth_xor_128B", + "V6.vgthf" => "__builtin_HEXAGON_V6_vgthf", + "V6.vgthf.128B" => "__builtin_HEXAGON_V6_vgthf_128B", + "V6.vgthf.and" => "__builtin_HEXAGON_V6_vgthf_and", + "V6.vgthf.and.128B" => "__builtin_HEXAGON_V6_vgthf_and_128B", + "V6.vgthf.or" => "__builtin_HEXAGON_V6_vgthf_or", + "V6.vgthf.or.128B" => "__builtin_HEXAGON_V6_vgthf_or_128B", + "V6.vgthf.xor" => "__builtin_HEXAGON_V6_vgthf_xor", + "V6.vgthf.xor.128B" => "__builtin_HEXAGON_V6_vgthf_xor_128B", + "V6.vgtsf" => "__builtin_HEXAGON_V6_vgtsf", + "V6.vgtsf.128B" => "__builtin_HEXAGON_V6_vgtsf_128B", + "V6.vgtsf.and" => "__builtin_HEXAGON_V6_vgtsf_and", + "V6.vgtsf.and.128B" => "__builtin_HEXAGON_V6_vgtsf_and_128B", + "V6.vgtsf.or" => "__builtin_HEXAGON_V6_vgtsf_or", + "V6.vgtsf.or.128B" => "__builtin_HEXAGON_V6_vgtsf_or_128B", + "V6.vgtsf.xor" => "__builtin_HEXAGON_V6_vgtsf_xor", + "V6.vgtsf.xor.128B" => "__builtin_HEXAGON_V6_vgtsf_xor_128B", + "V6.vgtub" => "__builtin_HEXAGON_V6_vgtub", + "V6.vgtub.128B" => "__builtin_HEXAGON_V6_vgtub_128B", + "V6.vgtub.and" => "__builtin_HEXAGON_V6_vgtub_and", + "V6.vgtub.and.128B" => "__builtin_HEXAGON_V6_vgtub_and_128B", + "V6.vgtub.or" => "__builtin_HEXAGON_V6_vgtub_or", + "V6.vgtub.or.128B" => "__builtin_HEXAGON_V6_vgtub_or_128B", + "V6.vgtub.xor" => "__builtin_HEXAGON_V6_vgtub_xor", + "V6.vgtub.xor.128B" => "__builtin_HEXAGON_V6_vgtub_xor_128B", + "V6.vgtuh" => "__builtin_HEXAGON_V6_vgtuh", + "V6.vgtuh.128B" => "__builtin_HEXAGON_V6_vgtuh_128B", + "V6.vgtuh.and" => "__builtin_HEXAGON_V6_vgtuh_and", + "V6.vgtuh.and.128B" => "__builtin_HEXAGON_V6_vgtuh_and_128B", + "V6.vgtuh.or" => "__builtin_HEXAGON_V6_vgtuh_or", + "V6.vgtuh.or.128B" => "__builtin_HEXAGON_V6_vgtuh_or_128B", + "V6.vgtuh.xor" => "__builtin_HEXAGON_V6_vgtuh_xor", + "V6.vgtuh.xor.128B" => "__builtin_HEXAGON_V6_vgtuh_xor_128B", + "V6.vgtuw" => "__builtin_HEXAGON_V6_vgtuw", + "V6.vgtuw.128B" => "__builtin_HEXAGON_V6_vgtuw_128B", + "V6.vgtuw.and" => "__builtin_HEXAGON_V6_vgtuw_and", + "V6.vgtuw.and.128B" => "__builtin_HEXAGON_V6_vgtuw_and_128B", + "V6.vgtuw.or" => "__builtin_HEXAGON_V6_vgtuw_or", + "V6.vgtuw.or.128B" => "__builtin_HEXAGON_V6_vgtuw_or_128B", + "V6.vgtuw.xor" => "__builtin_HEXAGON_V6_vgtuw_xor", + "V6.vgtuw.xor.128B" => "__builtin_HEXAGON_V6_vgtuw_xor_128B", + "V6.vgtw" => "__builtin_HEXAGON_V6_vgtw", + "V6.vgtw.128B" => "__builtin_HEXAGON_V6_vgtw_128B", + "V6.vgtw.and" => "__builtin_HEXAGON_V6_vgtw_and", + "V6.vgtw.and.128B" => "__builtin_HEXAGON_V6_vgtw_and_128B", + "V6.vgtw.or" => "__builtin_HEXAGON_V6_vgtw_or", + "V6.vgtw.or.128B" => "__builtin_HEXAGON_V6_vgtw_or_128B", + "V6.vgtw.xor" => "__builtin_HEXAGON_V6_vgtw_xor", + "V6.vgtw.xor.128B" => "__builtin_HEXAGON_V6_vgtw_xor_128B", + "V6.vinsertwr" => "__builtin_HEXAGON_V6_vinsertwr", + "V6.vinsertwr.128B" => "__builtin_HEXAGON_V6_vinsertwr_128B", + "V6.vlalignb" => "__builtin_HEXAGON_V6_vlalignb", + "V6.vlalignb.128B" => "__builtin_HEXAGON_V6_vlalignb_128B", + "V6.vlalignbi" => "__builtin_HEXAGON_V6_vlalignbi", + "V6.vlalignbi.128B" => "__builtin_HEXAGON_V6_vlalignbi_128B", + "V6.vlsrb" => "__builtin_HEXAGON_V6_vlsrb", + "V6.vlsrb.128B" => "__builtin_HEXAGON_V6_vlsrb_128B", + "V6.vlsrh" => "__builtin_HEXAGON_V6_vlsrh", + "V6.vlsrh.128B" => "__builtin_HEXAGON_V6_vlsrh_128B", + "V6.vlsrhv" => "__builtin_HEXAGON_V6_vlsrhv", + "V6.vlsrhv.128B" => "__builtin_HEXAGON_V6_vlsrhv_128B", + "V6.vlsrw" => "__builtin_HEXAGON_V6_vlsrw", + "V6.vlsrw.128B" => "__builtin_HEXAGON_V6_vlsrw_128B", + "V6.vlsrwv" => "__builtin_HEXAGON_V6_vlsrwv", + "V6.vlsrwv.128B" => "__builtin_HEXAGON_V6_vlsrwv_128B", + "V6.vlut4" => "__builtin_HEXAGON_V6_vlut4", + "V6.vlut4.128B" => "__builtin_HEXAGON_V6_vlut4_128B", + "V6.vlutb" => "__builtin_HEXAGON_V6_vlutb", + "V6.vlutb.128B" => "__builtin_HEXAGON_V6_vlutb_128B", + "V6.vlutb.acc" => "__builtin_HEXAGON_V6_vlutb_acc", + "V6.vlutb.acc.128B" => "__builtin_HEXAGON_V6_vlutb_acc_128B", + "V6.vlutb.dv" => "__builtin_HEXAGON_V6_vlutb_dv", + "V6.vlutb.dv.128B" => "__builtin_HEXAGON_V6_vlutb_dv_128B", + "V6.vlutb.dv.acc" => "__builtin_HEXAGON_V6_vlutb_dv_acc", + "V6.vlutb.dv.acc.128B" => "__builtin_HEXAGON_V6_vlutb_dv_acc_128B", + "V6.vlutvvb" => "__builtin_HEXAGON_V6_vlutvvb", + "V6.vlutvvb.128B" => "__builtin_HEXAGON_V6_vlutvvb_128B", + "V6.vlutvvb.nm" => "__builtin_HEXAGON_V6_vlutvvb_nm", + "V6.vlutvvb.nm.128B" => "__builtin_HEXAGON_V6_vlutvvb_nm_128B", + "V6.vlutvvb.oracc" => "__builtin_HEXAGON_V6_vlutvvb_oracc", + "V6.vlutvvb.oracc.128B" => "__builtin_HEXAGON_V6_vlutvvb_oracc_128B", + "V6.vlutvvb.oracci" => "__builtin_HEXAGON_V6_vlutvvb_oracci", + "V6.vlutvvb.oracci.128B" => "__builtin_HEXAGON_V6_vlutvvb_oracci_128B", + "V6.vlutvvbi" => "__builtin_HEXAGON_V6_vlutvvbi", + "V6.vlutvvbi.128B" => "__builtin_HEXAGON_V6_vlutvvbi_128B", + "V6.vlutvwh" => "__builtin_HEXAGON_V6_vlutvwh", + "V6.vlutvwh.128B" => "__builtin_HEXAGON_V6_vlutvwh_128B", + "V6.vlutvwh.nm" => "__builtin_HEXAGON_V6_vlutvwh_nm", + "V6.vlutvwh.nm.128B" => "__builtin_HEXAGON_V6_vlutvwh_nm_128B", + "V6.vlutvwh.oracc" => "__builtin_HEXAGON_V6_vlutvwh_oracc", + "V6.vlutvwh.oracc.128B" => "__builtin_HEXAGON_V6_vlutvwh_oracc_128B", + "V6.vlutvwh.oracci" => "__builtin_HEXAGON_V6_vlutvwh_oracci", + "V6.vlutvwh.oracci.128B" => "__builtin_HEXAGON_V6_vlutvwh_oracci_128B", + "V6.vlutvwhi" => "__builtin_HEXAGON_V6_vlutvwhi", + "V6.vlutvwhi.128B" => "__builtin_HEXAGON_V6_vlutvwhi_128B", + "V6.vmax.bf" => "__builtin_HEXAGON_V6_vmax_bf", + "V6.vmax.bf.128B" => "__builtin_HEXAGON_V6_vmax_bf_128B", + "V6.vmax.hf" => "__builtin_HEXAGON_V6_vmax_hf", + "V6.vmax.hf.128B" => "__builtin_HEXAGON_V6_vmax_hf_128B", + "V6.vmax.sf" => "__builtin_HEXAGON_V6_vmax_sf", + "V6.vmax.sf.128B" => "__builtin_HEXAGON_V6_vmax_sf_128B", + "V6.vmaxb" => "__builtin_HEXAGON_V6_vmaxb", + "V6.vmaxb.128B" => "__builtin_HEXAGON_V6_vmaxb_128B", + "V6.vmaxh" => "__builtin_HEXAGON_V6_vmaxh", + "V6.vmaxh.128B" => "__builtin_HEXAGON_V6_vmaxh_128B", + "V6.vmaxub" => "__builtin_HEXAGON_V6_vmaxub", + "V6.vmaxub.128B" => "__builtin_HEXAGON_V6_vmaxub_128B", + "V6.vmaxuh" => "__builtin_HEXAGON_V6_vmaxuh", + "V6.vmaxuh.128B" => "__builtin_HEXAGON_V6_vmaxuh_128B", + "V6.vmaxw" => "__builtin_HEXAGON_V6_vmaxw", + "V6.vmaxw.128B" => "__builtin_HEXAGON_V6_vmaxw_128B", + "V6.vmerge.qf" => "__builtin_HEXAGON_V6_vmerge_qf", + "V6.vmerge.qf.128B" => "__builtin_HEXAGON_V6_vmerge_qf_128B", + "V6.vmin.bf" => "__builtin_HEXAGON_V6_vmin_bf", + "V6.vmin.bf.128B" => "__builtin_HEXAGON_V6_vmin_bf_128B", + "V6.vmin.hf" => "__builtin_HEXAGON_V6_vmin_hf", + "V6.vmin.hf.128B" => "__builtin_HEXAGON_V6_vmin_hf_128B", + "V6.vmin.sf" => "__builtin_HEXAGON_V6_vmin_sf", + "V6.vmin.sf.128B" => "__builtin_HEXAGON_V6_vmin_sf_128B", + "V6.vminb" => "__builtin_HEXAGON_V6_vminb", + "V6.vminb.128B" => "__builtin_HEXAGON_V6_vminb_128B", + "V6.vminh" => "__builtin_HEXAGON_V6_vminh", + "V6.vminh.128B" => "__builtin_HEXAGON_V6_vminh_128B", + "V6.vminub" => "__builtin_HEXAGON_V6_vminub", + "V6.vminub.128B" => "__builtin_HEXAGON_V6_vminub_128B", + "V6.vminuh" => "__builtin_HEXAGON_V6_vminuh", + "V6.vminuh.128B" => "__builtin_HEXAGON_V6_vminuh_128B", + "V6.vminw" => "__builtin_HEXAGON_V6_vminw", + "V6.vminw.128B" => "__builtin_HEXAGON_V6_vminw_128B", + "V6.vmpabus" => "__builtin_HEXAGON_V6_vmpabus", + "V6.vmpabus.128B" => "__builtin_HEXAGON_V6_vmpabus_128B", + "V6.vmpabus.acc" => "__builtin_HEXAGON_V6_vmpabus_acc", + "V6.vmpabus.acc.128B" => "__builtin_HEXAGON_V6_vmpabus_acc_128B", + "V6.vmpabusv" => "__builtin_HEXAGON_V6_vmpabusv", + "V6.vmpabusv.128B" => "__builtin_HEXAGON_V6_vmpabusv_128B", + "V6.vmpabuu" => "__builtin_HEXAGON_V6_vmpabuu", + "V6.vmpabuu.128B" => "__builtin_HEXAGON_V6_vmpabuu_128B", + "V6.vmpabuu.acc" => "__builtin_HEXAGON_V6_vmpabuu_acc", + "V6.vmpabuu.acc.128B" => "__builtin_HEXAGON_V6_vmpabuu_acc_128B", + "V6.vmpabuuv" => "__builtin_HEXAGON_V6_vmpabuuv", + "V6.vmpabuuv.128B" => "__builtin_HEXAGON_V6_vmpabuuv_128B", + "V6.vmpahb" => "__builtin_HEXAGON_V6_vmpahb", + "V6.vmpahb.128B" => "__builtin_HEXAGON_V6_vmpahb_128B", + "V6.vmpahb.acc" => "__builtin_HEXAGON_V6_vmpahb_acc", + "V6.vmpahb.acc.128B" => "__builtin_HEXAGON_V6_vmpahb_acc_128B", + "V6.vmpahhsat" => "__builtin_HEXAGON_V6_vmpahhsat", + "V6.vmpahhsat.128B" => "__builtin_HEXAGON_V6_vmpahhsat_128B", + "V6.vmpauhb" => "__builtin_HEXAGON_V6_vmpauhb", + "V6.vmpauhb.128B" => "__builtin_HEXAGON_V6_vmpauhb_128B", + "V6.vmpauhb.acc" => "__builtin_HEXAGON_V6_vmpauhb_acc", + "V6.vmpauhb.acc.128B" => "__builtin_HEXAGON_V6_vmpauhb_acc_128B", + "V6.vmpauhuhsat" => "__builtin_HEXAGON_V6_vmpauhuhsat", + "V6.vmpauhuhsat.128B" => "__builtin_HEXAGON_V6_vmpauhuhsat_128B", + "V6.vmpsuhuhsat" => "__builtin_HEXAGON_V6_vmpsuhuhsat", + "V6.vmpsuhuhsat.128B" => "__builtin_HEXAGON_V6_vmpsuhuhsat_128B", + "V6.vmpy.hf.f8" => "__builtin_HEXAGON_V6_vmpy_hf_f8", + "V6.vmpy.hf.f8.128B" => "__builtin_HEXAGON_V6_vmpy_hf_f8_128B", + "V6.vmpy.hf.f8.acc" => "__builtin_HEXAGON_V6_vmpy_hf_f8_acc", + "V6.vmpy.hf.f8.acc.128B" => "__builtin_HEXAGON_V6_vmpy_hf_f8_acc_128B", + "V6.vmpy.hf.hf" => "__builtin_HEXAGON_V6_vmpy_hf_hf", + "V6.vmpy.hf.hf.128B" => "__builtin_HEXAGON_V6_vmpy_hf_hf_128B", + "V6.vmpy.hf.hf.acc" => "__builtin_HEXAGON_V6_vmpy_hf_hf_acc", + "V6.vmpy.hf.hf.acc.128B" => "__builtin_HEXAGON_V6_vmpy_hf_hf_acc_128B", + "V6.vmpy.qf16" => "__builtin_HEXAGON_V6_vmpy_qf16", + "V6.vmpy.qf16.128B" => "__builtin_HEXAGON_V6_vmpy_qf16_128B", + "V6.vmpy.qf16.hf" => "__builtin_HEXAGON_V6_vmpy_qf16_hf", + "V6.vmpy.qf16.hf.128B" => "__builtin_HEXAGON_V6_vmpy_qf16_hf_128B", + "V6.vmpy.qf16.mix.hf" => "__builtin_HEXAGON_V6_vmpy_qf16_mix_hf", + "V6.vmpy.qf16.mix.hf.128B" => "__builtin_HEXAGON_V6_vmpy_qf16_mix_hf_128B", + "V6.vmpy.qf32" => "__builtin_HEXAGON_V6_vmpy_qf32", + "V6.vmpy.qf32.128B" => "__builtin_HEXAGON_V6_vmpy_qf32_128B", + "V6.vmpy.qf32.hf" => "__builtin_HEXAGON_V6_vmpy_qf32_hf", + "V6.vmpy.qf32.hf.128B" => "__builtin_HEXAGON_V6_vmpy_qf32_hf_128B", + "V6.vmpy.qf32.mix.hf" => "__builtin_HEXAGON_V6_vmpy_qf32_mix_hf", + "V6.vmpy.qf32.mix.hf.128B" => "__builtin_HEXAGON_V6_vmpy_qf32_mix_hf_128B", + "V6.vmpy.qf32.qf16" => "__builtin_HEXAGON_V6_vmpy_qf32_qf16", + "V6.vmpy.qf32.qf16.128B" => "__builtin_HEXAGON_V6_vmpy_qf32_qf16_128B", + "V6.vmpy.qf32.sf" => "__builtin_HEXAGON_V6_vmpy_qf32_sf", + "V6.vmpy.qf32.sf.128B" => "__builtin_HEXAGON_V6_vmpy_qf32_sf_128B", + "V6.vmpy.rt.hf" => "__builtin_HEXAGON_V6_vmpy_rt_hf", + "V6.vmpy.rt.hf.128B" => "__builtin_HEXAGON_V6_vmpy_rt_hf_128B", + "V6.vmpy.rt.qf16" => "__builtin_HEXAGON_V6_vmpy_rt_qf16", + "V6.vmpy.rt.qf16.128B" => "__builtin_HEXAGON_V6_vmpy_rt_qf16_128B", + "V6.vmpy.rt.sf" => "__builtin_HEXAGON_V6_vmpy_rt_sf", + "V6.vmpy.rt.sf.128B" => "__builtin_HEXAGON_V6_vmpy_rt_sf_128B", + "V6.vmpy.sf.bf" => "__builtin_HEXAGON_V6_vmpy_sf_bf", + "V6.vmpy.sf.bf.128B" => "__builtin_HEXAGON_V6_vmpy_sf_bf_128B", + "V6.vmpy.sf.bf.acc" => "__builtin_HEXAGON_V6_vmpy_sf_bf_acc", + "V6.vmpy.sf.bf.acc.128B" => "__builtin_HEXAGON_V6_vmpy_sf_bf_acc_128B", + "V6.vmpy.sf.hf" => "__builtin_HEXAGON_V6_vmpy_sf_hf", + "V6.vmpy.sf.hf.128B" => "__builtin_HEXAGON_V6_vmpy_sf_hf_128B", + "V6.vmpy.sf.hf.acc" => "__builtin_HEXAGON_V6_vmpy_sf_hf_acc", + "V6.vmpy.sf.hf.acc.128B" => "__builtin_HEXAGON_V6_vmpy_sf_hf_acc_128B", + "V6.vmpy.sf.sf" => "__builtin_HEXAGON_V6_vmpy_sf_sf", + "V6.vmpy.sf.sf.128B" => "__builtin_HEXAGON_V6_vmpy_sf_sf_128B", + "V6.vmpybus" => "__builtin_HEXAGON_V6_vmpybus", + "V6.vmpybus.128B" => "__builtin_HEXAGON_V6_vmpybus_128B", + "V6.vmpybus.acc" => "__builtin_HEXAGON_V6_vmpybus_acc", + "V6.vmpybus.acc.128B" => "__builtin_HEXAGON_V6_vmpybus_acc_128B", + "V6.vmpybusv" => "__builtin_HEXAGON_V6_vmpybusv", + "V6.vmpybusv.128B" => "__builtin_HEXAGON_V6_vmpybusv_128B", + "V6.vmpybusv.acc" => "__builtin_HEXAGON_V6_vmpybusv_acc", + "V6.vmpybusv.acc.128B" => "__builtin_HEXAGON_V6_vmpybusv_acc_128B", + "V6.vmpybv" => "__builtin_HEXAGON_V6_vmpybv", + "V6.vmpybv.128B" => "__builtin_HEXAGON_V6_vmpybv_128B", + "V6.vmpybv.acc" => "__builtin_HEXAGON_V6_vmpybv_acc", + "V6.vmpybv.acc.128B" => "__builtin_HEXAGON_V6_vmpybv_acc_128B", + "V6.vmpyewuh" => "__builtin_HEXAGON_V6_vmpyewuh", + "V6.vmpyewuh.128B" => "__builtin_HEXAGON_V6_vmpyewuh_128B", + "V6.vmpyewuh.64" => "__builtin_HEXAGON_V6_vmpyewuh_64", + "V6.vmpyewuh.64.128B" => "__builtin_HEXAGON_V6_vmpyewuh_64_128B", + "V6.vmpyh" => "__builtin_HEXAGON_V6_vmpyh", + "V6.vmpyh.128B" => "__builtin_HEXAGON_V6_vmpyh_128B", + "V6.vmpyh.acc" => "__builtin_HEXAGON_V6_vmpyh_acc", + "V6.vmpyh.acc.128B" => "__builtin_HEXAGON_V6_vmpyh_acc_128B", + "V6.vmpyhsat.acc" => "__builtin_HEXAGON_V6_vmpyhsat_acc", + "V6.vmpyhsat.acc.128B" => "__builtin_HEXAGON_V6_vmpyhsat_acc_128B", + "V6.vmpyhsrs" => "__builtin_HEXAGON_V6_vmpyhsrs", + "V6.vmpyhsrs.128B" => "__builtin_HEXAGON_V6_vmpyhsrs_128B", + "V6.vmpyhss" => "__builtin_HEXAGON_V6_vmpyhss", + "V6.vmpyhss.128B" => "__builtin_HEXAGON_V6_vmpyhss_128B", + "V6.vmpyhus" => "__builtin_HEXAGON_V6_vmpyhus", + "V6.vmpyhus.128B" => "__builtin_HEXAGON_V6_vmpyhus_128B", + "V6.vmpyhus.acc" => "__builtin_HEXAGON_V6_vmpyhus_acc", + "V6.vmpyhus.acc.128B" => "__builtin_HEXAGON_V6_vmpyhus_acc_128B", + "V6.vmpyhv" => "__builtin_HEXAGON_V6_vmpyhv", + "V6.vmpyhv.128B" => "__builtin_HEXAGON_V6_vmpyhv_128B", + "V6.vmpyhv.acc" => "__builtin_HEXAGON_V6_vmpyhv_acc", + "V6.vmpyhv.acc.128B" => "__builtin_HEXAGON_V6_vmpyhv_acc_128B", + "V6.vmpyhvsrs" => "__builtin_HEXAGON_V6_vmpyhvsrs", + "V6.vmpyhvsrs.128B" => "__builtin_HEXAGON_V6_vmpyhvsrs_128B", + "V6.vmpyieoh" => "__builtin_HEXAGON_V6_vmpyieoh", + "V6.vmpyieoh.128B" => "__builtin_HEXAGON_V6_vmpyieoh_128B", + "V6.vmpyiewh.acc" => "__builtin_HEXAGON_V6_vmpyiewh_acc", + "V6.vmpyiewh.acc.128B" => "__builtin_HEXAGON_V6_vmpyiewh_acc_128B", + "V6.vmpyiewuh" => "__builtin_HEXAGON_V6_vmpyiewuh", + "V6.vmpyiewuh.128B" => "__builtin_HEXAGON_V6_vmpyiewuh_128B", + "V6.vmpyiewuh.acc" => "__builtin_HEXAGON_V6_vmpyiewuh_acc", + "V6.vmpyiewuh.acc.128B" => "__builtin_HEXAGON_V6_vmpyiewuh_acc_128B", + "V6.vmpyih" => "__builtin_HEXAGON_V6_vmpyih", + "V6.vmpyih.128B" => "__builtin_HEXAGON_V6_vmpyih_128B", + "V6.vmpyih.acc" => "__builtin_HEXAGON_V6_vmpyih_acc", + "V6.vmpyih.acc.128B" => "__builtin_HEXAGON_V6_vmpyih_acc_128B", + "V6.vmpyihb" => "__builtin_HEXAGON_V6_vmpyihb", + "V6.vmpyihb.128B" => "__builtin_HEXAGON_V6_vmpyihb_128B", + "V6.vmpyihb.acc" => "__builtin_HEXAGON_V6_vmpyihb_acc", + "V6.vmpyihb.acc.128B" => "__builtin_HEXAGON_V6_vmpyihb_acc_128B", + "V6.vmpyiowh" => "__builtin_HEXAGON_V6_vmpyiowh", + "V6.vmpyiowh.128B" => "__builtin_HEXAGON_V6_vmpyiowh_128B", + "V6.vmpyiwb" => "__builtin_HEXAGON_V6_vmpyiwb", + "V6.vmpyiwb.128B" => "__builtin_HEXAGON_V6_vmpyiwb_128B", + "V6.vmpyiwb.acc" => "__builtin_HEXAGON_V6_vmpyiwb_acc", + "V6.vmpyiwb.acc.128B" => "__builtin_HEXAGON_V6_vmpyiwb_acc_128B", + "V6.vmpyiwh" => "__builtin_HEXAGON_V6_vmpyiwh", + "V6.vmpyiwh.128B" => "__builtin_HEXAGON_V6_vmpyiwh_128B", + "V6.vmpyiwh.acc" => "__builtin_HEXAGON_V6_vmpyiwh_acc", + "V6.vmpyiwh.acc.128B" => "__builtin_HEXAGON_V6_vmpyiwh_acc_128B", + "V6.vmpyiwub" => "__builtin_HEXAGON_V6_vmpyiwub", + "V6.vmpyiwub.128B" => "__builtin_HEXAGON_V6_vmpyiwub_128B", + "V6.vmpyiwub.acc" => "__builtin_HEXAGON_V6_vmpyiwub_acc", + "V6.vmpyiwub.acc.128B" => "__builtin_HEXAGON_V6_vmpyiwub_acc_128B", + "V6.vmpyowh" => "__builtin_HEXAGON_V6_vmpyowh", + "V6.vmpyowh.128B" => "__builtin_HEXAGON_V6_vmpyowh_128B", + "V6.vmpyowh.64.acc" => "__builtin_HEXAGON_V6_vmpyowh_64_acc", + "V6.vmpyowh.64.acc.128B" => "__builtin_HEXAGON_V6_vmpyowh_64_acc_128B", + "V6.vmpyowh.rnd" => "__builtin_HEXAGON_V6_vmpyowh_rnd", + "V6.vmpyowh.rnd.128B" => "__builtin_HEXAGON_V6_vmpyowh_rnd_128B", + "V6.vmpyowh.rnd.sacc" => "__builtin_HEXAGON_V6_vmpyowh_rnd_sacc", + "V6.vmpyowh.rnd.sacc.128B" => "__builtin_HEXAGON_V6_vmpyowh_rnd_sacc_128B", + "V6.vmpyowh.sacc" => "__builtin_HEXAGON_V6_vmpyowh_sacc", + "V6.vmpyowh.sacc.128B" => "__builtin_HEXAGON_V6_vmpyowh_sacc_128B", + "V6.vmpyub" => "__builtin_HEXAGON_V6_vmpyub", + "V6.vmpyub.128B" => "__builtin_HEXAGON_V6_vmpyub_128B", + "V6.vmpyub.acc" => "__builtin_HEXAGON_V6_vmpyub_acc", + "V6.vmpyub.acc.128B" => "__builtin_HEXAGON_V6_vmpyub_acc_128B", + "V6.vmpyubv" => "__builtin_HEXAGON_V6_vmpyubv", + "V6.vmpyubv.128B" => "__builtin_HEXAGON_V6_vmpyubv_128B", + "V6.vmpyubv.acc" => "__builtin_HEXAGON_V6_vmpyubv_acc", + "V6.vmpyubv.acc.128B" => "__builtin_HEXAGON_V6_vmpyubv_acc_128B", + "V6.vmpyuh" => "__builtin_HEXAGON_V6_vmpyuh", + "V6.vmpyuh.128B" => "__builtin_HEXAGON_V6_vmpyuh_128B", + "V6.vmpyuh.acc" => "__builtin_HEXAGON_V6_vmpyuh_acc", + "V6.vmpyuh.acc.128B" => "__builtin_HEXAGON_V6_vmpyuh_acc_128B", + "V6.vmpyuhe" => "__builtin_HEXAGON_V6_vmpyuhe", + "V6.vmpyuhe.128B" => "__builtin_HEXAGON_V6_vmpyuhe_128B", + "V6.vmpyuhe.acc" => "__builtin_HEXAGON_V6_vmpyuhe_acc", + "V6.vmpyuhe.acc.128B" => "__builtin_HEXAGON_V6_vmpyuhe_acc_128B", + "V6.vmpyuhv" => "__builtin_HEXAGON_V6_vmpyuhv", + "V6.vmpyuhv.128B" => "__builtin_HEXAGON_V6_vmpyuhv_128B", + "V6.vmpyuhv.acc" => "__builtin_HEXAGON_V6_vmpyuhv_acc", + "V6.vmpyuhv.acc.128B" => "__builtin_HEXAGON_V6_vmpyuhv_acc_128B", + "V6.vmpyuhvs" => "__builtin_HEXAGON_V6_vmpyuhvs", + "V6.vmpyuhvs.128B" => "__builtin_HEXAGON_V6_vmpyuhvs_128B", + "V6.vmux" => "__builtin_HEXAGON_V6_vmux", + "V6.vmux.128B" => "__builtin_HEXAGON_V6_vmux_128B", + "V6.vnavgb" => "__builtin_HEXAGON_V6_vnavgb", + "V6.vnavgb.128B" => "__builtin_HEXAGON_V6_vnavgb_128B", + "V6.vnavgh" => "__builtin_HEXAGON_V6_vnavgh", + "V6.vnavgh.128B" => "__builtin_HEXAGON_V6_vnavgh_128B", + "V6.vnavgub" => "__builtin_HEXAGON_V6_vnavgub", + "V6.vnavgub.128B" => "__builtin_HEXAGON_V6_vnavgub_128B", + "V6.vnavgw" => "__builtin_HEXAGON_V6_vnavgw", + "V6.vnavgw.128B" => "__builtin_HEXAGON_V6_vnavgw_128B", + "V6.vnormamth" => "__builtin_HEXAGON_V6_vnormamth", + "V6.vnormamth.128B" => "__builtin_HEXAGON_V6_vnormamth_128B", + "V6.vnormamtw" => "__builtin_HEXAGON_V6_vnormamtw", + "V6.vnormamtw.128B" => "__builtin_HEXAGON_V6_vnormamtw_128B", + "V6.vnot" => "__builtin_HEXAGON_V6_vnot", + "V6.vnot.128B" => "__builtin_HEXAGON_V6_vnot_128B", + "V6.vor" => "__builtin_HEXAGON_V6_vor", + "V6.vor.128B" => "__builtin_HEXAGON_V6_vor_128B", + "V6.vpackeb" => "__builtin_HEXAGON_V6_vpackeb", + "V6.vpackeb.128B" => "__builtin_HEXAGON_V6_vpackeb_128B", + "V6.vpackeh" => "__builtin_HEXAGON_V6_vpackeh", + "V6.vpackeh.128B" => "__builtin_HEXAGON_V6_vpackeh_128B", + "V6.vpackhb.sat" => "__builtin_HEXAGON_V6_vpackhb_sat", + "V6.vpackhb.sat.128B" => "__builtin_HEXAGON_V6_vpackhb_sat_128B", + "V6.vpackhub.sat" => "__builtin_HEXAGON_V6_vpackhub_sat", + "V6.vpackhub.sat.128B" => "__builtin_HEXAGON_V6_vpackhub_sat_128B", + "V6.vpackob" => "__builtin_HEXAGON_V6_vpackob", + "V6.vpackob.128B" => "__builtin_HEXAGON_V6_vpackob_128B", + "V6.vpackoh" => "__builtin_HEXAGON_V6_vpackoh", + "V6.vpackoh.128B" => "__builtin_HEXAGON_V6_vpackoh_128B", + "V6.vpackwh.sat" => "__builtin_HEXAGON_V6_vpackwh_sat", + "V6.vpackwh.sat.128B" => "__builtin_HEXAGON_V6_vpackwh_sat_128B", + "V6.vpackwuh.sat" => "__builtin_HEXAGON_V6_vpackwuh_sat", + "V6.vpackwuh.sat.128B" => "__builtin_HEXAGON_V6_vpackwuh_sat_128B", + "V6.vpopcounth" => "__builtin_HEXAGON_V6_vpopcounth", + "V6.vpopcounth.128B" => "__builtin_HEXAGON_V6_vpopcounth_128B", + "V6.vprefixqb" => "__builtin_HEXAGON_V6_vprefixqb", + "V6.vprefixqb.128B" => "__builtin_HEXAGON_V6_vprefixqb_128B", + "V6.vprefixqh" => "__builtin_HEXAGON_V6_vprefixqh", + "V6.vprefixqh.128B" => "__builtin_HEXAGON_V6_vprefixqh_128B", + "V6.vprefixqw" => "__builtin_HEXAGON_V6_vprefixqw", + "V6.vprefixqw.128B" => "__builtin_HEXAGON_V6_vprefixqw_128B", + "V6.vrdelta" => "__builtin_HEXAGON_V6_vrdelta", + "V6.vrdelta.128B" => "__builtin_HEXAGON_V6_vrdelta_128B", + "V6.vrmpybub.rtt" => "__builtin_HEXAGON_V6_vrmpybub_rtt", + "V6.vrmpybub.rtt.128B" => "__builtin_HEXAGON_V6_vrmpybub_rtt_128B", + "V6.vrmpybub.rtt.acc" => "__builtin_HEXAGON_V6_vrmpybub_rtt_acc", + "V6.vrmpybub.rtt.acc.128B" => "__builtin_HEXAGON_V6_vrmpybub_rtt_acc_128B", + "V6.vrmpybus" => "__builtin_HEXAGON_V6_vrmpybus", + "V6.vrmpybus.128B" => "__builtin_HEXAGON_V6_vrmpybus_128B", + "V6.vrmpybus.acc" => "__builtin_HEXAGON_V6_vrmpybus_acc", + "V6.vrmpybus.acc.128B" => "__builtin_HEXAGON_V6_vrmpybus_acc_128B", + "V6.vrmpybusi" => "__builtin_HEXAGON_V6_vrmpybusi", + "V6.vrmpybusi.128B" => "__builtin_HEXAGON_V6_vrmpybusi_128B", + "V6.vrmpybusi.acc" => "__builtin_HEXAGON_V6_vrmpybusi_acc", + "V6.vrmpybusi.acc.128B" => "__builtin_HEXAGON_V6_vrmpybusi_acc_128B", + "V6.vrmpybusv" => "__builtin_HEXAGON_V6_vrmpybusv", + "V6.vrmpybusv.128B" => "__builtin_HEXAGON_V6_vrmpybusv_128B", + "V6.vrmpybusv.acc" => "__builtin_HEXAGON_V6_vrmpybusv_acc", + "V6.vrmpybusv.acc.128B" => "__builtin_HEXAGON_V6_vrmpybusv_acc_128B", + "V6.vrmpybv" => "__builtin_HEXAGON_V6_vrmpybv", + "V6.vrmpybv.128B" => "__builtin_HEXAGON_V6_vrmpybv_128B", + "V6.vrmpybv.acc" => "__builtin_HEXAGON_V6_vrmpybv_acc", + "V6.vrmpybv.acc.128B" => "__builtin_HEXAGON_V6_vrmpybv_acc_128B", + "V6.vrmpyub" => "__builtin_HEXAGON_V6_vrmpyub", + "V6.vrmpyub.128B" => "__builtin_HEXAGON_V6_vrmpyub_128B", + "V6.vrmpyub.acc" => "__builtin_HEXAGON_V6_vrmpyub_acc", + "V6.vrmpyub.acc.128B" => "__builtin_HEXAGON_V6_vrmpyub_acc_128B", + "V6.vrmpyub.rtt" => "__builtin_HEXAGON_V6_vrmpyub_rtt", + "V6.vrmpyub.rtt.128B" => "__builtin_HEXAGON_V6_vrmpyub_rtt_128B", + "V6.vrmpyub.rtt.acc" => "__builtin_HEXAGON_V6_vrmpyub_rtt_acc", + "V6.vrmpyub.rtt.acc.128B" => "__builtin_HEXAGON_V6_vrmpyub_rtt_acc_128B", + "V6.vrmpyubi" => "__builtin_HEXAGON_V6_vrmpyubi", + "V6.vrmpyubi.128B" => "__builtin_HEXAGON_V6_vrmpyubi_128B", + "V6.vrmpyubi.acc" => "__builtin_HEXAGON_V6_vrmpyubi_acc", + "V6.vrmpyubi.acc.128B" => "__builtin_HEXAGON_V6_vrmpyubi_acc_128B", + "V6.vrmpyubv" => "__builtin_HEXAGON_V6_vrmpyubv", + "V6.vrmpyubv.128B" => "__builtin_HEXAGON_V6_vrmpyubv_128B", + "V6.vrmpyubv.acc" => "__builtin_HEXAGON_V6_vrmpyubv_acc", + "V6.vrmpyubv.acc.128B" => "__builtin_HEXAGON_V6_vrmpyubv_acc_128B", + "V6.vror" => "__builtin_HEXAGON_V6_vror", + "V6.vror.128B" => "__builtin_HEXAGON_V6_vror_128B", + "V6.vrotr" => "__builtin_HEXAGON_V6_vrotr", + "V6.vrotr.128B" => "__builtin_HEXAGON_V6_vrotr_128B", + "V6.vroundhb" => "__builtin_HEXAGON_V6_vroundhb", + "V6.vroundhb.128B" => "__builtin_HEXAGON_V6_vroundhb_128B", + "V6.vroundhub" => "__builtin_HEXAGON_V6_vroundhub", + "V6.vroundhub.128B" => "__builtin_HEXAGON_V6_vroundhub_128B", + "V6.vrounduhub" => "__builtin_HEXAGON_V6_vrounduhub", + "V6.vrounduhub.128B" => "__builtin_HEXAGON_V6_vrounduhub_128B", + "V6.vrounduwuh" => "__builtin_HEXAGON_V6_vrounduwuh", + "V6.vrounduwuh.128B" => "__builtin_HEXAGON_V6_vrounduwuh_128B", + "V6.vroundwh" => "__builtin_HEXAGON_V6_vroundwh", + "V6.vroundwh.128B" => "__builtin_HEXAGON_V6_vroundwh_128B", + "V6.vroundwuh" => "__builtin_HEXAGON_V6_vroundwuh", + "V6.vroundwuh.128B" => "__builtin_HEXAGON_V6_vroundwuh_128B", + "V6.vrsadubi" => "__builtin_HEXAGON_V6_vrsadubi", + "V6.vrsadubi.128B" => "__builtin_HEXAGON_V6_vrsadubi_128B", + "V6.vrsadubi.acc" => "__builtin_HEXAGON_V6_vrsadubi_acc", + "V6.vrsadubi.acc.128B" => "__builtin_HEXAGON_V6_vrsadubi_acc_128B", + "V6.vsatdw" => "__builtin_HEXAGON_V6_vsatdw", + "V6.vsatdw.128B" => "__builtin_HEXAGON_V6_vsatdw_128B", + "V6.vsathub" => "__builtin_HEXAGON_V6_vsathub", + "V6.vsathub.128B" => "__builtin_HEXAGON_V6_vsathub_128B", + "V6.vsatuwuh" => "__builtin_HEXAGON_V6_vsatuwuh", + "V6.vsatuwuh.128B" => "__builtin_HEXAGON_V6_vsatuwuh_128B", + "V6.vsatwh" => "__builtin_HEXAGON_V6_vsatwh", + "V6.vsatwh.128B" => "__builtin_HEXAGON_V6_vsatwh_128B", + "V6.vsb" => "__builtin_HEXAGON_V6_vsb", + "V6.vsb.128B" => "__builtin_HEXAGON_V6_vsb_128B", + "V6.vscattermh" => "__builtin_HEXAGON_V6_vscattermh", + "V6.vscattermh.128B" => "__builtin_HEXAGON_V6_vscattermh_128B", + "V6.vscattermh.add" => "__builtin_HEXAGON_V6_vscattermh_add", + "V6.vscattermh.add.128B" => "__builtin_HEXAGON_V6_vscattermh_add_128B", + "V6.vscattermhq" => "__builtin_HEXAGON_V6_vscattermhq", + "V6.vscattermhq.128B" => "__builtin_HEXAGON_V6_vscattermhq_128B", + "V6.vscattermhw" => "__builtin_HEXAGON_V6_vscattermhw", + "V6.vscattermhw.128B" => "__builtin_HEXAGON_V6_vscattermhw_128B", + "V6.vscattermhw.add" => "__builtin_HEXAGON_V6_vscattermhw_add", + "V6.vscattermhw.add.128B" => "__builtin_HEXAGON_V6_vscattermhw_add_128B", + "V6.vscattermhwq" => "__builtin_HEXAGON_V6_vscattermhwq", + "V6.vscattermhwq.128B" => "__builtin_HEXAGON_V6_vscattermhwq_128B", + "V6.vscattermw" => "__builtin_HEXAGON_V6_vscattermw", + "V6.vscattermw.128B" => "__builtin_HEXAGON_V6_vscattermw_128B", + "V6.vscattermw.add" => "__builtin_HEXAGON_V6_vscattermw_add", + "V6.vscattermw.add.128B" => "__builtin_HEXAGON_V6_vscattermw_add_128B", + "V6.vscattermwq" => "__builtin_HEXAGON_V6_vscattermwq", + "V6.vscattermwq.128B" => "__builtin_HEXAGON_V6_vscattermwq_128B", + "V6.vsh" => "__builtin_HEXAGON_V6_vsh", + "V6.vsh.128B" => "__builtin_HEXAGON_V6_vsh_128B", + "V6.vshufeh" => "__builtin_HEXAGON_V6_vshufeh", + "V6.vshufeh.128B" => "__builtin_HEXAGON_V6_vshufeh_128B", + "V6.vshuffb" => "__builtin_HEXAGON_V6_vshuffb", + "V6.vshuffb.128B" => "__builtin_HEXAGON_V6_vshuffb_128B", + "V6.vshuffeb" => "__builtin_HEXAGON_V6_vshuffeb", + "V6.vshuffeb.128B" => "__builtin_HEXAGON_V6_vshuffeb_128B", + "V6.vshuffh" => "__builtin_HEXAGON_V6_vshuffh", + "V6.vshuffh.128B" => "__builtin_HEXAGON_V6_vshuffh_128B", + "V6.vshuffob" => "__builtin_HEXAGON_V6_vshuffob", + "V6.vshuffob.128B" => "__builtin_HEXAGON_V6_vshuffob_128B", + "V6.vshuffvdd" => "__builtin_HEXAGON_V6_vshuffvdd", + "V6.vshuffvdd.128B" => "__builtin_HEXAGON_V6_vshuffvdd_128B", + "V6.vshufoeb" => "__builtin_HEXAGON_V6_vshufoeb", + "V6.vshufoeb.128B" => "__builtin_HEXAGON_V6_vshufoeb_128B", + "V6.vshufoeh" => "__builtin_HEXAGON_V6_vshufoeh", + "V6.vshufoeh.128B" => "__builtin_HEXAGON_V6_vshufoeh_128B", + "V6.vshufoh" => "__builtin_HEXAGON_V6_vshufoh", + "V6.vshufoh.128B" => "__builtin_HEXAGON_V6_vshufoh_128B", + "V6.vsub.hf" => "__builtin_HEXAGON_V6_vsub_hf", + "V6.vsub.hf.128B" => "__builtin_HEXAGON_V6_vsub_hf_128B", + "V6.vsub.hf.f8" => "__builtin_HEXAGON_V6_vsub_hf_f8", + "V6.vsub.hf.f8.128B" => "__builtin_HEXAGON_V6_vsub_hf_f8_128B", + "V6.vsub.hf.hf" => "__builtin_HEXAGON_V6_vsub_hf_hf", + "V6.vsub.hf.hf.128B" => "__builtin_HEXAGON_V6_vsub_hf_hf_128B", + "V6.vsub.qf16" => "__builtin_HEXAGON_V6_vsub_qf16", + "V6.vsub.qf16.128B" => "__builtin_HEXAGON_V6_vsub_qf16_128B", + "V6.vsub.qf16.mix" => "__builtin_HEXAGON_V6_vsub_qf16_mix", + "V6.vsub.qf16.mix.128B" => "__builtin_HEXAGON_V6_vsub_qf16_mix_128B", + "V6.vsub.qf32" => "__builtin_HEXAGON_V6_vsub_qf32", + "V6.vsub.qf32.128B" => "__builtin_HEXAGON_V6_vsub_qf32_128B", + "V6.vsub.qf32.mix" => "__builtin_HEXAGON_V6_vsub_qf32_mix", + "V6.vsub.qf32.mix.128B" => "__builtin_HEXAGON_V6_vsub_qf32_mix_128B", + "V6.vsub.sf" => "__builtin_HEXAGON_V6_vsub_sf", + "V6.vsub.sf.128B" => "__builtin_HEXAGON_V6_vsub_sf_128B", + "V6.vsub.sf.bf" => "__builtin_HEXAGON_V6_vsub_sf_bf", + "V6.vsub.sf.bf.128B" => "__builtin_HEXAGON_V6_vsub_sf_bf_128B", + "V6.vsub.sf.hf" => "__builtin_HEXAGON_V6_vsub_sf_hf", + "V6.vsub.sf.hf.128B" => "__builtin_HEXAGON_V6_vsub_sf_hf_128B", + "V6.vsub.sf.sf" => "__builtin_HEXAGON_V6_vsub_sf_sf", + "V6.vsub.sf.sf.128B" => "__builtin_HEXAGON_V6_vsub_sf_sf_128B", + "V6.vsubb" => "__builtin_HEXAGON_V6_vsubb", + "V6.vsubb.128B" => "__builtin_HEXAGON_V6_vsubb_128B", + "V6.vsubb.dv" => "__builtin_HEXAGON_V6_vsubb_dv", + "V6.vsubb.dv.128B" => "__builtin_HEXAGON_V6_vsubb_dv_128B", + "V6.vsubbnq" => "__builtin_HEXAGON_V6_vsubbnq", + "V6.vsubbnq.128B" => "__builtin_HEXAGON_V6_vsubbnq_128B", + "V6.vsubbq" => "__builtin_HEXAGON_V6_vsubbq", + "V6.vsubbq.128B" => "__builtin_HEXAGON_V6_vsubbq_128B", + "V6.vsubbsat" => "__builtin_HEXAGON_V6_vsubbsat", + "V6.vsubbsat.128B" => "__builtin_HEXAGON_V6_vsubbsat_128B", + "V6.vsubbsat.dv" => "__builtin_HEXAGON_V6_vsubbsat_dv", + "V6.vsubbsat.dv.128B" => "__builtin_HEXAGON_V6_vsubbsat_dv_128B", + "V6.vsubh" => "__builtin_HEXAGON_V6_vsubh", + "V6.vsubh.128B" => "__builtin_HEXAGON_V6_vsubh_128B", + "V6.vsubh.dv" => "__builtin_HEXAGON_V6_vsubh_dv", + "V6.vsubh.dv.128B" => "__builtin_HEXAGON_V6_vsubh_dv_128B", + "V6.vsubhnq" => "__builtin_HEXAGON_V6_vsubhnq", + "V6.vsubhnq.128B" => "__builtin_HEXAGON_V6_vsubhnq_128B", + "V6.vsubhq" => "__builtin_HEXAGON_V6_vsubhq", + "V6.vsubhq.128B" => "__builtin_HEXAGON_V6_vsubhq_128B", + "V6.vsubhsat" => "__builtin_HEXAGON_V6_vsubhsat", + "V6.vsubhsat.128B" => "__builtin_HEXAGON_V6_vsubhsat_128B", + "V6.vsubhsat.dv" => "__builtin_HEXAGON_V6_vsubhsat_dv", + "V6.vsubhsat.dv.128B" => "__builtin_HEXAGON_V6_vsubhsat_dv_128B", + "V6.vsubhw" => "__builtin_HEXAGON_V6_vsubhw", + "V6.vsubhw.128B" => "__builtin_HEXAGON_V6_vsubhw_128B", + "V6.vsububh" => "__builtin_HEXAGON_V6_vsububh", + "V6.vsububh.128B" => "__builtin_HEXAGON_V6_vsububh_128B", + "V6.vsububsat" => "__builtin_HEXAGON_V6_vsububsat", + "V6.vsububsat.128B" => "__builtin_HEXAGON_V6_vsububsat_128B", + "V6.vsububsat.dv" => "__builtin_HEXAGON_V6_vsububsat_dv", + "V6.vsububsat.dv.128B" => "__builtin_HEXAGON_V6_vsububsat_dv_128B", + "V6.vsubububb.sat" => "__builtin_HEXAGON_V6_vsubububb_sat", + "V6.vsubububb.sat.128B" => "__builtin_HEXAGON_V6_vsubububb_sat_128B", + "V6.vsubuhsat" => "__builtin_HEXAGON_V6_vsubuhsat", + "V6.vsubuhsat.128B" => "__builtin_HEXAGON_V6_vsubuhsat_128B", + "V6.vsubuhsat.dv" => "__builtin_HEXAGON_V6_vsubuhsat_dv", + "V6.vsubuhsat.dv.128B" => "__builtin_HEXAGON_V6_vsubuhsat_dv_128B", + "V6.vsubuhw" => "__builtin_HEXAGON_V6_vsubuhw", + "V6.vsubuhw.128B" => "__builtin_HEXAGON_V6_vsubuhw_128B", + "V6.vsubuwsat" => "__builtin_HEXAGON_V6_vsubuwsat", + "V6.vsubuwsat.128B" => "__builtin_HEXAGON_V6_vsubuwsat_128B", + "V6.vsubuwsat.dv" => "__builtin_HEXAGON_V6_vsubuwsat_dv", + "V6.vsubuwsat.dv.128B" => "__builtin_HEXAGON_V6_vsubuwsat_dv_128B", + "V6.vsubw" => "__builtin_HEXAGON_V6_vsubw", + "V6.vsubw.128B" => "__builtin_HEXAGON_V6_vsubw_128B", + "V6.vsubw.dv" => "__builtin_HEXAGON_V6_vsubw_dv", + "V6.vsubw.dv.128B" => "__builtin_HEXAGON_V6_vsubw_dv_128B", + "V6.vsubwnq" => "__builtin_HEXAGON_V6_vsubwnq", + "V6.vsubwnq.128B" => "__builtin_HEXAGON_V6_vsubwnq_128B", + "V6.vsubwq" => "__builtin_HEXAGON_V6_vsubwq", + "V6.vsubwq.128B" => "__builtin_HEXAGON_V6_vsubwq_128B", + "V6.vsubwsat" => "__builtin_HEXAGON_V6_vsubwsat", + "V6.vsubwsat.128B" => "__builtin_HEXAGON_V6_vsubwsat_128B", + "V6.vsubwsat.dv" => "__builtin_HEXAGON_V6_vsubwsat_dv", + "V6.vsubwsat.dv.128B" => "__builtin_HEXAGON_V6_vsubwsat_dv_128B", + "V6.vswap" => "__builtin_HEXAGON_V6_vswap", + "V6.vswap.128B" => "__builtin_HEXAGON_V6_vswap_128B", + "V6.vtmpyb" => "__builtin_HEXAGON_V6_vtmpyb", + "V6.vtmpyb.128B" => "__builtin_HEXAGON_V6_vtmpyb_128B", + "V6.vtmpyb.acc" => "__builtin_HEXAGON_V6_vtmpyb_acc", + "V6.vtmpyb.acc.128B" => "__builtin_HEXAGON_V6_vtmpyb_acc_128B", + "V6.vtmpybus" => "__builtin_HEXAGON_V6_vtmpybus", + "V6.vtmpybus.128B" => "__builtin_HEXAGON_V6_vtmpybus_128B", + "V6.vtmpybus.acc" => "__builtin_HEXAGON_V6_vtmpybus_acc", + "V6.vtmpybus.acc.128B" => "__builtin_HEXAGON_V6_vtmpybus_acc_128B", + "V6.vtmpyhb" => "__builtin_HEXAGON_V6_vtmpyhb", + "V6.vtmpyhb.128B" => "__builtin_HEXAGON_V6_vtmpyhb_128B", + "V6.vtmpyhb.acc" => "__builtin_HEXAGON_V6_vtmpyhb_acc", + "V6.vtmpyhb.acc.128B" => "__builtin_HEXAGON_V6_vtmpyhb_acc_128B", + "V6.vunpackb" => "__builtin_HEXAGON_V6_vunpackb", + "V6.vunpackb.128B" => "__builtin_HEXAGON_V6_vunpackb_128B", + "V6.vunpackh" => "__builtin_HEXAGON_V6_vunpackh", + "V6.vunpackh.128B" => "__builtin_HEXAGON_V6_vunpackh_128B", + "V6.vunpackob" => "__builtin_HEXAGON_V6_vunpackob", + "V6.vunpackob.128B" => "__builtin_HEXAGON_V6_vunpackob_128B", + "V6.vunpackoh" => "__builtin_HEXAGON_V6_vunpackoh", + "V6.vunpackoh.128B" => "__builtin_HEXAGON_V6_vunpackoh_128B", + "V6.vunpackub" => "__builtin_HEXAGON_V6_vunpackub", + "V6.vunpackub.128B" => "__builtin_HEXAGON_V6_vunpackub_128B", + "V6.vunpackuh" => "__builtin_HEXAGON_V6_vunpackuh", + "V6.vunpackuh.128B" => "__builtin_HEXAGON_V6_vunpackuh_128B", + "V6.vxor" => "__builtin_HEXAGON_V6_vxor", + "V6.vxor.128B" => "__builtin_HEXAGON_V6_vxor_128B", + "V6.vzb" => "__builtin_HEXAGON_V6_vzb", + "V6.vzb.128B" => "__builtin_HEXAGON_V6_vzb_128B", + "V6.vzh" => "__builtin_HEXAGON_V6_vzh", + "V6.vzh.128B" => "__builtin_HEXAGON_V6_vzh_128B", + "Y2.dccleana" => "__builtin_HEXAGON_Y2_dccleana", + "Y2.dccleaninva" => "__builtin_HEXAGON_Y2_dccleaninva", + "Y2.dcfetch" => "__builtin_HEXAGON_Y2_dcfetch", + "Y2.dcinva" => "__builtin_HEXAGON_Y2_dcinva", + "Y2.dczeroa" => "__builtin_HEXAGON_Y2_dczeroa", + "Y4.l2fetch" => "__builtin_HEXAGON_Y4_l2fetch", + "Y5.l2fetch" => "__builtin_HEXAGON_Y5_l2fetch", + "Y6.dmlink" => "__builtin_HEXAGON_Y6_dmlink", + "Y6.dmpause" => "__builtin_HEXAGON_Y6_dmpause", + "Y6.dmpoll" => "__builtin_HEXAGON_Y6_dmpoll", + "Y6.dmresume" => "__builtin_HEXAGON_Y6_dmresume", + "Y6.dmstart" => "__builtin_HEXAGON_Y6_dmstart", + "Y6.dmwait" => "__builtin_HEXAGON_Y6_dmwait", + "brev.ldb" => "__builtin_brev_ldb", + "brev.ldd" => "__builtin_brev_ldd", + "brev.ldh" => "__builtin_brev_ldh", + "brev.ldub" => "__builtin_brev_ldub", + "brev.lduh" => "__builtin_brev_lduh", + "brev.ldw" => "__builtin_brev_ldw", + "brev.stb" => "__builtin_brev_stb", + "brev.std" => "__builtin_brev_std", + "brev.sth" => "__builtin_brev_sth", + "brev.sthhi" => "__builtin_brev_sthhi", + "brev.stw" => "__builtin_brev_stw", + "circ.ldb" => "__builtin_circ_ldb", + "circ.ldd" => "__builtin_circ_ldd", + "circ.ldh" => "__builtin_circ_ldh", + "circ.ldub" => "__builtin_circ_ldub", + "circ.lduh" => "__builtin_circ_lduh", + "circ.ldw" => "__builtin_circ_ldw", + "circ.stb" => "__builtin_circ_stb", + "circ.std" => "__builtin_circ_std", + "circ.sth" => "__builtin_circ_sth", + "circ.sthhi" => "__builtin_circ_sthhi", + "circ.stw" => "__builtin_circ_stw", + "mm256i.vaddw" => "__builtin__mm256i_vaddw", + "prefetch" => "__builtin_HEXAGON_prefetch", + "vmemcpy" => "__builtin_hexagon_vmemcpy", + "vmemset" => "__builtin_hexagon_vmemset", + _ => unimplemented!("***** unsupported LLVM intrinsic {}", name), + } + } + hexagon(name) + } + "loongarch" => { + #[allow(non_snake_case)] + fn loongarch(name: &str) -> &str { + match name { + // loongarch + "asrtgt.d" => "__builtin_loongarch_asrtgt_d", + "asrtle.d" => "__builtin_loongarch_asrtle_d", + "break" => "__builtin_loongarch_break", + "cacop.d" => "__builtin_loongarch_cacop_d", + "cacop.w" => "__builtin_loongarch_cacop_w", + "cpucfg" => "__builtin_loongarch_cpucfg", + "crc.w.b.w" => "__builtin_loongarch_crc_w_b_w", + "crc.w.d.w" => "__builtin_loongarch_crc_w_d_w", + "crc.w.h.w" => "__builtin_loongarch_crc_w_h_w", + "crc.w.w.w" => "__builtin_loongarch_crc_w_w_w", + "crcc.w.b.w" => "__builtin_loongarch_crcc_w_b_w", + "crcc.w.d.w" => "__builtin_loongarch_crcc_w_d_w", + "crcc.w.h.w" => "__builtin_loongarch_crcc_w_h_w", + "crcc.w.w.w" => "__builtin_loongarch_crcc_w_w_w", + "csrrd.d" => "__builtin_loongarch_csrrd_d", + "csrrd.w" => "__builtin_loongarch_csrrd_w", + "csrwr.d" => "__builtin_loongarch_csrwr_d", + "csrwr.w" => "__builtin_loongarch_csrwr_w", + "csrxchg.d" => "__builtin_loongarch_csrxchg_d", + "csrxchg.w" => "__builtin_loongarch_csrxchg_w", + "dbar" => "__builtin_loongarch_dbar", + "frecipe.d" => "__builtin_loongarch_frecipe_d", + "frecipe.s" => "__builtin_loongarch_frecipe_s", + "frsqrte.d" => "__builtin_loongarch_frsqrte_d", + "frsqrte.s" => "__builtin_loongarch_frsqrte_s", + "ibar" => "__builtin_loongarch_ibar", + "iocsrrd.b" => "__builtin_loongarch_iocsrrd_b", + "iocsrrd.d" => "__builtin_loongarch_iocsrrd_d", + "iocsrrd.h" => "__builtin_loongarch_iocsrrd_h", + "iocsrrd.w" => "__builtin_loongarch_iocsrrd_w", + "iocsrwr.b" => "__builtin_loongarch_iocsrwr_b", + "iocsrwr.d" => "__builtin_loongarch_iocsrwr_d", + "iocsrwr.h" => "__builtin_loongarch_iocsrwr_h", + "iocsrwr.w" => "__builtin_loongarch_iocsrwr_w", + "lasx.vext2xv.d.b" => "__builtin_lasx_vext2xv_d_b", + "lasx.vext2xv.d.h" => "__builtin_lasx_vext2xv_d_h", + "lasx.vext2xv.d.w" => "__builtin_lasx_vext2xv_d_w", + "lasx.vext2xv.du.bu" => "__builtin_lasx_vext2xv_du_bu", + "lasx.vext2xv.du.hu" => "__builtin_lasx_vext2xv_du_hu", + "lasx.vext2xv.du.wu" => "__builtin_lasx_vext2xv_du_wu", + "lasx.vext2xv.h.b" => "__builtin_lasx_vext2xv_h_b", + "lasx.vext2xv.hu.bu" => "__builtin_lasx_vext2xv_hu_bu", + "lasx.vext2xv.w.b" => "__builtin_lasx_vext2xv_w_b", + "lasx.vext2xv.w.h" => "__builtin_lasx_vext2xv_w_h", + "lasx.vext2xv.wu.bu" => "__builtin_lasx_vext2xv_wu_bu", + "lasx.vext2xv.wu.hu" => "__builtin_lasx_vext2xv_wu_hu", + "lasx.xbnz.b" => "__builtin_lasx_xbnz_b", + "lasx.xbnz.d" => "__builtin_lasx_xbnz_d", + "lasx.xbnz.h" => "__builtin_lasx_xbnz_h", + "lasx.xbnz.v" => "__builtin_lasx_xbnz_v", + "lasx.xbnz.w" => "__builtin_lasx_xbnz_w", + "lasx.xbz.b" => "__builtin_lasx_xbz_b", + "lasx.xbz.d" => "__builtin_lasx_xbz_d", + "lasx.xbz.h" => "__builtin_lasx_xbz_h", + "lasx.xbz.v" => "__builtin_lasx_xbz_v", + "lasx.xbz.w" => "__builtin_lasx_xbz_w", + "lasx.xvabsd.b" => "__builtin_lasx_xvabsd_b", + "lasx.xvabsd.bu" => "__builtin_lasx_xvabsd_bu", + "lasx.xvabsd.d" => "__builtin_lasx_xvabsd_d", + "lasx.xvabsd.du" => "__builtin_lasx_xvabsd_du", + "lasx.xvabsd.h" => "__builtin_lasx_xvabsd_h", + "lasx.xvabsd.hu" => "__builtin_lasx_xvabsd_hu", + "lasx.xvabsd.w" => "__builtin_lasx_xvabsd_w", + "lasx.xvabsd.wu" => "__builtin_lasx_xvabsd_wu", + "lasx.xvadd.b" => "__builtin_lasx_xvadd_b", + "lasx.xvadd.d" => "__builtin_lasx_xvadd_d", + "lasx.xvadd.h" => "__builtin_lasx_xvadd_h", + "lasx.xvadd.q" => "__builtin_lasx_xvadd_q", + "lasx.xvadd.w" => "__builtin_lasx_xvadd_w", + "lasx.xvadda.b" => "__builtin_lasx_xvadda_b", + "lasx.xvadda.d" => "__builtin_lasx_xvadda_d", + "lasx.xvadda.h" => "__builtin_lasx_xvadda_h", + "lasx.xvadda.w" => "__builtin_lasx_xvadda_w", + "lasx.xvaddi.bu" => "__builtin_lasx_xvaddi_bu", + "lasx.xvaddi.du" => "__builtin_lasx_xvaddi_du", + "lasx.xvaddi.hu" => "__builtin_lasx_xvaddi_hu", + "lasx.xvaddi.wu" => "__builtin_lasx_xvaddi_wu", + "lasx.xvaddwev.d.w" => "__builtin_lasx_xvaddwev_d_w", + "lasx.xvaddwev.d.wu" => "__builtin_lasx_xvaddwev_d_wu", + "lasx.xvaddwev.d.wu.w" => "__builtin_lasx_xvaddwev_d_wu_w", + "lasx.xvaddwev.h.b" => "__builtin_lasx_xvaddwev_h_b", + "lasx.xvaddwev.h.bu" => "__builtin_lasx_xvaddwev_h_bu", + "lasx.xvaddwev.h.bu.b" => "__builtin_lasx_xvaddwev_h_bu_b", + "lasx.xvaddwev.q.d" => "__builtin_lasx_xvaddwev_q_d", + "lasx.xvaddwev.q.du" => "__builtin_lasx_xvaddwev_q_du", + "lasx.xvaddwev.q.du.d" => "__builtin_lasx_xvaddwev_q_du_d", + "lasx.xvaddwev.w.h" => "__builtin_lasx_xvaddwev_w_h", + "lasx.xvaddwev.w.hu" => "__builtin_lasx_xvaddwev_w_hu", + "lasx.xvaddwev.w.hu.h" => "__builtin_lasx_xvaddwev_w_hu_h", + "lasx.xvaddwod.d.w" => "__builtin_lasx_xvaddwod_d_w", + "lasx.xvaddwod.d.wu" => "__builtin_lasx_xvaddwod_d_wu", + "lasx.xvaddwod.d.wu.w" => "__builtin_lasx_xvaddwod_d_wu_w", + "lasx.xvaddwod.h.b" => "__builtin_lasx_xvaddwod_h_b", + "lasx.xvaddwod.h.bu" => "__builtin_lasx_xvaddwod_h_bu", + "lasx.xvaddwod.h.bu.b" => "__builtin_lasx_xvaddwod_h_bu_b", + "lasx.xvaddwod.q.d" => "__builtin_lasx_xvaddwod_q_d", + "lasx.xvaddwod.q.du" => "__builtin_lasx_xvaddwod_q_du", + "lasx.xvaddwod.q.du.d" => "__builtin_lasx_xvaddwod_q_du_d", + "lasx.xvaddwod.w.h" => "__builtin_lasx_xvaddwod_w_h", + "lasx.xvaddwod.w.hu" => "__builtin_lasx_xvaddwod_w_hu", + "lasx.xvaddwod.w.hu.h" => "__builtin_lasx_xvaddwod_w_hu_h", + "lasx.xvand.v" => "__builtin_lasx_xvand_v", + "lasx.xvandi.b" => "__builtin_lasx_xvandi_b", + "lasx.xvandn.v" => "__builtin_lasx_xvandn_v", + "lasx.xvavg.b" => "__builtin_lasx_xvavg_b", + "lasx.xvavg.bu" => "__builtin_lasx_xvavg_bu", + "lasx.xvavg.d" => "__builtin_lasx_xvavg_d", + "lasx.xvavg.du" => "__builtin_lasx_xvavg_du", + "lasx.xvavg.h" => "__builtin_lasx_xvavg_h", + "lasx.xvavg.hu" => "__builtin_lasx_xvavg_hu", + "lasx.xvavg.w" => "__builtin_lasx_xvavg_w", + "lasx.xvavg.wu" => "__builtin_lasx_xvavg_wu", + "lasx.xvavgr.b" => "__builtin_lasx_xvavgr_b", + "lasx.xvavgr.bu" => "__builtin_lasx_xvavgr_bu", + "lasx.xvavgr.d" => "__builtin_lasx_xvavgr_d", + "lasx.xvavgr.du" => "__builtin_lasx_xvavgr_du", + "lasx.xvavgr.h" => "__builtin_lasx_xvavgr_h", + "lasx.xvavgr.hu" => "__builtin_lasx_xvavgr_hu", + "lasx.xvavgr.w" => "__builtin_lasx_xvavgr_w", + "lasx.xvavgr.wu" => "__builtin_lasx_xvavgr_wu", + "lasx.xvbitclr.b" => "__builtin_lasx_xvbitclr_b", + "lasx.xvbitclr.d" => "__builtin_lasx_xvbitclr_d", + "lasx.xvbitclr.h" => "__builtin_lasx_xvbitclr_h", + "lasx.xvbitclr.w" => "__builtin_lasx_xvbitclr_w", + "lasx.xvbitclri.b" => "__builtin_lasx_xvbitclri_b", + "lasx.xvbitclri.d" => "__builtin_lasx_xvbitclri_d", + "lasx.xvbitclri.h" => "__builtin_lasx_xvbitclri_h", + "lasx.xvbitclri.w" => "__builtin_lasx_xvbitclri_w", + "lasx.xvbitrev.b" => "__builtin_lasx_xvbitrev_b", + "lasx.xvbitrev.d" => "__builtin_lasx_xvbitrev_d", + "lasx.xvbitrev.h" => "__builtin_lasx_xvbitrev_h", + "lasx.xvbitrev.w" => "__builtin_lasx_xvbitrev_w", + "lasx.xvbitrevi.b" => "__builtin_lasx_xvbitrevi_b", + "lasx.xvbitrevi.d" => "__builtin_lasx_xvbitrevi_d", + "lasx.xvbitrevi.h" => "__builtin_lasx_xvbitrevi_h", + "lasx.xvbitrevi.w" => "__builtin_lasx_xvbitrevi_w", + "lasx.xvbitsel.v" => "__builtin_lasx_xvbitsel_v", + "lasx.xvbitseli.b" => "__builtin_lasx_xvbitseli_b", + "lasx.xvbitset.b" => "__builtin_lasx_xvbitset_b", + "lasx.xvbitset.d" => "__builtin_lasx_xvbitset_d", + "lasx.xvbitset.h" => "__builtin_lasx_xvbitset_h", + "lasx.xvbitset.w" => "__builtin_lasx_xvbitset_w", + "lasx.xvbitseti.b" => "__builtin_lasx_xvbitseti_b", + "lasx.xvbitseti.d" => "__builtin_lasx_xvbitseti_d", + "lasx.xvbitseti.h" => "__builtin_lasx_xvbitseti_h", + "lasx.xvbitseti.w" => "__builtin_lasx_xvbitseti_w", + "lasx.xvbsll.v" => "__builtin_lasx_xvbsll_v", + "lasx.xvbsrl.v" => "__builtin_lasx_xvbsrl_v", + "lasx.xvclo.b" => "__builtin_lasx_xvclo_b", + "lasx.xvclo.d" => "__builtin_lasx_xvclo_d", + "lasx.xvclo.h" => "__builtin_lasx_xvclo_h", + "lasx.xvclo.w" => "__builtin_lasx_xvclo_w", + "lasx.xvclz.b" => "__builtin_lasx_xvclz_b", + "lasx.xvclz.d" => "__builtin_lasx_xvclz_d", + "lasx.xvclz.h" => "__builtin_lasx_xvclz_h", + "lasx.xvclz.w" => "__builtin_lasx_xvclz_w", + "lasx.xvdiv.b" => "__builtin_lasx_xvdiv_b", + "lasx.xvdiv.bu" => "__builtin_lasx_xvdiv_bu", + "lasx.xvdiv.d" => "__builtin_lasx_xvdiv_d", + "lasx.xvdiv.du" => "__builtin_lasx_xvdiv_du", + "lasx.xvdiv.h" => "__builtin_lasx_xvdiv_h", + "lasx.xvdiv.hu" => "__builtin_lasx_xvdiv_hu", + "lasx.xvdiv.w" => "__builtin_lasx_xvdiv_w", + "lasx.xvdiv.wu" => "__builtin_lasx_xvdiv_wu", + "lasx.xvexth.d.w" => "__builtin_lasx_xvexth_d_w", + "lasx.xvexth.du.wu" => "__builtin_lasx_xvexth_du_wu", + "lasx.xvexth.h.b" => "__builtin_lasx_xvexth_h_b", + "lasx.xvexth.hu.bu" => "__builtin_lasx_xvexth_hu_bu", + "lasx.xvexth.q.d" => "__builtin_lasx_xvexth_q_d", + "lasx.xvexth.qu.du" => "__builtin_lasx_xvexth_qu_du", + "lasx.xvexth.w.h" => "__builtin_lasx_xvexth_w_h", + "lasx.xvexth.wu.hu" => "__builtin_lasx_xvexth_wu_hu", + "lasx.xvextl.q.d" => "__builtin_lasx_xvextl_q_d", + "lasx.xvextl.qu.du" => "__builtin_lasx_xvextl_qu_du", + "lasx.xvextrins.b" => "__builtin_lasx_xvextrins_b", + "lasx.xvextrins.d" => "__builtin_lasx_xvextrins_d", + "lasx.xvextrins.h" => "__builtin_lasx_xvextrins_h", + "lasx.xvextrins.w" => "__builtin_lasx_xvextrins_w", + "lasx.xvfadd.d" => "__builtin_lasx_xvfadd_d", + "lasx.xvfadd.s" => "__builtin_lasx_xvfadd_s", + "lasx.xvfclass.d" => "__builtin_lasx_xvfclass_d", + "lasx.xvfclass.s" => "__builtin_lasx_xvfclass_s", + "lasx.xvfcmp.caf.d" => "__builtin_lasx_xvfcmp_caf_d", + "lasx.xvfcmp.caf.s" => "__builtin_lasx_xvfcmp_caf_s", + "lasx.xvfcmp.ceq.d" => "__builtin_lasx_xvfcmp_ceq_d", + "lasx.xvfcmp.ceq.s" => "__builtin_lasx_xvfcmp_ceq_s", + "lasx.xvfcmp.cle.d" => "__builtin_lasx_xvfcmp_cle_d", + "lasx.xvfcmp.cle.s" => "__builtin_lasx_xvfcmp_cle_s", + "lasx.xvfcmp.clt.d" => "__builtin_lasx_xvfcmp_clt_d", + "lasx.xvfcmp.clt.s" => "__builtin_lasx_xvfcmp_clt_s", + "lasx.xvfcmp.cne.d" => "__builtin_lasx_xvfcmp_cne_d", + "lasx.xvfcmp.cne.s" => "__builtin_lasx_xvfcmp_cne_s", + "lasx.xvfcmp.cor.d" => "__builtin_lasx_xvfcmp_cor_d", + "lasx.xvfcmp.cor.s" => "__builtin_lasx_xvfcmp_cor_s", + "lasx.xvfcmp.cueq.d" => "__builtin_lasx_xvfcmp_cueq_d", + "lasx.xvfcmp.cueq.s" => "__builtin_lasx_xvfcmp_cueq_s", + "lasx.xvfcmp.cule.d" => "__builtin_lasx_xvfcmp_cule_d", + "lasx.xvfcmp.cule.s" => "__builtin_lasx_xvfcmp_cule_s", + "lasx.xvfcmp.cult.d" => "__builtin_lasx_xvfcmp_cult_d", + "lasx.xvfcmp.cult.s" => "__builtin_lasx_xvfcmp_cult_s", + "lasx.xvfcmp.cun.d" => "__builtin_lasx_xvfcmp_cun_d", + "lasx.xvfcmp.cun.s" => "__builtin_lasx_xvfcmp_cun_s", + "lasx.xvfcmp.cune.d" => "__builtin_lasx_xvfcmp_cune_d", + "lasx.xvfcmp.cune.s" => "__builtin_lasx_xvfcmp_cune_s", + "lasx.xvfcmp.saf.d" => "__builtin_lasx_xvfcmp_saf_d", + "lasx.xvfcmp.saf.s" => "__builtin_lasx_xvfcmp_saf_s", + "lasx.xvfcmp.seq.d" => "__builtin_lasx_xvfcmp_seq_d", + "lasx.xvfcmp.seq.s" => "__builtin_lasx_xvfcmp_seq_s", + "lasx.xvfcmp.sle.d" => "__builtin_lasx_xvfcmp_sle_d", + "lasx.xvfcmp.sle.s" => "__builtin_lasx_xvfcmp_sle_s", + "lasx.xvfcmp.slt.d" => "__builtin_lasx_xvfcmp_slt_d", + "lasx.xvfcmp.slt.s" => "__builtin_lasx_xvfcmp_slt_s", + "lasx.xvfcmp.sne.d" => "__builtin_lasx_xvfcmp_sne_d", + "lasx.xvfcmp.sne.s" => "__builtin_lasx_xvfcmp_sne_s", + "lasx.xvfcmp.sor.d" => "__builtin_lasx_xvfcmp_sor_d", + "lasx.xvfcmp.sor.s" => "__builtin_lasx_xvfcmp_sor_s", + "lasx.xvfcmp.sueq.d" => "__builtin_lasx_xvfcmp_sueq_d", + "lasx.xvfcmp.sueq.s" => "__builtin_lasx_xvfcmp_sueq_s", + "lasx.xvfcmp.sule.d" => "__builtin_lasx_xvfcmp_sule_d", + "lasx.xvfcmp.sule.s" => "__builtin_lasx_xvfcmp_sule_s", + "lasx.xvfcmp.sult.d" => "__builtin_lasx_xvfcmp_sult_d", + "lasx.xvfcmp.sult.s" => "__builtin_lasx_xvfcmp_sult_s", + "lasx.xvfcmp.sun.d" => "__builtin_lasx_xvfcmp_sun_d", + "lasx.xvfcmp.sun.s" => "__builtin_lasx_xvfcmp_sun_s", + "lasx.xvfcmp.sune.d" => "__builtin_lasx_xvfcmp_sune_d", + "lasx.xvfcmp.sune.s" => "__builtin_lasx_xvfcmp_sune_s", + "lasx.xvfcvt.h.s" => "__builtin_lasx_xvfcvt_h_s", + "lasx.xvfcvt.s.d" => "__builtin_lasx_xvfcvt_s_d", + "lasx.xvfcvth.d.s" => "__builtin_lasx_xvfcvth_d_s", + "lasx.xvfcvth.s.h" => "__builtin_lasx_xvfcvth_s_h", + "lasx.xvfcvtl.d.s" => "__builtin_lasx_xvfcvtl_d_s", + "lasx.xvfcvtl.s.h" => "__builtin_lasx_xvfcvtl_s_h", + "lasx.xvfdiv.d" => "__builtin_lasx_xvfdiv_d", + "lasx.xvfdiv.s" => "__builtin_lasx_xvfdiv_s", + "lasx.xvffint.d.l" => "__builtin_lasx_xvffint_d_l", + "lasx.xvffint.d.lu" => "__builtin_lasx_xvffint_d_lu", + "lasx.xvffint.s.l" => "__builtin_lasx_xvffint_s_l", + "lasx.xvffint.s.w" => "__builtin_lasx_xvffint_s_w", + "lasx.xvffint.s.wu" => "__builtin_lasx_xvffint_s_wu", + "lasx.xvffinth.d.w" => "__builtin_lasx_xvffinth_d_w", + "lasx.xvffintl.d.w" => "__builtin_lasx_xvffintl_d_w", + "lasx.xvflogb.d" => "__builtin_lasx_xvflogb_d", + "lasx.xvflogb.s" => "__builtin_lasx_xvflogb_s", + "lasx.xvfmadd.d" => "__builtin_lasx_xvfmadd_d", + "lasx.xvfmadd.s" => "__builtin_lasx_xvfmadd_s", + "lasx.xvfmax.d" => "__builtin_lasx_xvfmax_d", + "lasx.xvfmax.s" => "__builtin_lasx_xvfmax_s", + "lasx.xvfmaxa.d" => "__builtin_lasx_xvfmaxa_d", + "lasx.xvfmaxa.s" => "__builtin_lasx_xvfmaxa_s", + "lasx.xvfmin.d" => "__builtin_lasx_xvfmin_d", + "lasx.xvfmin.s" => "__builtin_lasx_xvfmin_s", + "lasx.xvfmina.d" => "__builtin_lasx_xvfmina_d", + "lasx.xvfmina.s" => "__builtin_lasx_xvfmina_s", + "lasx.xvfmsub.d" => "__builtin_lasx_xvfmsub_d", + "lasx.xvfmsub.s" => "__builtin_lasx_xvfmsub_s", + "lasx.xvfmul.d" => "__builtin_lasx_xvfmul_d", + "lasx.xvfmul.s" => "__builtin_lasx_xvfmul_s", + "lasx.xvfnmadd.d" => "__builtin_lasx_xvfnmadd_d", + "lasx.xvfnmadd.s" => "__builtin_lasx_xvfnmadd_s", + "lasx.xvfnmsub.d" => "__builtin_lasx_xvfnmsub_d", + "lasx.xvfnmsub.s" => "__builtin_lasx_xvfnmsub_s", + "lasx.xvfrecip.d" => "__builtin_lasx_xvfrecip_d", + "lasx.xvfrecip.s" => "__builtin_lasx_xvfrecip_s", + "lasx.xvfrecipe.d" => "__builtin_lasx_xvfrecipe_d", + "lasx.xvfrecipe.s" => "__builtin_lasx_xvfrecipe_s", + "lasx.xvfrint.d" => "__builtin_lasx_xvfrint_d", + "lasx.xvfrint.s" => "__builtin_lasx_xvfrint_s", + "lasx.xvfrintrm.d" => "__builtin_lasx_xvfrintrm_d", + "lasx.xvfrintrm.s" => "__builtin_lasx_xvfrintrm_s", + "lasx.xvfrintrne.d" => "__builtin_lasx_xvfrintrne_d", + "lasx.xvfrintrne.s" => "__builtin_lasx_xvfrintrne_s", + "lasx.xvfrintrp.d" => "__builtin_lasx_xvfrintrp_d", + "lasx.xvfrintrp.s" => "__builtin_lasx_xvfrintrp_s", + "lasx.xvfrintrz.d" => "__builtin_lasx_xvfrintrz_d", + "lasx.xvfrintrz.s" => "__builtin_lasx_xvfrintrz_s", + "lasx.xvfrsqrt.d" => "__builtin_lasx_xvfrsqrt_d", + "lasx.xvfrsqrt.s" => "__builtin_lasx_xvfrsqrt_s", + "lasx.xvfrsqrte.d" => "__builtin_lasx_xvfrsqrte_d", + "lasx.xvfrsqrte.s" => "__builtin_lasx_xvfrsqrte_s", + "lasx.xvfrstp.b" => "__builtin_lasx_xvfrstp_b", + "lasx.xvfrstp.h" => "__builtin_lasx_xvfrstp_h", + "lasx.xvfrstpi.b" => "__builtin_lasx_xvfrstpi_b", + "lasx.xvfrstpi.h" => "__builtin_lasx_xvfrstpi_h", + "lasx.xvfsqrt.d" => "__builtin_lasx_xvfsqrt_d", + "lasx.xvfsqrt.s" => "__builtin_lasx_xvfsqrt_s", + "lasx.xvfsub.d" => "__builtin_lasx_xvfsub_d", + "lasx.xvfsub.s" => "__builtin_lasx_xvfsub_s", + "lasx.xvftint.l.d" => "__builtin_lasx_xvftint_l_d", + "lasx.xvftint.lu.d" => "__builtin_lasx_xvftint_lu_d", + "lasx.xvftint.w.d" => "__builtin_lasx_xvftint_w_d", + "lasx.xvftint.w.s" => "__builtin_lasx_xvftint_w_s", + "lasx.xvftint.wu.s" => "__builtin_lasx_xvftint_wu_s", + "lasx.xvftinth.l.s" => "__builtin_lasx_xvftinth_l_s", + "lasx.xvftintl.l.s" => "__builtin_lasx_xvftintl_l_s", + "lasx.xvftintrm.l.d" => "__builtin_lasx_xvftintrm_l_d", + "lasx.xvftintrm.w.d" => "__builtin_lasx_xvftintrm_w_d", + "lasx.xvftintrm.w.s" => "__builtin_lasx_xvftintrm_w_s", + "lasx.xvftintrmh.l.s" => "__builtin_lasx_xvftintrmh_l_s", + "lasx.xvftintrml.l.s" => "__builtin_lasx_xvftintrml_l_s", + "lasx.xvftintrne.l.d" => "__builtin_lasx_xvftintrne_l_d", + "lasx.xvftintrne.w.d" => "__builtin_lasx_xvftintrne_w_d", + "lasx.xvftintrne.w.s" => "__builtin_lasx_xvftintrne_w_s", + "lasx.xvftintrneh.l.s" => "__builtin_lasx_xvftintrneh_l_s", + "lasx.xvftintrnel.l.s" => "__builtin_lasx_xvftintrnel_l_s", + "lasx.xvftintrp.l.d" => "__builtin_lasx_xvftintrp_l_d", + "lasx.xvftintrp.w.d" => "__builtin_lasx_xvftintrp_w_d", + "lasx.xvftintrp.w.s" => "__builtin_lasx_xvftintrp_w_s", + "lasx.xvftintrph.l.s" => "__builtin_lasx_xvftintrph_l_s", + "lasx.xvftintrpl.l.s" => "__builtin_lasx_xvftintrpl_l_s", + "lasx.xvftintrz.l.d" => "__builtin_lasx_xvftintrz_l_d", + "lasx.xvftintrz.lu.d" => "__builtin_lasx_xvftintrz_lu_d", + "lasx.xvftintrz.w.d" => "__builtin_lasx_xvftintrz_w_d", + "lasx.xvftintrz.w.s" => "__builtin_lasx_xvftintrz_w_s", + "lasx.xvftintrz.wu.s" => "__builtin_lasx_xvftintrz_wu_s", + "lasx.xvftintrzh.l.s" => "__builtin_lasx_xvftintrzh_l_s", + "lasx.xvftintrzl.l.s" => "__builtin_lasx_xvftintrzl_l_s", + "lasx.xvhaddw.d.w" => "__builtin_lasx_xvhaddw_d_w", + "lasx.xvhaddw.du.wu" => "__builtin_lasx_xvhaddw_du_wu", + "lasx.xvhaddw.h.b" => "__builtin_lasx_xvhaddw_h_b", + "lasx.xvhaddw.hu.bu" => "__builtin_lasx_xvhaddw_hu_bu", + "lasx.xvhaddw.q.d" => "__builtin_lasx_xvhaddw_q_d", + "lasx.xvhaddw.qu.du" => "__builtin_lasx_xvhaddw_qu_du", + "lasx.xvhaddw.w.h" => "__builtin_lasx_xvhaddw_w_h", + "lasx.xvhaddw.wu.hu" => "__builtin_lasx_xvhaddw_wu_hu", + "lasx.xvhsubw.d.w" => "__builtin_lasx_xvhsubw_d_w", + "lasx.xvhsubw.du.wu" => "__builtin_lasx_xvhsubw_du_wu", + "lasx.xvhsubw.h.b" => "__builtin_lasx_xvhsubw_h_b", + "lasx.xvhsubw.hu.bu" => "__builtin_lasx_xvhsubw_hu_bu", + "lasx.xvhsubw.q.d" => "__builtin_lasx_xvhsubw_q_d", + "lasx.xvhsubw.qu.du" => "__builtin_lasx_xvhsubw_qu_du", + "lasx.xvhsubw.w.h" => "__builtin_lasx_xvhsubw_w_h", + "lasx.xvhsubw.wu.hu" => "__builtin_lasx_xvhsubw_wu_hu", + "lasx.xvilvh.b" => "__builtin_lasx_xvilvh_b", + "lasx.xvilvh.d" => "__builtin_lasx_xvilvh_d", + "lasx.xvilvh.h" => "__builtin_lasx_xvilvh_h", + "lasx.xvilvh.w" => "__builtin_lasx_xvilvh_w", + "lasx.xvilvl.b" => "__builtin_lasx_xvilvl_b", + "lasx.xvilvl.d" => "__builtin_lasx_xvilvl_d", + "lasx.xvilvl.h" => "__builtin_lasx_xvilvl_h", + "lasx.xvilvl.w" => "__builtin_lasx_xvilvl_w", + "lasx.xvinsgr2vr.d" => "__builtin_lasx_xvinsgr2vr_d", + "lasx.xvinsgr2vr.w" => "__builtin_lasx_xvinsgr2vr_w", + "lasx.xvinsve0.d" => "__builtin_lasx_xvinsve0_d", + "lasx.xvinsve0.w" => "__builtin_lasx_xvinsve0_w", + "lasx.xvld" => "__builtin_lasx_xvld", + "lasx.xvldi" => "__builtin_lasx_xvldi", + "lasx.xvldrepl.b" => "__builtin_lasx_xvldrepl_b", + "lasx.xvldrepl.d" => "__builtin_lasx_xvldrepl_d", + "lasx.xvldrepl.h" => "__builtin_lasx_xvldrepl_h", + "lasx.xvldrepl.w" => "__builtin_lasx_xvldrepl_w", + "lasx.xvldx" => "__builtin_lasx_xvldx", + "lasx.xvmadd.b" => "__builtin_lasx_xvmadd_b", + "lasx.xvmadd.d" => "__builtin_lasx_xvmadd_d", + "lasx.xvmadd.h" => "__builtin_lasx_xvmadd_h", + "lasx.xvmadd.w" => "__builtin_lasx_xvmadd_w", + "lasx.xvmaddwev.d.w" => "__builtin_lasx_xvmaddwev_d_w", + "lasx.xvmaddwev.d.wu" => "__builtin_lasx_xvmaddwev_d_wu", + "lasx.xvmaddwev.d.wu.w" => "__builtin_lasx_xvmaddwev_d_wu_w", + "lasx.xvmaddwev.h.b" => "__builtin_lasx_xvmaddwev_h_b", + "lasx.xvmaddwev.h.bu" => "__builtin_lasx_xvmaddwev_h_bu", + "lasx.xvmaddwev.h.bu.b" => "__builtin_lasx_xvmaddwev_h_bu_b", + "lasx.xvmaddwev.q.d" => "__builtin_lasx_xvmaddwev_q_d", + "lasx.xvmaddwev.q.du" => "__builtin_lasx_xvmaddwev_q_du", + "lasx.xvmaddwev.q.du.d" => "__builtin_lasx_xvmaddwev_q_du_d", + "lasx.xvmaddwev.w.h" => "__builtin_lasx_xvmaddwev_w_h", + "lasx.xvmaddwev.w.hu" => "__builtin_lasx_xvmaddwev_w_hu", + "lasx.xvmaddwev.w.hu.h" => "__builtin_lasx_xvmaddwev_w_hu_h", + "lasx.xvmaddwod.d.w" => "__builtin_lasx_xvmaddwod_d_w", + "lasx.xvmaddwod.d.wu" => "__builtin_lasx_xvmaddwod_d_wu", + "lasx.xvmaddwod.d.wu.w" => "__builtin_lasx_xvmaddwod_d_wu_w", + "lasx.xvmaddwod.h.b" => "__builtin_lasx_xvmaddwod_h_b", + "lasx.xvmaddwod.h.bu" => "__builtin_lasx_xvmaddwod_h_bu", + "lasx.xvmaddwod.h.bu.b" => "__builtin_lasx_xvmaddwod_h_bu_b", + "lasx.xvmaddwod.q.d" => "__builtin_lasx_xvmaddwod_q_d", + "lasx.xvmaddwod.q.du" => "__builtin_lasx_xvmaddwod_q_du", + "lasx.xvmaddwod.q.du.d" => "__builtin_lasx_xvmaddwod_q_du_d", + "lasx.xvmaddwod.w.h" => "__builtin_lasx_xvmaddwod_w_h", + "lasx.xvmaddwod.w.hu" => "__builtin_lasx_xvmaddwod_w_hu", + "lasx.xvmaddwod.w.hu.h" => "__builtin_lasx_xvmaddwod_w_hu_h", + "lasx.xvmax.b" => "__builtin_lasx_xvmax_b", + "lasx.xvmax.bu" => "__builtin_lasx_xvmax_bu", + "lasx.xvmax.d" => "__builtin_lasx_xvmax_d", + "lasx.xvmax.du" => "__builtin_lasx_xvmax_du", + "lasx.xvmax.h" => "__builtin_lasx_xvmax_h", + "lasx.xvmax.hu" => "__builtin_lasx_xvmax_hu", + "lasx.xvmax.w" => "__builtin_lasx_xvmax_w", + "lasx.xvmax.wu" => "__builtin_lasx_xvmax_wu", + "lasx.xvmaxi.b" => "__builtin_lasx_xvmaxi_b", + "lasx.xvmaxi.bu" => "__builtin_lasx_xvmaxi_bu", + "lasx.xvmaxi.d" => "__builtin_lasx_xvmaxi_d", + "lasx.xvmaxi.du" => "__builtin_lasx_xvmaxi_du", + "lasx.xvmaxi.h" => "__builtin_lasx_xvmaxi_h", + "lasx.xvmaxi.hu" => "__builtin_lasx_xvmaxi_hu", + "lasx.xvmaxi.w" => "__builtin_lasx_xvmaxi_w", + "lasx.xvmaxi.wu" => "__builtin_lasx_xvmaxi_wu", + "lasx.xvmin.b" => "__builtin_lasx_xvmin_b", + "lasx.xvmin.bu" => "__builtin_lasx_xvmin_bu", + "lasx.xvmin.d" => "__builtin_lasx_xvmin_d", + "lasx.xvmin.du" => "__builtin_lasx_xvmin_du", + "lasx.xvmin.h" => "__builtin_lasx_xvmin_h", + "lasx.xvmin.hu" => "__builtin_lasx_xvmin_hu", + "lasx.xvmin.w" => "__builtin_lasx_xvmin_w", + "lasx.xvmin.wu" => "__builtin_lasx_xvmin_wu", + "lasx.xvmini.b" => "__builtin_lasx_xvmini_b", + "lasx.xvmini.bu" => "__builtin_lasx_xvmini_bu", + "lasx.xvmini.d" => "__builtin_lasx_xvmini_d", + "lasx.xvmini.du" => "__builtin_lasx_xvmini_du", + "lasx.xvmini.h" => "__builtin_lasx_xvmini_h", + "lasx.xvmini.hu" => "__builtin_lasx_xvmini_hu", + "lasx.xvmini.w" => "__builtin_lasx_xvmini_w", + "lasx.xvmini.wu" => "__builtin_lasx_xvmini_wu", + "lasx.xvmod.b" => "__builtin_lasx_xvmod_b", + "lasx.xvmod.bu" => "__builtin_lasx_xvmod_bu", + "lasx.xvmod.d" => "__builtin_lasx_xvmod_d", + "lasx.xvmod.du" => "__builtin_lasx_xvmod_du", + "lasx.xvmod.h" => "__builtin_lasx_xvmod_h", + "lasx.xvmod.hu" => "__builtin_lasx_xvmod_hu", + "lasx.xvmod.w" => "__builtin_lasx_xvmod_w", + "lasx.xvmod.wu" => "__builtin_lasx_xvmod_wu", + "lasx.xvmskgez.b" => "__builtin_lasx_xvmskgez_b", + "lasx.xvmskltz.b" => "__builtin_lasx_xvmskltz_b", + "lasx.xvmskltz.d" => "__builtin_lasx_xvmskltz_d", + "lasx.xvmskltz.h" => "__builtin_lasx_xvmskltz_h", + "lasx.xvmskltz.w" => "__builtin_lasx_xvmskltz_w", + "lasx.xvmsknz.b" => "__builtin_lasx_xvmsknz_b", + "lasx.xvmsub.b" => "__builtin_lasx_xvmsub_b", + "lasx.xvmsub.d" => "__builtin_lasx_xvmsub_d", + "lasx.xvmsub.h" => "__builtin_lasx_xvmsub_h", + "lasx.xvmsub.w" => "__builtin_lasx_xvmsub_w", + "lasx.xvmuh.b" => "__builtin_lasx_xvmuh_b", + "lasx.xvmuh.bu" => "__builtin_lasx_xvmuh_bu", + "lasx.xvmuh.d" => "__builtin_lasx_xvmuh_d", + "lasx.xvmuh.du" => "__builtin_lasx_xvmuh_du", + "lasx.xvmuh.h" => "__builtin_lasx_xvmuh_h", + "lasx.xvmuh.hu" => "__builtin_lasx_xvmuh_hu", + "lasx.xvmuh.w" => "__builtin_lasx_xvmuh_w", + "lasx.xvmuh.wu" => "__builtin_lasx_xvmuh_wu", + "lasx.xvmul.b" => "__builtin_lasx_xvmul_b", + "lasx.xvmul.d" => "__builtin_lasx_xvmul_d", + "lasx.xvmul.h" => "__builtin_lasx_xvmul_h", + "lasx.xvmul.w" => "__builtin_lasx_xvmul_w", + "lasx.xvmulwev.d.w" => "__builtin_lasx_xvmulwev_d_w", + "lasx.xvmulwev.d.wu" => "__builtin_lasx_xvmulwev_d_wu", + "lasx.xvmulwev.d.wu.w" => "__builtin_lasx_xvmulwev_d_wu_w", + "lasx.xvmulwev.h.b" => "__builtin_lasx_xvmulwev_h_b", + "lasx.xvmulwev.h.bu" => "__builtin_lasx_xvmulwev_h_bu", + "lasx.xvmulwev.h.bu.b" => "__builtin_lasx_xvmulwev_h_bu_b", + "lasx.xvmulwev.q.d" => "__builtin_lasx_xvmulwev_q_d", + "lasx.xvmulwev.q.du" => "__builtin_lasx_xvmulwev_q_du", + "lasx.xvmulwev.q.du.d" => "__builtin_lasx_xvmulwev_q_du_d", + "lasx.xvmulwev.w.h" => "__builtin_lasx_xvmulwev_w_h", + "lasx.xvmulwev.w.hu" => "__builtin_lasx_xvmulwev_w_hu", + "lasx.xvmulwev.w.hu.h" => "__builtin_lasx_xvmulwev_w_hu_h", + "lasx.xvmulwod.d.w" => "__builtin_lasx_xvmulwod_d_w", + "lasx.xvmulwod.d.wu" => "__builtin_lasx_xvmulwod_d_wu", + "lasx.xvmulwod.d.wu.w" => "__builtin_lasx_xvmulwod_d_wu_w", + "lasx.xvmulwod.h.b" => "__builtin_lasx_xvmulwod_h_b", + "lasx.xvmulwod.h.bu" => "__builtin_lasx_xvmulwod_h_bu", + "lasx.xvmulwod.h.bu.b" => "__builtin_lasx_xvmulwod_h_bu_b", + "lasx.xvmulwod.q.d" => "__builtin_lasx_xvmulwod_q_d", + "lasx.xvmulwod.q.du" => "__builtin_lasx_xvmulwod_q_du", + "lasx.xvmulwod.q.du.d" => "__builtin_lasx_xvmulwod_q_du_d", + "lasx.xvmulwod.w.h" => "__builtin_lasx_xvmulwod_w_h", + "lasx.xvmulwod.w.hu" => "__builtin_lasx_xvmulwod_w_hu", + "lasx.xvmulwod.w.hu.h" => "__builtin_lasx_xvmulwod_w_hu_h", + "lasx.xvneg.b" => "__builtin_lasx_xvneg_b", + "lasx.xvneg.d" => "__builtin_lasx_xvneg_d", + "lasx.xvneg.h" => "__builtin_lasx_xvneg_h", + "lasx.xvneg.w" => "__builtin_lasx_xvneg_w", + "lasx.xvnor.v" => "__builtin_lasx_xvnor_v", + "lasx.xvnori.b" => "__builtin_lasx_xvnori_b", + "lasx.xvor.v" => "__builtin_lasx_xvor_v", + "lasx.xvori.b" => "__builtin_lasx_xvori_b", + "lasx.xvorn.v" => "__builtin_lasx_xvorn_v", + "lasx.xvpackev.b" => "__builtin_lasx_xvpackev_b", + "lasx.xvpackev.d" => "__builtin_lasx_xvpackev_d", + "lasx.xvpackev.h" => "__builtin_lasx_xvpackev_h", + "lasx.xvpackev.w" => "__builtin_lasx_xvpackev_w", + "lasx.xvpackod.b" => "__builtin_lasx_xvpackod_b", + "lasx.xvpackod.d" => "__builtin_lasx_xvpackod_d", + "lasx.xvpackod.h" => "__builtin_lasx_xvpackod_h", + "lasx.xvpackod.w" => "__builtin_lasx_xvpackod_w", + "lasx.xvpcnt.b" => "__builtin_lasx_xvpcnt_b", + "lasx.xvpcnt.d" => "__builtin_lasx_xvpcnt_d", + "lasx.xvpcnt.h" => "__builtin_lasx_xvpcnt_h", + "lasx.xvpcnt.w" => "__builtin_lasx_xvpcnt_w", + "lasx.xvperm.w" => "__builtin_lasx_xvperm_w", + "lasx.xvpermi.d" => "__builtin_lasx_xvpermi_d", + "lasx.xvpermi.q" => "__builtin_lasx_xvpermi_q", + "lasx.xvpermi.w" => "__builtin_lasx_xvpermi_w", + "lasx.xvpickev.b" => "__builtin_lasx_xvpickev_b", + "lasx.xvpickev.d" => "__builtin_lasx_xvpickev_d", + "lasx.xvpickev.h" => "__builtin_lasx_xvpickev_h", + "lasx.xvpickev.w" => "__builtin_lasx_xvpickev_w", + "lasx.xvpickod.b" => "__builtin_lasx_xvpickod_b", + "lasx.xvpickod.d" => "__builtin_lasx_xvpickod_d", + "lasx.xvpickod.h" => "__builtin_lasx_xvpickod_h", + "lasx.xvpickod.w" => "__builtin_lasx_xvpickod_w", + "lasx.xvpickve.d" => "__builtin_lasx_xvpickve_d", + "lasx.xvpickve.d.f" => "__builtin_lasx_xvpickve_d_f", + "lasx.xvpickve.w" => "__builtin_lasx_xvpickve_w", + "lasx.xvpickve.w.f" => "__builtin_lasx_xvpickve_w_f", + "lasx.xvpickve2gr.d" => "__builtin_lasx_xvpickve2gr_d", + "lasx.xvpickve2gr.du" => "__builtin_lasx_xvpickve2gr_du", + "lasx.xvpickve2gr.w" => "__builtin_lasx_xvpickve2gr_w", + "lasx.xvpickve2gr.wu" => "__builtin_lasx_xvpickve2gr_wu", + "lasx.xvrepl128vei.b" => "__builtin_lasx_xvrepl128vei_b", + "lasx.xvrepl128vei.d" => "__builtin_lasx_xvrepl128vei_d", + "lasx.xvrepl128vei.h" => "__builtin_lasx_xvrepl128vei_h", + "lasx.xvrepl128vei.w" => "__builtin_lasx_xvrepl128vei_w", + "lasx.xvreplgr2vr.b" => "__builtin_lasx_xvreplgr2vr_b", + "lasx.xvreplgr2vr.d" => "__builtin_lasx_xvreplgr2vr_d", + "lasx.xvreplgr2vr.h" => "__builtin_lasx_xvreplgr2vr_h", + "lasx.xvreplgr2vr.w" => "__builtin_lasx_xvreplgr2vr_w", + "lasx.xvrepli.b" => "__builtin_lasx_xvrepli_b", + "lasx.xvrepli.d" => "__builtin_lasx_xvrepli_d", + "lasx.xvrepli.h" => "__builtin_lasx_xvrepli_h", + "lasx.xvrepli.w" => "__builtin_lasx_xvrepli_w", + "lasx.xvreplve.b" => "__builtin_lasx_xvreplve_b", + "lasx.xvreplve.d" => "__builtin_lasx_xvreplve_d", + "lasx.xvreplve.h" => "__builtin_lasx_xvreplve_h", + "lasx.xvreplve.w" => "__builtin_lasx_xvreplve_w", + "lasx.xvreplve0.b" => "__builtin_lasx_xvreplve0_b", + "lasx.xvreplve0.d" => "__builtin_lasx_xvreplve0_d", + "lasx.xvreplve0.h" => "__builtin_lasx_xvreplve0_h", + "lasx.xvreplve0.q" => "__builtin_lasx_xvreplve0_q", + "lasx.xvreplve0.w" => "__builtin_lasx_xvreplve0_w", + "lasx.xvrotr.b" => "__builtin_lasx_xvrotr_b", + "lasx.xvrotr.d" => "__builtin_lasx_xvrotr_d", + "lasx.xvrotr.h" => "__builtin_lasx_xvrotr_h", + "lasx.xvrotr.w" => "__builtin_lasx_xvrotr_w", + "lasx.xvrotri.b" => "__builtin_lasx_xvrotri_b", + "lasx.xvrotri.d" => "__builtin_lasx_xvrotri_d", + "lasx.xvrotri.h" => "__builtin_lasx_xvrotri_h", + "lasx.xvrotri.w" => "__builtin_lasx_xvrotri_w", + "lasx.xvsadd.b" => "__builtin_lasx_xvsadd_b", + "lasx.xvsadd.bu" => "__builtin_lasx_xvsadd_bu", + "lasx.xvsadd.d" => "__builtin_lasx_xvsadd_d", + "lasx.xvsadd.du" => "__builtin_lasx_xvsadd_du", + "lasx.xvsadd.h" => "__builtin_lasx_xvsadd_h", + "lasx.xvsadd.hu" => "__builtin_lasx_xvsadd_hu", + "lasx.xvsadd.w" => "__builtin_lasx_xvsadd_w", + "lasx.xvsadd.wu" => "__builtin_lasx_xvsadd_wu", + "lasx.xvsat.b" => "__builtin_lasx_xvsat_b", + "lasx.xvsat.bu" => "__builtin_lasx_xvsat_bu", + "lasx.xvsat.d" => "__builtin_lasx_xvsat_d", + "lasx.xvsat.du" => "__builtin_lasx_xvsat_du", + "lasx.xvsat.h" => "__builtin_lasx_xvsat_h", + "lasx.xvsat.hu" => "__builtin_lasx_xvsat_hu", + "lasx.xvsat.w" => "__builtin_lasx_xvsat_w", + "lasx.xvsat.wu" => "__builtin_lasx_xvsat_wu", + "lasx.xvseq.b" => "__builtin_lasx_xvseq_b", + "lasx.xvseq.d" => "__builtin_lasx_xvseq_d", + "lasx.xvseq.h" => "__builtin_lasx_xvseq_h", + "lasx.xvseq.w" => "__builtin_lasx_xvseq_w", + "lasx.xvseqi.b" => "__builtin_lasx_xvseqi_b", + "lasx.xvseqi.d" => "__builtin_lasx_xvseqi_d", + "lasx.xvseqi.h" => "__builtin_lasx_xvseqi_h", + "lasx.xvseqi.w" => "__builtin_lasx_xvseqi_w", + "lasx.xvshuf.b" => "__builtin_lasx_xvshuf_b", + "lasx.xvshuf.d" => "__builtin_lasx_xvshuf_d", + "lasx.xvshuf.h" => "__builtin_lasx_xvshuf_h", + "lasx.xvshuf.w" => "__builtin_lasx_xvshuf_w", + "lasx.xvshuf4i.b" => "__builtin_lasx_xvshuf4i_b", + "lasx.xvshuf4i.d" => "__builtin_lasx_xvshuf4i_d", + "lasx.xvshuf4i.h" => "__builtin_lasx_xvshuf4i_h", + "lasx.xvshuf4i.w" => "__builtin_lasx_xvshuf4i_w", + "lasx.xvsigncov.b" => "__builtin_lasx_xvsigncov_b", + "lasx.xvsigncov.d" => "__builtin_lasx_xvsigncov_d", + "lasx.xvsigncov.h" => "__builtin_lasx_xvsigncov_h", + "lasx.xvsigncov.w" => "__builtin_lasx_xvsigncov_w", + "lasx.xvsle.b" => "__builtin_lasx_xvsle_b", + "lasx.xvsle.bu" => "__builtin_lasx_xvsle_bu", + "lasx.xvsle.d" => "__builtin_lasx_xvsle_d", + "lasx.xvsle.du" => "__builtin_lasx_xvsle_du", + "lasx.xvsle.h" => "__builtin_lasx_xvsle_h", + "lasx.xvsle.hu" => "__builtin_lasx_xvsle_hu", + "lasx.xvsle.w" => "__builtin_lasx_xvsle_w", + "lasx.xvsle.wu" => "__builtin_lasx_xvsle_wu", + "lasx.xvslei.b" => "__builtin_lasx_xvslei_b", + "lasx.xvslei.bu" => "__builtin_lasx_xvslei_bu", + "lasx.xvslei.d" => "__builtin_lasx_xvslei_d", + "lasx.xvslei.du" => "__builtin_lasx_xvslei_du", + "lasx.xvslei.h" => "__builtin_lasx_xvslei_h", + "lasx.xvslei.hu" => "__builtin_lasx_xvslei_hu", + "lasx.xvslei.w" => "__builtin_lasx_xvslei_w", + "lasx.xvslei.wu" => "__builtin_lasx_xvslei_wu", + "lasx.xvsll.b" => "__builtin_lasx_xvsll_b", + "lasx.xvsll.d" => "__builtin_lasx_xvsll_d", + "lasx.xvsll.h" => "__builtin_lasx_xvsll_h", + "lasx.xvsll.w" => "__builtin_lasx_xvsll_w", + "lasx.xvslli.b" => "__builtin_lasx_xvslli_b", + "lasx.xvslli.d" => "__builtin_lasx_xvslli_d", + "lasx.xvslli.h" => "__builtin_lasx_xvslli_h", + "lasx.xvslli.w" => "__builtin_lasx_xvslli_w", + "lasx.xvsllwil.d.w" => "__builtin_lasx_xvsllwil_d_w", + "lasx.xvsllwil.du.wu" => "__builtin_lasx_xvsllwil_du_wu", + "lasx.xvsllwil.h.b" => "__builtin_lasx_xvsllwil_h_b", + "lasx.xvsllwil.hu.bu" => "__builtin_lasx_xvsllwil_hu_bu", + "lasx.xvsllwil.w.h" => "__builtin_lasx_xvsllwil_w_h", + "lasx.xvsllwil.wu.hu" => "__builtin_lasx_xvsllwil_wu_hu", + "lasx.xvslt.b" => "__builtin_lasx_xvslt_b", + "lasx.xvslt.bu" => "__builtin_lasx_xvslt_bu", + "lasx.xvslt.d" => "__builtin_lasx_xvslt_d", + "lasx.xvslt.du" => "__builtin_lasx_xvslt_du", + "lasx.xvslt.h" => "__builtin_lasx_xvslt_h", + "lasx.xvslt.hu" => "__builtin_lasx_xvslt_hu", + "lasx.xvslt.w" => "__builtin_lasx_xvslt_w", + "lasx.xvslt.wu" => "__builtin_lasx_xvslt_wu", + "lasx.xvslti.b" => "__builtin_lasx_xvslti_b", + "lasx.xvslti.bu" => "__builtin_lasx_xvslti_bu", + "lasx.xvslti.d" => "__builtin_lasx_xvslti_d", + "lasx.xvslti.du" => "__builtin_lasx_xvslti_du", + "lasx.xvslti.h" => "__builtin_lasx_xvslti_h", + "lasx.xvslti.hu" => "__builtin_lasx_xvslti_hu", + "lasx.xvslti.w" => "__builtin_lasx_xvslti_w", + "lasx.xvslti.wu" => "__builtin_lasx_xvslti_wu", + "lasx.xvsra.b" => "__builtin_lasx_xvsra_b", + "lasx.xvsra.d" => "__builtin_lasx_xvsra_d", + "lasx.xvsra.h" => "__builtin_lasx_xvsra_h", + "lasx.xvsra.w" => "__builtin_lasx_xvsra_w", + "lasx.xvsrai.b" => "__builtin_lasx_xvsrai_b", + "lasx.xvsrai.d" => "__builtin_lasx_xvsrai_d", + "lasx.xvsrai.h" => "__builtin_lasx_xvsrai_h", + "lasx.xvsrai.w" => "__builtin_lasx_xvsrai_w", + "lasx.xvsran.b.h" => "__builtin_lasx_xvsran_b_h", + "lasx.xvsran.h.w" => "__builtin_lasx_xvsran_h_w", + "lasx.xvsran.w.d" => "__builtin_lasx_xvsran_w_d", + "lasx.xvsrani.b.h" => "__builtin_lasx_xvsrani_b_h", + "lasx.xvsrani.d.q" => "__builtin_lasx_xvsrani_d_q", + "lasx.xvsrani.h.w" => "__builtin_lasx_xvsrani_h_w", + "lasx.xvsrani.w.d" => "__builtin_lasx_xvsrani_w_d", + "lasx.xvsrar.b" => "__builtin_lasx_xvsrar_b", + "lasx.xvsrar.d" => "__builtin_lasx_xvsrar_d", + "lasx.xvsrar.h" => "__builtin_lasx_xvsrar_h", + "lasx.xvsrar.w" => "__builtin_lasx_xvsrar_w", + "lasx.xvsrari.b" => "__builtin_lasx_xvsrari_b", + "lasx.xvsrari.d" => "__builtin_lasx_xvsrari_d", + "lasx.xvsrari.h" => "__builtin_lasx_xvsrari_h", + "lasx.xvsrari.w" => "__builtin_lasx_xvsrari_w", + "lasx.xvsrarn.b.h" => "__builtin_lasx_xvsrarn_b_h", + "lasx.xvsrarn.h.w" => "__builtin_lasx_xvsrarn_h_w", + "lasx.xvsrarn.w.d" => "__builtin_lasx_xvsrarn_w_d", + "lasx.xvsrarni.b.h" => "__builtin_lasx_xvsrarni_b_h", + "lasx.xvsrarni.d.q" => "__builtin_lasx_xvsrarni_d_q", + "lasx.xvsrarni.h.w" => "__builtin_lasx_xvsrarni_h_w", + "lasx.xvsrarni.w.d" => "__builtin_lasx_xvsrarni_w_d", + "lasx.xvsrl.b" => "__builtin_lasx_xvsrl_b", + "lasx.xvsrl.d" => "__builtin_lasx_xvsrl_d", + "lasx.xvsrl.h" => "__builtin_lasx_xvsrl_h", + "lasx.xvsrl.w" => "__builtin_lasx_xvsrl_w", + "lasx.xvsrli.b" => "__builtin_lasx_xvsrli_b", + "lasx.xvsrli.d" => "__builtin_lasx_xvsrli_d", + "lasx.xvsrli.h" => "__builtin_lasx_xvsrli_h", + "lasx.xvsrli.w" => "__builtin_lasx_xvsrli_w", + "lasx.xvsrln.b.h" => "__builtin_lasx_xvsrln_b_h", + "lasx.xvsrln.h.w" => "__builtin_lasx_xvsrln_h_w", + "lasx.xvsrln.w.d" => "__builtin_lasx_xvsrln_w_d", + "lasx.xvsrlni.b.h" => "__builtin_lasx_xvsrlni_b_h", + "lasx.xvsrlni.d.q" => "__builtin_lasx_xvsrlni_d_q", + "lasx.xvsrlni.h.w" => "__builtin_lasx_xvsrlni_h_w", + "lasx.xvsrlni.w.d" => "__builtin_lasx_xvsrlni_w_d", + "lasx.xvsrlr.b" => "__builtin_lasx_xvsrlr_b", + "lasx.xvsrlr.d" => "__builtin_lasx_xvsrlr_d", + "lasx.xvsrlr.h" => "__builtin_lasx_xvsrlr_h", + "lasx.xvsrlr.w" => "__builtin_lasx_xvsrlr_w", + "lasx.xvsrlri.b" => "__builtin_lasx_xvsrlri_b", + "lasx.xvsrlri.d" => "__builtin_lasx_xvsrlri_d", + "lasx.xvsrlri.h" => "__builtin_lasx_xvsrlri_h", + "lasx.xvsrlri.w" => "__builtin_lasx_xvsrlri_w", + "lasx.xvsrlrn.b.h" => "__builtin_lasx_xvsrlrn_b_h", + "lasx.xvsrlrn.h.w" => "__builtin_lasx_xvsrlrn_h_w", + "lasx.xvsrlrn.w.d" => "__builtin_lasx_xvsrlrn_w_d", + "lasx.xvsrlrni.b.h" => "__builtin_lasx_xvsrlrni_b_h", + "lasx.xvsrlrni.d.q" => "__builtin_lasx_xvsrlrni_d_q", + "lasx.xvsrlrni.h.w" => "__builtin_lasx_xvsrlrni_h_w", + "lasx.xvsrlrni.w.d" => "__builtin_lasx_xvsrlrni_w_d", + "lasx.xvssran.b.h" => "__builtin_lasx_xvssran_b_h", + "lasx.xvssran.bu.h" => "__builtin_lasx_xvssran_bu_h", + "lasx.xvssran.h.w" => "__builtin_lasx_xvssran_h_w", + "lasx.xvssran.hu.w" => "__builtin_lasx_xvssran_hu_w", + "lasx.xvssran.w.d" => "__builtin_lasx_xvssran_w_d", + "lasx.xvssran.wu.d" => "__builtin_lasx_xvssran_wu_d", + "lasx.xvssrani.b.h" => "__builtin_lasx_xvssrani_b_h", + "lasx.xvssrani.bu.h" => "__builtin_lasx_xvssrani_bu_h", + "lasx.xvssrani.d.q" => "__builtin_lasx_xvssrani_d_q", + "lasx.xvssrani.du.q" => "__builtin_lasx_xvssrani_du_q", + "lasx.xvssrani.h.w" => "__builtin_lasx_xvssrani_h_w", + "lasx.xvssrani.hu.w" => "__builtin_lasx_xvssrani_hu_w", + "lasx.xvssrani.w.d" => "__builtin_lasx_xvssrani_w_d", + "lasx.xvssrani.wu.d" => "__builtin_lasx_xvssrani_wu_d", + "lasx.xvssrarn.b.h" => "__builtin_lasx_xvssrarn_b_h", + "lasx.xvssrarn.bu.h" => "__builtin_lasx_xvssrarn_bu_h", + "lasx.xvssrarn.h.w" => "__builtin_lasx_xvssrarn_h_w", + "lasx.xvssrarn.hu.w" => "__builtin_lasx_xvssrarn_hu_w", + "lasx.xvssrarn.w.d" => "__builtin_lasx_xvssrarn_w_d", + "lasx.xvssrarn.wu.d" => "__builtin_lasx_xvssrarn_wu_d", + "lasx.xvssrarni.b.h" => "__builtin_lasx_xvssrarni_b_h", + "lasx.xvssrarni.bu.h" => "__builtin_lasx_xvssrarni_bu_h", + "lasx.xvssrarni.d.q" => "__builtin_lasx_xvssrarni_d_q", + "lasx.xvssrarni.du.q" => "__builtin_lasx_xvssrarni_du_q", + "lasx.xvssrarni.h.w" => "__builtin_lasx_xvssrarni_h_w", + "lasx.xvssrarni.hu.w" => "__builtin_lasx_xvssrarni_hu_w", + "lasx.xvssrarni.w.d" => "__builtin_lasx_xvssrarni_w_d", + "lasx.xvssrarni.wu.d" => "__builtin_lasx_xvssrarni_wu_d", + "lasx.xvssrln.b.h" => "__builtin_lasx_xvssrln_b_h", + "lasx.xvssrln.bu.h" => "__builtin_lasx_xvssrln_bu_h", + "lasx.xvssrln.h.w" => "__builtin_lasx_xvssrln_h_w", + "lasx.xvssrln.hu.w" => "__builtin_lasx_xvssrln_hu_w", + "lasx.xvssrln.w.d" => "__builtin_lasx_xvssrln_w_d", + "lasx.xvssrln.wu.d" => "__builtin_lasx_xvssrln_wu_d", + "lasx.xvssrlni.b.h" => "__builtin_lasx_xvssrlni_b_h", + "lasx.xvssrlni.bu.h" => "__builtin_lasx_xvssrlni_bu_h", + "lasx.xvssrlni.d.q" => "__builtin_lasx_xvssrlni_d_q", + "lasx.xvssrlni.du.q" => "__builtin_lasx_xvssrlni_du_q", + "lasx.xvssrlni.h.w" => "__builtin_lasx_xvssrlni_h_w", + "lasx.xvssrlni.hu.w" => "__builtin_lasx_xvssrlni_hu_w", + "lasx.xvssrlni.w.d" => "__builtin_lasx_xvssrlni_w_d", + "lasx.xvssrlni.wu.d" => "__builtin_lasx_xvssrlni_wu_d", + "lasx.xvssrlrn.b.h" => "__builtin_lasx_xvssrlrn_b_h", + "lasx.xvssrlrn.bu.h" => "__builtin_lasx_xvssrlrn_bu_h", + "lasx.xvssrlrn.h.w" => "__builtin_lasx_xvssrlrn_h_w", + "lasx.xvssrlrn.hu.w" => "__builtin_lasx_xvssrlrn_hu_w", + "lasx.xvssrlrn.w.d" => "__builtin_lasx_xvssrlrn_w_d", + "lasx.xvssrlrn.wu.d" => "__builtin_lasx_xvssrlrn_wu_d", + "lasx.xvssrlrni.b.h" => "__builtin_lasx_xvssrlrni_b_h", + "lasx.xvssrlrni.bu.h" => "__builtin_lasx_xvssrlrni_bu_h", + "lasx.xvssrlrni.d.q" => "__builtin_lasx_xvssrlrni_d_q", + "lasx.xvssrlrni.du.q" => "__builtin_lasx_xvssrlrni_du_q", + "lasx.xvssrlrni.h.w" => "__builtin_lasx_xvssrlrni_h_w", + "lasx.xvssrlrni.hu.w" => "__builtin_lasx_xvssrlrni_hu_w", + "lasx.xvssrlrni.w.d" => "__builtin_lasx_xvssrlrni_w_d", + "lasx.xvssrlrni.wu.d" => "__builtin_lasx_xvssrlrni_wu_d", + "lasx.xvssub.b" => "__builtin_lasx_xvssub_b", + "lasx.xvssub.bu" => "__builtin_lasx_xvssub_bu", + "lasx.xvssub.d" => "__builtin_lasx_xvssub_d", + "lasx.xvssub.du" => "__builtin_lasx_xvssub_du", + "lasx.xvssub.h" => "__builtin_lasx_xvssub_h", + "lasx.xvssub.hu" => "__builtin_lasx_xvssub_hu", + "lasx.xvssub.w" => "__builtin_lasx_xvssub_w", + "lasx.xvssub.wu" => "__builtin_lasx_xvssub_wu", + "lasx.xvst" => "__builtin_lasx_xvst", + "lasx.xvstelm.b" => "__builtin_lasx_xvstelm_b", + "lasx.xvstelm.d" => "__builtin_lasx_xvstelm_d", + "lasx.xvstelm.h" => "__builtin_lasx_xvstelm_h", + "lasx.xvstelm.w" => "__builtin_lasx_xvstelm_w", + "lasx.xvstx" => "__builtin_lasx_xvstx", + "lasx.xvsub.b" => "__builtin_lasx_xvsub_b", + "lasx.xvsub.d" => "__builtin_lasx_xvsub_d", + "lasx.xvsub.h" => "__builtin_lasx_xvsub_h", + "lasx.xvsub.q" => "__builtin_lasx_xvsub_q", + "lasx.xvsub.w" => "__builtin_lasx_xvsub_w", + "lasx.xvsubi.bu" => "__builtin_lasx_xvsubi_bu", + "lasx.xvsubi.du" => "__builtin_lasx_xvsubi_du", + "lasx.xvsubi.hu" => "__builtin_lasx_xvsubi_hu", + "lasx.xvsubi.wu" => "__builtin_lasx_xvsubi_wu", + "lasx.xvsubwev.d.w" => "__builtin_lasx_xvsubwev_d_w", + "lasx.xvsubwev.d.wu" => "__builtin_lasx_xvsubwev_d_wu", + "lasx.xvsubwev.h.b" => "__builtin_lasx_xvsubwev_h_b", + "lasx.xvsubwev.h.bu" => "__builtin_lasx_xvsubwev_h_bu", + "lasx.xvsubwev.q.d" => "__builtin_lasx_xvsubwev_q_d", + "lasx.xvsubwev.q.du" => "__builtin_lasx_xvsubwev_q_du", + "lasx.xvsubwev.w.h" => "__builtin_lasx_xvsubwev_w_h", + "lasx.xvsubwev.w.hu" => "__builtin_lasx_xvsubwev_w_hu", + "lasx.xvsubwod.d.w" => "__builtin_lasx_xvsubwod_d_w", + "lasx.xvsubwod.d.wu" => "__builtin_lasx_xvsubwod_d_wu", + "lasx.xvsubwod.h.b" => "__builtin_lasx_xvsubwod_h_b", + "lasx.xvsubwod.h.bu" => "__builtin_lasx_xvsubwod_h_bu", + "lasx.xvsubwod.q.d" => "__builtin_lasx_xvsubwod_q_d", + "lasx.xvsubwod.q.du" => "__builtin_lasx_xvsubwod_q_du", + "lasx.xvsubwod.w.h" => "__builtin_lasx_xvsubwod_w_h", + "lasx.xvsubwod.w.hu" => "__builtin_lasx_xvsubwod_w_hu", + "lasx.xvxor.v" => "__builtin_lasx_xvxor_v", + "lasx.xvxori.b" => "__builtin_lasx_xvxori_b", + "lddir.d" => "__builtin_loongarch_lddir_d", + "ldpte.d" => "__builtin_loongarch_ldpte_d", + "lsx.bnz.b" => "__builtin_lsx_bnz_b", + "lsx.bnz.d" => "__builtin_lsx_bnz_d", + "lsx.bnz.h" => "__builtin_lsx_bnz_h", + "lsx.bnz.v" => "__builtin_lsx_bnz_v", + "lsx.bnz.w" => "__builtin_lsx_bnz_w", + "lsx.bz.b" => "__builtin_lsx_bz_b", + "lsx.bz.d" => "__builtin_lsx_bz_d", + "lsx.bz.h" => "__builtin_lsx_bz_h", + "lsx.bz.v" => "__builtin_lsx_bz_v", + "lsx.bz.w" => "__builtin_lsx_bz_w", + "lsx.vabsd.b" => "__builtin_lsx_vabsd_b", + "lsx.vabsd.bu" => "__builtin_lsx_vabsd_bu", + "lsx.vabsd.d" => "__builtin_lsx_vabsd_d", + "lsx.vabsd.du" => "__builtin_lsx_vabsd_du", + "lsx.vabsd.h" => "__builtin_lsx_vabsd_h", + "lsx.vabsd.hu" => "__builtin_lsx_vabsd_hu", + "lsx.vabsd.w" => "__builtin_lsx_vabsd_w", + "lsx.vabsd.wu" => "__builtin_lsx_vabsd_wu", + "lsx.vadd.b" => "__builtin_lsx_vadd_b", + "lsx.vadd.d" => "__builtin_lsx_vadd_d", + "lsx.vadd.h" => "__builtin_lsx_vadd_h", + "lsx.vadd.q" => "__builtin_lsx_vadd_q", + "lsx.vadd.w" => "__builtin_lsx_vadd_w", + "lsx.vadda.b" => "__builtin_lsx_vadda_b", + "lsx.vadda.d" => "__builtin_lsx_vadda_d", + "lsx.vadda.h" => "__builtin_lsx_vadda_h", + "lsx.vadda.w" => "__builtin_lsx_vadda_w", + "lsx.vaddi.bu" => "__builtin_lsx_vaddi_bu", + "lsx.vaddi.du" => "__builtin_lsx_vaddi_du", + "lsx.vaddi.hu" => "__builtin_lsx_vaddi_hu", + "lsx.vaddi.wu" => "__builtin_lsx_vaddi_wu", + "lsx.vaddwev.d.w" => "__builtin_lsx_vaddwev_d_w", + "lsx.vaddwev.d.wu" => "__builtin_lsx_vaddwev_d_wu", + "lsx.vaddwev.d.wu.w" => "__builtin_lsx_vaddwev_d_wu_w", + "lsx.vaddwev.h.b" => "__builtin_lsx_vaddwev_h_b", + "lsx.vaddwev.h.bu" => "__builtin_lsx_vaddwev_h_bu", + "lsx.vaddwev.h.bu.b" => "__builtin_lsx_vaddwev_h_bu_b", + "lsx.vaddwev.q.d" => "__builtin_lsx_vaddwev_q_d", + "lsx.vaddwev.q.du" => "__builtin_lsx_vaddwev_q_du", + "lsx.vaddwev.q.du.d" => "__builtin_lsx_vaddwev_q_du_d", + "lsx.vaddwev.w.h" => "__builtin_lsx_vaddwev_w_h", + "lsx.vaddwev.w.hu" => "__builtin_lsx_vaddwev_w_hu", + "lsx.vaddwev.w.hu.h" => "__builtin_lsx_vaddwev_w_hu_h", + "lsx.vaddwod.d.w" => "__builtin_lsx_vaddwod_d_w", + "lsx.vaddwod.d.wu" => "__builtin_lsx_vaddwod_d_wu", + "lsx.vaddwod.d.wu.w" => "__builtin_lsx_vaddwod_d_wu_w", + "lsx.vaddwod.h.b" => "__builtin_lsx_vaddwod_h_b", + "lsx.vaddwod.h.bu" => "__builtin_lsx_vaddwod_h_bu", + "lsx.vaddwod.h.bu.b" => "__builtin_lsx_vaddwod_h_bu_b", + "lsx.vaddwod.q.d" => "__builtin_lsx_vaddwod_q_d", + "lsx.vaddwod.q.du" => "__builtin_lsx_vaddwod_q_du", + "lsx.vaddwod.q.du.d" => "__builtin_lsx_vaddwod_q_du_d", + "lsx.vaddwod.w.h" => "__builtin_lsx_vaddwod_w_h", + "lsx.vaddwod.w.hu" => "__builtin_lsx_vaddwod_w_hu", + "lsx.vaddwod.w.hu.h" => "__builtin_lsx_vaddwod_w_hu_h", + "lsx.vand.v" => "__builtin_lsx_vand_v", + "lsx.vandi.b" => "__builtin_lsx_vandi_b", + "lsx.vandn.v" => "__builtin_lsx_vandn_v", + "lsx.vavg.b" => "__builtin_lsx_vavg_b", + "lsx.vavg.bu" => "__builtin_lsx_vavg_bu", + "lsx.vavg.d" => "__builtin_lsx_vavg_d", + "lsx.vavg.du" => "__builtin_lsx_vavg_du", + "lsx.vavg.h" => "__builtin_lsx_vavg_h", + "lsx.vavg.hu" => "__builtin_lsx_vavg_hu", + "lsx.vavg.w" => "__builtin_lsx_vavg_w", + "lsx.vavg.wu" => "__builtin_lsx_vavg_wu", + "lsx.vavgr.b" => "__builtin_lsx_vavgr_b", + "lsx.vavgr.bu" => "__builtin_lsx_vavgr_bu", + "lsx.vavgr.d" => "__builtin_lsx_vavgr_d", + "lsx.vavgr.du" => "__builtin_lsx_vavgr_du", + "lsx.vavgr.h" => "__builtin_lsx_vavgr_h", + "lsx.vavgr.hu" => "__builtin_lsx_vavgr_hu", + "lsx.vavgr.w" => "__builtin_lsx_vavgr_w", + "lsx.vavgr.wu" => "__builtin_lsx_vavgr_wu", + "lsx.vbitclr.b" => "__builtin_lsx_vbitclr_b", + "lsx.vbitclr.d" => "__builtin_lsx_vbitclr_d", + "lsx.vbitclr.h" => "__builtin_lsx_vbitclr_h", + "lsx.vbitclr.w" => "__builtin_lsx_vbitclr_w", + "lsx.vbitclri.b" => "__builtin_lsx_vbitclri_b", + "lsx.vbitclri.d" => "__builtin_lsx_vbitclri_d", + "lsx.vbitclri.h" => "__builtin_lsx_vbitclri_h", + "lsx.vbitclri.w" => "__builtin_lsx_vbitclri_w", + "lsx.vbitrev.b" => "__builtin_lsx_vbitrev_b", + "lsx.vbitrev.d" => "__builtin_lsx_vbitrev_d", + "lsx.vbitrev.h" => "__builtin_lsx_vbitrev_h", + "lsx.vbitrev.w" => "__builtin_lsx_vbitrev_w", + "lsx.vbitrevi.b" => "__builtin_lsx_vbitrevi_b", + "lsx.vbitrevi.d" => "__builtin_lsx_vbitrevi_d", + "lsx.vbitrevi.h" => "__builtin_lsx_vbitrevi_h", + "lsx.vbitrevi.w" => "__builtin_lsx_vbitrevi_w", + "lsx.vbitsel.v" => "__builtin_lsx_vbitsel_v", + "lsx.vbitseli.b" => "__builtin_lsx_vbitseli_b", + "lsx.vbitset.b" => "__builtin_lsx_vbitset_b", + "lsx.vbitset.d" => "__builtin_lsx_vbitset_d", + "lsx.vbitset.h" => "__builtin_lsx_vbitset_h", + "lsx.vbitset.w" => "__builtin_lsx_vbitset_w", + "lsx.vbitseti.b" => "__builtin_lsx_vbitseti_b", + "lsx.vbitseti.d" => "__builtin_lsx_vbitseti_d", + "lsx.vbitseti.h" => "__builtin_lsx_vbitseti_h", + "lsx.vbitseti.w" => "__builtin_lsx_vbitseti_w", + "lsx.vbsll.v" => "__builtin_lsx_vbsll_v", + "lsx.vbsrl.v" => "__builtin_lsx_vbsrl_v", + "lsx.vclo.b" => "__builtin_lsx_vclo_b", + "lsx.vclo.d" => "__builtin_lsx_vclo_d", + "lsx.vclo.h" => "__builtin_lsx_vclo_h", + "lsx.vclo.w" => "__builtin_lsx_vclo_w", + "lsx.vclz.b" => "__builtin_lsx_vclz_b", + "lsx.vclz.d" => "__builtin_lsx_vclz_d", + "lsx.vclz.h" => "__builtin_lsx_vclz_h", + "lsx.vclz.w" => "__builtin_lsx_vclz_w", + "lsx.vdiv.b" => "__builtin_lsx_vdiv_b", + "lsx.vdiv.bu" => "__builtin_lsx_vdiv_bu", + "lsx.vdiv.d" => "__builtin_lsx_vdiv_d", + "lsx.vdiv.du" => "__builtin_lsx_vdiv_du", + "lsx.vdiv.h" => "__builtin_lsx_vdiv_h", + "lsx.vdiv.hu" => "__builtin_lsx_vdiv_hu", + "lsx.vdiv.w" => "__builtin_lsx_vdiv_w", + "lsx.vdiv.wu" => "__builtin_lsx_vdiv_wu", + "lsx.vexth.d.w" => "__builtin_lsx_vexth_d_w", + "lsx.vexth.du.wu" => "__builtin_lsx_vexth_du_wu", + "lsx.vexth.h.b" => "__builtin_lsx_vexth_h_b", + "lsx.vexth.hu.bu" => "__builtin_lsx_vexth_hu_bu", + "lsx.vexth.q.d" => "__builtin_lsx_vexth_q_d", + "lsx.vexth.qu.du" => "__builtin_lsx_vexth_qu_du", + "lsx.vexth.w.h" => "__builtin_lsx_vexth_w_h", + "lsx.vexth.wu.hu" => "__builtin_lsx_vexth_wu_hu", + "lsx.vextl.q.d" => "__builtin_lsx_vextl_q_d", + "lsx.vextl.qu.du" => "__builtin_lsx_vextl_qu_du", + "lsx.vextrins.b" => "__builtin_lsx_vextrins_b", + "lsx.vextrins.d" => "__builtin_lsx_vextrins_d", + "lsx.vextrins.h" => "__builtin_lsx_vextrins_h", + "lsx.vextrins.w" => "__builtin_lsx_vextrins_w", + "lsx.vfadd.d" => "__builtin_lsx_vfadd_d", + "lsx.vfadd.s" => "__builtin_lsx_vfadd_s", + "lsx.vfclass.d" => "__builtin_lsx_vfclass_d", + "lsx.vfclass.s" => "__builtin_lsx_vfclass_s", + "lsx.vfcmp.caf.d" => "__builtin_lsx_vfcmp_caf_d", + "lsx.vfcmp.caf.s" => "__builtin_lsx_vfcmp_caf_s", + "lsx.vfcmp.ceq.d" => "__builtin_lsx_vfcmp_ceq_d", + "lsx.vfcmp.ceq.s" => "__builtin_lsx_vfcmp_ceq_s", + "lsx.vfcmp.cle.d" => "__builtin_lsx_vfcmp_cle_d", + "lsx.vfcmp.cle.s" => "__builtin_lsx_vfcmp_cle_s", + "lsx.vfcmp.clt.d" => "__builtin_lsx_vfcmp_clt_d", + "lsx.vfcmp.clt.s" => "__builtin_lsx_vfcmp_clt_s", + "lsx.vfcmp.cne.d" => "__builtin_lsx_vfcmp_cne_d", + "lsx.vfcmp.cne.s" => "__builtin_lsx_vfcmp_cne_s", + "lsx.vfcmp.cor.d" => "__builtin_lsx_vfcmp_cor_d", + "lsx.vfcmp.cor.s" => "__builtin_lsx_vfcmp_cor_s", + "lsx.vfcmp.cueq.d" => "__builtin_lsx_vfcmp_cueq_d", + "lsx.vfcmp.cueq.s" => "__builtin_lsx_vfcmp_cueq_s", + "lsx.vfcmp.cule.d" => "__builtin_lsx_vfcmp_cule_d", + "lsx.vfcmp.cule.s" => "__builtin_lsx_vfcmp_cule_s", + "lsx.vfcmp.cult.d" => "__builtin_lsx_vfcmp_cult_d", + "lsx.vfcmp.cult.s" => "__builtin_lsx_vfcmp_cult_s", + "lsx.vfcmp.cun.d" => "__builtin_lsx_vfcmp_cun_d", + "lsx.vfcmp.cun.s" => "__builtin_lsx_vfcmp_cun_s", + "lsx.vfcmp.cune.d" => "__builtin_lsx_vfcmp_cune_d", + "lsx.vfcmp.cune.s" => "__builtin_lsx_vfcmp_cune_s", + "lsx.vfcmp.saf.d" => "__builtin_lsx_vfcmp_saf_d", + "lsx.vfcmp.saf.s" => "__builtin_lsx_vfcmp_saf_s", + "lsx.vfcmp.seq.d" => "__builtin_lsx_vfcmp_seq_d", + "lsx.vfcmp.seq.s" => "__builtin_lsx_vfcmp_seq_s", + "lsx.vfcmp.sle.d" => "__builtin_lsx_vfcmp_sle_d", + "lsx.vfcmp.sle.s" => "__builtin_lsx_vfcmp_sle_s", + "lsx.vfcmp.slt.d" => "__builtin_lsx_vfcmp_slt_d", + "lsx.vfcmp.slt.s" => "__builtin_lsx_vfcmp_slt_s", + "lsx.vfcmp.sne.d" => "__builtin_lsx_vfcmp_sne_d", + "lsx.vfcmp.sne.s" => "__builtin_lsx_vfcmp_sne_s", + "lsx.vfcmp.sor.d" => "__builtin_lsx_vfcmp_sor_d", + "lsx.vfcmp.sor.s" => "__builtin_lsx_vfcmp_sor_s", + "lsx.vfcmp.sueq.d" => "__builtin_lsx_vfcmp_sueq_d", + "lsx.vfcmp.sueq.s" => "__builtin_lsx_vfcmp_sueq_s", + "lsx.vfcmp.sule.d" => "__builtin_lsx_vfcmp_sule_d", + "lsx.vfcmp.sule.s" => "__builtin_lsx_vfcmp_sule_s", + "lsx.vfcmp.sult.d" => "__builtin_lsx_vfcmp_sult_d", + "lsx.vfcmp.sult.s" => "__builtin_lsx_vfcmp_sult_s", + "lsx.vfcmp.sun.d" => "__builtin_lsx_vfcmp_sun_d", + "lsx.vfcmp.sun.s" => "__builtin_lsx_vfcmp_sun_s", + "lsx.vfcmp.sune.d" => "__builtin_lsx_vfcmp_sune_d", + "lsx.vfcmp.sune.s" => "__builtin_lsx_vfcmp_sune_s", + "lsx.vfcvt.h.s" => "__builtin_lsx_vfcvt_h_s", + "lsx.vfcvt.s.d" => "__builtin_lsx_vfcvt_s_d", + "lsx.vfcvth.d.s" => "__builtin_lsx_vfcvth_d_s", + "lsx.vfcvth.s.h" => "__builtin_lsx_vfcvth_s_h", + "lsx.vfcvtl.d.s" => "__builtin_lsx_vfcvtl_d_s", + "lsx.vfcvtl.s.h" => "__builtin_lsx_vfcvtl_s_h", + "lsx.vfdiv.d" => "__builtin_lsx_vfdiv_d", + "lsx.vfdiv.s" => "__builtin_lsx_vfdiv_s", + "lsx.vffint.d.l" => "__builtin_lsx_vffint_d_l", + "lsx.vffint.d.lu" => "__builtin_lsx_vffint_d_lu", + "lsx.vffint.s.l" => "__builtin_lsx_vffint_s_l", + "lsx.vffint.s.w" => "__builtin_lsx_vffint_s_w", + "lsx.vffint.s.wu" => "__builtin_lsx_vffint_s_wu", + "lsx.vffinth.d.w" => "__builtin_lsx_vffinth_d_w", + "lsx.vffintl.d.w" => "__builtin_lsx_vffintl_d_w", + "lsx.vflogb.d" => "__builtin_lsx_vflogb_d", + "lsx.vflogb.s" => "__builtin_lsx_vflogb_s", + "lsx.vfmadd.d" => "__builtin_lsx_vfmadd_d", + "lsx.vfmadd.s" => "__builtin_lsx_vfmadd_s", + "lsx.vfmax.d" => "__builtin_lsx_vfmax_d", + "lsx.vfmax.s" => "__builtin_lsx_vfmax_s", + "lsx.vfmaxa.d" => "__builtin_lsx_vfmaxa_d", + "lsx.vfmaxa.s" => "__builtin_lsx_vfmaxa_s", + "lsx.vfmin.d" => "__builtin_lsx_vfmin_d", + "lsx.vfmin.s" => "__builtin_lsx_vfmin_s", + "lsx.vfmina.d" => "__builtin_lsx_vfmina_d", + "lsx.vfmina.s" => "__builtin_lsx_vfmina_s", + "lsx.vfmsub.d" => "__builtin_lsx_vfmsub_d", + "lsx.vfmsub.s" => "__builtin_lsx_vfmsub_s", + "lsx.vfmul.d" => "__builtin_lsx_vfmul_d", + "lsx.vfmul.s" => "__builtin_lsx_vfmul_s", + "lsx.vfnmadd.d" => "__builtin_lsx_vfnmadd_d", + "lsx.vfnmadd.s" => "__builtin_lsx_vfnmadd_s", + "lsx.vfnmsub.d" => "__builtin_lsx_vfnmsub_d", + "lsx.vfnmsub.s" => "__builtin_lsx_vfnmsub_s", + "lsx.vfrecip.d" => "__builtin_lsx_vfrecip_d", + "lsx.vfrecip.s" => "__builtin_lsx_vfrecip_s", + "lsx.vfrecipe.d" => "__builtin_lsx_vfrecipe_d", + "lsx.vfrecipe.s" => "__builtin_lsx_vfrecipe_s", + "lsx.vfrint.d" => "__builtin_lsx_vfrint_d", + "lsx.vfrint.s" => "__builtin_lsx_vfrint_s", + "lsx.vfrintrm.d" => "__builtin_lsx_vfrintrm_d", + "lsx.vfrintrm.s" => "__builtin_lsx_vfrintrm_s", + "lsx.vfrintrne.d" => "__builtin_lsx_vfrintrne_d", + "lsx.vfrintrne.s" => "__builtin_lsx_vfrintrne_s", + "lsx.vfrintrp.d" => "__builtin_lsx_vfrintrp_d", + "lsx.vfrintrp.s" => "__builtin_lsx_vfrintrp_s", + "lsx.vfrintrz.d" => "__builtin_lsx_vfrintrz_d", + "lsx.vfrintrz.s" => "__builtin_lsx_vfrintrz_s", + "lsx.vfrsqrt.d" => "__builtin_lsx_vfrsqrt_d", + "lsx.vfrsqrt.s" => "__builtin_lsx_vfrsqrt_s", + "lsx.vfrsqrte.d" => "__builtin_lsx_vfrsqrte_d", + "lsx.vfrsqrte.s" => "__builtin_lsx_vfrsqrte_s", + "lsx.vfrstp.b" => "__builtin_lsx_vfrstp_b", + "lsx.vfrstp.h" => "__builtin_lsx_vfrstp_h", + "lsx.vfrstpi.b" => "__builtin_lsx_vfrstpi_b", + "lsx.vfrstpi.h" => "__builtin_lsx_vfrstpi_h", + "lsx.vfsqrt.d" => "__builtin_lsx_vfsqrt_d", + "lsx.vfsqrt.s" => "__builtin_lsx_vfsqrt_s", + "lsx.vfsub.d" => "__builtin_lsx_vfsub_d", + "lsx.vfsub.s" => "__builtin_lsx_vfsub_s", + "lsx.vftint.l.d" => "__builtin_lsx_vftint_l_d", + "lsx.vftint.lu.d" => "__builtin_lsx_vftint_lu_d", + "lsx.vftint.w.d" => "__builtin_lsx_vftint_w_d", + "lsx.vftint.w.s" => "__builtin_lsx_vftint_w_s", + "lsx.vftint.wu.s" => "__builtin_lsx_vftint_wu_s", + "lsx.vftinth.l.s" => "__builtin_lsx_vftinth_l_s", + "lsx.vftintl.l.s" => "__builtin_lsx_vftintl_l_s", + "lsx.vftintrm.l.d" => "__builtin_lsx_vftintrm_l_d", + "lsx.vftintrm.w.d" => "__builtin_lsx_vftintrm_w_d", + "lsx.vftintrm.w.s" => "__builtin_lsx_vftintrm_w_s", + "lsx.vftintrmh.l.s" => "__builtin_lsx_vftintrmh_l_s", + "lsx.vftintrml.l.s" => "__builtin_lsx_vftintrml_l_s", + "lsx.vftintrne.l.d" => "__builtin_lsx_vftintrne_l_d", + "lsx.vftintrne.w.d" => "__builtin_lsx_vftintrne_w_d", + "lsx.vftintrne.w.s" => "__builtin_lsx_vftintrne_w_s", + "lsx.vftintrneh.l.s" => "__builtin_lsx_vftintrneh_l_s", + "lsx.vftintrnel.l.s" => "__builtin_lsx_vftintrnel_l_s", + "lsx.vftintrp.l.d" => "__builtin_lsx_vftintrp_l_d", + "lsx.vftintrp.w.d" => "__builtin_lsx_vftintrp_w_d", + "lsx.vftintrp.w.s" => "__builtin_lsx_vftintrp_w_s", + "lsx.vftintrph.l.s" => "__builtin_lsx_vftintrph_l_s", + "lsx.vftintrpl.l.s" => "__builtin_lsx_vftintrpl_l_s", + "lsx.vftintrz.l.d" => "__builtin_lsx_vftintrz_l_d", + "lsx.vftintrz.lu.d" => "__builtin_lsx_vftintrz_lu_d", + "lsx.vftintrz.w.d" => "__builtin_lsx_vftintrz_w_d", + "lsx.vftintrz.w.s" => "__builtin_lsx_vftintrz_w_s", + "lsx.vftintrz.wu.s" => "__builtin_lsx_vftintrz_wu_s", + "lsx.vftintrzh.l.s" => "__builtin_lsx_vftintrzh_l_s", + "lsx.vftintrzl.l.s" => "__builtin_lsx_vftintrzl_l_s", + "lsx.vhaddw.d.w" => "__builtin_lsx_vhaddw_d_w", + "lsx.vhaddw.du.wu" => "__builtin_lsx_vhaddw_du_wu", + "lsx.vhaddw.h.b" => "__builtin_lsx_vhaddw_h_b", + "lsx.vhaddw.hu.bu" => "__builtin_lsx_vhaddw_hu_bu", + "lsx.vhaddw.q.d" => "__builtin_lsx_vhaddw_q_d", + "lsx.vhaddw.qu.du" => "__builtin_lsx_vhaddw_qu_du", + "lsx.vhaddw.w.h" => "__builtin_lsx_vhaddw_w_h", + "lsx.vhaddw.wu.hu" => "__builtin_lsx_vhaddw_wu_hu", + "lsx.vhsubw.d.w" => "__builtin_lsx_vhsubw_d_w", + "lsx.vhsubw.du.wu" => "__builtin_lsx_vhsubw_du_wu", + "lsx.vhsubw.h.b" => "__builtin_lsx_vhsubw_h_b", + "lsx.vhsubw.hu.bu" => "__builtin_lsx_vhsubw_hu_bu", + "lsx.vhsubw.q.d" => "__builtin_lsx_vhsubw_q_d", + "lsx.vhsubw.qu.du" => "__builtin_lsx_vhsubw_qu_du", + "lsx.vhsubw.w.h" => "__builtin_lsx_vhsubw_w_h", + "lsx.vhsubw.wu.hu" => "__builtin_lsx_vhsubw_wu_hu", + "lsx.vilvh.b" => "__builtin_lsx_vilvh_b", + "lsx.vilvh.d" => "__builtin_lsx_vilvh_d", + "lsx.vilvh.h" => "__builtin_lsx_vilvh_h", + "lsx.vilvh.w" => "__builtin_lsx_vilvh_w", + "lsx.vilvl.b" => "__builtin_lsx_vilvl_b", + "lsx.vilvl.d" => "__builtin_lsx_vilvl_d", + "lsx.vilvl.h" => "__builtin_lsx_vilvl_h", + "lsx.vilvl.w" => "__builtin_lsx_vilvl_w", + "lsx.vinsgr2vr.b" => "__builtin_lsx_vinsgr2vr_b", + "lsx.vinsgr2vr.d" => "__builtin_lsx_vinsgr2vr_d", + "lsx.vinsgr2vr.h" => "__builtin_lsx_vinsgr2vr_h", + "lsx.vinsgr2vr.w" => "__builtin_lsx_vinsgr2vr_w", + "lsx.vld" => "__builtin_lsx_vld", + "lsx.vldi" => "__builtin_lsx_vldi", + "lsx.vldrepl.b" => "__builtin_lsx_vldrepl_b", + "lsx.vldrepl.d" => "__builtin_lsx_vldrepl_d", + "lsx.vldrepl.h" => "__builtin_lsx_vldrepl_h", + "lsx.vldrepl.w" => "__builtin_lsx_vldrepl_w", + "lsx.vldx" => "__builtin_lsx_vldx", + "lsx.vmadd.b" => "__builtin_lsx_vmadd_b", + "lsx.vmadd.d" => "__builtin_lsx_vmadd_d", + "lsx.vmadd.h" => "__builtin_lsx_vmadd_h", + "lsx.vmadd.w" => "__builtin_lsx_vmadd_w", + "lsx.vmaddwev.d.w" => "__builtin_lsx_vmaddwev_d_w", + "lsx.vmaddwev.d.wu" => "__builtin_lsx_vmaddwev_d_wu", + "lsx.vmaddwev.d.wu.w" => "__builtin_lsx_vmaddwev_d_wu_w", + "lsx.vmaddwev.h.b" => "__builtin_lsx_vmaddwev_h_b", + "lsx.vmaddwev.h.bu" => "__builtin_lsx_vmaddwev_h_bu", + "lsx.vmaddwev.h.bu.b" => "__builtin_lsx_vmaddwev_h_bu_b", + "lsx.vmaddwev.q.d" => "__builtin_lsx_vmaddwev_q_d", + "lsx.vmaddwev.q.du" => "__builtin_lsx_vmaddwev_q_du", + "lsx.vmaddwev.q.du.d" => "__builtin_lsx_vmaddwev_q_du_d", + "lsx.vmaddwev.w.h" => "__builtin_lsx_vmaddwev_w_h", + "lsx.vmaddwev.w.hu" => "__builtin_lsx_vmaddwev_w_hu", + "lsx.vmaddwev.w.hu.h" => "__builtin_lsx_vmaddwev_w_hu_h", + "lsx.vmaddwod.d.w" => "__builtin_lsx_vmaddwod_d_w", + "lsx.vmaddwod.d.wu" => "__builtin_lsx_vmaddwod_d_wu", + "lsx.vmaddwod.d.wu.w" => "__builtin_lsx_vmaddwod_d_wu_w", + "lsx.vmaddwod.h.b" => "__builtin_lsx_vmaddwod_h_b", + "lsx.vmaddwod.h.bu" => "__builtin_lsx_vmaddwod_h_bu", + "lsx.vmaddwod.h.bu.b" => "__builtin_lsx_vmaddwod_h_bu_b", + "lsx.vmaddwod.q.d" => "__builtin_lsx_vmaddwod_q_d", + "lsx.vmaddwod.q.du" => "__builtin_lsx_vmaddwod_q_du", + "lsx.vmaddwod.q.du.d" => "__builtin_lsx_vmaddwod_q_du_d", + "lsx.vmaddwod.w.h" => "__builtin_lsx_vmaddwod_w_h", + "lsx.vmaddwod.w.hu" => "__builtin_lsx_vmaddwod_w_hu", + "lsx.vmaddwod.w.hu.h" => "__builtin_lsx_vmaddwod_w_hu_h", + "lsx.vmax.b" => "__builtin_lsx_vmax_b", + "lsx.vmax.bu" => "__builtin_lsx_vmax_bu", + "lsx.vmax.d" => "__builtin_lsx_vmax_d", + "lsx.vmax.du" => "__builtin_lsx_vmax_du", + "lsx.vmax.h" => "__builtin_lsx_vmax_h", + "lsx.vmax.hu" => "__builtin_lsx_vmax_hu", + "lsx.vmax.w" => "__builtin_lsx_vmax_w", + "lsx.vmax.wu" => "__builtin_lsx_vmax_wu", + "lsx.vmaxi.b" => "__builtin_lsx_vmaxi_b", + "lsx.vmaxi.bu" => "__builtin_lsx_vmaxi_bu", + "lsx.vmaxi.d" => "__builtin_lsx_vmaxi_d", + "lsx.vmaxi.du" => "__builtin_lsx_vmaxi_du", + "lsx.vmaxi.h" => "__builtin_lsx_vmaxi_h", + "lsx.vmaxi.hu" => "__builtin_lsx_vmaxi_hu", + "lsx.vmaxi.w" => "__builtin_lsx_vmaxi_w", + "lsx.vmaxi.wu" => "__builtin_lsx_vmaxi_wu", + "lsx.vmin.b" => "__builtin_lsx_vmin_b", + "lsx.vmin.bu" => "__builtin_lsx_vmin_bu", + "lsx.vmin.d" => "__builtin_lsx_vmin_d", + "lsx.vmin.du" => "__builtin_lsx_vmin_du", + "lsx.vmin.h" => "__builtin_lsx_vmin_h", + "lsx.vmin.hu" => "__builtin_lsx_vmin_hu", + "lsx.vmin.w" => "__builtin_lsx_vmin_w", + "lsx.vmin.wu" => "__builtin_lsx_vmin_wu", + "lsx.vmini.b" => "__builtin_lsx_vmini_b", + "lsx.vmini.bu" => "__builtin_lsx_vmini_bu", + "lsx.vmini.d" => "__builtin_lsx_vmini_d", + "lsx.vmini.du" => "__builtin_lsx_vmini_du", + "lsx.vmini.h" => "__builtin_lsx_vmini_h", + "lsx.vmini.hu" => "__builtin_lsx_vmini_hu", + "lsx.vmini.w" => "__builtin_lsx_vmini_w", + "lsx.vmini.wu" => "__builtin_lsx_vmini_wu", + "lsx.vmod.b" => "__builtin_lsx_vmod_b", + "lsx.vmod.bu" => "__builtin_lsx_vmod_bu", + "lsx.vmod.d" => "__builtin_lsx_vmod_d", + "lsx.vmod.du" => "__builtin_lsx_vmod_du", + "lsx.vmod.h" => "__builtin_lsx_vmod_h", + "lsx.vmod.hu" => "__builtin_lsx_vmod_hu", + "lsx.vmod.w" => "__builtin_lsx_vmod_w", + "lsx.vmod.wu" => "__builtin_lsx_vmod_wu", + "lsx.vmskgez.b" => "__builtin_lsx_vmskgez_b", + "lsx.vmskltz.b" => "__builtin_lsx_vmskltz_b", + "lsx.vmskltz.d" => "__builtin_lsx_vmskltz_d", + "lsx.vmskltz.h" => "__builtin_lsx_vmskltz_h", + "lsx.vmskltz.w" => "__builtin_lsx_vmskltz_w", + "lsx.vmsknz.b" => "__builtin_lsx_vmsknz_b", + "lsx.vmsub.b" => "__builtin_lsx_vmsub_b", + "lsx.vmsub.d" => "__builtin_lsx_vmsub_d", + "lsx.vmsub.h" => "__builtin_lsx_vmsub_h", + "lsx.vmsub.w" => "__builtin_lsx_vmsub_w", + "lsx.vmuh.b" => "__builtin_lsx_vmuh_b", + "lsx.vmuh.bu" => "__builtin_lsx_vmuh_bu", + "lsx.vmuh.d" => "__builtin_lsx_vmuh_d", + "lsx.vmuh.du" => "__builtin_lsx_vmuh_du", + "lsx.vmuh.h" => "__builtin_lsx_vmuh_h", + "lsx.vmuh.hu" => "__builtin_lsx_vmuh_hu", + "lsx.vmuh.w" => "__builtin_lsx_vmuh_w", + "lsx.vmuh.wu" => "__builtin_lsx_vmuh_wu", + "lsx.vmul.b" => "__builtin_lsx_vmul_b", + "lsx.vmul.d" => "__builtin_lsx_vmul_d", + "lsx.vmul.h" => "__builtin_lsx_vmul_h", + "lsx.vmul.w" => "__builtin_lsx_vmul_w", + "lsx.vmulwev.d.w" => "__builtin_lsx_vmulwev_d_w", + "lsx.vmulwev.d.wu" => "__builtin_lsx_vmulwev_d_wu", + "lsx.vmulwev.d.wu.w" => "__builtin_lsx_vmulwev_d_wu_w", + "lsx.vmulwev.h.b" => "__builtin_lsx_vmulwev_h_b", + "lsx.vmulwev.h.bu" => "__builtin_lsx_vmulwev_h_bu", + "lsx.vmulwev.h.bu.b" => "__builtin_lsx_vmulwev_h_bu_b", + "lsx.vmulwev.q.d" => "__builtin_lsx_vmulwev_q_d", + "lsx.vmulwev.q.du" => "__builtin_lsx_vmulwev_q_du", + "lsx.vmulwev.q.du.d" => "__builtin_lsx_vmulwev_q_du_d", + "lsx.vmulwev.w.h" => "__builtin_lsx_vmulwev_w_h", + "lsx.vmulwev.w.hu" => "__builtin_lsx_vmulwev_w_hu", + "lsx.vmulwev.w.hu.h" => "__builtin_lsx_vmulwev_w_hu_h", + "lsx.vmulwod.d.w" => "__builtin_lsx_vmulwod_d_w", + "lsx.vmulwod.d.wu" => "__builtin_lsx_vmulwod_d_wu", + "lsx.vmulwod.d.wu.w" => "__builtin_lsx_vmulwod_d_wu_w", + "lsx.vmulwod.h.b" => "__builtin_lsx_vmulwod_h_b", + "lsx.vmulwod.h.bu" => "__builtin_lsx_vmulwod_h_bu", + "lsx.vmulwod.h.bu.b" => "__builtin_lsx_vmulwod_h_bu_b", + "lsx.vmulwod.q.d" => "__builtin_lsx_vmulwod_q_d", + "lsx.vmulwod.q.du" => "__builtin_lsx_vmulwod_q_du", + "lsx.vmulwod.q.du.d" => "__builtin_lsx_vmulwod_q_du_d", + "lsx.vmulwod.w.h" => "__builtin_lsx_vmulwod_w_h", + "lsx.vmulwod.w.hu" => "__builtin_lsx_vmulwod_w_hu", + "lsx.vmulwod.w.hu.h" => "__builtin_lsx_vmulwod_w_hu_h", + "lsx.vneg.b" => "__builtin_lsx_vneg_b", + "lsx.vneg.d" => "__builtin_lsx_vneg_d", + "lsx.vneg.h" => "__builtin_lsx_vneg_h", + "lsx.vneg.w" => "__builtin_lsx_vneg_w", + "lsx.vnor.v" => "__builtin_lsx_vnor_v", + "lsx.vnori.b" => "__builtin_lsx_vnori_b", + "lsx.vor.v" => "__builtin_lsx_vor_v", + "lsx.vori.b" => "__builtin_lsx_vori_b", + "lsx.vorn.v" => "__builtin_lsx_vorn_v", + "lsx.vpackev.b" => "__builtin_lsx_vpackev_b", + "lsx.vpackev.d" => "__builtin_lsx_vpackev_d", + "lsx.vpackev.h" => "__builtin_lsx_vpackev_h", + "lsx.vpackev.w" => "__builtin_lsx_vpackev_w", + "lsx.vpackod.b" => "__builtin_lsx_vpackod_b", + "lsx.vpackod.d" => "__builtin_lsx_vpackod_d", + "lsx.vpackod.h" => "__builtin_lsx_vpackod_h", + "lsx.vpackod.w" => "__builtin_lsx_vpackod_w", + "lsx.vpcnt.b" => "__builtin_lsx_vpcnt_b", + "lsx.vpcnt.d" => "__builtin_lsx_vpcnt_d", + "lsx.vpcnt.h" => "__builtin_lsx_vpcnt_h", + "lsx.vpcnt.w" => "__builtin_lsx_vpcnt_w", + "lsx.vpermi.w" => "__builtin_lsx_vpermi_w", + "lsx.vpickev.b" => "__builtin_lsx_vpickev_b", + "lsx.vpickev.d" => "__builtin_lsx_vpickev_d", + "lsx.vpickev.h" => "__builtin_lsx_vpickev_h", + "lsx.vpickev.w" => "__builtin_lsx_vpickev_w", + "lsx.vpickod.b" => "__builtin_lsx_vpickod_b", + "lsx.vpickod.d" => "__builtin_lsx_vpickod_d", + "lsx.vpickod.h" => "__builtin_lsx_vpickod_h", + "lsx.vpickod.w" => "__builtin_lsx_vpickod_w", + "lsx.vpickve2gr.b" => "__builtin_lsx_vpickve2gr_b", + "lsx.vpickve2gr.bu" => "__builtin_lsx_vpickve2gr_bu", + "lsx.vpickve2gr.d" => "__builtin_lsx_vpickve2gr_d", + "lsx.vpickve2gr.du" => "__builtin_lsx_vpickve2gr_du", + "lsx.vpickve2gr.h" => "__builtin_lsx_vpickve2gr_h", + "lsx.vpickve2gr.hu" => "__builtin_lsx_vpickve2gr_hu", + "lsx.vpickve2gr.w" => "__builtin_lsx_vpickve2gr_w", + "lsx.vpickve2gr.wu" => "__builtin_lsx_vpickve2gr_wu", + "lsx.vreplgr2vr.b" => "__builtin_lsx_vreplgr2vr_b", + "lsx.vreplgr2vr.d" => "__builtin_lsx_vreplgr2vr_d", + "lsx.vreplgr2vr.h" => "__builtin_lsx_vreplgr2vr_h", + "lsx.vreplgr2vr.w" => "__builtin_lsx_vreplgr2vr_w", + "lsx.vrepli.b" => "__builtin_lsx_vrepli_b", + "lsx.vrepli.d" => "__builtin_lsx_vrepli_d", + "lsx.vrepli.h" => "__builtin_lsx_vrepli_h", + "lsx.vrepli.w" => "__builtin_lsx_vrepli_w", + "lsx.vreplve.b" => "__builtin_lsx_vreplve_b", + "lsx.vreplve.d" => "__builtin_lsx_vreplve_d", + "lsx.vreplve.h" => "__builtin_lsx_vreplve_h", + "lsx.vreplve.w" => "__builtin_lsx_vreplve_w", + "lsx.vreplvei.b" => "__builtin_lsx_vreplvei_b", + "lsx.vreplvei.d" => "__builtin_lsx_vreplvei_d", + "lsx.vreplvei.h" => "__builtin_lsx_vreplvei_h", + "lsx.vreplvei.w" => "__builtin_lsx_vreplvei_w", + "lsx.vrotr.b" => "__builtin_lsx_vrotr_b", + "lsx.vrotr.d" => "__builtin_lsx_vrotr_d", + "lsx.vrotr.h" => "__builtin_lsx_vrotr_h", + "lsx.vrotr.w" => "__builtin_lsx_vrotr_w", + "lsx.vrotri.b" => "__builtin_lsx_vrotri_b", + "lsx.vrotri.d" => "__builtin_lsx_vrotri_d", + "lsx.vrotri.h" => "__builtin_lsx_vrotri_h", + "lsx.vrotri.w" => "__builtin_lsx_vrotri_w", + "lsx.vsadd.b" => "__builtin_lsx_vsadd_b", + "lsx.vsadd.bu" => "__builtin_lsx_vsadd_bu", + "lsx.vsadd.d" => "__builtin_lsx_vsadd_d", + "lsx.vsadd.du" => "__builtin_lsx_vsadd_du", + "lsx.vsadd.h" => "__builtin_lsx_vsadd_h", + "lsx.vsadd.hu" => "__builtin_lsx_vsadd_hu", + "lsx.vsadd.w" => "__builtin_lsx_vsadd_w", + "lsx.vsadd.wu" => "__builtin_lsx_vsadd_wu", + "lsx.vsat.b" => "__builtin_lsx_vsat_b", + "lsx.vsat.bu" => "__builtin_lsx_vsat_bu", + "lsx.vsat.d" => "__builtin_lsx_vsat_d", + "lsx.vsat.du" => "__builtin_lsx_vsat_du", + "lsx.vsat.h" => "__builtin_lsx_vsat_h", + "lsx.vsat.hu" => "__builtin_lsx_vsat_hu", + "lsx.vsat.w" => "__builtin_lsx_vsat_w", + "lsx.vsat.wu" => "__builtin_lsx_vsat_wu", + "lsx.vseq.b" => "__builtin_lsx_vseq_b", + "lsx.vseq.d" => "__builtin_lsx_vseq_d", + "lsx.vseq.h" => "__builtin_lsx_vseq_h", + "lsx.vseq.w" => "__builtin_lsx_vseq_w", + "lsx.vseqi.b" => "__builtin_lsx_vseqi_b", + "lsx.vseqi.d" => "__builtin_lsx_vseqi_d", + "lsx.vseqi.h" => "__builtin_lsx_vseqi_h", + "lsx.vseqi.w" => "__builtin_lsx_vseqi_w", + "lsx.vshuf.b" => "__builtin_lsx_vshuf_b", + "lsx.vshuf.d" => "__builtin_lsx_vshuf_d", + "lsx.vshuf.h" => "__builtin_lsx_vshuf_h", + "lsx.vshuf.w" => "__builtin_lsx_vshuf_w", + "lsx.vshuf4i.b" => "__builtin_lsx_vshuf4i_b", + "lsx.vshuf4i.d" => "__builtin_lsx_vshuf4i_d", + "lsx.vshuf4i.h" => "__builtin_lsx_vshuf4i_h", + "lsx.vshuf4i.w" => "__builtin_lsx_vshuf4i_w", + "lsx.vsigncov.b" => "__builtin_lsx_vsigncov_b", + "lsx.vsigncov.d" => "__builtin_lsx_vsigncov_d", + "lsx.vsigncov.h" => "__builtin_lsx_vsigncov_h", + "lsx.vsigncov.w" => "__builtin_lsx_vsigncov_w", + "lsx.vsle.b" => "__builtin_lsx_vsle_b", + "lsx.vsle.bu" => "__builtin_lsx_vsle_bu", + "lsx.vsle.d" => "__builtin_lsx_vsle_d", + "lsx.vsle.du" => "__builtin_lsx_vsle_du", + "lsx.vsle.h" => "__builtin_lsx_vsle_h", + "lsx.vsle.hu" => "__builtin_lsx_vsle_hu", + "lsx.vsle.w" => "__builtin_lsx_vsle_w", + "lsx.vsle.wu" => "__builtin_lsx_vsle_wu", + "lsx.vslei.b" => "__builtin_lsx_vslei_b", + "lsx.vslei.bu" => "__builtin_lsx_vslei_bu", + "lsx.vslei.d" => "__builtin_lsx_vslei_d", + "lsx.vslei.du" => "__builtin_lsx_vslei_du", + "lsx.vslei.h" => "__builtin_lsx_vslei_h", + "lsx.vslei.hu" => "__builtin_lsx_vslei_hu", + "lsx.vslei.w" => "__builtin_lsx_vslei_w", + "lsx.vslei.wu" => "__builtin_lsx_vslei_wu", + "lsx.vsll.b" => "__builtin_lsx_vsll_b", + "lsx.vsll.d" => "__builtin_lsx_vsll_d", + "lsx.vsll.h" => "__builtin_lsx_vsll_h", + "lsx.vsll.w" => "__builtin_lsx_vsll_w", + "lsx.vslli.b" => "__builtin_lsx_vslli_b", + "lsx.vslli.d" => "__builtin_lsx_vslli_d", + "lsx.vslli.h" => "__builtin_lsx_vslli_h", + "lsx.vslli.w" => "__builtin_lsx_vslli_w", + "lsx.vsllwil.d.w" => "__builtin_lsx_vsllwil_d_w", + "lsx.vsllwil.du.wu" => "__builtin_lsx_vsllwil_du_wu", + "lsx.vsllwil.h.b" => "__builtin_lsx_vsllwil_h_b", + "lsx.vsllwil.hu.bu" => "__builtin_lsx_vsllwil_hu_bu", + "lsx.vsllwil.w.h" => "__builtin_lsx_vsllwil_w_h", + "lsx.vsllwil.wu.hu" => "__builtin_lsx_vsllwil_wu_hu", + "lsx.vslt.b" => "__builtin_lsx_vslt_b", + "lsx.vslt.bu" => "__builtin_lsx_vslt_bu", + "lsx.vslt.d" => "__builtin_lsx_vslt_d", + "lsx.vslt.du" => "__builtin_lsx_vslt_du", + "lsx.vslt.h" => "__builtin_lsx_vslt_h", + "lsx.vslt.hu" => "__builtin_lsx_vslt_hu", + "lsx.vslt.w" => "__builtin_lsx_vslt_w", + "lsx.vslt.wu" => "__builtin_lsx_vslt_wu", + "lsx.vslti.b" => "__builtin_lsx_vslti_b", + "lsx.vslti.bu" => "__builtin_lsx_vslti_bu", + "lsx.vslti.d" => "__builtin_lsx_vslti_d", + "lsx.vslti.du" => "__builtin_lsx_vslti_du", + "lsx.vslti.h" => "__builtin_lsx_vslti_h", + "lsx.vslti.hu" => "__builtin_lsx_vslti_hu", + "lsx.vslti.w" => "__builtin_lsx_vslti_w", + "lsx.vslti.wu" => "__builtin_lsx_vslti_wu", + "lsx.vsra.b" => "__builtin_lsx_vsra_b", + "lsx.vsra.d" => "__builtin_lsx_vsra_d", + "lsx.vsra.h" => "__builtin_lsx_vsra_h", + "lsx.vsra.w" => "__builtin_lsx_vsra_w", + "lsx.vsrai.b" => "__builtin_lsx_vsrai_b", + "lsx.vsrai.d" => "__builtin_lsx_vsrai_d", + "lsx.vsrai.h" => "__builtin_lsx_vsrai_h", + "lsx.vsrai.w" => "__builtin_lsx_vsrai_w", + "lsx.vsran.b.h" => "__builtin_lsx_vsran_b_h", + "lsx.vsran.h.w" => "__builtin_lsx_vsran_h_w", + "lsx.vsran.w.d" => "__builtin_lsx_vsran_w_d", + "lsx.vsrani.b.h" => "__builtin_lsx_vsrani_b_h", + "lsx.vsrani.d.q" => "__builtin_lsx_vsrani_d_q", + "lsx.vsrani.h.w" => "__builtin_lsx_vsrani_h_w", + "lsx.vsrani.w.d" => "__builtin_lsx_vsrani_w_d", + "lsx.vsrar.b" => "__builtin_lsx_vsrar_b", + "lsx.vsrar.d" => "__builtin_lsx_vsrar_d", + "lsx.vsrar.h" => "__builtin_lsx_vsrar_h", + "lsx.vsrar.w" => "__builtin_lsx_vsrar_w", + "lsx.vsrari.b" => "__builtin_lsx_vsrari_b", + "lsx.vsrari.d" => "__builtin_lsx_vsrari_d", + "lsx.vsrari.h" => "__builtin_lsx_vsrari_h", + "lsx.vsrari.w" => "__builtin_lsx_vsrari_w", + "lsx.vsrarn.b.h" => "__builtin_lsx_vsrarn_b_h", + "lsx.vsrarn.h.w" => "__builtin_lsx_vsrarn_h_w", + "lsx.vsrarn.w.d" => "__builtin_lsx_vsrarn_w_d", + "lsx.vsrarni.b.h" => "__builtin_lsx_vsrarni_b_h", + "lsx.vsrarni.d.q" => "__builtin_lsx_vsrarni_d_q", + "lsx.vsrarni.h.w" => "__builtin_lsx_vsrarni_h_w", + "lsx.vsrarni.w.d" => "__builtin_lsx_vsrarni_w_d", + "lsx.vsrl.b" => "__builtin_lsx_vsrl_b", + "lsx.vsrl.d" => "__builtin_lsx_vsrl_d", + "lsx.vsrl.h" => "__builtin_lsx_vsrl_h", + "lsx.vsrl.w" => "__builtin_lsx_vsrl_w", + "lsx.vsrli.b" => "__builtin_lsx_vsrli_b", + "lsx.vsrli.d" => "__builtin_lsx_vsrli_d", + "lsx.vsrli.h" => "__builtin_lsx_vsrli_h", + "lsx.vsrli.w" => "__builtin_lsx_vsrli_w", + "lsx.vsrln.b.h" => "__builtin_lsx_vsrln_b_h", + "lsx.vsrln.h.w" => "__builtin_lsx_vsrln_h_w", + "lsx.vsrln.w.d" => "__builtin_lsx_vsrln_w_d", + "lsx.vsrlni.b.h" => "__builtin_lsx_vsrlni_b_h", + "lsx.vsrlni.d.q" => "__builtin_lsx_vsrlni_d_q", + "lsx.vsrlni.h.w" => "__builtin_lsx_vsrlni_h_w", + "lsx.vsrlni.w.d" => "__builtin_lsx_vsrlni_w_d", + "lsx.vsrlr.b" => "__builtin_lsx_vsrlr_b", + "lsx.vsrlr.d" => "__builtin_lsx_vsrlr_d", + "lsx.vsrlr.h" => "__builtin_lsx_vsrlr_h", + "lsx.vsrlr.w" => "__builtin_lsx_vsrlr_w", + "lsx.vsrlri.b" => "__builtin_lsx_vsrlri_b", + "lsx.vsrlri.d" => "__builtin_lsx_vsrlri_d", + "lsx.vsrlri.h" => "__builtin_lsx_vsrlri_h", + "lsx.vsrlri.w" => "__builtin_lsx_vsrlri_w", + "lsx.vsrlrn.b.h" => "__builtin_lsx_vsrlrn_b_h", + "lsx.vsrlrn.h.w" => "__builtin_lsx_vsrlrn_h_w", + "lsx.vsrlrn.w.d" => "__builtin_lsx_vsrlrn_w_d", + "lsx.vsrlrni.b.h" => "__builtin_lsx_vsrlrni_b_h", + "lsx.vsrlrni.d.q" => "__builtin_lsx_vsrlrni_d_q", + "lsx.vsrlrni.h.w" => "__builtin_lsx_vsrlrni_h_w", + "lsx.vsrlrni.w.d" => "__builtin_lsx_vsrlrni_w_d", + "lsx.vssran.b.h" => "__builtin_lsx_vssran_b_h", + "lsx.vssran.bu.h" => "__builtin_lsx_vssran_bu_h", + "lsx.vssran.h.w" => "__builtin_lsx_vssran_h_w", + "lsx.vssran.hu.w" => "__builtin_lsx_vssran_hu_w", + "lsx.vssran.w.d" => "__builtin_lsx_vssran_w_d", + "lsx.vssran.wu.d" => "__builtin_lsx_vssran_wu_d", + "lsx.vssrani.b.h" => "__builtin_lsx_vssrani_b_h", + "lsx.vssrani.bu.h" => "__builtin_lsx_vssrani_bu_h", + "lsx.vssrani.d.q" => "__builtin_lsx_vssrani_d_q", + "lsx.vssrani.du.q" => "__builtin_lsx_vssrani_du_q", + "lsx.vssrani.h.w" => "__builtin_lsx_vssrani_h_w", + "lsx.vssrani.hu.w" => "__builtin_lsx_vssrani_hu_w", + "lsx.vssrani.w.d" => "__builtin_lsx_vssrani_w_d", + "lsx.vssrani.wu.d" => "__builtin_lsx_vssrani_wu_d", + "lsx.vssrarn.b.h" => "__builtin_lsx_vssrarn_b_h", + "lsx.vssrarn.bu.h" => "__builtin_lsx_vssrarn_bu_h", + "lsx.vssrarn.h.w" => "__builtin_lsx_vssrarn_h_w", + "lsx.vssrarn.hu.w" => "__builtin_lsx_vssrarn_hu_w", + "lsx.vssrarn.w.d" => "__builtin_lsx_vssrarn_w_d", + "lsx.vssrarn.wu.d" => "__builtin_lsx_vssrarn_wu_d", + "lsx.vssrarni.b.h" => "__builtin_lsx_vssrarni_b_h", + "lsx.vssrarni.bu.h" => "__builtin_lsx_vssrarni_bu_h", + "lsx.vssrarni.d.q" => "__builtin_lsx_vssrarni_d_q", + "lsx.vssrarni.du.q" => "__builtin_lsx_vssrarni_du_q", + "lsx.vssrarni.h.w" => "__builtin_lsx_vssrarni_h_w", + "lsx.vssrarni.hu.w" => "__builtin_lsx_vssrarni_hu_w", + "lsx.vssrarni.w.d" => "__builtin_lsx_vssrarni_w_d", + "lsx.vssrarni.wu.d" => "__builtin_lsx_vssrarni_wu_d", + "lsx.vssrln.b.h" => "__builtin_lsx_vssrln_b_h", + "lsx.vssrln.bu.h" => "__builtin_lsx_vssrln_bu_h", + "lsx.vssrln.h.w" => "__builtin_lsx_vssrln_h_w", + "lsx.vssrln.hu.w" => "__builtin_lsx_vssrln_hu_w", + "lsx.vssrln.w.d" => "__builtin_lsx_vssrln_w_d", + "lsx.vssrln.wu.d" => "__builtin_lsx_vssrln_wu_d", + "lsx.vssrlni.b.h" => "__builtin_lsx_vssrlni_b_h", + "lsx.vssrlni.bu.h" => "__builtin_lsx_vssrlni_bu_h", + "lsx.vssrlni.d.q" => "__builtin_lsx_vssrlni_d_q", + "lsx.vssrlni.du.q" => "__builtin_lsx_vssrlni_du_q", + "lsx.vssrlni.h.w" => "__builtin_lsx_vssrlni_h_w", + "lsx.vssrlni.hu.w" => "__builtin_lsx_vssrlni_hu_w", + "lsx.vssrlni.w.d" => "__builtin_lsx_vssrlni_w_d", + "lsx.vssrlni.wu.d" => "__builtin_lsx_vssrlni_wu_d", + "lsx.vssrlrn.b.h" => "__builtin_lsx_vssrlrn_b_h", + "lsx.vssrlrn.bu.h" => "__builtin_lsx_vssrlrn_bu_h", + "lsx.vssrlrn.h.w" => "__builtin_lsx_vssrlrn_h_w", + "lsx.vssrlrn.hu.w" => "__builtin_lsx_vssrlrn_hu_w", + "lsx.vssrlrn.w.d" => "__builtin_lsx_vssrlrn_w_d", + "lsx.vssrlrn.wu.d" => "__builtin_lsx_vssrlrn_wu_d", + "lsx.vssrlrni.b.h" => "__builtin_lsx_vssrlrni_b_h", + "lsx.vssrlrni.bu.h" => "__builtin_lsx_vssrlrni_bu_h", + "lsx.vssrlrni.d.q" => "__builtin_lsx_vssrlrni_d_q", + "lsx.vssrlrni.du.q" => "__builtin_lsx_vssrlrni_du_q", + "lsx.vssrlrni.h.w" => "__builtin_lsx_vssrlrni_h_w", + "lsx.vssrlrni.hu.w" => "__builtin_lsx_vssrlrni_hu_w", + "lsx.vssrlrni.w.d" => "__builtin_lsx_vssrlrni_w_d", + "lsx.vssrlrni.wu.d" => "__builtin_lsx_vssrlrni_wu_d", + "lsx.vssub.b" => "__builtin_lsx_vssub_b", + "lsx.vssub.bu" => "__builtin_lsx_vssub_bu", + "lsx.vssub.d" => "__builtin_lsx_vssub_d", + "lsx.vssub.du" => "__builtin_lsx_vssub_du", + "lsx.vssub.h" => "__builtin_lsx_vssub_h", + "lsx.vssub.hu" => "__builtin_lsx_vssub_hu", + "lsx.vssub.w" => "__builtin_lsx_vssub_w", + "lsx.vssub.wu" => "__builtin_lsx_vssub_wu", + "lsx.vst" => "__builtin_lsx_vst", + "lsx.vstelm.b" => "__builtin_lsx_vstelm_b", + "lsx.vstelm.d" => "__builtin_lsx_vstelm_d", + "lsx.vstelm.h" => "__builtin_lsx_vstelm_h", + "lsx.vstelm.w" => "__builtin_lsx_vstelm_w", + "lsx.vstx" => "__builtin_lsx_vstx", + "lsx.vsub.b" => "__builtin_lsx_vsub_b", + "lsx.vsub.d" => "__builtin_lsx_vsub_d", + "lsx.vsub.h" => "__builtin_lsx_vsub_h", + "lsx.vsub.q" => "__builtin_lsx_vsub_q", + "lsx.vsub.w" => "__builtin_lsx_vsub_w", + "lsx.vsubi.bu" => "__builtin_lsx_vsubi_bu", + "lsx.vsubi.du" => "__builtin_lsx_vsubi_du", + "lsx.vsubi.hu" => "__builtin_lsx_vsubi_hu", + "lsx.vsubi.wu" => "__builtin_lsx_vsubi_wu", + "lsx.vsubwev.d.w" => "__builtin_lsx_vsubwev_d_w", + "lsx.vsubwev.d.wu" => "__builtin_lsx_vsubwev_d_wu", + "lsx.vsubwev.h.b" => "__builtin_lsx_vsubwev_h_b", + "lsx.vsubwev.h.bu" => "__builtin_lsx_vsubwev_h_bu", + "lsx.vsubwev.q.d" => "__builtin_lsx_vsubwev_q_d", + "lsx.vsubwev.q.du" => "__builtin_lsx_vsubwev_q_du", + "lsx.vsubwev.w.h" => "__builtin_lsx_vsubwev_w_h", + "lsx.vsubwev.w.hu" => "__builtin_lsx_vsubwev_w_hu", + "lsx.vsubwod.d.w" => "__builtin_lsx_vsubwod_d_w", + "lsx.vsubwod.d.wu" => "__builtin_lsx_vsubwod_d_wu", + "lsx.vsubwod.h.b" => "__builtin_lsx_vsubwod_h_b", + "lsx.vsubwod.h.bu" => "__builtin_lsx_vsubwod_h_bu", + "lsx.vsubwod.q.d" => "__builtin_lsx_vsubwod_q_d", + "lsx.vsubwod.q.du" => "__builtin_lsx_vsubwod_q_du", + "lsx.vsubwod.w.h" => "__builtin_lsx_vsubwod_w_h", + "lsx.vsubwod.w.hu" => "__builtin_lsx_vsubwod_w_hu", + "lsx.vxor.v" => "__builtin_lsx_vxor_v", + "lsx.vxori.b" => "__builtin_lsx_vxori_b", + "movfcsr2gr" => "__builtin_loongarch_movfcsr2gr", + "movgr2fcsr" => "__builtin_loongarch_movgr2fcsr", + "syscall" => "__builtin_loongarch_syscall", + _ => unimplemented!("***** unsupported LLVM intrinsic {}", name), + } + } + loongarch(name) + } + "mips" => { + #[allow(non_snake_case)] + fn mips(name: &str) -> &str { + match name { + // mips + "absq.s.ph" => "__builtin_mips_absq_s_ph", + "absq.s.qb" => "__builtin_mips_absq_s_qb", + "absq.s.w" => "__builtin_mips_absq_s_w", + "add.a.b" => "__builtin_msa_add_a_b", + "add.a.d" => "__builtin_msa_add_a_d", + "add.a.h" => "__builtin_msa_add_a_h", + "add.a.w" => "__builtin_msa_add_a_w", + "addq.ph" => "__builtin_mips_addq_ph", + "addq.s.ph" => "__builtin_mips_addq_s_ph", + "addq.s.w" => "__builtin_mips_addq_s_w", + "addqh.ph" => "__builtin_mips_addqh_ph", + "addqh.r.ph" => "__builtin_mips_addqh_r_ph", + "addqh.r.w" => "__builtin_mips_addqh_r_w", + "addqh.w" => "__builtin_mips_addqh_w", + "adds.a.b" => "__builtin_msa_adds_a_b", + "adds.a.d" => "__builtin_msa_adds_a_d", + "adds.a.h" => "__builtin_msa_adds_a_h", + "adds.a.w" => "__builtin_msa_adds_a_w", + "adds.s.b" => "__builtin_msa_adds_s_b", + "adds.s.d" => "__builtin_msa_adds_s_d", + "adds.s.h" => "__builtin_msa_adds_s_h", + "adds.s.w" => "__builtin_msa_adds_s_w", + "adds.u.b" => "__builtin_msa_adds_u_b", + "adds.u.d" => "__builtin_msa_adds_u_d", + "adds.u.h" => "__builtin_msa_adds_u_h", + "adds.u.w" => "__builtin_msa_adds_u_w", + "addsc" => "__builtin_mips_addsc", + "addu.ph" => "__builtin_mips_addu_ph", + "addu.qb" => "__builtin_mips_addu_qb", + "addu.s.ph" => "__builtin_mips_addu_s_ph", + "addu.s.qb" => "__builtin_mips_addu_s_qb", + "adduh.qb" => "__builtin_mips_adduh_qb", + "adduh.r.qb" => "__builtin_mips_adduh_r_qb", + "addv.b" => "__builtin_msa_addv_b", + "addv.d" => "__builtin_msa_addv_d", + "addv.h" => "__builtin_msa_addv_h", + "addv.w" => "__builtin_msa_addv_w", + "addvi.b" => "__builtin_msa_addvi_b", + "addvi.d" => "__builtin_msa_addvi_d", + "addvi.h" => "__builtin_msa_addvi_h", + "addvi.w" => "__builtin_msa_addvi_w", + "addwc" => "__builtin_mips_addwc", + "and.v" => "__builtin_msa_and_v", + "andi.b" => "__builtin_msa_andi_b", + "append" => "__builtin_mips_append", + "asub.s.b" => "__builtin_msa_asub_s_b", + "asub.s.d" => "__builtin_msa_asub_s_d", + "asub.s.h" => "__builtin_msa_asub_s_h", + "asub.s.w" => "__builtin_msa_asub_s_w", + "asub.u.b" => "__builtin_msa_asub_u_b", + "asub.u.d" => "__builtin_msa_asub_u_d", + "asub.u.h" => "__builtin_msa_asub_u_h", + "asub.u.w" => "__builtin_msa_asub_u_w", + "ave.s.b" => "__builtin_msa_ave_s_b", + "ave.s.d" => "__builtin_msa_ave_s_d", + "ave.s.h" => "__builtin_msa_ave_s_h", + "ave.s.w" => "__builtin_msa_ave_s_w", + "ave.u.b" => "__builtin_msa_ave_u_b", + "ave.u.d" => "__builtin_msa_ave_u_d", + "ave.u.h" => "__builtin_msa_ave_u_h", + "ave.u.w" => "__builtin_msa_ave_u_w", + "aver.s.b" => "__builtin_msa_aver_s_b", + "aver.s.d" => "__builtin_msa_aver_s_d", + "aver.s.h" => "__builtin_msa_aver_s_h", + "aver.s.w" => "__builtin_msa_aver_s_w", + "aver.u.b" => "__builtin_msa_aver_u_b", + "aver.u.d" => "__builtin_msa_aver_u_d", + "aver.u.h" => "__builtin_msa_aver_u_h", + "aver.u.w" => "__builtin_msa_aver_u_w", + "balign" => "__builtin_mips_balign", + "bclr.b" => "__builtin_msa_bclr_b", + "bclr.d" => "__builtin_msa_bclr_d", + "bclr.h" => "__builtin_msa_bclr_h", + "bclr.w" => "__builtin_msa_bclr_w", + "bclri.b" => "__builtin_msa_bclri_b", + "bclri.d" => "__builtin_msa_bclri_d", + "bclri.h" => "__builtin_msa_bclri_h", + "bclri.w" => "__builtin_msa_bclri_w", + "binsl.b" => "__builtin_msa_binsl_b", + "binsl.d" => "__builtin_msa_binsl_d", + "binsl.h" => "__builtin_msa_binsl_h", + "binsl.w" => "__builtin_msa_binsl_w", + "binsli.b" => "__builtin_msa_binsli_b", + "binsli.d" => "__builtin_msa_binsli_d", + "binsli.h" => "__builtin_msa_binsli_h", + "binsli.w" => "__builtin_msa_binsli_w", + "binsr.b" => "__builtin_msa_binsr_b", + "binsr.d" => "__builtin_msa_binsr_d", + "binsr.h" => "__builtin_msa_binsr_h", + "binsr.w" => "__builtin_msa_binsr_w", + "binsri.b" => "__builtin_msa_binsri_b", + "binsri.d" => "__builtin_msa_binsri_d", + "binsri.h" => "__builtin_msa_binsri_h", + "binsri.w" => "__builtin_msa_binsri_w", + "bitrev" => "__builtin_mips_bitrev", + "bmnz.v" => "__builtin_msa_bmnz_v", + "bmnzi.b" => "__builtin_msa_bmnzi_b", + "bmz.v" => "__builtin_msa_bmz_v", + "bmzi.b" => "__builtin_msa_bmzi_b", + "bneg.b" => "__builtin_msa_bneg_b", + "bneg.d" => "__builtin_msa_bneg_d", + "bneg.h" => "__builtin_msa_bneg_h", + "bneg.w" => "__builtin_msa_bneg_w", + "bnegi.b" => "__builtin_msa_bnegi_b", + "bnegi.d" => "__builtin_msa_bnegi_d", + "bnegi.h" => "__builtin_msa_bnegi_h", + "bnegi.w" => "__builtin_msa_bnegi_w", + "bnz.b" => "__builtin_msa_bnz_b", + "bnz.d" => "__builtin_msa_bnz_d", + "bnz.h" => "__builtin_msa_bnz_h", + "bnz.v" => "__builtin_msa_bnz_v", + "bnz.w" => "__builtin_msa_bnz_w", + "bposge32" => "__builtin_mips_bposge32", + "bsel.v" => "__builtin_msa_bsel_v", + "bseli.b" => "__builtin_msa_bseli_b", + "bset.b" => "__builtin_msa_bset_b", + "bset.d" => "__builtin_msa_bset_d", + "bset.h" => "__builtin_msa_bset_h", + "bset.w" => "__builtin_msa_bset_w", + "bseti.b" => "__builtin_msa_bseti_b", + "bseti.d" => "__builtin_msa_bseti_d", + "bseti.h" => "__builtin_msa_bseti_h", + "bseti.w" => "__builtin_msa_bseti_w", + "bz.b" => "__builtin_msa_bz_b", + "bz.d" => "__builtin_msa_bz_d", + "bz.h" => "__builtin_msa_bz_h", + "bz.v" => "__builtin_msa_bz_v", + "bz.w" => "__builtin_msa_bz_w", + "ceq.b" => "__builtin_msa_ceq_b", + "ceq.d" => "__builtin_msa_ceq_d", + "ceq.h" => "__builtin_msa_ceq_h", + "ceq.w" => "__builtin_msa_ceq_w", + "ceqi.b" => "__builtin_msa_ceqi_b", + "ceqi.d" => "__builtin_msa_ceqi_d", + "ceqi.h" => "__builtin_msa_ceqi_h", + "ceqi.w" => "__builtin_msa_ceqi_w", + "cfcmsa" => "__builtin_msa_cfcmsa", + "cle.s.b" => "__builtin_msa_cle_s_b", + "cle.s.d" => "__builtin_msa_cle_s_d", + "cle.s.h" => "__builtin_msa_cle_s_h", + "cle.s.w" => "__builtin_msa_cle_s_w", + "cle.u.b" => "__builtin_msa_cle_u_b", + "cle.u.d" => "__builtin_msa_cle_u_d", + "cle.u.h" => "__builtin_msa_cle_u_h", + "cle.u.w" => "__builtin_msa_cle_u_w", + "clei.s.b" => "__builtin_msa_clei_s_b", + "clei.s.d" => "__builtin_msa_clei_s_d", + "clei.s.h" => "__builtin_msa_clei_s_h", + "clei.s.w" => "__builtin_msa_clei_s_w", + "clei.u.b" => "__builtin_msa_clei_u_b", + "clei.u.d" => "__builtin_msa_clei_u_d", + "clei.u.h" => "__builtin_msa_clei_u_h", + "clei.u.w" => "__builtin_msa_clei_u_w", + "clt.s.b" => "__builtin_msa_clt_s_b", + "clt.s.d" => "__builtin_msa_clt_s_d", + "clt.s.h" => "__builtin_msa_clt_s_h", + "clt.s.w" => "__builtin_msa_clt_s_w", + "clt.u.b" => "__builtin_msa_clt_u_b", + "clt.u.d" => "__builtin_msa_clt_u_d", + "clt.u.h" => "__builtin_msa_clt_u_h", + "clt.u.w" => "__builtin_msa_clt_u_w", + "clti.s.b" => "__builtin_msa_clti_s_b", + "clti.s.d" => "__builtin_msa_clti_s_d", + "clti.s.h" => "__builtin_msa_clti_s_h", + "clti.s.w" => "__builtin_msa_clti_s_w", + "clti.u.b" => "__builtin_msa_clti_u_b", + "clti.u.d" => "__builtin_msa_clti_u_d", + "clti.u.h" => "__builtin_msa_clti_u_h", + "clti.u.w" => "__builtin_msa_clti_u_w", + "cmp.eq.ph" => "__builtin_mips_cmp_eq_ph", + "cmp.le.ph" => "__builtin_mips_cmp_le_ph", + "cmp.lt.ph" => "__builtin_mips_cmp_lt_ph", + "cmpgdu.eq.qb" => "__builtin_mips_cmpgdu_eq_qb", + "cmpgdu.le.qb" => "__builtin_mips_cmpgdu_le_qb", + "cmpgdu.lt.qb" => "__builtin_mips_cmpgdu_lt_qb", + "cmpgu.eq.qb" => "__builtin_mips_cmpgu_eq_qb", + "cmpgu.le.qb" => "__builtin_mips_cmpgu_le_qb", + "cmpgu.lt.qb" => "__builtin_mips_cmpgu_lt_qb", + "cmpu.eq.qb" => "__builtin_mips_cmpu_eq_qb", + "cmpu.le.qb" => "__builtin_mips_cmpu_le_qb", + "cmpu.lt.qb" => "__builtin_mips_cmpu_lt_qb", + "copy.s.b" => "__builtin_msa_copy_s_b", + "copy.s.d" => "__builtin_msa_copy_s_d", + "copy.s.h" => "__builtin_msa_copy_s_h", + "copy.s.w" => "__builtin_msa_copy_s_w", + "copy.u.b" => "__builtin_msa_copy_u_b", + "copy.u.d" => "__builtin_msa_copy_u_d", + "copy.u.h" => "__builtin_msa_copy_u_h", + "copy.u.w" => "__builtin_msa_copy_u_w", + "ctcmsa" => "__builtin_msa_ctcmsa", + "div.s.b" => "__builtin_msa_div_s_b", + "div.s.d" => "__builtin_msa_div_s_d", + "div.s.h" => "__builtin_msa_div_s_h", + "div.s.w" => "__builtin_msa_div_s_w", + "div.u.b" => "__builtin_msa_div_u_b", + "div.u.d" => "__builtin_msa_div_u_d", + "div.u.h" => "__builtin_msa_div_u_h", + "div.u.w" => "__builtin_msa_div_u_w", + "dlsa" => "__builtin_mips_dlsa", + "dotp.s.d" => "__builtin_msa_dotp_s_d", + "dotp.s.h" => "__builtin_msa_dotp_s_h", + "dotp.s.w" => "__builtin_msa_dotp_s_w", + "dotp.u.d" => "__builtin_msa_dotp_u_d", + "dotp.u.h" => "__builtin_msa_dotp_u_h", + "dotp.u.w" => "__builtin_msa_dotp_u_w", + "dpa.w.ph" => "__builtin_mips_dpa_w_ph", + "dpadd.s.d" => "__builtin_msa_dpadd_s_d", + "dpadd.s.h" => "__builtin_msa_dpadd_s_h", + "dpadd.s.w" => "__builtin_msa_dpadd_s_w", + "dpadd.u.d" => "__builtin_msa_dpadd_u_d", + "dpadd.u.h" => "__builtin_msa_dpadd_u_h", + "dpadd.u.w" => "__builtin_msa_dpadd_u_w", + "dpaq.s.w.ph" => "__builtin_mips_dpaq_s_w_ph", + "dpaq.sa.l.w" => "__builtin_mips_dpaq_sa_l_w", + "dpaqx.s.w.ph" => "__builtin_mips_dpaqx_s_w_ph", + "dpaqx.sa.w.ph" => "__builtin_mips_dpaqx_sa_w_ph", + "dpau.h.qbl" => "__builtin_mips_dpau_h_qbl", + "dpau.h.qbr" => "__builtin_mips_dpau_h_qbr", + "dpax.w.ph" => "__builtin_mips_dpax_w_ph", + "dps.w.ph" => "__builtin_mips_dps_w_ph", + "dpsq.s.w.ph" => "__builtin_mips_dpsq_s_w_ph", + "dpsq.sa.l.w" => "__builtin_mips_dpsq_sa_l_w", + "dpsqx.s.w.ph" => "__builtin_mips_dpsqx_s_w_ph", + "dpsqx.sa.w.ph" => "__builtin_mips_dpsqx_sa_w_ph", + "dpsu.h.qbl" => "__builtin_mips_dpsu_h_qbl", + "dpsu.h.qbr" => "__builtin_mips_dpsu_h_qbr", + "dpsub.s.d" => "__builtin_msa_dpsub_s_d", + "dpsub.s.h" => "__builtin_msa_dpsub_s_h", + "dpsub.s.w" => "__builtin_msa_dpsub_s_w", + "dpsub.u.d" => "__builtin_msa_dpsub_u_d", + "dpsub.u.h" => "__builtin_msa_dpsub_u_h", + "dpsub.u.w" => "__builtin_msa_dpsub_u_w", + "dpsx.w.ph" => "__builtin_mips_dpsx_w_ph", + "extp" => "__builtin_mips_extp", + "extpdp" => "__builtin_mips_extpdp", + "extr.r.w" => "__builtin_mips_extr_r_w", + "extr.rs.w" => "__builtin_mips_extr_rs_w", + "extr.s.h" => "__builtin_mips_extr_s_h", + "extr.w" => "__builtin_mips_extr_w", + "fadd.d" => "__builtin_msa_fadd_d", + "fadd.w" => "__builtin_msa_fadd_w", + "fcaf.d" => "__builtin_msa_fcaf_d", + "fcaf.w" => "__builtin_msa_fcaf_w", + "fceq.d" => "__builtin_msa_fceq_d", + "fceq.w" => "__builtin_msa_fceq_w", + "fclass.d" => "__builtin_msa_fclass_d", + "fclass.w" => "__builtin_msa_fclass_w", + "fcle.d" => "__builtin_msa_fcle_d", + "fcle.w" => "__builtin_msa_fcle_w", + "fclt.d" => "__builtin_msa_fclt_d", + "fclt.w" => "__builtin_msa_fclt_w", + "fcne.d" => "__builtin_msa_fcne_d", + "fcne.w" => "__builtin_msa_fcne_w", + "fcor.d" => "__builtin_msa_fcor_d", + "fcor.w" => "__builtin_msa_fcor_w", + "fcueq.d" => "__builtin_msa_fcueq_d", + "fcueq.w" => "__builtin_msa_fcueq_w", + "fcule.d" => "__builtin_msa_fcule_d", + "fcule.w" => "__builtin_msa_fcule_w", + "fcult.d" => "__builtin_msa_fcult_d", + "fcult.w" => "__builtin_msa_fcult_w", + "fcun.d" => "__builtin_msa_fcun_d", + "fcun.w" => "__builtin_msa_fcun_w", + "fcune.d" => "__builtin_msa_fcune_d", + "fcune.w" => "__builtin_msa_fcune_w", + "fdiv.d" => "__builtin_msa_fdiv_d", + "fdiv.w" => "__builtin_msa_fdiv_w", + "fexdo.h" => "__builtin_msa_fexdo_h", + "fexdo.w" => "__builtin_msa_fexdo_w", + "fexp2.d" => "__builtin_msa_fexp2_d", + "fexp2.w" => "__builtin_msa_fexp2_w", + "fexupl.d" => "__builtin_msa_fexupl_d", + "fexupl.w" => "__builtin_msa_fexupl_w", + "fexupr.d" => "__builtin_msa_fexupr_d", + "fexupr.w" => "__builtin_msa_fexupr_w", + "ffint.s.d" => "__builtin_msa_ffint_s_d", + "ffint.s.w" => "__builtin_msa_ffint_s_w", + "ffint.u.d" => "__builtin_msa_ffint_u_d", + "ffint.u.w" => "__builtin_msa_ffint_u_w", + "ffql.d" => "__builtin_msa_ffql_d", + "ffql.w" => "__builtin_msa_ffql_w", + "ffqr.d" => "__builtin_msa_ffqr_d", + "ffqr.w" => "__builtin_msa_ffqr_w", + "fill.b" => "__builtin_msa_fill_b", + "fill.d" => "__builtin_msa_fill_d", + "fill.h" => "__builtin_msa_fill_h", + "fill.w" => "__builtin_msa_fill_w", + "flog2.d" => "__builtin_msa_flog2_d", + "flog2.w" => "__builtin_msa_flog2_w", + "fmadd.d" => "__builtin_msa_fmadd_d", + "fmadd.w" => "__builtin_msa_fmadd_w", + "fmax.a.d" => "__builtin_msa_fmax_a_d", + "fmax.a.w" => "__builtin_msa_fmax_a_w", + "fmax.d" => "__builtin_msa_fmax_d", + "fmax.w" => "__builtin_msa_fmax_w", + "fmin.a.d" => "__builtin_msa_fmin_a_d", + "fmin.a.w" => "__builtin_msa_fmin_a_w", + "fmin.d" => "__builtin_msa_fmin_d", + "fmin.w" => "__builtin_msa_fmin_w", + "fmsub.d" => "__builtin_msa_fmsub_d", + "fmsub.w" => "__builtin_msa_fmsub_w", + "fmul.d" => "__builtin_msa_fmul_d", + "fmul.w" => "__builtin_msa_fmul_w", + "frcp.d" => "__builtin_msa_frcp_d", + "frcp.w" => "__builtin_msa_frcp_w", + "frint.d" => "__builtin_msa_frint_d", + "frint.w" => "__builtin_msa_frint_w", + "frsqrt.d" => "__builtin_msa_frsqrt_d", + "frsqrt.w" => "__builtin_msa_frsqrt_w", + "fsaf.d" => "__builtin_msa_fsaf_d", + "fsaf.w" => "__builtin_msa_fsaf_w", + "fseq.d" => "__builtin_msa_fseq_d", + "fseq.w" => "__builtin_msa_fseq_w", + "fsle.d" => "__builtin_msa_fsle_d", + "fsle.w" => "__builtin_msa_fsle_w", + "fslt.d" => "__builtin_msa_fslt_d", + "fslt.w" => "__builtin_msa_fslt_w", + "fsne.d" => "__builtin_msa_fsne_d", + "fsne.w" => "__builtin_msa_fsne_w", + "fsor.d" => "__builtin_msa_fsor_d", + "fsor.w" => "__builtin_msa_fsor_w", + "fsqrt.d" => "__builtin_msa_fsqrt_d", + "fsqrt.w" => "__builtin_msa_fsqrt_w", + "fsub.d" => "__builtin_msa_fsub_d", + "fsub.w" => "__builtin_msa_fsub_w", + "fsueq.d" => "__builtin_msa_fsueq_d", + "fsueq.w" => "__builtin_msa_fsueq_w", + "fsule.d" => "__builtin_msa_fsule_d", + "fsule.w" => "__builtin_msa_fsule_w", + "fsult.d" => "__builtin_msa_fsult_d", + "fsult.w" => "__builtin_msa_fsult_w", + "fsun.d" => "__builtin_msa_fsun_d", + "fsun.w" => "__builtin_msa_fsun_w", + "fsune.d" => "__builtin_msa_fsune_d", + "fsune.w" => "__builtin_msa_fsune_w", + "ftint.s.d" => "__builtin_msa_ftint_s_d", + "ftint.s.w" => "__builtin_msa_ftint_s_w", + "ftint.u.d" => "__builtin_msa_ftint_u_d", + "ftint.u.w" => "__builtin_msa_ftint_u_w", + "ftq.h" => "__builtin_msa_ftq_h", + "ftq.w" => "__builtin_msa_ftq_w", + "ftrunc.s.d" => "__builtin_msa_ftrunc_s_d", + "ftrunc.s.w" => "__builtin_msa_ftrunc_s_w", + "ftrunc.u.d" => "__builtin_msa_ftrunc_u_d", + "ftrunc.u.w" => "__builtin_msa_ftrunc_u_w", + "hadd.s.d" => "__builtin_msa_hadd_s_d", + "hadd.s.h" => "__builtin_msa_hadd_s_h", + "hadd.s.w" => "__builtin_msa_hadd_s_w", + "hadd.u.d" => "__builtin_msa_hadd_u_d", + "hadd.u.h" => "__builtin_msa_hadd_u_h", + "hadd.u.w" => "__builtin_msa_hadd_u_w", + "hsub.s.d" => "__builtin_msa_hsub_s_d", + "hsub.s.h" => "__builtin_msa_hsub_s_h", + "hsub.s.w" => "__builtin_msa_hsub_s_w", + "hsub.u.d" => "__builtin_msa_hsub_u_d", + "hsub.u.h" => "__builtin_msa_hsub_u_h", + "hsub.u.w" => "__builtin_msa_hsub_u_w", + "ilvev.b" => "__builtin_msa_ilvev_b", + "ilvev.d" => "__builtin_msa_ilvev_d", + "ilvev.h" => "__builtin_msa_ilvev_h", + "ilvev.w" => "__builtin_msa_ilvev_w", + "ilvl.b" => "__builtin_msa_ilvl_b", + "ilvl.d" => "__builtin_msa_ilvl_d", + "ilvl.h" => "__builtin_msa_ilvl_h", + "ilvl.w" => "__builtin_msa_ilvl_w", + "ilvod.b" => "__builtin_msa_ilvod_b", + "ilvod.d" => "__builtin_msa_ilvod_d", + "ilvod.h" => "__builtin_msa_ilvod_h", + "ilvod.w" => "__builtin_msa_ilvod_w", + "ilvr.b" => "__builtin_msa_ilvr_b", + "ilvr.d" => "__builtin_msa_ilvr_d", + "ilvr.h" => "__builtin_msa_ilvr_h", + "ilvr.w" => "__builtin_msa_ilvr_w", + "insert.b" => "__builtin_msa_insert_b", + "insert.d" => "__builtin_msa_insert_d", + "insert.h" => "__builtin_msa_insert_h", + "insert.w" => "__builtin_msa_insert_w", + "insv" => "__builtin_mips_insv", + "insve.b" => "__builtin_msa_insve_b", + "insve.d" => "__builtin_msa_insve_d", + "insve.h" => "__builtin_msa_insve_h", + "insve.w" => "__builtin_msa_insve_w", + "lbux" => "__builtin_mips_lbux", + "ld.b" => "__builtin_msa_ld_b", + "ld.d" => "__builtin_msa_ld_d", + "ld.h" => "__builtin_msa_ld_h", + "ld.w" => "__builtin_msa_ld_w", + "ldi.b" => "__builtin_msa_ldi_b", + "ldi.d" => "__builtin_msa_ldi_d", + "ldi.h" => "__builtin_msa_ldi_h", + "ldi.w" => "__builtin_msa_ldi_w", + "ldr.d" => "__builtin_msa_ldr_d", + "ldr.w" => "__builtin_msa_ldr_w", + "lhx" => "__builtin_mips_lhx", + "lsa" => "__builtin_mips_lsa", + "lwx" => "__builtin_mips_lwx", + "madd" => "__builtin_mips_madd", + "madd.q.h" => "__builtin_msa_madd_q_h", + "madd.q.w" => "__builtin_msa_madd_q_w", + "maddr.q.h" => "__builtin_msa_maddr_q_h", + "maddr.q.w" => "__builtin_msa_maddr_q_w", + "maddu" => "__builtin_mips_maddu", + "maddv.b" => "__builtin_msa_maddv_b", + "maddv.d" => "__builtin_msa_maddv_d", + "maddv.h" => "__builtin_msa_maddv_h", + "maddv.w" => "__builtin_msa_maddv_w", + "maq.s.w.phl" => "__builtin_mips_maq_s_w_phl", + "maq.s.w.phr" => "__builtin_mips_maq_s_w_phr", + "maq.sa.w.phl" => "__builtin_mips_maq_sa_w_phl", + "maq.sa.w.phr" => "__builtin_mips_maq_sa_w_phr", + "max.a.b" => "__builtin_msa_max_a_b", + "max.a.d" => "__builtin_msa_max_a_d", + "max.a.h" => "__builtin_msa_max_a_h", + "max.a.w" => "__builtin_msa_max_a_w", + "max.s.b" => "__builtin_msa_max_s_b", + "max.s.d" => "__builtin_msa_max_s_d", + "max.s.h" => "__builtin_msa_max_s_h", + "max.s.w" => "__builtin_msa_max_s_w", + "max.u.b" => "__builtin_msa_max_u_b", + "max.u.d" => "__builtin_msa_max_u_d", + "max.u.h" => "__builtin_msa_max_u_h", + "max.u.w" => "__builtin_msa_max_u_w", + "maxi.s.b" => "__builtin_msa_maxi_s_b", + "maxi.s.d" => "__builtin_msa_maxi_s_d", + "maxi.s.h" => "__builtin_msa_maxi_s_h", + "maxi.s.w" => "__builtin_msa_maxi_s_w", + "maxi.u.b" => "__builtin_msa_maxi_u_b", + "maxi.u.d" => "__builtin_msa_maxi_u_d", + "maxi.u.h" => "__builtin_msa_maxi_u_h", + "maxi.u.w" => "__builtin_msa_maxi_u_w", + "min.a.b" => "__builtin_msa_min_a_b", + "min.a.d" => "__builtin_msa_min_a_d", + "min.a.h" => "__builtin_msa_min_a_h", + "min.a.w" => "__builtin_msa_min_a_w", + "min.s.b" => "__builtin_msa_min_s_b", + "min.s.d" => "__builtin_msa_min_s_d", + "min.s.h" => "__builtin_msa_min_s_h", + "min.s.w" => "__builtin_msa_min_s_w", + "min.u.b" => "__builtin_msa_min_u_b", + "min.u.d" => "__builtin_msa_min_u_d", + "min.u.h" => "__builtin_msa_min_u_h", + "min.u.w" => "__builtin_msa_min_u_w", + "mini.s.b" => "__builtin_msa_mini_s_b", + "mini.s.d" => "__builtin_msa_mini_s_d", + "mini.s.h" => "__builtin_msa_mini_s_h", + "mini.s.w" => "__builtin_msa_mini_s_w", + "mini.u.b" => "__builtin_msa_mini_u_b", + "mini.u.d" => "__builtin_msa_mini_u_d", + "mini.u.h" => "__builtin_msa_mini_u_h", + "mini.u.w" => "__builtin_msa_mini_u_w", + "mod.s.b" => "__builtin_msa_mod_s_b", + "mod.s.d" => "__builtin_msa_mod_s_d", + "mod.s.h" => "__builtin_msa_mod_s_h", + "mod.s.w" => "__builtin_msa_mod_s_w", + "mod.u.b" => "__builtin_msa_mod_u_b", + "mod.u.d" => "__builtin_msa_mod_u_d", + "mod.u.h" => "__builtin_msa_mod_u_h", + "mod.u.w" => "__builtin_msa_mod_u_w", + "modsub" => "__builtin_mips_modsub", + "move.v" => "__builtin_msa_move_v", + "msub" => "__builtin_mips_msub", + "msub.q.h" => "__builtin_msa_msub_q_h", + "msub.q.w" => "__builtin_msa_msub_q_w", + "msubr.q.h" => "__builtin_msa_msubr_q_h", + "msubr.q.w" => "__builtin_msa_msubr_q_w", + "msubu" => "__builtin_mips_msubu", + "msubv.b" => "__builtin_msa_msubv_b", + "msubv.d" => "__builtin_msa_msubv_d", + "msubv.h" => "__builtin_msa_msubv_h", + "msubv.w" => "__builtin_msa_msubv_w", + "mthlip" => "__builtin_mips_mthlip", + "mul.ph" => "__builtin_mips_mul_ph", + "mul.q.h" => "__builtin_msa_mul_q_h", + "mul.q.w" => "__builtin_msa_mul_q_w", + "mul.s.ph" => "__builtin_mips_mul_s_ph", + "muleq.s.w.phl" => "__builtin_mips_muleq_s_w_phl", + "muleq.s.w.phr" => "__builtin_mips_muleq_s_w_phr", + "muleu.s.ph.qbl" => "__builtin_mips_muleu_s_ph_qbl", + "muleu.s.ph.qbr" => "__builtin_mips_muleu_s_ph_qbr", + "mulq.rs.ph" => "__builtin_mips_mulq_rs_ph", + "mulq.rs.w" => "__builtin_mips_mulq_rs_w", + "mulq.s.ph" => "__builtin_mips_mulq_s_ph", + "mulq.s.w" => "__builtin_mips_mulq_s_w", + "mulr.q.h" => "__builtin_msa_mulr_q_h", + "mulr.q.w" => "__builtin_msa_mulr_q_w", + "mulsa.w.ph" => "__builtin_mips_mulsa_w_ph", + "mulsaq.s.w.ph" => "__builtin_mips_mulsaq_s_w_ph", + "mult" => "__builtin_mips_mult", + "multu" => "__builtin_mips_multu", + "mulv.b" => "__builtin_msa_mulv_b", + "mulv.d" => "__builtin_msa_mulv_d", + "mulv.h" => "__builtin_msa_mulv_h", + "mulv.w" => "__builtin_msa_mulv_w", + "nloc.b" => "__builtin_msa_nloc_b", + "nloc.d" => "__builtin_msa_nloc_d", + "nloc.h" => "__builtin_msa_nloc_h", + "nloc.w" => "__builtin_msa_nloc_w", + "nlzc.b" => "__builtin_msa_nlzc_b", + "nlzc.d" => "__builtin_msa_nlzc_d", + "nlzc.h" => "__builtin_msa_nlzc_h", + "nlzc.w" => "__builtin_msa_nlzc_w", + "nor.v" => "__builtin_msa_nor_v", + "nori.b" => "__builtin_msa_nori_b", + "or.v" => "__builtin_msa_or_v", + "ori.b" => "__builtin_msa_ori_b", + "packrl.ph" => "__builtin_mips_packrl_ph", + "pckev.b" => "__builtin_msa_pckev_b", + "pckev.d" => "__builtin_msa_pckev_d", + "pckev.h" => "__builtin_msa_pckev_h", + "pckev.w" => "__builtin_msa_pckev_w", + "pckod.b" => "__builtin_msa_pckod_b", + "pckod.d" => "__builtin_msa_pckod_d", + "pckod.h" => "__builtin_msa_pckod_h", + "pckod.w" => "__builtin_msa_pckod_w", + "pcnt.b" => "__builtin_msa_pcnt_b", + "pcnt.d" => "__builtin_msa_pcnt_d", + "pcnt.h" => "__builtin_msa_pcnt_h", + "pcnt.w" => "__builtin_msa_pcnt_w", + "pick.ph" => "__builtin_mips_pick_ph", + "pick.qb" => "__builtin_mips_pick_qb", + "preceq.w.phl" => "__builtin_mips_preceq_w_phl", + "preceq.w.phr" => "__builtin_mips_preceq_w_phr", + "precequ.ph.qbl" => "__builtin_mips_precequ_ph_qbl", + "precequ.ph.qbla" => "__builtin_mips_precequ_ph_qbla", + "precequ.ph.qbr" => "__builtin_mips_precequ_ph_qbr", + "precequ.ph.qbra" => "__builtin_mips_precequ_ph_qbra", + "preceu.ph.qbl" => "__builtin_mips_preceu_ph_qbl", + "preceu.ph.qbla" => "__builtin_mips_preceu_ph_qbla", + "preceu.ph.qbr" => "__builtin_mips_preceu_ph_qbr", + "preceu.ph.qbra" => "__builtin_mips_preceu_ph_qbra", + "precr.qb.ph" => "__builtin_mips_precr_qb_ph", + "precr.sra.ph.w" => "__builtin_mips_precr_sra_ph_w", + "precr.sra.r.ph.w" => "__builtin_mips_precr_sra_r_ph_w", + "precrq.ph.w" => "__builtin_mips_precrq_ph_w", + "precrq.qb.ph" => "__builtin_mips_precrq_qb_ph", + "precrq.rs.ph.w" => "__builtin_mips_precrq_rs_ph_w", + "precrqu.s.qb.ph" => "__builtin_mips_precrqu_s_qb_ph", + "prepend" => "__builtin_mips_prepend", + "raddu.w.qb" => "__builtin_mips_raddu_w_qb", + "rddsp" => "__builtin_mips_rddsp", + "repl.ph" => "__builtin_mips_repl_ph", + "repl.qb" => "__builtin_mips_repl_qb", + "sat.s.b" => "__builtin_msa_sat_s_b", + "sat.s.d" => "__builtin_msa_sat_s_d", + "sat.s.h" => "__builtin_msa_sat_s_h", + "sat.s.w" => "__builtin_msa_sat_s_w", + "sat.u.b" => "__builtin_msa_sat_u_b", + "sat.u.d" => "__builtin_msa_sat_u_d", + "sat.u.h" => "__builtin_msa_sat_u_h", + "sat.u.w" => "__builtin_msa_sat_u_w", + "shf.b" => "__builtin_msa_shf_b", + "shf.h" => "__builtin_msa_shf_h", + "shf.w" => "__builtin_msa_shf_w", + "shilo" => "__builtin_mips_shilo", + "shll.ph" => "__builtin_mips_shll_ph", + "shll.qb" => "__builtin_mips_shll_qb", + "shll.s.ph" => "__builtin_mips_shll_s_ph", + "shll.s.w" => "__builtin_mips_shll_s_w", + "shra.ph" => "__builtin_mips_shra_ph", + "shra.qb" => "__builtin_mips_shra_qb", + "shra.r.ph" => "__builtin_mips_shra_r_ph", + "shra.r.qb" => "__builtin_mips_shra_r_qb", + "shra.r.w" => "__builtin_mips_shra_r_w", + "shrl.ph" => "__builtin_mips_shrl_ph", + "shrl.qb" => "__builtin_mips_shrl_qb", + "sld.b" => "__builtin_msa_sld_b", + "sld.d" => "__builtin_msa_sld_d", + "sld.h" => "__builtin_msa_sld_h", + "sld.w" => "__builtin_msa_sld_w", + "sldi.b" => "__builtin_msa_sldi_b", + "sldi.d" => "__builtin_msa_sldi_d", + "sldi.h" => "__builtin_msa_sldi_h", + "sldi.w" => "__builtin_msa_sldi_w", + "sll.b" => "__builtin_msa_sll_b", + "sll.d" => "__builtin_msa_sll_d", + "sll.h" => "__builtin_msa_sll_h", + "sll.w" => "__builtin_msa_sll_w", + "slli.b" => "__builtin_msa_slli_b", + "slli.d" => "__builtin_msa_slli_d", + "slli.h" => "__builtin_msa_slli_h", + "slli.w" => "__builtin_msa_slli_w", + "splat.b" => "__builtin_msa_splat_b", + "splat.d" => "__builtin_msa_splat_d", + "splat.h" => "__builtin_msa_splat_h", + "splat.w" => "__builtin_msa_splat_w", + "splati.b" => "__builtin_msa_splati_b", + "splati.d" => "__builtin_msa_splati_d", + "splati.h" => "__builtin_msa_splati_h", + "splati.w" => "__builtin_msa_splati_w", + "sra.b" => "__builtin_msa_sra_b", + "sra.d" => "__builtin_msa_sra_d", + "sra.h" => "__builtin_msa_sra_h", + "sra.w" => "__builtin_msa_sra_w", + "srai.b" => "__builtin_msa_srai_b", + "srai.d" => "__builtin_msa_srai_d", + "srai.h" => "__builtin_msa_srai_h", + "srai.w" => "__builtin_msa_srai_w", + "srar.b" => "__builtin_msa_srar_b", + "srar.d" => "__builtin_msa_srar_d", + "srar.h" => "__builtin_msa_srar_h", + "srar.w" => "__builtin_msa_srar_w", + "srari.b" => "__builtin_msa_srari_b", + "srari.d" => "__builtin_msa_srari_d", + "srari.h" => "__builtin_msa_srari_h", + "srari.w" => "__builtin_msa_srari_w", + "srl.b" => "__builtin_msa_srl_b", + "srl.d" => "__builtin_msa_srl_d", + "srl.h" => "__builtin_msa_srl_h", + "srl.w" => "__builtin_msa_srl_w", + "srli.b" => "__builtin_msa_srli_b", + "srli.d" => "__builtin_msa_srli_d", + "srli.h" => "__builtin_msa_srli_h", + "srli.w" => "__builtin_msa_srli_w", + "srlr.b" => "__builtin_msa_srlr_b", + "srlr.d" => "__builtin_msa_srlr_d", + "srlr.h" => "__builtin_msa_srlr_h", + "srlr.w" => "__builtin_msa_srlr_w", + "srlri.b" => "__builtin_msa_srlri_b", + "srlri.d" => "__builtin_msa_srlri_d", + "srlri.h" => "__builtin_msa_srlri_h", + "srlri.w" => "__builtin_msa_srlri_w", + "st.b" => "__builtin_msa_st_b", + "st.d" => "__builtin_msa_st_d", + "st.h" => "__builtin_msa_st_h", + "st.w" => "__builtin_msa_st_w", + "str.d" => "__builtin_msa_str_d", + "str.w" => "__builtin_msa_str_w", + "subq.ph" => "__builtin_mips_subq_ph", + "subq.s.ph" => "__builtin_mips_subq_s_ph", + "subq.s.w" => "__builtin_mips_subq_s_w", + "subqh.ph" => "__builtin_mips_subqh_ph", + "subqh.r.ph" => "__builtin_mips_subqh_r_ph", + "subqh.r.w" => "__builtin_mips_subqh_r_w", + "subqh.w" => "__builtin_mips_subqh_w", + "subs.s.b" => "__builtin_msa_subs_s_b", + "subs.s.d" => "__builtin_msa_subs_s_d", + "subs.s.h" => "__builtin_msa_subs_s_h", + "subs.s.w" => "__builtin_msa_subs_s_w", + "subs.u.b" => "__builtin_msa_subs_u_b", + "subs.u.d" => "__builtin_msa_subs_u_d", + "subs.u.h" => "__builtin_msa_subs_u_h", + "subs.u.w" => "__builtin_msa_subs_u_w", + "subsus.u.b" => "__builtin_msa_subsus_u_b", + "subsus.u.d" => "__builtin_msa_subsus_u_d", + "subsus.u.h" => "__builtin_msa_subsus_u_h", + "subsus.u.w" => "__builtin_msa_subsus_u_w", + "subsuu.s.b" => "__builtin_msa_subsuu_s_b", + "subsuu.s.d" => "__builtin_msa_subsuu_s_d", + "subsuu.s.h" => "__builtin_msa_subsuu_s_h", + "subsuu.s.w" => "__builtin_msa_subsuu_s_w", + "subu.ph" => "__builtin_mips_subu_ph", + "subu.qb" => "__builtin_mips_subu_qb", + "subu.s.ph" => "__builtin_mips_subu_s_ph", + "subu.s.qb" => "__builtin_mips_subu_s_qb", + "subuh.qb" => "__builtin_mips_subuh_qb", + "subuh.r.qb" => "__builtin_mips_subuh_r_qb", + "subv.b" => "__builtin_msa_subv_b", + "subv.d" => "__builtin_msa_subv_d", + "subv.h" => "__builtin_msa_subv_h", + "subv.w" => "__builtin_msa_subv_w", + "subvi.b" => "__builtin_msa_subvi_b", + "subvi.d" => "__builtin_msa_subvi_d", + "subvi.h" => "__builtin_msa_subvi_h", + "subvi.w" => "__builtin_msa_subvi_w", + "vshf.b" => "__builtin_msa_vshf_b", + "vshf.d" => "__builtin_msa_vshf_d", + "vshf.h" => "__builtin_msa_vshf_h", + "vshf.w" => "__builtin_msa_vshf_w", + "wrdsp" => "__builtin_mips_wrdsp", + "xor.v" => "__builtin_msa_xor_v", + "xori.b" => "__builtin_msa_xori_b", + _ => unimplemented!("***** unsupported LLVM intrinsic {}", name), + } + } + mips(name) + } + "nvvm" => { + #[allow(non_snake_case)] + fn nvvm(name: &str) -> &str { + match name { + // nvvm + "abs.i" => "__nvvm_abs_i", + "abs.ll" => "__nvvm_abs_ll", + "activemask" => "__nvvm_activemask", + "add.rm.d" => "__nvvm_add_rm_d", + "add.rm.f" => "__nvvm_add_rm_f", + "add.rm.ftz.f" => "__nvvm_add_rm_ftz_f", + "add.rn.d" => "__nvvm_add_rn_d", + "add.rn.f" => "__nvvm_add_rn_f", + "add.rn.ftz.f" => "__nvvm_add_rn_ftz_f", + "add.rp.d" => "__nvvm_add_rp_d", + "add.rp.f" => "__nvvm_add_rp_f", + "add.rp.ftz.f" => "__nvvm_add_rp_ftz_f", + "add.rz.d" => "__nvvm_add_rz_d", + "add.rz.f" => "__nvvm_add_rz_f", + "add.rz.ftz.f" => "__nvvm_add_rz_ftz_f", + "bar.sync" => "__nvvm_bar_sync", + "bar.warp.sync" => "__nvvm_bar_warp_sync", + "barrier0" => "__nvvm_bar0", + // [DUPLICATE]: "barrier0" => "__syncthreads", + "barrier0.and" => "__nvvm_bar0_and", + "barrier0.or" => "__nvvm_bar0_or", + "barrier0.popc" => "__nvvm_bar0_popc", + "bf16x2.to.ue8m0x2.rp" => "__nvvm_bf16x2_to_ue8m0x2_rp", + "bf16x2.to.ue8m0x2.rp.satfinite" => "__nvvm_bf16x2_to_ue8m0x2_rp_satfinite", + "bf16x2.to.ue8m0x2.rz" => "__nvvm_bf16x2_to_ue8m0x2_rz", + "bf16x2.to.ue8m0x2.rz.satfinite" => "__nvvm_bf16x2_to_ue8m0x2_rz_satfinite", + "bf2h.rn" => "__nvvm_bf2h_rn", + "bf2h.rn.ftz" => "__nvvm_bf2h_rn_ftz", + "bitcast.d2ll" => "__nvvm_bitcast_d2ll", + "bitcast.f2i" => "__nvvm_bitcast_f2i", + "bitcast.i2f" => "__nvvm_bitcast_i2f", + "bitcast.ll2d" => "__nvvm_bitcast_ll2d", + "brev32" => "__nvvm_brev32", + "brev64" => "__nvvm_brev64", + "ceil.d" => "__nvvm_ceil_d", + "ceil.f" => "__nvvm_ceil_f", + "ceil.ftz.f" => "__nvvm_ceil_ftz_f", + "clz.i" => "__nvvm_clz_i", + "clz.ll" => "__nvvm_clz_ll", + "cos.approx.f" => "__nvvm_cos_approx_f", + "cos.approx.ftz.f" => "__nvvm_cos_approx_ftz_f", + "cp.async.commit.group" => "__nvvm_cp_async_commit_group", + "cp.async.mbarrier.arrive" => "__nvvm_cp_async_mbarrier_arrive", + "cp.async.mbarrier.arrive.noinc" => "__nvvm_cp_async_mbarrier_arrive_noinc", + "cp.async.mbarrier.arrive.noinc.shared" => { + "__nvvm_cp_async_mbarrier_arrive_noinc_shared" + } + "cp.async.mbarrier.arrive.shared" => "__nvvm_cp_async_mbarrier_arrive_shared", + "cp.async.wait.all" => "__nvvm_cp_async_wait_all", + "cp.async.wait.group" => "__nvvm_cp_async_wait_group", + "d2f.rm" => "__nvvm_d2f_rm", + "d2f.rm.ftz" => "__nvvm_d2f_rm_ftz", + "d2f.rn" => "__nvvm_d2f_rn", + "d2f.rn.ftz" => "__nvvm_d2f_rn_ftz", + "d2f.rp" => "__nvvm_d2f_rp", + "d2f.rp.ftz" => "__nvvm_d2f_rp_ftz", + "d2f.rz" => "__nvvm_d2f_rz", + "d2f.rz.ftz" => "__nvvm_d2f_rz_ftz", + "d2i.hi" => "__nvvm_d2i_hi", + "d2i.lo" => "__nvvm_d2i_lo", + "d2i.rm" => "__nvvm_d2i_rm", + "d2i.rn" => "__nvvm_d2i_rn", + "d2i.rp" => "__nvvm_d2i_rp", + "d2i.rz" => "__nvvm_d2i_rz", + "d2ll.rm" => "__nvvm_d2ll_rm", + "d2ll.rn" => "__nvvm_d2ll_rn", + "d2ll.rp" => "__nvvm_d2ll_rp", + "d2ll.rz" => "__nvvm_d2ll_rz", + "d2ui.rm" => "__nvvm_d2ui_rm", + "d2ui.rn" => "__nvvm_d2ui_rn", + "d2ui.rp" => "__nvvm_d2ui_rp", + "d2ui.rz" => "__nvvm_d2ui_rz", + "d2ull.rm" => "__nvvm_d2ull_rm", + "d2ull.rn" => "__nvvm_d2ull_rn", + "d2ull.rp" => "__nvvm_d2ull_rp", + "d2ull.rz" => "__nvvm_d2ull_rz", + "div.approx.f" => "__nvvm_div_approx_f", + "div.approx.ftz.f" => "__nvvm_div_approx_ftz_f", + "div.full" => "__nvvm_div_full", + "div.full.ftz" => "__nvvm_div_full_ftz", + "div.rm.d" => "__nvvm_div_rm_d", + "div.rm.f" => "__nvvm_div_rm_f", + "div.rm.ftz.f" => "__nvvm_div_rm_ftz_f", + "div.rn.d" => "__nvvm_div_rn_d", + "div.rn.f" => "__nvvm_div_rn_f", + "div.rn.ftz.f" => "__nvvm_div_rn_ftz_f", + "div.rp.d" => "__nvvm_div_rp_d", + "div.rp.f" => "__nvvm_div_rp_f", + "div.rp.ftz.f" => "__nvvm_div_rp_ftz_f", + "div.rz.d" => "__nvvm_div_rz_d", + "div.rz.f" => "__nvvm_div_rz_f", + "div.rz.ftz.f" => "__nvvm_div_rz_ftz_f", + "e2m1x2.to.f16x2.rn" => "__nvvm_e2m1x2_to_f16x2_rn", + "e2m1x2.to.f16x2.rn.relu" => "__nvvm_e2m1x2_to_f16x2_rn_relu", + "e2m3x2.to.f16x2.rn" => "__nvvm_e2m3x2_to_f16x2_rn", + "e2m3x2.to.f16x2.rn.relu" => "__nvvm_e2m3x2_to_f16x2_rn_relu", + "e3m2x2.to.f16x2.rn" => "__nvvm_e3m2x2_to_f16x2_rn", + "e3m2x2.to.f16x2.rn.relu" => "__nvvm_e3m2x2_to_f16x2_rn_relu", + "e4m3x2.to.f16x2.rn" => "__nvvm_e4m3x2_to_f16x2_rn", + "e4m3x2.to.f16x2.rn.relu" => "__nvvm_e4m3x2_to_f16x2_rn_relu", + "e5m2x2.to.f16x2.rn" => "__nvvm_e5m2x2_to_f16x2_rn", + "e5m2x2.to.f16x2.rn.relu" => "__nvvm_e5m2x2_to_f16x2_rn_relu", + "ex2.approx.d" => "__nvvm_ex2_approx_d", + "ex2.approx.f" => "__nvvm_ex2_approx_f", + "ex2.approx.ftz.f" => "__nvvm_ex2_approx_ftz_f", + "exit" => "__nvvm_exit", + "f16x2.to.e4m3x2.rn" => "__nvvm_f16x2_to_e4m3x2_rn", + "f16x2.to.e4m3x2.rn.relu" => "__nvvm_f16x2_to_e4m3x2_rn_relu", + "f16x2.to.e5m2x2.rn" => "__nvvm_f16x2_to_e5m2x2_rn", + "f16x2.to.e5m2x2.rn.relu" => "__nvvm_f16x2_to_e5m2x2_rn_relu", + "f2bf16.rn" => "__nvvm_f2bf16_rn", + "f2bf16.rn.relu" => "__nvvm_f2bf16_rn_relu", + "f2bf16.rz" => "__nvvm_f2bf16_rz", + "f2bf16.rz.relu" => "__nvvm_f2bf16_rz_relu", + "f2h.rn" => "__nvvm_f2h_rn", + "f2h.rn.ftz" => "__nvvm_f2h_rn_ftz", + "f2i.rm" => "__nvvm_f2i_rm", + "f2i.rm.ftz" => "__nvvm_f2i_rm_ftz", + "f2i.rn" => "__nvvm_f2i_rn", + "f2i.rn.ftz" => "__nvvm_f2i_rn_ftz", + "f2i.rp" => "__nvvm_f2i_rp", + "f2i.rp.ftz" => "__nvvm_f2i_rp_ftz", + "f2i.rz" => "__nvvm_f2i_rz", + "f2i.rz.ftz" => "__nvvm_f2i_rz_ftz", + "f2ll.rm" => "__nvvm_f2ll_rm", + "f2ll.rm.ftz" => "__nvvm_f2ll_rm_ftz", + "f2ll.rn" => "__nvvm_f2ll_rn", + "f2ll.rn.ftz" => "__nvvm_f2ll_rn_ftz", + "f2ll.rp" => "__nvvm_f2ll_rp", + "f2ll.rp.ftz" => "__nvvm_f2ll_rp_ftz", + "f2ll.rz" => "__nvvm_f2ll_rz", + "f2ll.rz.ftz" => "__nvvm_f2ll_rz_ftz", + "f2tf32.rn" => "__nvvm_f2tf32_rn", + "f2tf32.rn.relu" => "__nvvm_f2tf32_rn_relu", + "f2tf32.rn.relu.satfinite" => "__nvvm_f2tf32_rn_relu_satfinite", + "f2tf32.rn.satfinite" => "__nvvm_f2tf32_rn_satfinite", + "f2tf32.rna" => "__nvvm_f2tf32_rna", + "f2tf32.rna.satfinite" => "__nvvm_f2tf32_rna_satfinite", + "f2tf32.rz" => "__nvvm_f2tf32_rz", + "f2tf32.rz.relu" => "__nvvm_f2tf32_rz_relu", + "f2tf32.rz.relu.satfinite" => "__nvvm_f2tf32_rz_relu_satfinite", + "f2tf32.rz.satfinite" => "__nvvm_f2tf32_rz_satfinite", + "f2ui.rm" => "__nvvm_f2ui_rm", + "f2ui.rm.ftz" => "__nvvm_f2ui_rm_ftz", + "f2ui.rn" => "__nvvm_f2ui_rn", + "f2ui.rn.ftz" => "__nvvm_f2ui_rn_ftz", + "f2ui.rp" => "__nvvm_f2ui_rp", + "f2ui.rp.ftz" => "__nvvm_f2ui_rp_ftz", + "f2ui.rz" => "__nvvm_f2ui_rz", + "f2ui.rz.ftz" => "__nvvm_f2ui_rz_ftz", + "f2ull.rm" => "__nvvm_f2ull_rm", + "f2ull.rm.ftz" => "__nvvm_f2ull_rm_ftz", + "f2ull.rn" => "__nvvm_f2ull_rn", + "f2ull.rn.ftz" => "__nvvm_f2ull_rn_ftz", + "f2ull.rp" => "__nvvm_f2ull_rp", + "f2ull.rp.ftz" => "__nvvm_f2ull_rp_ftz", + "f2ull.rz" => "__nvvm_f2ull_rz", + "f2ull.rz.ftz" => "__nvvm_f2ull_rz_ftz", + "fabs.d" => "__nvvm_fabs_d", + "fabs.f" => "__nvvm_fabs_f", + "fabs.ftz.f" => "__nvvm_fabs_ftz_f", + "ff.to.e2m1x2.rn.relu.satfinite" => "__nvvm_ff_to_e2m1x2_rn_relu_satfinite", + "ff.to.e2m1x2.rn.satfinite" => "__nvvm_ff_to_e2m1x2_rn_satfinite", + "ff.to.e2m3x2.rn.relu.satfinite" => "__nvvm_ff_to_e2m3x2_rn_relu_satfinite", + "ff.to.e2m3x2.rn.satfinite" => "__nvvm_ff_to_e2m3x2_rn_satfinite", + "ff.to.e3m2x2.rn.relu.satfinite" => "__nvvm_ff_to_e3m2x2_rn_relu_satfinite", + "ff.to.e3m2x2.rn.satfinite" => "__nvvm_ff_to_e3m2x2_rn_satfinite", + "ff.to.e4m3x2.rn" => "__nvvm_ff_to_e4m3x2_rn", + "ff.to.e4m3x2.rn.relu" => "__nvvm_ff_to_e4m3x2_rn_relu", + "ff.to.e5m2x2.rn" => "__nvvm_ff_to_e5m2x2_rn", + "ff.to.e5m2x2.rn.relu" => "__nvvm_ff_to_e5m2x2_rn_relu", + "ff.to.ue8m0x2.rp" => "__nvvm_ff_to_ue8m0x2_rp", + "ff.to.ue8m0x2.rp.satfinite" => "__nvvm_ff_to_ue8m0x2_rp_satfinite", + "ff.to.ue8m0x2.rz" => "__nvvm_ff_to_ue8m0x2_rz", + "ff.to.ue8m0x2.rz.satfinite" => "__nvvm_ff_to_ue8m0x2_rz_satfinite", + "ff2bf16x2.rn" => "__nvvm_ff2bf16x2_rn", + "ff2bf16x2.rn.relu" => "__nvvm_ff2bf16x2_rn_relu", + "ff2bf16x2.rz" => "__nvvm_ff2bf16x2_rz", + "ff2bf16x2.rz.relu" => "__nvvm_ff2bf16x2_rz_relu", + "ff2f16x2.rn" => "__nvvm_ff2f16x2_rn", + "ff2f16x2.rn.relu" => "__nvvm_ff2f16x2_rn_relu", + "ff2f16x2.rz" => "__nvvm_ff2f16x2_rz", + "ff2f16x2.rz.relu" => "__nvvm_ff2f16x2_rz_relu", + "floor.d" => "__nvvm_floor_d", + "floor.f" => "__nvvm_floor_f", + "floor.ftz.f" => "__nvvm_floor_ftz_f", + "fma.rm.d" => "__nvvm_fma_rm_d", + "fma.rm.f" => "__nvvm_fma_rm_f", + "fma.rm.ftz.f" => "__nvvm_fma_rm_ftz_f", + "fma.rn.bf16" => "__nvvm_fma_rn_bf16", + "fma.rn.bf16x2" => "__nvvm_fma_rn_bf16x2", + "fma.rn.d" => "__nvvm_fma_rn_d", + "fma.rn.f" => "__nvvm_fma_rn_f", + "fma.rn.ftz.bf16" => "__nvvm_fma_rn_ftz_bf16", + "fma.rn.ftz.bf16x2" => "__nvvm_fma_rn_ftz_bf16x2", + "fma.rn.ftz.f" => "__nvvm_fma_rn_ftz_f", + "fma.rn.ftz.relu.bf16" => "__nvvm_fma_rn_ftz_relu_bf16", + "fma.rn.ftz.relu.bf16x2" => "__nvvm_fma_rn_ftz_relu_bf16x2", + "fma.rn.ftz.sat.bf16" => "__nvvm_fma_rn_ftz_sat_bf16", + "fma.rn.ftz.sat.bf16x2" => "__nvvm_fma_rn_ftz_sat_bf16x2", + "fma.rn.relu.bf16" => "__nvvm_fma_rn_relu_bf16", + "fma.rn.relu.bf16x2" => "__nvvm_fma_rn_relu_bf16x2", + "fma.rn.sat.bf16" => "__nvvm_fma_rn_sat_bf16", + "fma.rn.sat.bf16x2" => "__nvvm_fma_rn_sat_bf16x2", + "fma.rp.d" => "__nvvm_fma_rp_d", + "fma.rp.f" => "__nvvm_fma_rp_f", + "fma.rp.ftz.f" => "__nvvm_fma_rp_ftz_f", + "fma.rz.d" => "__nvvm_fma_rz_d", + "fma.rz.f" => "__nvvm_fma_rz_f", + "fma.rz.ftz.f" => "__nvvm_fma_rz_ftz_f", + "fmax.bf16" => "__nvvm_fmax_bf16", + "fmax.bf16x2" => "__nvvm_fmax_bf16x2", + "fmax.d" => "__nvvm_fmax_d", + "fmax.f" => "__nvvm_fmax_f", + "fmax.ftz.bf16" => "__nvvm_fmax_ftz_bf16", + "fmax.ftz.bf16x2" => "__nvvm_fmax_ftz_bf16x2", + "fmax.ftz.f" => "__nvvm_fmax_ftz_f", + "fmax.ftz.nan.bf16" => "__nvvm_fmax_ftz_nan_bf16", + "fmax.ftz.nan.bf16x2" => "__nvvm_fmax_ftz_nan_bf16x2", + "fmax.ftz.nan.f" => "__nvvm_fmax_ftz_nan_f", + "fmax.ftz.nan.xorsign.abs.bf16" => "__nvvm_fmax_ftz_nan_xorsign_abs_bf16", + "fmax.ftz.nan.xorsign.abs.bf16x2" => "__nvvm_fmax_ftz_nan_xorsign_abs_bf16x2", + "fmax.ftz.nan.xorsign.abs.f" => "__nvvm_fmax_ftz_nan_xorsign_abs_f", + "fmax.ftz.xorsign.abs.bf16" => "__nvvm_fmax_ftz_xorsign_abs_bf16", + "fmax.ftz.xorsign.abs.bf16x2" => "__nvvm_fmax_ftz_xorsign_abs_bf16x2", + "fmax.ftz.xorsign.abs.f" => "__nvvm_fmax_ftz_xorsign_abs_f", + "fmax.nan.bf16" => "__nvvm_fmax_nan_bf16", + "fmax.nan.bf16x2" => "__nvvm_fmax_nan_bf16x2", + "fmax.nan.f" => "__nvvm_fmax_nan_f", + "fmax.nan.xorsign.abs.bf16" => "__nvvm_fmax_nan_xorsign_abs_bf16", + "fmax.nan.xorsign.abs.bf16x2" => "__nvvm_fmax_nan_xorsign_abs_bf16x2", + "fmax.nan.xorsign.abs.f" => "__nvvm_fmax_nan_xorsign_abs_f", + "fmax.xorsign.abs.bf16" => "__nvvm_fmax_xorsign_abs_bf16", + "fmax.xorsign.abs.bf16x2" => "__nvvm_fmax_xorsign_abs_bf16x2", + "fmax.xorsign.abs.f" => "__nvvm_fmax_xorsign_abs_f", + "fmin.bf16" => "__nvvm_fmin_bf16", + "fmin.bf16x2" => "__nvvm_fmin_bf16x2", + "fmin.d" => "__nvvm_fmin_d", + "fmin.f" => "__nvvm_fmin_f", + "fmin.ftz.bf16" => "__nvvm_fmin_ftz_bf16", + "fmin.ftz.bf16x2" => "__nvvm_fmin_ftz_bf16x2", + "fmin.ftz.f" => "__nvvm_fmin_ftz_f", + "fmin.ftz.nan.bf16" => "__nvvm_fmin_ftz_nan_bf16", + "fmin.ftz.nan.bf16x2" => "__nvvm_fmin_ftz_nan_bf16x2", + "fmin.ftz.nan.f" => "__nvvm_fmin_ftz_nan_f", + "fmin.ftz.nan.xorsign.abs.bf16" => "__nvvm_fmin_ftz_nan_xorsign_abs_bf16", + "fmin.ftz.nan.xorsign.abs.bf16x2" => "__nvvm_fmin_ftz_nan_xorsign_abs_bf16x2", + "fmin.ftz.nan.xorsign.abs.f" => "__nvvm_fmin_ftz_nan_xorsign_abs_f", + "fmin.ftz.xorsign.abs.bf16" => "__nvvm_fmin_ftz_xorsign_abs_bf16", + "fmin.ftz.xorsign.abs.bf16x2" => "__nvvm_fmin_ftz_xorsign_abs_bf16x2", + "fmin.ftz.xorsign.abs.f" => "__nvvm_fmin_ftz_xorsign_abs_f", + "fmin.nan.bf16" => "__nvvm_fmin_nan_bf16", + "fmin.nan.bf16x2" => "__nvvm_fmin_nan_bf16x2", + "fmin.nan.f" => "__nvvm_fmin_nan_f", + "fmin.nan.xorsign.abs.bf16" => "__nvvm_fmin_nan_xorsign_abs_bf16", + "fmin.nan.xorsign.abs.bf16x2" => "__nvvm_fmin_nan_xorsign_abs_bf16x2", + "fmin.nan.xorsign.abs.f" => "__nvvm_fmin_nan_xorsign_abs_f", + "fmin.xorsign.abs.bf16" => "__nvvm_fmin_xorsign_abs_bf16", + "fmin.xorsign.abs.bf16x2" => "__nvvm_fmin_xorsign_abs_bf16x2", + "fmin.xorsign.abs.f" => "__nvvm_fmin_xorsign_abs_f", + "fns" => "__nvvm_fns", + "h2f" => "__nvvm_h2f", + "i2d.rm" => "__nvvm_i2d_rm", + "i2d.rn" => "__nvvm_i2d_rn", + "i2d.rp" => "__nvvm_i2d_rp", + "i2d.rz" => "__nvvm_i2d_rz", + "i2f.rm" => "__nvvm_i2f_rm", + "i2f.rn" => "__nvvm_i2f_rn", + "i2f.rp" => "__nvvm_i2f_rp", + "i2f.rz" => "__nvvm_i2f_rz", + "isspacep.const" => "__nvvm_isspacep_const", + "isspacep.global" => "__nvvm_isspacep_global", + "isspacep.local" => "__nvvm_isspacep_local", + "isspacep.shared" => "__nvvm_isspacep_shared", + "isspacep.shared.cluster" => "__nvvm_isspacep_shared_cluster", + "istypep.sampler" => "__nvvm_istypep_sampler", + "istypep.surface" => "__nvvm_istypep_surface", + "istypep.texture" => "__nvvm_istypep_texture", + "lg2.approx.d" => "__nvvm_lg2_approx_d", + "lg2.approx.f" => "__nvvm_lg2_approx_f", + "lg2.approx.ftz.f" => "__nvvm_lg2_approx_ftz_f", + "ll2d.rm" => "__nvvm_ll2d_rm", + "ll2d.rn" => "__nvvm_ll2d_rn", + "ll2d.rp" => "__nvvm_ll2d_rp", + "ll2d.rz" => "__nvvm_ll2d_rz", + "ll2f.rm" => "__nvvm_ll2f_rm", + "ll2f.rn" => "__nvvm_ll2f_rn", + "ll2f.rp" => "__nvvm_ll2f_rp", + "ll2f.rz" => "__nvvm_ll2f_rz", + "lohi.i2d" => "__nvvm_lohi_i2d", + "match.any.sync.i32" => "__nvvm_match_any_sync_i32", + "match.any.sync.i64" => "__nvvm_match_any_sync_i64", + "max.i" => "__nvvm_max_i", + "max.ll" => "__nvvm_max_ll", + "max.ui" => "__nvvm_max_ui", + "max.ull" => "__nvvm_max_ull", + "mbarrier.arrive" => "__nvvm_mbarrier_arrive", + "mbarrier.arrive.drop" => "__nvvm_mbarrier_arrive_drop", + "mbarrier.arrive.drop.noComplete" => "__nvvm_mbarrier_arrive_drop_noComplete", + "mbarrier.arrive.drop.noComplete.shared" => { + "__nvvm_mbarrier_arrive_drop_noComplete_shared" + } + "mbarrier.arrive.drop.shared" => "__nvvm_mbarrier_arrive_drop_shared", + "mbarrier.arrive.noComplete" => "__nvvm_mbarrier_arrive_noComplete", + "mbarrier.arrive.noComplete.shared" => { + "__nvvm_mbarrier_arrive_noComplete_shared" + } + "mbarrier.arrive.shared" => "__nvvm_mbarrier_arrive_shared", + "mbarrier.init" => "__nvvm_mbarrier_init", + "mbarrier.init.shared" => "__nvvm_mbarrier_init_shared", + "mbarrier.inval" => "__nvvm_mbarrier_inval", + "mbarrier.inval.shared" => "__nvvm_mbarrier_inval_shared", + "mbarrier.pending.count" => "__nvvm_mbarrier_pending_count", + "mbarrier.test.wait" => "__nvvm_mbarrier_test_wait", + "mbarrier.test.wait.shared" => "__nvvm_mbarrier_test_wait_shared", + "membar.cta" => "__nvvm_membar_cta", + "membar.gl" => "__nvvm_membar_gl", + "membar.sys" => "__nvvm_membar_sys", + "min.i" => "__nvvm_min_i", + "min.ll" => "__nvvm_min_ll", + "min.ui" => "__nvvm_min_ui", + "min.ull" => "__nvvm_min_ull", + "mul.rm.d" => "__nvvm_mul_rm_d", + "mul.rm.f" => "__nvvm_mul_rm_f", + "mul.rm.ftz.f" => "__nvvm_mul_rm_ftz_f", + "mul.rn.d" => "__nvvm_mul_rn_d", + "mul.rn.f" => "__nvvm_mul_rn_f", + "mul.rn.ftz.f" => "__nvvm_mul_rn_ftz_f", + "mul.rp.d" => "__nvvm_mul_rp_d", + "mul.rp.f" => "__nvvm_mul_rp_f", + "mul.rp.ftz.f" => "__nvvm_mul_rp_ftz_f", + "mul.rz.d" => "__nvvm_mul_rz_d", + "mul.rz.f" => "__nvvm_mul_rz_f", + "mul.rz.ftz.f" => "__nvvm_mul_rz_ftz_f", + "mul24.i" => "__nvvm_mul24_i", + "mul24.ui" => "__nvvm_mul24_ui", + "mulhi.i" => "__nvvm_mulhi_i", + "mulhi.ll" => "__nvvm_mulhi_ll", + "mulhi.s" => "__nvvm_mulhi_s", + "mulhi.ui" => "__nvvm_mulhi_ui", + "mulhi.ull" => "__nvvm_mulhi_ull", + "mulhi.us" => "__nvvm_mulhi_us", + "nanosleep" => "__nvvm_nanosleep", + "neg.bf16" => "__nvvm_neg_bf16", + "neg.bf16x2" => "__nvvm_neg_bf16x2", + "popc.i" => "__nvvm_popc_i", + "popc.ll" => "__nvvm_popc_ll", + "prmt" => "__nvvm_prmt", + "rcp.approx.ftz.d" => "__nvvm_rcp_approx_ftz_d", + "rcp.approx.ftz.f" => "__nvvm_rcp_approx_ftz_f", + "rcp.rm.d" => "__nvvm_rcp_rm_d", + "rcp.rm.f" => "__nvvm_rcp_rm_f", + "rcp.rm.ftz.f" => "__nvvm_rcp_rm_ftz_f", + "rcp.rn.d" => "__nvvm_rcp_rn_d", + "rcp.rn.f" => "__nvvm_rcp_rn_f", + "rcp.rn.ftz.f" => "__nvvm_rcp_rn_ftz_f", + "rcp.rp.d" => "__nvvm_rcp_rp_d", + "rcp.rp.f" => "__nvvm_rcp_rp_f", + "rcp.rp.ftz.f" => "__nvvm_rcp_rp_ftz_f", + "rcp.rz.d" => "__nvvm_rcp_rz_d", + "rcp.rz.f" => "__nvvm_rcp_rz_f", + "rcp.rz.ftz.f" => "__nvvm_rcp_rz_ftz_f", + "read.ptx.sreg.clock" => "__nvvm_read_ptx_sreg_clock", + // [DUPLICATE]: "read.ptx.sreg.clock" => "__nvvm_read_ptx_sreg_", + "read.ptx.sreg.clock64" => "__nvvm_read_ptx_sreg_clock64", + // [DUPLICATE]: "read.ptx.sreg.clock64" => "__nvvm_read_ptx_sreg_", + "read.ptx.sreg.ctaid.w" => "__nvvm_read_ptx_sreg_ctaid_w", + "read.ptx.sreg.ctaid.x" => "__nvvm_read_ptx_sreg_ctaid_x", + "read.ptx.sreg.ctaid.y" => "__nvvm_read_ptx_sreg_ctaid_y", + "read.ptx.sreg.ctaid.z" => "__nvvm_read_ptx_sreg_ctaid_z", + "read.ptx.sreg.envreg0" => "__nvvm_read_ptx_sreg_envreg0", + "read.ptx.sreg.envreg1" => "__nvvm_read_ptx_sreg_envreg1", + "read.ptx.sreg.envreg10" => "__nvvm_read_ptx_sreg_envreg10", + "read.ptx.sreg.envreg11" => "__nvvm_read_ptx_sreg_envreg11", + "read.ptx.sreg.envreg12" => "__nvvm_read_ptx_sreg_envreg12", + "read.ptx.sreg.envreg13" => "__nvvm_read_ptx_sreg_envreg13", + "read.ptx.sreg.envreg14" => "__nvvm_read_ptx_sreg_envreg14", + "read.ptx.sreg.envreg15" => "__nvvm_read_ptx_sreg_envreg15", + "read.ptx.sreg.envreg16" => "__nvvm_read_ptx_sreg_envreg16", + "read.ptx.sreg.envreg17" => "__nvvm_read_ptx_sreg_envreg17", + "read.ptx.sreg.envreg18" => "__nvvm_read_ptx_sreg_envreg18", + "read.ptx.sreg.envreg19" => "__nvvm_read_ptx_sreg_envreg19", + "read.ptx.sreg.envreg2" => "__nvvm_read_ptx_sreg_envreg2", + "read.ptx.sreg.envreg20" => "__nvvm_read_ptx_sreg_envreg20", + "read.ptx.sreg.envreg21" => "__nvvm_read_ptx_sreg_envreg21", + "read.ptx.sreg.envreg22" => "__nvvm_read_ptx_sreg_envreg22", + "read.ptx.sreg.envreg23" => "__nvvm_read_ptx_sreg_envreg23", + "read.ptx.sreg.envreg24" => "__nvvm_read_ptx_sreg_envreg24", + "read.ptx.sreg.envreg25" => "__nvvm_read_ptx_sreg_envreg25", + "read.ptx.sreg.envreg26" => "__nvvm_read_ptx_sreg_envreg26", + "read.ptx.sreg.envreg27" => "__nvvm_read_ptx_sreg_envreg27", + "read.ptx.sreg.envreg28" => "__nvvm_read_ptx_sreg_envreg28", + "read.ptx.sreg.envreg29" => "__nvvm_read_ptx_sreg_envreg29", + "read.ptx.sreg.envreg3" => "__nvvm_read_ptx_sreg_envreg3", + "read.ptx.sreg.envreg30" => "__nvvm_read_ptx_sreg_envreg30", + "read.ptx.sreg.envreg31" => "__nvvm_read_ptx_sreg_envreg31", + "read.ptx.sreg.envreg4" => "__nvvm_read_ptx_sreg_envreg4", + "read.ptx.sreg.envreg5" => "__nvvm_read_ptx_sreg_envreg5", + "read.ptx.sreg.envreg6" => "__nvvm_read_ptx_sreg_envreg6", + "read.ptx.sreg.envreg7" => "__nvvm_read_ptx_sreg_envreg7", + "read.ptx.sreg.envreg8" => "__nvvm_read_ptx_sreg_envreg8", + "read.ptx.sreg.envreg9" => "__nvvm_read_ptx_sreg_envreg9", + "read.ptx.sreg.globaltimer" => "__nvvm_read_ptx_sreg_globaltimer", + "read.ptx.sreg.gridid" => "__nvvm_read_ptx_sreg_gridid", + // [DUPLICATE]: "read.ptx.sreg.gridid" => "__nvvm_read_ptx_sreg_", + "read.ptx.sreg.laneid" => "__nvvm_read_ptx_sreg_laneid", + // [DUPLICATE]: "read.ptx.sreg.laneid" => "__nvvm_read_ptx_sreg_", + "read.ptx.sreg.lanemask.eq" => "__nvvm_read_ptx_sreg_lanemask_eq", + // [DUPLICATE]: "read.ptx.sreg.lanemask.eq" => "__nvvm_read_ptx_sreg_", + "read.ptx.sreg.lanemask.ge" => "__nvvm_read_ptx_sreg_lanemask_ge", + // [DUPLICATE]: "read.ptx.sreg.lanemask.ge" => "__nvvm_read_ptx_sreg_", + "read.ptx.sreg.lanemask.gt" => "__nvvm_read_ptx_sreg_lanemask_gt", + // [DUPLICATE]: "read.ptx.sreg.lanemask.gt" => "__nvvm_read_ptx_sreg_", + "read.ptx.sreg.lanemask.le" => "__nvvm_read_ptx_sreg_lanemask_le", + // [DUPLICATE]: "read.ptx.sreg.lanemask.le" => "__nvvm_read_ptx_sreg_", + "read.ptx.sreg.lanemask.lt" => "__nvvm_read_ptx_sreg_lanemask_lt", + // [DUPLICATE]: "read.ptx.sreg.lanemask.lt" => "__nvvm_read_ptx_sreg_", + "read.ptx.sreg.nctaid.w" => "__nvvm_read_ptx_sreg_nctaid_w", + "read.ptx.sreg.nctaid.x" => "__nvvm_read_ptx_sreg_nctaid_x", + "read.ptx.sreg.nctaid.y" => "__nvvm_read_ptx_sreg_nctaid_y", + "read.ptx.sreg.nctaid.z" => "__nvvm_read_ptx_sreg_nctaid_z", + "read.ptx.sreg.nsmid" => "__nvvm_read_ptx_sreg_nsmid", + // [DUPLICATE]: "read.ptx.sreg.nsmid" => "__nvvm_read_ptx_sreg_", + "read.ptx.sreg.ntid.w" => "__nvvm_read_ptx_sreg_ntid_w", + "read.ptx.sreg.ntid.x" => "__nvvm_read_ptx_sreg_ntid_x", + "read.ptx.sreg.ntid.y" => "__nvvm_read_ptx_sreg_ntid_y", + "read.ptx.sreg.ntid.z" => "__nvvm_read_ptx_sreg_ntid_z", + "read.ptx.sreg.nwarpid" => "__nvvm_read_ptx_sreg_nwarpid", + // [DUPLICATE]: "read.ptx.sreg.nwarpid" => "__nvvm_read_ptx_sreg_", + "read.ptx.sreg.pm0" => "__nvvm_read_ptx_sreg_pm0", + // [DUPLICATE]: "read.ptx.sreg.pm0" => "__nvvm_read_ptx_sreg_", + "read.ptx.sreg.pm1" => "__nvvm_read_ptx_sreg_pm1", + // [DUPLICATE]: "read.ptx.sreg.pm1" => "__nvvm_read_ptx_sreg_", + "read.ptx.sreg.pm2" => "__nvvm_read_ptx_sreg_pm2", + // [DUPLICATE]: "read.ptx.sreg.pm2" => "__nvvm_read_ptx_sreg_", + "read.ptx.sreg.pm3" => "__nvvm_read_ptx_sreg_pm3", + // [DUPLICATE]: "read.ptx.sreg.pm3" => "__nvvm_read_ptx_sreg_", + "read.ptx.sreg.smid" => "__nvvm_read_ptx_sreg_smid", + // [DUPLICATE]: "read.ptx.sreg.smid" => "__nvvm_read_ptx_sreg_", + "read.ptx.sreg.tid.w" => "__nvvm_read_ptx_sreg_tid_w", + "read.ptx.sreg.tid.x" => "__nvvm_read_ptx_sreg_tid_x", + "read.ptx.sreg.tid.y" => "__nvvm_read_ptx_sreg_tid_y", + "read.ptx.sreg.tid.z" => "__nvvm_read_ptx_sreg_tid_z", + "read.ptx.sreg.warpid" => "__nvvm_read_ptx_sreg_warpid", + // [DUPLICATE]: "read.ptx.sreg.warpid" => "__nvvm_read_ptx_sreg_", + "read.ptx.sreg.warpsize" => "__nvvm_read_ptx_sreg_warpsize", + // [DUPLICATE]: "read.ptx.sreg.warpsize" => "__nvvm_read_ptx_sreg_", + "redux.sync.add" => "__nvvm_redux_sync_add", + "redux.sync.and" => "__nvvm_redux_sync_and", + "redux.sync.fmax" => "__nvvm_redux_sync_fmax", + "redux.sync.fmax.NaN" => "__nvvm_redux_sync_fmax_NaN", + "redux.sync.fmax.abs" => "__nvvm_redux_sync_fmax_abs", + "redux.sync.fmax.abs.NaN" => "__nvvm_redux_sync_fmax_abs_NaN", + "redux.sync.fmin" => "__nvvm_redux_sync_fmin", + "redux.sync.fmin.NaN" => "__nvvm_redux_sync_fmin_NaN", + "redux.sync.fmin.abs" => "__nvvm_redux_sync_fmin_abs", + "redux.sync.fmin.abs.NaN" => "__nvvm_redux_sync_fmin_abs_NaN", + "redux.sync.max" => "__nvvm_redux_sync_max", + "redux.sync.min" => "__nvvm_redux_sync_min", + "redux.sync.or" => "__nvvm_redux_sync_or", + "redux.sync.umax" => "__nvvm_redux_sync_umax", + "redux.sync.umin" => "__nvvm_redux_sync_umin", + "redux.sync.xor" => "__nvvm_redux_sync_xor", + "reflect" => "__nvvm_reflect", + "rotate.b32" => "__nvvm_rotate_b32", + "rotate.b64" => "__nvvm_rotate_b64", + "rotate.right.b64" => "__nvvm_rotate_right_b64", + "round.d" => "__nvvm_round_d", + "round.f" => "__nvvm_round_f", + "round.ftz.f" => "__nvvm_round_ftz_f", + "rsqrt.approx.d" => "__nvvm_rsqrt_approx_d", + "rsqrt.approx.f" => "__nvvm_rsqrt_approx_f", + "rsqrt.approx.ftz.d" => "__nvvm_rsqrt_approx_ftz_d", + "rsqrt.approx.ftz.f" => "__nvvm_rsqrt_approx_ftz_f", + "sad.i" => "__nvvm_sad_i", + "sad.ll" => "__nvvm_sad_ll", + "sad.s" => "__nvvm_sad_s", + "sad.ui" => "__nvvm_sad_ui", + "sad.ull" => "__nvvm_sad_ull", + "sad.us" => "__nvvm_sad_us", + "saturate.d" => "__nvvm_saturate_d", + "saturate.f" => "__nvvm_saturate_f", + "saturate.ftz.f" => "__nvvm_saturate_ftz_f", + "shfl.bfly.f32" => "__nvvm_shfl_bfly_f32", + "shfl.bfly.i32" => "__nvvm_shfl_bfly_i32", + "shfl.down.f32" => "__nvvm_shfl_down_f32", + "shfl.down.i32" => "__nvvm_shfl_down_i32", + "shfl.idx.f32" => "__nvvm_shfl_idx_f32", + "shfl.idx.i32" => "__nvvm_shfl_idx_i32", + "shfl.sync.bfly.f32" => "__nvvm_shfl_sync_bfly_f32", + "shfl.sync.bfly.i32" => "__nvvm_shfl_sync_bfly_i32", + "shfl.sync.down.f32" => "__nvvm_shfl_sync_down_f32", + "shfl.sync.down.i32" => "__nvvm_shfl_sync_down_i32", + "shfl.sync.idx.f32" => "__nvvm_shfl_sync_idx_f32", + "shfl.sync.idx.i32" => "__nvvm_shfl_sync_idx_i32", + "shfl.sync.up.f32" => "__nvvm_shfl_sync_up_f32", + "shfl.sync.up.i32" => "__nvvm_shfl_sync_up_i32", + "shfl.up.f32" => "__nvvm_shfl_up_f32", + "shfl.up.i32" => "__nvvm_shfl_up_i32", + "sin.approx.f" => "__nvvm_sin_approx_f", + "sin.approx.ftz.f" => "__nvvm_sin_approx_ftz_f", + "sqrt.approx.f" => "__nvvm_sqrt_approx_f", + "sqrt.approx.ftz.f" => "__nvvm_sqrt_approx_ftz_f", + "sqrt.f" => "__nvvm_sqrt_f", + "sqrt.rm.d" => "__nvvm_sqrt_rm_d", + "sqrt.rm.f" => "__nvvm_sqrt_rm_f", + "sqrt.rm.ftz.f" => "__nvvm_sqrt_rm_ftz_f", + "sqrt.rn.d" => "__nvvm_sqrt_rn_d", + "sqrt.rn.f" => "__nvvm_sqrt_rn_f", + "sqrt.rn.ftz.f" => "__nvvm_sqrt_rn_ftz_f", + "sqrt.rp.d" => "__nvvm_sqrt_rp_d", + "sqrt.rp.f" => "__nvvm_sqrt_rp_f", + "sqrt.rp.ftz.f" => "__nvvm_sqrt_rp_ftz_f", + "sqrt.rz.d" => "__nvvm_sqrt_rz_d", + "sqrt.rz.f" => "__nvvm_sqrt_rz_f", + "sqrt.rz.ftz.f" => "__nvvm_sqrt_rz_ftz_f", + "suq.array.size" => "__nvvm_suq_array_size", + "suq.channel.data.type" => "__nvvm_suq_channel_data_type", + "suq.channel.order" => "__nvvm_suq_channel_order", + "suq.depth" => "__nvvm_suq_depth", + "suq.height" => "__nvvm_suq_height", + "suq.width" => "__nvvm_suq_width", + "sust.b.1d.array.i16.clamp" => "__nvvm_sust_b_1d_array_i16_clamp", + "sust.b.1d.array.i16.trap" => "__nvvm_sust_b_1d_array_i16_trap", + "sust.b.1d.array.i16.zero" => "__nvvm_sust_b_1d_array_i16_zero", + "sust.b.1d.array.i32.clamp" => "__nvvm_sust_b_1d_array_i32_clamp", + "sust.b.1d.array.i32.trap" => "__nvvm_sust_b_1d_array_i32_trap", + "sust.b.1d.array.i32.zero" => "__nvvm_sust_b_1d_array_i32_zero", + "sust.b.1d.array.i64.clamp" => "__nvvm_sust_b_1d_array_i64_clamp", + "sust.b.1d.array.i64.trap" => "__nvvm_sust_b_1d_array_i64_trap", + "sust.b.1d.array.i64.zero" => "__nvvm_sust_b_1d_array_i64_zero", + "sust.b.1d.array.i8.clamp" => "__nvvm_sust_b_1d_array_i8_clamp", + "sust.b.1d.array.i8.trap" => "__nvvm_sust_b_1d_array_i8_trap", + "sust.b.1d.array.i8.zero" => "__nvvm_sust_b_1d_array_i8_zero", + "sust.b.1d.array.v2i16.clamp" => "__nvvm_sust_b_1d_array_v2i16_clamp", + "sust.b.1d.array.v2i16.trap" => "__nvvm_sust_b_1d_array_v2i16_trap", + "sust.b.1d.array.v2i16.zero" => "__nvvm_sust_b_1d_array_v2i16_zero", + "sust.b.1d.array.v2i32.clamp" => "__nvvm_sust_b_1d_array_v2i32_clamp", + "sust.b.1d.array.v2i32.trap" => "__nvvm_sust_b_1d_array_v2i32_trap", + "sust.b.1d.array.v2i32.zero" => "__nvvm_sust_b_1d_array_v2i32_zero", + "sust.b.1d.array.v2i64.clamp" => "__nvvm_sust_b_1d_array_v2i64_clamp", + "sust.b.1d.array.v2i64.trap" => "__nvvm_sust_b_1d_array_v2i64_trap", + "sust.b.1d.array.v2i64.zero" => "__nvvm_sust_b_1d_array_v2i64_zero", + "sust.b.1d.array.v2i8.clamp" => "__nvvm_sust_b_1d_array_v2i8_clamp", + "sust.b.1d.array.v2i8.trap" => "__nvvm_sust_b_1d_array_v2i8_trap", + "sust.b.1d.array.v2i8.zero" => "__nvvm_sust_b_1d_array_v2i8_zero", + "sust.b.1d.array.v4i16.clamp" => "__nvvm_sust_b_1d_array_v4i16_clamp", + "sust.b.1d.array.v4i16.trap" => "__nvvm_sust_b_1d_array_v4i16_trap", + "sust.b.1d.array.v4i16.zero" => "__nvvm_sust_b_1d_array_v4i16_zero", + "sust.b.1d.array.v4i32.clamp" => "__nvvm_sust_b_1d_array_v4i32_clamp", + "sust.b.1d.array.v4i32.trap" => "__nvvm_sust_b_1d_array_v4i32_trap", + "sust.b.1d.array.v4i32.zero" => "__nvvm_sust_b_1d_array_v4i32_zero", + "sust.b.1d.array.v4i8.clamp" => "__nvvm_sust_b_1d_array_v4i8_clamp", + "sust.b.1d.array.v4i8.trap" => "__nvvm_sust_b_1d_array_v4i8_trap", + "sust.b.1d.array.v4i8.zero" => "__nvvm_sust_b_1d_array_v4i8_zero", + "sust.b.1d.i16.clamp" => "__nvvm_sust_b_1d_i16_clamp", + "sust.b.1d.i16.trap" => "__nvvm_sust_b_1d_i16_trap", + "sust.b.1d.i16.zero" => "__nvvm_sust_b_1d_i16_zero", + "sust.b.1d.i32.clamp" => "__nvvm_sust_b_1d_i32_clamp", + "sust.b.1d.i32.trap" => "__nvvm_sust_b_1d_i32_trap", + "sust.b.1d.i32.zero" => "__nvvm_sust_b_1d_i32_zero", + "sust.b.1d.i64.clamp" => "__nvvm_sust_b_1d_i64_clamp", + "sust.b.1d.i64.trap" => "__nvvm_sust_b_1d_i64_trap", + "sust.b.1d.i64.zero" => "__nvvm_sust_b_1d_i64_zero", + "sust.b.1d.i8.clamp" => "__nvvm_sust_b_1d_i8_clamp", + "sust.b.1d.i8.trap" => "__nvvm_sust_b_1d_i8_trap", + "sust.b.1d.i8.zero" => "__nvvm_sust_b_1d_i8_zero", + "sust.b.1d.v2i16.clamp" => "__nvvm_sust_b_1d_v2i16_clamp", + "sust.b.1d.v2i16.trap" => "__nvvm_sust_b_1d_v2i16_trap", + "sust.b.1d.v2i16.zero" => "__nvvm_sust_b_1d_v2i16_zero", + "sust.b.1d.v2i32.clamp" => "__nvvm_sust_b_1d_v2i32_clamp", + "sust.b.1d.v2i32.trap" => "__nvvm_sust_b_1d_v2i32_trap", + "sust.b.1d.v2i32.zero" => "__nvvm_sust_b_1d_v2i32_zero", + "sust.b.1d.v2i64.clamp" => "__nvvm_sust_b_1d_v2i64_clamp", + "sust.b.1d.v2i64.trap" => "__nvvm_sust_b_1d_v2i64_trap", + "sust.b.1d.v2i64.zero" => "__nvvm_sust_b_1d_v2i64_zero", + "sust.b.1d.v2i8.clamp" => "__nvvm_sust_b_1d_v2i8_clamp", + "sust.b.1d.v2i8.trap" => "__nvvm_sust_b_1d_v2i8_trap", + "sust.b.1d.v2i8.zero" => "__nvvm_sust_b_1d_v2i8_zero", + "sust.b.1d.v4i16.clamp" => "__nvvm_sust_b_1d_v4i16_clamp", + "sust.b.1d.v4i16.trap" => "__nvvm_sust_b_1d_v4i16_trap", + "sust.b.1d.v4i16.zero" => "__nvvm_sust_b_1d_v4i16_zero", + "sust.b.1d.v4i32.clamp" => "__nvvm_sust_b_1d_v4i32_clamp", + "sust.b.1d.v4i32.trap" => "__nvvm_sust_b_1d_v4i32_trap", + "sust.b.1d.v4i32.zero" => "__nvvm_sust_b_1d_v4i32_zero", + "sust.b.1d.v4i8.clamp" => "__nvvm_sust_b_1d_v4i8_clamp", + "sust.b.1d.v4i8.trap" => "__nvvm_sust_b_1d_v4i8_trap", + "sust.b.1d.v4i8.zero" => "__nvvm_sust_b_1d_v4i8_zero", + "sust.b.2d.array.i16.clamp" => "__nvvm_sust_b_2d_array_i16_clamp", + "sust.b.2d.array.i16.trap" => "__nvvm_sust_b_2d_array_i16_trap", + "sust.b.2d.array.i16.zero" => "__nvvm_sust_b_2d_array_i16_zero", + "sust.b.2d.array.i32.clamp" => "__nvvm_sust_b_2d_array_i32_clamp", + "sust.b.2d.array.i32.trap" => "__nvvm_sust_b_2d_array_i32_trap", + "sust.b.2d.array.i32.zero" => "__nvvm_sust_b_2d_array_i32_zero", + "sust.b.2d.array.i64.clamp" => "__nvvm_sust_b_2d_array_i64_clamp", + "sust.b.2d.array.i64.trap" => "__nvvm_sust_b_2d_array_i64_trap", + "sust.b.2d.array.i64.zero" => "__nvvm_sust_b_2d_array_i64_zero", + "sust.b.2d.array.i8.clamp" => "__nvvm_sust_b_2d_array_i8_clamp", + "sust.b.2d.array.i8.trap" => "__nvvm_sust_b_2d_array_i8_trap", + "sust.b.2d.array.i8.zero" => "__nvvm_sust_b_2d_array_i8_zero", + "sust.b.2d.array.v2i16.clamp" => "__nvvm_sust_b_2d_array_v2i16_clamp", + "sust.b.2d.array.v2i16.trap" => "__nvvm_sust_b_2d_array_v2i16_trap", + "sust.b.2d.array.v2i16.zero" => "__nvvm_sust_b_2d_array_v2i16_zero", + "sust.b.2d.array.v2i32.clamp" => "__nvvm_sust_b_2d_array_v2i32_clamp", + "sust.b.2d.array.v2i32.trap" => "__nvvm_sust_b_2d_array_v2i32_trap", + "sust.b.2d.array.v2i32.zero" => "__nvvm_sust_b_2d_array_v2i32_zero", + "sust.b.2d.array.v2i64.clamp" => "__nvvm_sust_b_2d_array_v2i64_clamp", + "sust.b.2d.array.v2i64.trap" => "__nvvm_sust_b_2d_array_v2i64_trap", + "sust.b.2d.array.v2i64.zero" => "__nvvm_sust_b_2d_array_v2i64_zero", + "sust.b.2d.array.v2i8.clamp" => "__nvvm_sust_b_2d_array_v2i8_clamp", + "sust.b.2d.array.v2i8.trap" => "__nvvm_sust_b_2d_array_v2i8_trap", + "sust.b.2d.array.v2i8.zero" => "__nvvm_sust_b_2d_array_v2i8_zero", + "sust.b.2d.array.v4i16.clamp" => "__nvvm_sust_b_2d_array_v4i16_clamp", + "sust.b.2d.array.v4i16.trap" => "__nvvm_sust_b_2d_array_v4i16_trap", + "sust.b.2d.array.v4i16.zero" => "__nvvm_sust_b_2d_array_v4i16_zero", + "sust.b.2d.array.v4i32.clamp" => "__nvvm_sust_b_2d_array_v4i32_clamp", + "sust.b.2d.array.v4i32.trap" => "__nvvm_sust_b_2d_array_v4i32_trap", + "sust.b.2d.array.v4i32.zero" => "__nvvm_sust_b_2d_array_v4i32_zero", + "sust.b.2d.array.v4i8.clamp" => "__nvvm_sust_b_2d_array_v4i8_clamp", + "sust.b.2d.array.v4i8.trap" => "__nvvm_sust_b_2d_array_v4i8_trap", + "sust.b.2d.array.v4i8.zero" => "__nvvm_sust_b_2d_array_v4i8_zero", + "sust.b.2d.i16.clamp" => "__nvvm_sust_b_2d_i16_clamp", + "sust.b.2d.i16.trap" => "__nvvm_sust_b_2d_i16_trap", + "sust.b.2d.i16.zero" => "__nvvm_sust_b_2d_i16_zero", + "sust.b.2d.i32.clamp" => "__nvvm_sust_b_2d_i32_clamp", + "sust.b.2d.i32.trap" => "__nvvm_sust_b_2d_i32_trap", + "sust.b.2d.i32.zero" => "__nvvm_sust_b_2d_i32_zero", + "sust.b.2d.i64.clamp" => "__nvvm_sust_b_2d_i64_clamp", + "sust.b.2d.i64.trap" => "__nvvm_sust_b_2d_i64_trap", + "sust.b.2d.i64.zero" => "__nvvm_sust_b_2d_i64_zero", + "sust.b.2d.i8.clamp" => "__nvvm_sust_b_2d_i8_clamp", + "sust.b.2d.i8.trap" => "__nvvm_sust_b_2d_i8_trap", + "sust.b.2d.i8.zero" => "__nvvm_sust_b_2d_i8_zero", + "sust.b.2d.v2i16.clamp" => "__nvvm_sust_b_2d_v2i16_clamp", + "sust.b.2d.v2i16.trap" => "__nvvm_sust_b_2d_v2i16_trap", + "sust.b.2d.v2i16.zero" => "__nvvm_sust_b_2d_v2i16_zero", + "sust.b.2d.v2i32.clamp" => "__nvvm_sust_b_2d_v2i32_clamp", + "sust.b.2d.v2i32.trap" => "__nvvm_sust_b_2d_v2i32_trap", + "sust.b.2d.v2i32.zero" => "__nvvm_sust_b_2d_v2i32_zero", + "sust.b.2d.v2i64.clamp" => "__nvvm_sust_b_2d_v2i64_clamp", + "sust.b.2d.v2i64.trap" => "__nvvm_sust_b_2d_v2i64_trap", + "sust.b.2d.v2i64.zero" => "__nvvm_sust_b_2d_v2i64_zero", + "sust.b.2d.v2i8.clamp" => "__nvvm_sust_b_2d_v2i8_clamp", + "sust.b.2d.v2i8.trap" => "__nvvm_sust_b_2d_v2i8_trap", + "sust.b.2d.v2i8.zero" => "__nvvm_sust_b_2d_v2i8_zero", + "sust.b.2d.v4i16.clamp" => "__nvvm_sust_b_2d_v4i16_clamp", + "sust.b.2d.v4i16.trap" => "__nvvm_sust_b_2d_v4i16_trap", + "sust.b.2d.v4i16.zero" => "__nvvm_sust_b_2d_v4i16_zero", + "sust.b.2d.v4i32.clamp" => "__nvvm_sust_b_2d_v4i32_clamp", + "sust.b.2d.v4i32.trap" => "__nvvm_sust_b_2d_v4i32_trap", + "sust.b.2d.v4i32.zero" => "__nvvm_sust_b_2d_v4i32_zero", + "sust.b.2d.v4i8.clamp" => "__nvvm_sust_b_2d_v4i8_clamp", + "sust.b.2d.v4i8.trap" => "__nvvm_sust_b_2d_v4i8_trap", + "sust.b.2d.v4i8.zero" => "__nvvm_sust_b_2d_v4i8_zero", + "sust.b.3d.i16.clamp" => "__nvvm_sust_b_3d_i16_clamp", + "sust.b.3d.i16.trap" => "__nvvm_sust_b_3d_i16_trap", + "sust.b.3d.i16.zero" => "__nvvm_sust_b_3d_i16_zero", + "sust.b.3d.i32.clamp" => "__nvvm_sust_b_3d_i32_clamp", + "sust.b.3d.i32.trap" => "__nvvm_sust_b_3d_i32_trap", + "sust.b.3d.i32.zero" => "__nvvm_sust_b_3d_i32_zero", + "sust.b.3d.i64.clamp" => "__nvvm_sust_b_3d_i64_clamp", + "sust.b.3d.i64.trap" => "__nvvm_sust_b_3d_i64_trap", + "sust.b.3d.i64.zero" => "__nvvm_sust_b_3d_i64_zero", + "sust.b.3d.i8.clamp" => "__nvvm_sust_b_3d_i8_clamp", + "sust.b.3d.i8.trap" => "__nvvm_sust_b_3d_i8_trap", + "sust.b.3d.i8.zero" => "__nvvm_sust_b_3d_i8_zero", + "sust.b.3d.v2i16.clamp" => "__nvvm_sust_b_3d_v2i16_clamp", + "sust.b.3d.v2i16.trap" => "__nvvm_sust_b_3d_v2i16_trap", + "sust.b.3d.v2i16.zero" => "__nvvm_sust_b_3d_v2i16_zero", + "sust.b.3d.v2i32.clamp" => "__nvvm_sust_b_3d_v2i32_clamp", + "sust.b.3d.v2i32.trap" => "__nvvm_sust_b_3d_v2i32_trap", + "sust.b.3d.v2i32.zero" => "__nvvm_sust_b_3d_v2i32_zero", + "sust.b.3d.v2i64.clamp" => "__nvvm_sust_b_3d_v2i64_clamp", + "sust.b.3d.v2i64.trap" => "__nvvm_sust_b_3d_v2i64_trap", + "sust.b.3d.v2i64.zero" => "__nvvm_sust_b_3d_v2i64_zero", + "sust.b.3d.v2i8.clamp" => "__nvvm_sust_b_3d_v2i8_clamp", + "sust.b.3d.v2i8.trap" => "__nvvm_sust_b_3d_v2i8_trap", + "sust.b.3d.v2i8.zero" => "__nvvm_sust_b_3d_v2i8_zero", + "sust.b.3d.v4i16.clamp" => "__nvvm_sust_b_3d_v4i16_clamp", + "sust.b.3d.v4i16.trap" => "__nvvm_sust_b_3d_v4i16_trap", + "sust.b.3d.v4i16.zero" => "__nvvm_sust_b_3d_v4i16_zero", + "sust.b.3d.v4i32.clamp" => "__nvvm_sust_b_3d_v4i32_clamp", + "sust.b.3d.v4i32.trap" => "__nvvm_sust_b_3d_v4i32_trap", + "sust.b.3d.v4i32.zero" => "__nvvm_sust_b_3d_v4i32_zero", + "sust.b.3d.v4i8.clamp" => "__nvvm_sust_b_3d_v4i8_clamp", + "sust.b.3d.v4i8.trap" => "__nvvm_sust_b_3d_v4i8_trap", + "sust.b.3d.v4i8.zero" => "__nvvm_sust_b_3d_v4i8_zero", + "sust.p.1d.array.i16.trap" => "__nvvm_sust_p_1d_array_i16_trap", + "sust.p.1d.array.i32.trap" => "__nvvm_sust_p_1d_array_i32_trap", + "sust.p.1d.array.i8.trap" => "__nvvm_sust_p_1d_array_i8_trap", + "sust.p.1d.array.v2i16.trap" => "__nvvm_sust_p_1d_array_v2i16_trap", + "sust.p.1d.array.v2i32.trap" => "__nvvm_sust_p_1d_array_v2i32_trap", + "sust.p.1d.array.v2i8.trap" => "__nvvm_sust_p_1d_array_v2i8_trap", + "sust.p.1d.array.v4i16.trap" => "__nvvm_sust_p_1d_array_v4i16_trap", + "sust.p.1d.array.v4i32.trap" => "__nvvm_sust_p_1d_array_v4i32_trap", + "sust.p.1d.array.v4i8.trap" => "__nvvm_sust_p_1d_array_v4i8_trap", + "sust.p.1d.i16.trap" => "__nvvm_sust_p_1d_i16_trap", + "sust.p.1d.i32.trap" => "__nvvm_sust_p_1d_i32_trap", + "sust.p.1d.i8.trap" => "__nvvm_sust_p_1d_i8_trap", + "sust.p.1d.v2i16.trap" => "__nvvm_sust_p_1d_v2i16_trap", + "sust.p.1d.v2i32.trap" => "__nvvm_sust_p_1d_v2i32_trap", + "sust.p.1d.v2i8.trap" => "__nvvm_sust_p_1d_v2i8_trap", + "sust.p.1d.v4i16.trap" => "__nvvm_sust_p_1d_v4i16_trap", + "sust.p.1d.v4i32.trap" => "__nvvm_sust_p_1d_v4i32_trap", + "sust.p.1d.v4i8.trap" => "__nvvm_sust_p_1d_v4i8_trap", + "sust.p.2d.array.i16.trap" => "__nvvm_sust_p_2d_array_i16_trap", + "sust.p.2d.array.i32.trap" => "__nvvm_sust_p_2d_array_i32_trap", + "sust.p.2d.array.i8.trap" => "__nvvm_sust_p_2d_array_i8_trap", + "sust.p.2d.array.v2i16.trap" => "__nvvm_sust_p_2d_array_v2i16_trap", + "sust.p.2d.array.v2i32.trap" => "__nvvm_sust_p_2d_array_v2i32_trap", + "sust.p.2d.array.v2i8.trap" => "__nvvm_sust_p_2d_array_v2i8_trap", + "sust.p.2d.array.v4i16.trap" => "__nvvm_sust_p_2d_array_v4i16_trap", + "sust.p.2d.array.v4i32.trap" => "__nvvm_sust_p_2d_array_v4i32_trap", + "sust.p.2d.array.v4i8.trap" => "__nvvm_sust_p_2d_array_v4i8_trap", + "sust.p.2d.i16.trap" => "__nvvm_sust_p_2d_i16_trap", + "sust.p.2d.i32.trap" => "__nvvm_sust_p_2d_i32_trap", + "sust.p.2d.i8.trap" => "__nvvm_sust_p_2d_i8_trap", + "sust.p.2d.v2i16.trap" => "__nvvm_sust_p_2d_v2i16_trap", + "sust.p.2d.v2i32.trap" => "__nvvm_sust_p_2d_v2i32_trap", + "sust.p.2d.v2i8.trap" => "__nvvm_sust_p_2d_v2i8_trap", + "sust.p.2d.v4i16.trap" => "__nvvm_sust_p_2d_v4i16_trap", + "sust.p.2d.v4i32.trap" => "__nvvm_sust_p_2d_v4i32_trap", + "sust.p.2d.v4i8.trap" => "__nvvm_sust_p_2d_v4i8_trap", + "sust.p.3d.i16.trap" => "__nvvm_sust_p_3d_i16_trap", + "sust.p.3d.i32.trap" => "__nvvm_sust_p_3d_i32_trap", + "sust.p.3d.i8.trap" => "__nvvm_sust_p_3d_i8_trap", + "sust.p.3d.v2i16.trap" => "__nvvm_sust_p_3d_v2i16_trap", + "sust.p.3d.v2i32.trap" => "__nvvm_sust_p_3d_v2i32_trap", + "sust.p.3d.v2i8.trap" => "__nvvm_sust_p_3d_v2i8_trap", + "sust.p.3d.v4i16.trap" => "__nvvm_sust_p_3d_v4i16_trap", + "sust.p.3d.v4i32.trap" => "__nvvm_sust_p_3d_v4i32_trap", + "sust.p.3d.v4i8.trap" => "__nvvm_sust_p_3d_v4i8_trap", + "swap.lo.hi.b64" => "__nvvm_swap_lo_hi_b64", + "trunc.d" => "__nvvm_trunc_d", + "trunc.f" => "__nvvm_trunc_f", + "trunc.ftz.f" => "__nvvm_trunc_ftz_f", + "txq.array.size" => "__nvvm_txq_array_size", + "txq.channel.data.type" => "__nvvm_txq_channel_data_type", + "txq.channel.order" => "__nvvm_txq_channel_order", + "txq.depth" => "__nvvm_txq_depth", + "txq.height" => "__nvvm_txq_height", + "txq.num.mipmap.levels" => "__nvvm_txq_num_mipmap_levels", + "txq.num.samples" => "__nvvm_txq_num_samples", + "txq.width" => "__nvvm_txq_width", + "ue8m0x2.to.bf16x2" => "__nvvm_ue8m0x2_to_bf16x2", + "ui2d.rm" => "__nvvm_ui2d_rm", + "ui2d.rn" => "__nvvm_ui2d_rn", + "ui2d.rp" => "__nvvm_ui2d_rp", + "ui2d.rz" => "__nvvm_ui2d_rz", + "ui2f.rm" => "__nvvm_ui2f_rm", + "ui2f.rn" => "__nvvm_ui2f_rn", + "ui2f.rp" => "__nvvm_ui2f_rp", + "ui2f.rz" => "__nvvm_ui2f_rz", + "ull2d.rm" => "__nvvm_ull2d_rm", + "ull2d.rn" => "__nvvm_ull2d_rn", + "ull2d.rp" => "__nvvm_ull2d_rp", + "ull2d.rz" => "__nvvm_ull2d_rz", + "ull2f.rm" => "__nvvm_ull2f_rm", + "ull2f.rn" => "__nvvm_ull2f_rn", + "ull2f.rp" => "__nvvm_ull2f_rp", + "ull2f.rz" => "__nvvm_ull2f_rz", + "vote.all" => "__nvvm_vote_all", + "vote.all.sync" => "__nvvm_vote_all_sync", + "vote.any" => "__nvvm_vote_any", + "vote.any.sync" => "__nvvm_vote_any_sync", + "vote.ballot" => "__nvvm_vote_ballot", + "vote.ballot.sync" => "__nvvm_vote_ballot_sync", + "vote.uni" => "__nvvm_vote_uni", + "vote.uni.sync" => "__nvvm_vote_uni_sync", + _ => unimplemented!("***** unsupported LLVM intrinsic {}", name), + } + } + nvvm(name) + } + "ppc" => { + #[allow(non_snake_case)] + fn ppc(name: &str) -> &str { + match name { + // ppc + "addex" => "__builtin_ppc_addex", + "addf128.round.to.odd" => "__builtin_addf128_round_to_odd", + "addg6s" => "__builtin_addg6s", + "addg6sd" => "__builtin_ppc_addg6s", + "altivec.crypto.vcipher" => "__builtin_altivec_crypto_vcipher", + "altivec.crypto.vcipherlast" => "__builtin_altivec_crypto_vcipherlast", + "altivec.crypto.vncipher" => "__builtin_altivec_crypto_vncipher", + "altivec.crypto.vncipherlast" => "__builtin_altivec_crypto_vncipherlast", + "altivec.crypto.vpermxor" => "__builtin_altivec_crypto_vpermxor", + "altivec.crypto.vpermxor.be" => "__builtin_altivec_crypto_vpermxor_be", + "altivec.crypto.vpmsumb" => "__builtin_altivec_crypto_vpmsumb", + "altivec.crypto.vpmsumd" => "__builtin_altivec_crypto_vpmsumd", + "altivec.crypto.vpmsumh" => "__builtin_altivec_crypto_vpmsumh", + "altivec.crypto.vpmsumw" => "__builtin_altivec_crypto_vpmsumw", + "altivec.crypto.vsbox" => "__builtin_altivec_crypto_vsbox", + "altivec.crypto.vshasigmad" => "__builtin_altivec_crypto_vshasigmad", + "altivec.crypto.vshasigmaw" => "__builtin_altivec_crypto_vshasigmaw", + "altivec.dss" => "__builtin_altivec_dss", + "altivec.dssall" => "__builtin_altivec_dssall", + "altivec.dst" => "__builtin_altivec_dst", + "altivec.dstst" => "__builtin_altivec_dstst", + "altivec.dststt" => "__builtin_altivec_dststt", + "altivec.dstt" => "__builtin_altivec_dstt", + "altivec.mfvscr" => "__builtin_altivec_mfvscr", + "altivec.mtvscr" => "__builtin_altivec_mtvscr", + "altivec.mtvsrbm" => "__builtin_altivec_mtvsrbm", + "altivec.mtvsrdm" => "__builtin_altivec_mtvsrdm", + "altivec.mtvsrhm" => "__builtin_altivec_mtvsrhm", + "altivec.mtvsrqm" => "__builtin_altivec_mtvsrqm", + "altivec.mtvsrwm" => "__builtin_altivec_mtvsrwm", + "altivec.vabsdub" => "__builtin_altivec_vabsdub", + "altivec.vabsduh" => "__builtin_altivec_vabsduh", + "altivec.vabsduw" => "__builtin_altivec_vabsduw", + "altivec.vaddcuq" => "__builtin_altivec_vaddcuq", + "altivec.vaddcuw" => "__builtin_altivec_vaddcuw", + "altivec.vaddecuq" => "__builtin_altivec_vaddecuq", + "altivec.vaddeuqm" => "__builtin_altivec_vaddeuqm", + "altivec.vaddsbs" => "__builtin_altivec_vaddsbs", + "altivec.vaddshs" => "__builtin_altivec_vaddshs", + "altivec.vaddsws" => "__builtin_altivec_vaddsws", + "altivec.vaddubs" => "__builtin_altivec_vaddubs", + "altivec.vadduhs" => "__builtin_altivec_vadduhs", + "altivec.vadduws" => "__builtin_altivec_vadduws", + "altivec.vavgsb" => "__builtin_altivec_vavgsb", + "altivec.vavgsh" => "__builtin_altivec_vavgsh", + "altivec.vavgsw" => "__builtin_altivec_vavgsw", + "altivec.vavgub" => "__builtin_altivec_vavgub", + "altivec.vavguh" => "__builtin_altivec_vavguh", + "altivec.vavguw" => "__builtin_altivec_vavguw", + "altivec.vbpermd" => "__builtin_altivec_vbpermd", + "altivec.vbpermq" => "__builtin_altivec_vbpermq", + "altivec.vcfsx" => "__builtin_altivec_vcfsx", + "altivec.vcfuged" => "__builtin_altivec_vcfuged", + "altivec.vcfux" => "__builtin_altivec_vcfux", + "altivec.vclrlb" => "__builtin_altivec_vclrlb", + "altivec.vclrrb" => "__builtin_altivec_vclrrb", + "altivec.vclzdm" => "__builtin_altivec_vclzdm", + "altivec.vclzlsbb" => "__builtin_altivec_vclzlsbb", + "altivec.vcmpbfp" => "__builtin_altivec_vcmpbfp", + "altivec.vcmpbfp.p" => "__builtin_altivec_vcmpbfp_p", + "altivec.vcmpeqfp" => "__builtin_altivec_vcmpeqfp", + "altivec.vcmpeqfp.p" => "__builtin_altivec_vcmpeqfp_p", + "altivec.vcmpequb" => "__builtin_altivec_vcmpequb", + "altivec.vcmpequb.p" => "__builtin_altivec_vcmpequb_p", + "altivec.vcmpequd" => "__builtin_altivec_vcmpequd", + "altivec.vcmpequd.p" => "__builtin_altivec_vcmpequd_p", + "altivec.vcmpequh" => "__builtin_altivec_vcmpequh", + "altivec.vcmpequh.p" => "__builtin_altivec_vcmpequh_p", + "altivec.vcmpequq" => "__builtin_altivec_vcmpequq", + "altivec.vcmpequq.p" => "__builtin_altivec_vcmpequq_p", + "altivec.vcmpequw" => "__builtin_altivec_vcmpequw", + "altivec.vcmpequw.p" => "__builtin_altivec_vcmpequw_p", + "altivec.vcmpgefp" => "__builtin_altivec_vcmpgefp", + "altivec.vcmpgefp.p" => "__builtin_altivec_vcmpgefp_p", + "altivec.vcmpgtfp" => "__builtin_altivec_vcmpgtfp", + "altivec.vcmpgtfp.p" => "__builtin_altivec_vcmpgtfp_p", + "altivec.vcmpgtsb" => "__builtin_altivec_vcmpgtsb", + "altivec.vcmpgtsb.p" => "__builtin_altivec_vcmpgtsb_p", + "altivec.vcmpgtsd" => "__builtin_altivec_vcmpgtsd", + "altivec.vcmpgtsd.p" => "__builtin_altivec_vcmpgtsd_p", + "altivec.vcmpgtsh" => "__builtin_altivec_vcmpgtsh", + "altivec.vcmpgtsh.p" => "__builtin_altivec_vcmpgtsh_p", + "altivec.vcmpgtsq" => "__builtin_altivec_vcmpgtsq", + "altivec.vcmpgtsq.p" => "__builtin_altivec_vcmpgtsq_p", + "altivec.vcmpgtsw" => "__builtin_altivec_vcmpgtsw", + "altivec.vcmpgtsw.p" => "__builtin_altivec_vcmpgtsw_p", + "altivec.vcmpgtub" => "__builtin_altivec_vcmpgtub", + "altivec.vcmpgtub.p" => "__builtin_altivec_vcmpgtub_p", + "altivec.vcmpgtud" => "__builtin_altivec_vcmpgtud", + "altivec.vcmpgtud.p" => "__builtin_altivec_vcmpgtud_p", + "altivec.vcmpgtuh" => "__builtin_altivec_vcmpgtuh", + "altivec.vcmpgtuh.p" => "__builtin_altivec_vcmpgtuh_p", + "altivec.vcmpgtuq" => "__builtin_altivec_vcmpgtuq", + "altivec.vcmpgtuq.p" => "__builtin_altivec_vcmpgtuq_p", + "altivec.vcmpgtuw" => "__builtin_altivec_vcmpgtuw", + "altivec.vcmpgtuw.p" => "__builtin_altivec_vcmpgtuw_p", + "altivec.vcmpneb" => "__builtin_altivec_vcmpneb", + "altivec.vcmpneb.p" => "__builtin_altivec_vcmpneb_p", + "altivec.vcmpneh" => "__builtin_altivec_vcmpneh", + "altivec.vcmpneh.p" => "__builtin_altivec_vcmpneh_p", + "altivec.vcmpnew" => "__builtin_altivec_vcmpnew", + "altivec.vcmpnew.p" => "__builtin_altivec_vcmpnew_p", + "altivec.vcmpnezb" => "__builtin_altivec_vcmpnezb", + "altivec.vcmpnezb.p" => "__builtin_altivec_vcmpnezb_p", + "altivec.vcmpnezh" => "__builtin_altivec_vcmpnezh", + "altivec.vcmpnezh.p" => "__builtin_altivec_vcmpnezh_p", + "altivec.vcmpnezw" => "__builtin_altivec_vcmpnezw", + "altivec.vcmpnezw.p" => "__builtin_altivec_vcmpnezw_p", + "altivec.vcntmbb" => "__builtin_altivec_vcntmbb", + "altivec.vcntmbd" => "__builtin_altivec_vcntmbd", + "altivec.vcntmbh" => "__builtin_altivec_vcntmbh", + "altivec.vcntmbw" => "__builtin_altivec_vcntmbw", + "altivec.vctsxs" => "__builtin_altivec_vctsxs", + "altivec.vctuxs" => "__builtin_altivec_vctuxs", + "altivec.vctzdm" => "__builtin_altivec_vctzdm", + "altivec.vctzlsbb" => "__builtin_altivec_vctzlsbb", + "altivec.vdivesd" => "__builtin_altivec_vdivesd", + "altivec.vdivesq" => "__builtin_altivec_vdivesq", + "altivec.vdivesw" => "__builtin_altivec_vdivesw", + "altivec.vdiveud" => "__builtin_altivec_vdiveud", + "altivec.vdiveuq" => "__builtin_altivec_vdiveuq", + "altivec.vdiveuw" => "__builtin_altivec_vdiveuw", + "altivec.vexpandbm" => "__builtin_altivec_vexpandbm", + "altivec.vexpanddm" => "__builtin_altivec_vexpanddm", + "altivec.vexpandhm" => "__builtin_altivec_vexpandhm", + "altivec.vexpandqm" => "__builtin_altivec_vexpandqm", + "altivec.vexpandwm" => "__builtin_altivec_vexpandwm", + "altivec.vexptefp" => "__builtin_altivec_vexptefp", + "altivec.vextddvlx" => "__builtin_altivec_vextddvlx", + "altivec.vextddvrx" => "__builtin_altivec_vextddvrx", + "altivec.vextdubvlx" => "__builtin_altivec_vextdubvlx", + "altivec.vextdubvrx" => "__builtin_altivec_vextdubvrx", + "altivec.vextduhvlx" => "__builtin_altivec_vextduhvlx", + "altivec.vextduhvrx" => "__builtin_altivec_vextduhvrx", + "altivec.vextduwvlx" => "__builtin_altivec_vextduwvlx", + "altivec.vextduwvrx" => "__builtin_altivec_vextduwvrx", + "altivec.vextractbm" => "__builtin_altivec_vextractbm", + "altivec.vextractdm" => "__builtin_altivec_vextractdm", + "altivec.vextracthm" => "__builtin_altivec_vextracthm", + "altivec.vextractqm" => "__builtin_altivec_vextractqm", + "altivec.vextractwm" => "__builtin_altivec_vextractwm", + "altivec.vextsb2d" => "__builtin_altivec_vextsb2d", + "altivec.vextsb2w" => "__builtin_altivec_vextsb2w", + "altivec.vextsd2q" => "__builtin_altivec_vextsd2q", + "altivec.vextsh2d" => "__builtin_altivec_vextsh2d", + "altivec.vextsh2w" => "__builtin_altivec_vextsh2w", + "altivec.vextsw2d" => "__builtin_altivec_vextsw2d", + "altivec.vgbbd" => "__builtin_altivec_vgbbd", + "altivec.vgnb" => "__builtin_altivec_vgnb", + "altivec.vinsblx" => "__builtin_altivec_vinsblx", + "altivec.vinsbrx" => "__builtin_altivec_vinsbrx", + "altivec.vinsbvlx" => "__builtin_altivec_vinsbvlx", + "altivec.vinsbvrx" => "__builtin_altivec_vinsbvrx", + "altivec.vinsdlx" => "__builtin_altivec_vinsdlx", + "altivec.vinsdrx" => "__builtin_altivec_vinsdrx", + "altivec.vinshlx" => "__builtin_altivec_vinshlx", + "altivec.vinshrx" => "__builtin_altivec_vinshrx", + "altivec.vinshvlx" => "__builtin_altivec_vinshvlx", + "altivec.vinshvrx" => "__builtin_altivec_vinshvrx", + "altivec.vinswlx" => "__builtin_altivec_vinswlx", + "altivec.vinswrx" => "__builtin_altivec_vinswrx", + "altivec.vinswvlx" => "__builtin_altivec_vinswvlx", + "altivec.vinswvrx" => "__builtin_altivec_vinswvrx", + "altivec.vlogefp" => "__builtin_altivec_vlogefp", + "altivec.vmaddfp" => "__builtin_altivec_vmaddfp", + "altivec.vmaxfp" => "__builtin_altivec_vmaxfp", + "altivec.vmaxsb" => "__builtin_altivec_vmaxsb", + "altivec.vmaxsd" => "__builtin_altivec_vmaxsd", + "altivec.vmaxsh" => "__builtin_altivec_vmaxsh", + "altivec.vmaxsw" => "__builtin_altivec_vmaxsw", + "altivec.vmaxub" => "__builtin_altivec_vmaxub", + "altivec.vmaxud" => "__builtin_altivec_vmaxud", + "altivec.vmaxuh" => "__builtin_altivec_vmaxuh", + "altivec.vmaxuw" => "__builtin_altivec_vmaxuw", + "altivec.vmhaddshs" => "__builtin_altivec_vmhaddshs", + "altivec.vmhraddshs" => "__builtin_altivec_vmhraddshs", + "altivec.vminfp" => "__builtin_altivec_vminfp", + "altivec.vminsb" => "__builtin_altivec_vminsb", + "altivec.vminsd" => "__builtin_altivec_vminsd", + "altivec.vminsh" => "__builtin_altivec_vminsh", + "altivec.vminsw" => "__builtin_altivec_vminsw", + "altivec.vminub" => "__builtin_altivec_vminub", + "altivec.vminud" => "__builtin_altivec_vminud", + "altivec.vminuh" => "__builtin_altivec_vminuh", + "altivec.vminuw" => "__builtin_altivec_vminuw", + "altivec.vmladduhm" => "__builtin_altivec_vmladduhm", + "altivec.vmsumcud" => "__builtin_altivec_vmsumcud", + "altivec.vmsummbm" => "__builtin_altivec_vmsummbm", + "altivec.vmsumshm" => "__builtin_altivec_vmsumshm", + "altivec.vmsumshs" => "__builtin_altivec_vmsumshs", + "altivec.vmsumubm" => "__builtin_altivec_vmsumubm", + "altivec.vmsumudm" => "__builtin_altivec_vmsumudm", + "altivec.vmsumuhm" => "__builtin_altivec_vmsumuhm", + "altivec.vmsumuhs" => "__builtin_altivec_vmsumuhs", + "altivec.vmulesb" => "__builtin_altivec_vmulesb", + "altivec.vmulesd" => "__builtin_altivec_vmulesd", + "altivec.vmulesh" => "__builtin_altivec_vmulesh", + "altivec.vmulesw" => "__builtin_altivec_vmulesw", + "altivec.vmuleub" => "__builtin_altivec_vmuleub", + "altivec.vmuleud" => "__builtin_altivec_vmuleud", + "altivec.vmuleuh" => "__builtin_altivec_vmuleuh", + "altivec.vmuleuw" => "__builtin_altivec_vmuleuw", + "altivec.vmulhsd" => "__builtin_altivec_vmulhsd", + "altivec.vmulhsw" => "__builtin_altivec_vmulhsw", + "altivec.vmulhud" => "__builtin_altivec_vmulhud", + "altivec.vmulhuw" => "__builtin_altivec_vmulhuw", + "altivec.vmulosb" => "__builtin_altivec_vmulosb", + "altivec.vmulosd" => "__builtin_altivec_vmulosd", + "altivec.vmulosh" => "__builtin_altivec_vmulosh", + "altivec.vmulosw" => "__builtin_altivec_vmulosw", + "altivec.vmuloub" => "__builtin_altivec_vmuloub", + "altivec.vmuloud" => "__builtin_altivec_vmuloud", + "altivec.vmulouh" => "__builtin_altivec_vmulouh", + "altivec.vmulouw" => "__builtin_altivec_vmulouw", + "altivec.vnmsubfp" => "__builtin_altivec_vnmsubfp", + "altivec.vpdepd" => "__builtin_altivec_vpdepd", + "altivec.vperm" => "__builtin_altivec_vperm_4si", + "altivec.vpextd" => "__builtin_altivec_vpextd", + "altivec.vpkpx" => "__builtin_altivec_vpkpx", + "altivec.vpksdss" => "__builtin_altivec_vpksdss", + "altivec.vpksdus" => "__builtin_altivec_vpksdus", + "altivec.vpkshss" => "__builtin_altivec_vpkshss", + "altivec.vpkshus" => "__builtin_altivec_vpkshus", + "altivec.vpkswss" => "__builtin_altivec_vpkswss", + "altivec.vpkswus" => "__builtin_altivec_vpkswus", + "altivec.vpkudus" => "__builtin_altivec_vpkudus", + "altivec.vpkuhus" => "__builtin_altivec_vpkuhus", + "altivec.vpkuwus" => "__builtin_altivec_vpkuwus", + "altivec.vprtybd" => "__builtin_altivec_vprtybd", + "altivec.vprtybq" => "__builtin_altivec_vprtybq", + "altivec.vprtybw" => "__builtin_altivec_vprtybw", + "altivec.vrefp" => "__builtin_altivec_vrefp", + "altivec.vrfim" => "__builtin_altivec_vrfim", + "altivec.vrfin" => "__builtin_altivec_vrfin", + "altivec.vrfip" => "__builtin_altivec_vrfip", + "altivec.vrfiz" => "__builtin_altivec_vrfiz", + "altivec.vrlb" => "__builtin_altivec_vrlb", + "altivec.vrld" => "__builtin_altivec_vrld", + "altivec.vrldmi" => "__builtin_altivec_vrldmi", + "altivec.vrldnm" => "__builtin_altivec_vrldnm", + "altivec.vrlh" => "__builtin_altivec_vrlh", + "altivec.vrlqmi" => "__builtin_altivec_vrlqmi", + "altivec.vrlqnm" => "__builtin_altivec_vrlqnm", + "altivec.vrlw" => "__builtin_altivec_vrlw", + "altivec.vrlwmi" => "__builtin_altivec_vrlwmi", + "altivec.vrlwnm" => "__builtin_altivec_vrlwnm", + "altivec.vrsqrtefp" => "__builtin_altivec_vrsqrtefp", + "altivec.vsel" => "__builtin_altivec_vsel_4si", + "altivec.vsl" => "__builtin_altivec_vsl", + "altivec.vslb" => "__builtin_altivec_vslb", + "altivec.vsldbi" => "__builtin_altivec_vsldbi", + "altivec.vslh" => "__builtin_altivec_vslh", + "altivec.vslo" => "__builtin_altivec_vslo", + "altivec.vslv" => "__builtin_altivec_vslv", + "altivec.vslw" => "__builtin_altivec_vslw", + "altivec.vsr" => "__builtin_altivec_vsr", + "altivec.vsrab" => "__builtin_altivec_vsrab", + "altivec.vsrah" => "__builtin_altivec_vsrah", + "altivec.vsraw" => "__builtin_altivec_vsraw", + "altivec.vsrb" => "__builtin_altivec_vsrb", + "altivec.vsrdbi" => "__builtin_altivec_vsrdbi", + "altivec.vsrh" => "__builtin_altivec_vsrh", + "altivec.vsro" => "__builtin_altivec_vsro", + "altivec.vsrv" => "__builtin_altivec_vsrv", + "altivec.vsrw" => "__builtin_altivec_vsrw", + "altivec.vstribl" => "__builtin_altivec_vstribl", + "altivec.vstribl.p" => "__builtin_altivec_vstribl_p", + "altivec.vstribr" => "__builtin_altivec_vstribr", + "altivec.vstribr.p" => "__builtin_altivec_vstribr_p", + "altivec.vstrihl" => "__builtin_altivec_vstrihl", + "altivec.vstrihl.p" => "__builtin_altivec_vstrihl_p", + "altivec.vstrihr" => "__builtin_altivec_vstrihr", + "altivec.vstrihr.p" => "__builtin_altivec_vstrihr_p", + "altivec.vsubcuq" => "__builtin_altivec_vsubcuq", + "altivec.vsubcuw" => "__builtin_altivec_vsubcuw", + "altivec.vsubecuq" => "__builtin_altivec_vsubecuq", + "altivec.vsubeuqm" => "__builtin_altivec_vsubeuqm", + "altivec.vsubsbs" => "__builtin_altivec_vsubsbs", + "altivec.vsubshs" => "__builtin_altivec_vsubshs", + "altivec.vsubsws" => "__builtin_altivec_vsubsws", + "altivec.vsububs" => "__builtin_altivec_vsububs", + "altivec.vsubuhs" => "__builtin_altivec_vsubuhs", + "altivec.vsubuws" => "__builtin_altivec_vsubuws", + "altivec.vsum2sws" => "__builtin_altivec_vsum2sws", + "altivec.vsum4sbs" => "__builtin_altivec_vsum4sbs", + "altivec.vsum4shs" => "__builtin_altivec_vsum4shs", + "altivec.vsum4ubs" => "__builtin_altivec_vsum4ubs", + "altivec.vsumsws" => "__builtin_altivec_vsumsws", + "altivec.vupkhpx" => "__builtin_altivec_vupkhpx", + "altivec.vupkhsb" => "__builtin_altivec_vupkhsb", + "altivec.vupkhsh" => "__builtin_altivec_vupkhsh", + "altivec.vupkhsw" => "__builtin_altivec_vupkhsw", + "altivec.vupklpx" => "__builtin_altivec_vupklpx", + "altivec.vupklsb" => "__builtin_altivec_vupklsb", + "altivec.vupklsh" => "__builtin_altivec_vupklsh", + "altivec.vupklsw" => "__builtin_altivec_vupklsw", + "bcdadd" => "__builtin_ppc_bcdadd", + "bcdadd.p" => "__builtin_ppc_bcdadd_p", + "bcdsub" => "__builtin_ppc_bcdsub", + "bcdsub.p" => "__builtin_ppc_bcdsub_p", + "bpermd" => "__builtin_bpermd", + "cbcdtd" => "__builtin_cbcdtd", + "cbcdtdd" => "__builtin_ppc_cbcdtd", + "cdtbcd" => "__builtin_cdtbcd", + "cdtbcdd" => "__builtin_ppc_cdtbcd", + "cfuged" => "__builtin_cfuged", + "cmpeqb" => "__builtin_ppc_cmpeqb", + "cmprb" => "__builtin_ppc_cmprb", + "cntlzdm" => "__builtin_cntlzdm", + "cnttzdm" => "__builtin_cnttzdm", + "compare.exp.eq" => "__builtin_ppc_compare_exp_eq", + "compare.exp.gt" => "__builtin_ppc_compare_exp_gt", + "compare.exp.lt" => "__builtin_ppc_compare_exp_lt", + "compare.exp.uo" => "__builtin_ppc_compare_exp_uo", + "darn" => "__builtin_darn", + "darn32" => "__builtin_darn_32", + "darnraw" => "__builtin_darn_raw", + "dcbf" => "__builtin_dcbf", + "dcbfl" => "__builtin_ppc_dcbfl", + "dcbflp" => "__builtin_ppc_dcbflp", + "dcbst" => "__builtin_ppc_dcbst", + "dcbt" => "__builtin_ppc_dcbt", + "dcbtst" => "__builtin_ppc_dcbtst", + "dcbtstt" => "__builtin_ppc_dcbtstt", + "dcbtt" => "__builtin_ppc_dcbtt", + "dcbz" => "__builtin_ppc_dcbz", + "divde" => "__builtin_divde", + "divdeu" => "__builtin_divdeu", + "divf128.round.to.odd" => "__builtin_divf128_round_to_odd", + "divwe" => "__builtin_divwe", + "divweu" => "__builtin_divweu", + "eieio" => "__builtin_ppc_eieio", + "extract.exp" => "__builtin_ppc_extract_exp", + "extract.sig" => "__builtin_ppc_extract_sig", + "fcfid" => "__builtin_ppc_fcfid", + "fcfud" => "__builtin_ppc_fcfud", + "fctid" => "__builtin_ppc_fctid", + "fctidz" => "__builtin_ppc_fctidz", + "fctiw" => "__builtin_ppc_fctiw", + "fctiwz" => "__builtin_ppc_fctiwz", + "fctudz" => "__builtin_ppc_fctudz", + "fctuwz" => "__builtin_ppc_fctuwz", + "fence" => "__builtin_ppc_fence", + "fmaf128.round.to.odd" => "__builtin_fmaf128_round_to_odd", + "fmsub" => "__builtin_ppc_fmsub", + "fmsubs" => "__builtin_ppc_fmsubs", + "fnabs" => "__builtin_ppc_fnabs", + "fnabss" => "__builtin_ppc_fnabss", + "fnmadd" => "__builtin_ppc_fnmadd", + "fnmadds" => "__builtin_ppc_fnmadds", + "fre" => "__builtin_ppc_fre", + "fres" => "__builtin_ppc_fres", + "frsqrte" => "__builtin_ppc_frsqrte", + "frsqrtes" => "__builtin_ppc_frsqrtes", + "fsel" => "__builtin_ppc_fsel", + "fsels" => "__builtin_ppc_fsels", + "get.texasr" => "__builtin_get_texasr", + "get.texasru" => "__builtin_get_texasru", + "get.tfhar" => "__builtin_get_tfhar", + "get.tfiar" => "__builtin_get_tfiar", + "icbt" => "__builtin_ppc_icbt", + "insert.exp" => "__builtin_ppc_insert_exp", + "iospace.eieio" => "__builtin_ppc_iospace_eieio", + "iospace.lwsync" => "__builtin_ppc_iospace_lwsync", + "iospace.sync" => "__builtin_ppc_iospace_sync", + "isync" => "__builtin_ppc_isync", + "load4r" => "__builtin_ppc_load4r", + "load8r" => "__builtin_ppc_load8r", + "lwsync" => "__builtin_ppc_lwsync", + "maddhd" => "__builtin_ppc_maddhd", + "maddhdu" => "__builtin_ppc_maddhdu", + "maddld" => "__builtin_ppc_maddld", + "mffsl" => "__builtin_ppc_mffsl", + "mfmsr" => "__builtin_ppc_mfmsr", + "mftbu" => "__builtin_ppc_mftbu", + "mtfsb0" => "__builtin_ppc_mtfsb0", + "mtfsb1" => "__builtin_ppc_mtfsb1", + "mtfsfi" => "__builtin_ppc_mtfsfi", + "mtmsr" => "__builtin_ppc_mtmsr", + "mulf128.round.to.odd" => "__builtin_mulf128_round_to_odd", + "mulhd" => "__builtin_ppc_mulhd", + "mulhdu" => "__builtin_ppc_mulhdu", + "mulhw" => "__builtin_ppc_mulhw", + "mulhwu" => "__builtin_ppc_mulhwu", + "pack.longdouble" => "__builtin_pack_longdouble", + "pdepd" => "__builtin_pdepd", + "pextd" => "__builtin_pextd", + "qpx.qvfabs" => "__builtin_qpx_qvfabs", + "qpx.qvfadd" => "__builtin_qpx_qvfadd", + "qpx.qvfadds" => "__builtin_qpx_qvfadds", + "qpx.qvfcfid" => "__builtin_qpx_qvfcfid", + "qpx.qvfcfids" => "__builtin_qpx_qvfcfids", + "qpx.qvfcfidu" => "__builtin_qpx_qvfcfidu", + "qpx.qvfcfidus" => "__builtin_qpx_qvfcfidus", + "qpx.qvfcmpeq" => "__builtin_qpx_qvfcmpeq", + "qpx.qvfcmpgt" => "__builtin_qpx_qvfcmpgt", + "qpx.qvfcmplt" => "__builtin_qpx_qvfcmplt", + "qpx.qvfcpsgn" => "__builtin_qpx_qvfcpsgn", + "qpx.qvfctid" => "__builtin_qpx_qvfctid", + "qpx.qvfctidu" => "__builtin_qpx_qvfctidu", + "qpx.qvfctiduz" => "__builtin_qpx_qvfctiduz", + "qpx.qvfctidz" => "__builtin_qpx_qvfctidz", + "qpx.qvfctiw" => "__builtin_qpx_qvfctiw", + "qpx.qvfctiwu" => "__builtin_qpx_qvfctiwu", + "qpx.qvfctiwuz" => "__builtin_qpx_qvfctiwuz", + "qpx.qvfctiwz" => "__builtin_qpx_qvfctiwz", + "qpx.qvflogical" => "__builtin_qpx_qvflogical", + "qpx.qvfmadd" => "__builtin_qpx_qvfmadd", + "qpx.qvfmadds" => "__builtin_qpx_qvfmadds", + "qpx.qvfmsub" => "__builtin_qpx_qvfmsub", + "qpx.qvfmsubs" => "__builtin_qpx_qvfmsubs", + "qpx.qvfmul" => "__builtin_qpx_qvfmul", + "qpx.qvfmuls" => "__builtin_qpx_qvfmuls", + "qpx.qvfnabs" => "__builtin_qpx_qvfnabs", + "qpx.qvfneg" => "__builtin_qpx_qvfneg", + "qpx.qvfnmadd" => "__builtin_qpx_qvfnmadd", + "qpx.qvfnmadds" => "__builtin_qpx_qvfnmadds", + "qpx.qvfnmsub" => "__builtin_qpx_qvfnmsub", + "qpx.qvfnmsubs" => "__builtin_qpx_qvfnmsubs", + "qpx.qvfperm" => "__builtin_qpx_qvfperm", + "qpx.qvfre" => "__builtin_qpx_qvfre", + "qpx.qvfres" => "__builtin_qpx_qvfres", + "qpx.qvfrim" => "__builtin_qpx_qvfrim", + "qpx.qvfrin" => "__builtin_qpx_qvfrin", + "qpx.qvfrip" => "__builtin_qpx_qvfrip", + "qpx.qvfriz" => "__builtin_qpx_qvfriz", + "qpx.qvfrsp" => "__builtin_qpx_qvfrsp", + "qpx.qvfrsqrte" => "__builtin_qpx_qvfrsqrte", + "qpx.qvfrsqrtes" => "__builtin_qpx_qvfrsqrtes", + "qpx.qvfsel" => "__builtin_qpx_qvfsel", + "qpx.qvfsub" => "__builtin_qpx_qvfsub", + "qpx.qvfsubs" => "__builtin_qpx_qvfsubs", + "qpx.qvftstnan" => "__builtin_qpx_qvftstnan", + "qpx.qvfxmadd" => "__builtin_qpx_qvfxmadd", + "qpx.qvfxmadds" => "__builtin_qpx_qvfxmadds", + "qpx.qvfxmul" => "__builtin_qpx_qvfxmul", + "qpx.qvfxmuls" => "__builtin_qpx_qvfxmuls", + "qpx.qvfxxcpnmadd" => "__builtin_qpx_qvfxxcpnmadd", + "qpx.qvfxxcpnmadds" => "__builtin_qpx_qvfxxcpnmadds", + "qpx.qvfxxmadd" => "__builtin_qpx_qvfxxmadd", + "qpx.qvfxxmadds" => "__builtin_qpx_qvfxxmadds", + "qpx.qvfxxnpmadd" => "__builtin_qpx_qvfxxnpmadd", + "qpx.qvfxxnpmadds" => "__builtin_qpx_qvfxxnpmadds", + "qpx.qvgpci" => "__builtin_qpx_qvgpci", + "qpx.qvlfcd" => "__builtin_qpx_qvlfcd", + "qpx.qvlfcda" => "__builtin_qpx_qvlfcda", + "qpx.qvlfcs" => "__builtin_qpx_qvlfcs", + "qpx.qvlfcsa" => "__builtin_qpx_qvlfcsa", + "qpx.qvlfd" => "__builtin_qpx_qvlfd", + "qpx.qvlfda" => "__builtin_qpx_qvlfda", + "qpx.qvlfiwa" => "__builtin_qpx_qvlfiwa", + "qpx.qvlfiwaa" => "__builtin_qpx_qvlfiwaa", + "qpx.qvlfiwz" => "__builtin_qpx_qvlfiwz", + "qpx.qvlfiwza" => "__builtin_qpx_qvlfiwza", + "qpx.qvlfs" => "__builtin_qpx_qvlfs", + "qpx.qvlfsa" => "__builtin_qpx_qvlfsa", + "qpx.qvlpcld" => "__builtin_qpx_qvlpcld", + "qpx.qvlpcls" => "__builtin_qpx_qvlpcls", + "qpx.qvlpcrd" => "__builtin_qpx_qvlpcrd", + "qpx.qvlpcrs" => "__builtin_qpx_qvlpcrs", + "qpx.qvstfcd" => "__builtin_qpx_qvstfcd", + "qpx.qvstfcda" => "__builtin_qpx_qvstfcda", + "qpx.qvstfcs" => "__builtin_qpx_qvstfcs", + "qpx.qvstfcsa" => "__builtin_qpx_qvstfcsa", + "qpx.qvstfd" => "__builtin_qpx_qvstfd", + "qpx.qvstfda" => "__builtin_qpx_qvstfda", + "qpx.qvstfiw" => "__builtin_qpx_qvstfiw", + "qpx.qvstfiwa" => "__builtin_qpx_qvstfiwa", + "qpx.qvstfs" => "__builtin_qpx_qvstfs", + "qpx.qvstfsa" => "__builtin_qpx_qvstfsa", + "readflm" => "__builtin_readflm", + "rlwimi" => "__builtin_ppc_rlwimi", + "rlwnm" => "__builtin_ppc_rlwnm", + "scalar.extract.expq" => "__builtin_vsx_scalar_extract_expq", + "scalar.insert.exp.qp" => "__builtin_vsx_scalar_insert_exp_qp", + "set.texasr" => "__builtin_set_texasr", + "set.texasru" => "__builtin_set_texasru", + "set.tfhar" => "__builtin_set_tfhar", + "set.tfiar" => "__builtin_set_tfiar", + "setb" => "__builtin_ppc_setb", + "setflm" => "__builtin_setflm", + "setrnd" => "__builtin_setrnd", + "sqrtf128.round.to.odd" => "__builtin_sqrtf128_round_to_odd", + "stbcx" => "__builtin_ppc_stbcx", + "stdcx" => "__builtin_ppc_stdcx", + "stfiw" => "__builtin_ppc_stfiw", + "store2r" => "__builtin_ppc_store2r", + "store4r" => "__builtin_ppc_store4r", + "store8r" => "__builtin_ppc_store8r", + "stwcx" => "__builtin_ppc_stwcx", + "subf128.round.to.odd" => "__builtin_subf128_round_to_odd", + "sync" => "__builtin_ppc_sync", + "tabort" => "__builtin_tabort", + "tabortdc" => "__builtin_tabortdc", + "tabortdci" => "__builtin_tabortdci", + "tabortwc" => "__builtin_tabortwc", + "tabortwci" => "__builtin_tabortwci", + "tbegin" => "__builtin_tbegin", + "tcheck" => "__builtin_tcheck", + "tdw" => "__builtin_ppc_tdw", + "tend" => "__builtin_tend", + "tendall" => "__builtin_tendall", + "trap" => "__builtin_ppc_trap", + "trapd" => "__builtin_ppc_trapd", + "trechkpt" => "__builtin_trechkpt", + "treclaim" => "__builtin_treclaim", + "tresume" => "__builtin_tresume", + "truncf128.round.to.odd" => "__builtin_truncf128_round_to_odd", + "tsr" => "__builtin_tsr", + "tsuspend" => "__builtin_tsuspend", + "ttest" => "__builtin_ttest", + "tw" => "__builtin_ppc_tw", + "unpack.longdouble" => "__builtin_unpack_longdouble", + "vsx.xsmaxdp" => "__builtin_vsx_xsmaxdp", + "vsx.xsmindp" => "__builtin_vsx_xsmindp", + "vsx.xvcmpeqdp" => "__builtin_vsx_xvcmpeqdp", + "vsx.xvcmpeqdp.p" => "__builtin_vsx_xvcmpeqdp_p", + "vsx.xvcmpeqsp" => "__builtin_vsx_xvcmpeqsp", + "vsx.xvcmpeqsp.p" => "__builtin_vsx_xvcmpeqsp_p", + "vsx.xvcmpgedp" => "__builtin_vsx_xvcmpgedp", + "vsx.xvcmpgedp.p" => "__builtin_vsx_xvcmpgedp_p", + "vsx.xvcmpgesp" => "__builtin_vsx_xvcmpgesp", + "vsx.xvcmpgesp.p" => "__builtin_vsx_xvcmpgesp_p", + "vsx.xvcmpgtdp" => "__builtin_vsx_xvcmpgtdp", + "vsx.xvcmpgtdp.p" => "__builtin_vsx_xvcmpgtdp_p", + "vsx.xvcmpgtsp" => "__builtin_vsx_xvcmpgtsp", + "vsx.xvcmpgtsp.p" => "__builtin_vsx_xvcmpgtsp_p", + "vsx.xvcvbf16spn" => "__builtin_vsx_xvcvbf16spn", + "vsx.xvcvdpsp" => "__builtin_vsx_xvcvdpsp", + "vsx.xvcvdpsxws" => "__builtin_vsx_xvcvdpsxws", + "vsx.xvcvdpuxws" => "__builtin_vsx_xvcvdpuxws", + "vsx.xvcvhpsp" => "__builtin_vsx_xvcvhpsp", + "vsx.xvcvspbf16" => "__builtin_vsx_xvcvspbf16", + "vsx.xvcvspdp" => "__builtin_vsx_xvcvspdp", + "vsx.xvcvsphp" => "__builtin_vsx_xvcvsphp", + "vsx.xvcvspsxds" => "__builtin_vsx_xvcvspsxds", + "vsx.xvcvspuxds" => "__builtin_vsx_xvcvspuxds", + "vsx.xvcvsxdsp" => "__builtin_vsx_xvcvsxdsp", + "vsx.xvcvsxwdp" => "__builtin_vsx_xvcvsxwdp", + "vsx.xvcvuxdsp" => "__builtin_vsx_xvcvuxdsp", + "vsx.xvcvuxwdp" => "__builtin_vsx_xvcvuxwdp", + "vsx.xvdivdp" => "__builtin_vsx_xvdivdp", + "vsx.xvdivsp" => "__builtin_vsx_xvdivsp", + "vsx.xviexpdp" => "__builtin_vsx_xviexpdp", + "vsx.xviexpsp" => "__builtin_vsx_xviexpsp", + "vsx.xvmaxdp" => "__builtin_vsx_xvmaxdp", + "vsx.xvmaxsp" => "__builtin_vsx_xvmaxsp", + "vsx.xvmindp" => "__builtin_vsx_xvmindp", + "vsx.xvminsp" => "__builtin_vsx_xvminsp", + "vsx.xvredp" => "__builtin_vsx_xvredp", + "vsx.xvresp" => "__builtin_vsx_xvresp", + "vsx.xvrsqrtedp" => "__builtin_vsx_xvrsqrtedp", + "vsx.xvrsqrtesp" => "__builtin_vsx_xvrsqrtesp", + "vsx.xvtdivdp" => "__builtin_vsx_xvtdivdp", + "vsx.xvtdivsp" => "__builtin_vsx_xvtdivsp", + "vsx.xvtlsbb" => "__builtin_vsx_xvtlsbb", + "vsx.xvtsqrtdp" => "__builtin_vsx_xvtsqrtdp", + "vsx.xvtsqrtsp" => "__builtin_vsx_xvtsqrtsp", + "vsx.xvtstdcdp" => "__builtin_vsx_xvtstdcdp", + "vsx.xvtstdcsp" => "__builtin_vsx_xvtstdcsp", + "vsx.xvxexpdp" => "__builtin_vsx_xvxexpdp", + "vsx.xvxexpsp" => "__builtin_vsx_xvxexpsp", + "vsx.xvxsigdp" => "__builtin_vsx_xvxsigdp", + "vsx.xvxsigsp" => "__builtin_vsx_xvxsigsp", + "vsx.xxblendvb" => "__builtin_vsx_xxblendvb", + "vsx.xxblendvd" => "__builtin_vsx_xxblendvd", + "vsx.xxblendvh" => "__builtin_vsx_xxblendvh", + "vsx.xxblendvw" => "__builtin_vsx_xxblendvw", + "vsx.xxeval" => "__builtin_vsx_xxeval", + "vsx.xxextractuw" => "__builtin_vsx_xxextractuw", + "vsx.xxgenpcvbm" => "__builtin_vsx_xxgenpcvbm", + "vsx.xxgenpcvdm" => "__builtin_vsx_xxgenpcvdm", + "vsx.xxgenpcvhm" => "__builtin_vsx_xxgenpcvhm", + "vsx.xxgenpcvwm" => "__builtin_vsx_xxgenpcvwm", + "vsx.xxinsertw" => "__builtin_vsx_xxinsertw", + "vsx.xxleqv" => "__builtin_vsx_xxleqv", + "vsx.xxpermx" => "__builtin_vsx_xxpermx", + _ => unimplemented!("***** unsupported LLVM intrinsic {}", name), + } + } + ppc(name) + } + "ptx" => { + #[allow(non_snake_case)] + fn ptx(name: &str) -> &str { + match name { + // ptx + "bar.sync" => "__builtin_ptx_bar_sync", + "read.clock" => "__builtin_ptx_read_clock", + "read.clock64" => "__builtin_ptx_read_clock64", + "read.gridid" => "__builtin_ptx_read_gridid", + "read.laneid" => "__builtin_ptx_read_laneid", + "read.lanemask.eq" => "__builtin_ptx_read_lanemask_eq", + "read.lanemask.ge" => "__builtin_ptx_read_lanemask_ge", + "read.lanemask.gt" => "__builtin_ptx_read_lanemask_gt", + "read.lanemask.le" => "__builtin_ptx_read_lanemask_le", + "read.lanemask.lt" => "__builtin_ptx_read_lanemask_lt", + "read.nsmid" => "__builtin_ptx_read_nsmid", + "read.nwarpid" => "__builtin_ptx_read_nwarpid", + "read.pm0" => "__builtin_ptx_read_pm0", + "read.pm1" => "__builtin_ptx_read_pm1", + "read.pm2" => "__builtin_ptx_read_pm2", + "read.pm3" => "__builtin_ptx_read_pm3", + "read.smid" => "__builtin_ptx_read_smid", + "read.warpid" => "__builtin_ptx_read_warpid", + _ => unimplemented!("***** unsupported LLVM intrinsic {}", name), + } + } + ptx(name) + } + "r600" => { + #[allow(non_snake_case)] + fn r600(name: &str) -> &str { + match name { + // r600 + "group.barrier" => "__builtin_r600_group_barrier", + "implicitarg.ptr" => "__builtin_r600_implicitarg_ptr", + "rat.store.typed" => "__builtin_r600_rat_store_typed", + "read.global.size.x" => "__builtin_r600_read_global_size_x", + "read.global.size.y" => "__builtin_r600_read_global_size_y", + "read.global.size.z" => "__builtin_r600_read_global_size_z", + "read.ngroups.x" => "__builtin_r600_read_ngroups_x", + "read.ngroups.y" => "__builtin_r600_read_ngroups_y", + "read.ngroups.z" => "__builtin_r600_read_ngroups_z", + "read.tgid.x" => "__builtin_r600_read_tgid_x", + "read.tgid.y" => "__builtin_r600_read_tgid_y", + "read.tgid.z" => "__builtin_r600_read_tgid_z", + "read.tidig.x" => "__builtin_r600_read_tidig_x", + "read.tidig.y" => "__builtin_r600_read_tidig_y", + "read.tidig.z" => "__builtin_r600_read_tidig_z", + _ => unimplemented!("***** unsupported LLVM intrinsic {}", name), + } + } + r600(name) + } + "riscv" => { + #[allow(non_snake_case)] + fn riscv(name: &str) -> &str { + match name { + // riscv + "aes32dsi" => "__builtin_riscv_aes32dsi", + "aes32dsmi" => "__builtin_riscv_aes32dsmi", + "aes32esi" => "__builtin_riscv_aes32esi", + "aes32esmi" => "__builtin_riscv_aes32esmi", + "aes64ds" => "__builtin_riscv_aes64ds", + "aes64dsm" => "__builtin_riscv_aes64dsm", + "aes64es" => "__builtin_riscv_aes64es", + "aes64esm" => "__builtin_riscv_aes64esm", + "aes64im" => "__builtin_riscv_aes64im", + "aes64ks1i" => "__builtin_riscv_aes64ks1i", + "aes64ks2" => "__builtin_riscv_aes64ks2", + "sha512sig0" => "__builtin_riscv_sha512sig0", + "sha512sig0h" => "__builtin_riscv_sha512sig0h", + "sha512sig0l" => "__builtin_riscv_sha512sig0l", + "sha512sig1" => "__builtin_riscv_sha512sig1", + "sha512sig1h" => "__builtin_riscv_sha512sig1h", + "sha512sig1l" => "__builtin_riscv_sha512sig1l", + "sha512sum0" => "__builtin_riscv_sha512sum0", + "sha512sum0r" => "__builtin_riscv_sha512sum0r", + "sha512sum1" => "__builtin_riscv_sha512sum1", + "sha512sum1r" => "__builtin_riscv_sha512sum1r", + _ => unimplemented!("***** unsupported LLVM intrinsic {}", name), + } + } + riscv(name) + } + "s390" => { + #[allow(non_snake_case)] + fn s390(name: &str) -> &str { + match name { + // s390 + "bdepg" => "__builtin_s390_bdepg", + "bextg" => "__builtin_s390_bextg", + "efpc" => "__builtin_s390_efpc", + "etnd" => "__builtin_tx_nesting_depth", + "lcbb" => "__builtin_s390_lcbb", + "ppa.txassist" => "__builtin_tx_assist", + "sfpc" => "__builtin_s390_sfpc", + "tend" => "__builtin_tend", + "vaccb" => "__builtin_s390_vaccb", + "vacccq" => "__builtin_s390_vacccq", + "vaccf" => "__builtin_s390_vaccf", + "vaccg" => "__builtin_s390_vaccg", + "vacch" => "__builtin_s390_vacch", + "vaccq" => "__builtin_s390_vaccq", + "vacq" => "__builtin_s390_vacq", + "vaq" => "__builtin_s390_vaq", + "vavgb" => "__builtin_s390_vavgb", + "vavgf" => "__builtin_s390_vavgf", + "vavgg" => "__builtin_s390_vavgg", + "vavgh" => "__builtin_s390_vavgh", + "vavglb" => "__builtin_s390_vavglb", + "vavglf" => "__builtin_s390_vavglf", + "vavglg" => "__builtin_s390_vavglg", + "vavglh" => "__builtin_s390_vavglh", + "vavglq" => "__builtin_s390_vavglq", + "vavgq" => "__builtin_s390_vavgq", + "vbperm" => "__builtin_s390_vbperm", + "vcfn" => "__builtin_s390_vcfn", + "vcksm" => "__builtin_s390_vcksm", + "vclfnhs" => "__builtin_s390_vclfnhs", + "vclfnls" => "__builtin_s390_vclfnls", + "vcnf" => "__builtin_s390_vcnf", + "vcrnfs" => "__builtin_s390_vcrnfs", + "verimb" => "__builtin_s390_verimb", + "verimf" => "__builtin_s390_verimf", + "verimg" => "__builtin_s390_verimg", + "verimh" => "__builtin_s390_verimh", + "veval" => "__builtin_s390_veval", + "vfaeb" => "__builtin_s390_vfaeb", + "vfaef" => "__builtin_s390_vfaef", + "vfaeh" => "__builtin_s390_vfaeh", + "vfaezb" => "__builtin_s390_vfaezb", + "vfaezf" => "__builtin_s390_vfaezf", + "vfaezh" => "__builtin_s390_vfaezh", + "vfeeb" => "__builtin_s390_vfeeb", + "vfeef" => "__builtin_s390_vfeef", + "vfeeh" => "__builtin_s390_vfeeh", + "vfeezb" => "__builtin_s390_vfeezb", + "vfeezf" => "__builtin_s390_vfeezf", + "vfeezh" => "__builtin_s390_vfeezh", + "vfeneb" => "__builtin_s390_vfeneb", + "vfenef" => "__builtin_s390_vfenef", + "vfeneh" => "__builtin_s390_vfeneh", + "vfenezb" => "__builtin_s390_vfenezb", + "vfenezf" => "__builtin_s390_vfenezf", + "vfenezh" => "__builtin_s390_vfenezh", + "vgemb" => "__builtin_s390_vgemb", + "vgemf" => "__builtin_s390_vgemf", + "vgemg" => "__builtin_s390_vgemg", + "vgemh" => "__builtin_s390_vgemh", + "vgemq" => "__builtin_s390_vgemq", + "vgfmab" => "__builtin_s390_vgfmab", + "vgfmaf" => "__builtin_s390_vgfmaf", + "vgfmag" => "__builtin_s390_vgfmag", + "vgfmah" => "__builtin_s390_vgfmah", + "vgfmb" => "__builtin_s390_vgfmb", + "vgfmf" => "__builtin_s390_vgfmf", + "vgfmg" => "__builtin_s390_vgfmg", + "vgfmh" => "__builtin_s390_vgfmh", + "vistrb" => "__builtin_s390_vistrb", + "vistrf" => "__builtin_s390_vistrf", + "vistrh" => "__builtin_s390_vistrh", + "vlbb" => "__builtin_s390_vlbb", + "vll" => "__builtin_s390_vll", + "vlrl" => "__builtin_s390_vlrlr", + "vmaeb" => "__builtin_s390_vmaeb", + "vmaef" => "__builtin_s390_vmaef", + "vmaeg" => "__builtin_s390_vmaeg", + "vmaeh" => "__builtin_s390_vmaeh", + "vmahb" => "__builtin_s390_vmahb", + "vmahf" => "__builtin_s390_vmahf", + "vmahg" => "__builtin_s390_vmahg", + "vmahh" => "__builtin_s390_vmahh", + "vmahq" => "__builtin_s390_vmahq", + "vmaleb" => "__builtin_s390_vmaleb", + "vmalef" => "__builtin_s390_vmalef", + "vmaleg" => "__builtin_s390_vmaleg", + "vmaleh" => "__builtin_s390_vmaleh", + "vmalhb" => "__builtin_s390_vmalhb", + "vmalhf" => "__builtin_s390_vmalhf", + "vmalhg" => "__builtin_s390_vmalhg", + "vmalhh" => "__builtin_s390_vmalhh", + "vmalhq" => "__builtin_s390_vmalhq", + "vmalob" => "__builtin_s390_vmalob", + "vmalof" => "__builtin_s390_vmalof", + "vmalog" => "__builtin_s390_vmalog", + "vmaloh" => "__builtin_s390_vmaloh", + "vmaob" => "__builtin_s390_vmaob", + "vmaof" => "__builtin_s390_vmaof", + "vmaog" => "__builtin_s390_vmaog", + "vmaoh" => "__builtin_s390_vmaoh", + "vmeb" => "__builtin_s390_vmeb", + "vmef" => "__builtin_s390_vmef", + "vmeg" => "__builtin_s390_vmeg", + "vmeh" => "__builtin_s390_vmeh", + "vmhb" => "__builtin_s390_vmhb", + "vmhf" => "__builtin_s390_vmhf", + "vmhg" => "__builtin_s390_vmhg", + "vmhh" => "__builtin_s390_vmhh", + "vmhq" => "__builtin_s390_vmhq", + "vmleb" => "__builtin_s390_vmleb", + "vmlef" => "__builtin_s390_vmlef", + "vmleg" => "__builtin_s390_vmleg", + "vmleh" => "__builtin_s390_vmleh", + "vmlhb" => "__builtin_s390_vmlhb", + "vmlhf" => "__builtin_s390_vmlhf", + "vmlhg" => "__builtin_s390_vmlhg", + "vmlhh" => "__builtin_s390_vmlhh", + "vmlhq" => "__builtin_s390_vmlhq", + "vmlob" => "__builtin_s390_vmlob", + "vmlof" => "__builtin_s390_vmlof", + "vmlog" => "__builtin_s390_vmlog", + "vmloh" => "__builtin_s390_vmloh", + "vmob" => "__builtin_s390_vmob", + "vmof" => "__builtin_s390_vmof", + "vmog" => "__builtin_s390_vmog", + "vmoh" => "__builtin_s390_vmoh", + "vmslg" => "__builtin_s390_vmslg", + "vpdi" => "__builtin_s390_vpdi", + "vperm" => "__builtin_s390_vperm", + "vpklsf" => "__builtin_s390_vpklsf", + "vpklsg" => "__builtin_s390_vpklsg", + "vpklsh" => "__builtin_s390_vpklsh", + "vpksf" => "__builtin_s390_vpksf", + "vpksg" => "__builtin_s390_vpksg", + "vpksh" => "__builtin_s390_vpksh", + "vsbcbiq" => "__builtin_s390_vsbcbiq", + "vsbiq" => "__builtin_s390_vsbiq", + "vscbib" => "__builtin_s390_vscbib", + "vscbif" => "__builtin_s390_vscbif", + "vscbig" => "__builtin_s390_vscbig", + "vscbih" => "__builtin_s390_vscbih", + "vscbiq" => "__builtin_s390_vscbiq", + "vsl" => "__builtin_s390_vsl", + "vslb" => "__builtin_s390_vslb", + "vsld" => "__builtin_s390_vsld", + "vsldb" => "__builtin_s390_vsldb", + "vsq" => "__builtin_s390_vsq", + "vsra" => "__builtin_s390_vsra", + "vsrab" => "__builtin_s390_vsrab", + "vsrd" => "__builtin_s390_vsrd", + "vsrl" => "__builtin_s390_vsrl", + "vsrlb" => "__builtin_s390_vsrlb", + "vstl" => "__builtin_s390_vstl", + "vstrcb" => "__builtin_s390_vstrcb", + "vstrcf" => "__builtin_s390_vstrcf", + "vstrch" => "__builtin_s390_vstrch", + "vstrczb" => "__builtin_s390_vstrczb", + "vstrczf" => "__builtin_s390_vstrczf", + "vstrczh" => "__builtin_s390_vstrczh", + "vstrl" => "__builtin_s390_vstrlr", + "vsumb" => "__builtin_s390_vsumb", + "vsumgf" => "__builtin_s390_vsumgf", + "vsumgh" => "__builtin_s390_vsumgh", + "vsumh" => "__builtin_s390_vsumh", + "vsumqf" => "__builtin_s390_vsumqf", + "vsumqg" => "__builtin_s390_vsumqg", + "vtm" => "__builtin_s390_vtm", + "vuphb" => "__builtin_s390_vuphb", + "vuphf" => "__builtin_s390_vuphf", + "vuphg" => "__builtin_s390_vuphg", + "vuphh" => "__builtin_s390_vuphh", + "vuplb" => "__builtin_s390_vuplb", + "vuplf" => "__builtin_s390_vuplf", + "vuplg" => "__builtin_s390_vuplg", + "vuplhb" => "__builtin_s390_vuplhb", + "vuplhf" => "__builtin_s390_vuplhf", + "vuplhg" => "__builtin_s390_vuplhg", + "vuplhh" => "__builtin_s390_vuplhh", + "vuplhw" => "__builtin_s390_vuplhw", + "vupllb" => "__builtin_s390_vupllb", + "vupllf" => "__builtin_s390_vupllf", + "vupllg" => "__builtin_s390_vupllg", + "vupllh" => "__builtin_s390_vupllh", + _ => unimplemented!("***** unsupported LLVM intrinsic {}", name), + } + } + s390(name) + } + "ve" => { + #[allow(non_snake_case)] + fn ve(name: &str) -> &str { + match name { + // ve + "vl.andm.MMM" => "__builtin_ve_vl_andm_MMM", + "vl.andm.mmm" => "__builtin_ve_vl_andm_mmm", + "vl.eqvm.MMM" => "__builtin_ve_vl_eqvm_MMM", + "vl.eqvm.mmm" => "__builtin_ve_vl_eqvm_mmm", + "vl.extract.vm512l" => "__builtin_ve_vl_extract_vm512l", + "vl.extract.vm512u" => "__builtin_ve_vl_extract_vm512u", + "vl.fencec.s" => "__builtin_ve_vl_fencec_s", + "vl.fencei" => "__builtin_ve_vl_fencei", + "vl.fencem.s" => "__builtin_ve_vl_fencem_s", + "vl.fidcr.sss" => "__builtin_ve_vl_fidcr_sss", + "vl.insert.vm512l" => "__builtin_ve_vl_insert_vm512l", + "vl.insert.vm512u" => "__builtin_ve_vl_insert_vm512u", + "vl.lcr.sss" => "__builtin_ve_vl_lcr_sss", + "vl.lsv.vvss" => "__builtin_ve_vl_lsv_vvss", + "vl.lvm.MMss" => "__builtin_ve_vl_lvm_MMss", + "vl.lvm.mmss" => "__builtin_ve_vl_lvm_mmss", + "vl.lvsd.svs" => "__builtin_ve_vl_lvsd_svs", + "vl.lvsl.svs" => "__builtin_ve_vl_lvsl_svs", + "vl.lvss.svs" => "__builtin_ve_vl_lvss_svs", + "vl.lzvm.sml" => "__builtin_ve_vl_lzvm_sml", + "vl.negm.MM" => "__builtin_ve_vl_negm_MM", + "vl.negm.mm" => "__builtin_ve_vl_negm_mm", + "vl.nndm.MMM" => "__builtin_ve_vl_nndm_MMM", + "vl.nndm.mmm" => "__builtin_ve_vl_nndm_mmm", + "vl.orm.MMM" => "__builtin_ve_vl_orm_MMM", + "vl.orm.mmm" => "__builtin_ve_vl_orm_mmm", + "vl.pack.f32a" => "__builtin_ve_vl_pack_f32a", + "vl.pack.f32p" => "__builtin_ve_vl_pack_f32p", + "vl.pcvm.sml" => "__builtin_ve_vl_pcvm_sml", + "vl.pfchv.ssl" => "__builtin_ve_vl_pfchv_ssl", + "vl.pfchvnc.ssl" => "__builtin_ve_vl_pfchvnc_ssl", + "vl.pvadds.vsvMvl" => "__builtin_ve_vl_pvadds_vsvMvl", + "vl.pvadds.vsvl" => "__builtin_ve_vl_pvadds_vsvl", + "vl.pvadds.vsvvl" => "__builtin_ve_vl_pvadds_vsvvl", + "vl.pvadds.vvvMvl" => "__builtin_ve_vl_pvadds_vvvMvl", + "vl.pvadds.vvvl" => "__builtin_ve_vl_pvadds_vvvl", + "vl.pvadds.vvvvl" => "__builtin_ve_vl_pvadds_vvvvl", + "vl.pvaddu.vsvMvl" => "__builtin_ve_vl_pvaddu_vsvMvl", + "vl.pvaddu.vsvl" => "__builtin_ve_vl_pvaddu_vsvl", + "vl.pvaddu.vsvvl" => "__builtin_ve_vl_pvaddu_vsvvl", + "vl.pvaddu.vvvMvl" => "__builtin_ve_vl_pvaddu_vvvMvl", + "vl.pvaddu.vvvl" => "__builtin_ve_vl_pvaddu_vvvl", + "vl.pvaddu.vvvvl" => "__builtin_ve_vl_pvaddu_vvvvl", + "vl.pvand.vsvMvl" => "__builtin_ve_vl_pvand_vsvMvl", + "vl.pvand.vsvl" => "__builtin_ve_vl_pvand_vsvl", + "vl.pvand.vsvvl" => "__builtin_ve_vl_pvand_vsvvl", + "vl.pvand.vvvMvl" => "__builtin_ve_vl_pvand_vvvMvl", + "vl.pvand.vvvl" => "__builtin_ve_vl_pvand_vvvl", + "vl.pvand.vvvvl" => "__builtin_ve_vl_pvand_vvvvl", + "vl.pvbrd.vsMvl" => "__builtin_ve_vl_pvbrd_vsMvl", + "vl.pvbrd.vsl" => "__builtin_ve_vl_pvbrd_vsl", + "vl.pvbrd.vsvl" => "__builtin_ve_vl_pvbrd_vsvl", + "vl.pvbrv.vvMvl" => "__builtin_ve_vl_pvbrv_vvMvl", + "vl.pvbrv.vvl" => "__builtin_ve_vl_pvbrv_vvl", + "vl.pvbrv.vvvl" => "__builtin_ve_vl_pvbrv_vvvl", + "vl.pvbrvlo.vvl" => "__builtin_ve_vl_pvbrvlo_vvl", + "vl.pvbrvlo.vvmvl" => "__builtin_ve_vl_pvbrvlo_vvmvl", + "vl.pvbrvlo.vvvl" => "__builtin_ve_vl_pvbrvlo_vvvl", + "vl.pvbrvup.vvl" => "__builtin_ve_vl_pvbrvup_vvl", + "vl.pvbrvup.vvmvl" => "__builtin_ve_vl_pvbrvup_vvmvl", + "vl.pvbrvup.vvvl" => "__builtin_ve_vl_pvbrvup_vvvl", + "vl.pvcmps.vsvMvl" => "__builtin_ve_vl_pvcmps_vsvMvl", + "vl.pvcmps.vsvl" => "__builtin_ve_vl_pvcmps_vsvl", + "vl.pvcmps.vsvvl" => "__builtin_ve_vl_pvcmps_vsvvl", + "vl.pvcmps.vvvMvl" => "__builtin_ve_vl_pvcmps_vvvMvl", + "vl.pvcmps.vvvl" => "__builtin_ve_vl_pvcmps_vvvl", + "vl.pvcmps.vvvvl" => "__builtin_ve_vl_pvcmps_vvvvl", + "vl.pvcmpu.vsvMvl" => "__builtin_ve_vl_pvcmpu_vsvMvl", + "vl.pvcmpu.vsvl" => "__builtin_ve_vl_pvcmpu_vsvl", + "vl.pvcmpu.vsvvl" => "__builtin_ve_vl_pvcmpu_vsvvl", + "vl.pvcmpu.vvvMvl" => "__builtin_ve_vl_pvcmpu_vvvMvl", + "vl.pvcmpu.vvvl" => "__builtin_ve_vl_pvcmpu_vvvl", + "vl.pvcmpu.vvvvl" => "__builtin_ve_vl_pvcmpu_vvvvl", + "vl.pvcvtsw.vvl" => "__builtin_ve_vl_pvcvtsw_vvl", + "vl.pvcvtsw.vvvl" => "__builtin_ve_vl_pvcvtsw_vvvl", + "vl.pvcvtws.vvMvl" => "__builtin_ve_vl_pvcvtws_vvMvl", + "vl.pvcvtws.vvl" => "__builtin_ve_vl_pvcvtws_vvl", + "vl.pvcvtws.vvvl" => "__builtin_ve_vl_pvcvtws_vvvl", + "vl.pvcvtwsrz.vvMvl" => "__builtin_ve_vl_pvcvtwsrz_vvMvl", + "vl.pvcvtwsrz.vvl" => "__builtin_ve_vl_pvcvtwsrz_vvl", + "vl.pvcvtwsrz.vvvl" => "__builtin_ve_vl_pvcvtwsrz_vvvl", + "vl.pveqv.vsvMvl" => "__builtin_ve_vl_pveqv_vsvMvl", + "vl.pveqv.vsvl" => "__builtin_ve_vl_pveqv_vsvl", + "vl.pveqv.vsvvl" => "__builtin_ve_vl_pveqv_vsvvl", + "vl.pveqv.vvvMvl" => "__builtin_ve_vl_pveqv_vvvMvl", + "vl.pveqv.vvvl" => "__builtin_ve_vl_pveqv_vvvl", + "vl.pveqv.vvvvl" => "__builtin_ve_vl_pveqv_vvvvl", + "vl.pvfadd.vsvMvl" => "__builtin_ve_vl_pvfadd_vsvMvl", + "vl.pvfadd.vsvl" => "__builtin_ve_vl_pvfadd_vsvl", + "vl.pvfadd.vsvvl" => "__builtin_ve_vl_pvfadd_vsvvl", + "vl.pvfadd.vvvMvl" => "__builtin_ve_vl_pvfadd_vvvMvl", + "vl.pvfadd.vvvl" => "__builtin_ve_vl_pvfadd_vvvl", + "vl.pvfadd.vvvvl" => "__builtin_ve_vl_pvfadd_vvvvl", + "vl.pvfcmp.vsvMvl" => "__builtin_ve_vl_pvfcmp_vsvMvl", + "vl.pvfcmp.vsvl" => "__builtin_ve_vl_pvfcmp_vsvl", + "vl.pvfcmp.vsvvl" => "__builtin_ve_vl_pvfcmp_vsvvl", + "vl.pvfcmp.vvvMvl" => "__builtin_ve_vl_pvfcmp_vvvMvl", + "vl.pvfcmp.vvvl" => "__builtin_ve_vl_pvfcmp_vvvl", + "vl.pvfcmp.vvvvl" => "__builtin_ve_vl_pvfcmp_vvvvl", + "vl.pvfmad.vsvvMvl" => "__builtin_ve_vl_pvfmad_vsvvMvl", + "vl.pvfmad.vsvvl" => "__builtin_ve_vl_pvfmad_vsvvl", + "vl.pvfmad.vsvvvl" => "__builtin_ve_vl_pvfmad_vsvvvl", + "vl.pvfmad.vvsvMvl" => "__builtin_ve_vl_pvfmad_vvsvMvl", + "vl.pvfmad.vvsvl" => "__builtin_ve_vl_pvfmad_vvsvl", + "vl.pvfmad.vvsvvl" => "__builtin_ve_vl_pvfmad_vvsvvl", + "vl.pvfmad.vvvvMvl" => "__builtin_ve_vl_pvfmad_vvvvMvl", + "vl.pvfmad.vvvvl" => "__builtin_ve_vl_pvfmad_vvvvl", + "vl.pvfmad.vvvvvl" => "__builtin_ve_vl_pvfmad_vvvvvl", + "vl.pvfmax.vsvMvl" => "__builtin_ve_vl_pvfmax_vsvMvl", + "vl.pvfmax.vsvl" => "__builtin_ve_vl_pvfmax_vsvl", + "vl.pvfmax.vsvvl" => "__builtin_ve_vl_pvfmax_vsvvl", + "vl.pvfmax.vvvMvl" => "__builtin_ve_vl_pvfmax_vvvMvl", + "vl.pvfmax.vvvl" => "__builtin_ve_vl_pvfmax_vvvl", + "vl.pvfmax.vvvvl" => "__builtin_ve_vl_pvfmax_vvvvl", + "vl.pvfmin.vsvMvl" => "__builtin_ve_vl_pvfmin_vsvMvl", + "vl.pvfmin.vsvl" => "__builtin_ve_vl_pvfmin_vsvl", + "vl.pvfmin.vsvvl" => "__builtin_ve_vl_pvfmin_vsvvl", + "vl.pvfmin.vvvMvl" => "__builtin_ve_vl_pvfmin_vvvMvl", + "vl.pvfmin.vvvl" => "__builtin_ve_vl_pvfmin_vvvl", + "vl.pvfmin.vvvvl" => "__builtin_ve_vl_pvfmin_vvvvl", + "vl.pvfmkaf.Ml" => "__builtin_ve_vl_pvfmkaf_Ml", + "vl.pvfmkat.Ml" => "__builtin_ve_vl_pvfmkat_Ml", + "vl.pvfmkseq.MvMl" => "__builtin_ve_vl_pvfmkseq_MvMl", + "vl.pvfmkseq.Mvl" => "__builtin_ve_vl_pvfmkseq_Mvl", + "vl.pvfmkseqnan.MvMl" => "__builtin_ve_vl_pvfmkseqnan_MvMl", + "vl.pvfmkseqnan.Mvl" => "__builtin_ve_vl_pvfmkseqnan_Mvl", + "vl.pvfmksge.MvMl" => "__builtin_ve_vl_pvfmksge_MvMl", + "vl.pvfmksge.Mvl" => "__builtin_ve_vl_pvfmksge_Mvl", + "vl.pvfmksgenan.MvMl" => "__builtin_ve_vl_pvfmksgenan_MvMl", + "vl.pvfmksgenan.Mvl" => "__builtin_ve_vl_pvfmksgenan_Mvl", + "vl.pvfmksgt.MvMl" => "__builtin_ve_vl_pvfmksgt_MvMl", + "vl.pvfmksgt.Mvl" => "__builtin_ve_vl_pvfmksgt_Mvl", + "vl.pvfmksgtnan.MvMl" => "__builtin_ve_vl_pvfmksgtnan_MvMl", + "vl.pvfmksgtnan.Mvl" => "__builtin_ve_vl_pvfmksgtnan_Mvl", + "vl.pvfmksle.MvMl" => "__builtin_ve_vl_pvfmksle_MvMl", + "vl.pvfmksle.Mvl" => "__builtin_ve_vl_pvfmksle_Mvl", + "vl.pvfmkslenan.MvMl" => "__builtin_ve_vl_pvfmkslenan_MvMl", + "vl.pvfmkslenan.Mvl" => "__builtin_ve_vl_pvfmkslenan_Mvl", + "vl.pvfmksloeq.mvl" => "__builtin_ve_vl_pvfmksloeq_mvl", + "vl.pvfmksloeq.mvml" => "__builtin_ve_vl_pvfmksloeq_mvml", + "vl.pvfmksloeqnan.mvl" => "__builtin_ve_vl_pvfmksloeqnan_mvl", + "vl.pvfmksloeqnan.mvml" => "__builtin_ve_vl_pvfmksloeqnan_mvml", + "vl.pvfmksloge.mvl" => "__builtin_ve_vl_pvfmksloge_mvl", + "vl.pvfmksloge.mvml" => "__builtin_ve_vl_pvfmksloge_mvml", + "vl.pvfmkslogenan.mvl" => "__builtin_ve_vl_pvfmkslogenan_mvl", + "vl.pvfmkslogenan.mvml" => "__builtin_ve_vl_pvfmkslogenan_mvml", + "vl.pvfmkslogt.mvl" => "__builtin_ve_vl_pvfmkslogt_mvl", + "vl.pvfmkslogt.mvml" => "__builtin_ve_vl_pvfmkslogt_mvml", + "vl.pvfmkslogtnan.mvl" => "__builtin_ve_vl_pvfmkslogtnan_mvl", + "vl.pvfmkslogtnan.mvml" => "__builtin_ve_vl_pvfmkslogtnan_mvml", + "vl.pvfmkslole.mvl" => "__builtin_ve_vl_pvfmkslole_mvl", + "vl.pvfmkslole.mvml" => "__builtin_ve_vl_pvfmkslole_mvml", + "vl.pvfmkslolenan.mvl" => "__builtin_ve_vl_pvfmkslolenan_mvl", + "vl.pvfmkslolenan.mvml" => "__builtin_ve_vl_pvfmkslolenan_mvml", + "vl.pvfmkslolt.mvl" => "__builtin_ve_vl_pvfmkslolt_mvl", + "vl.pvfmkslolt.mvml" => "__builtin_ve_vl_pvfmkslolt_mvml", + "vl.pvfmksloltnan.mvl" => "__builtin_ve_vl_pvfmksloltnan_mvl", + "vl.pvfmksloltnan.mvml" => "__builtin_ve_vl_pvfmksloltnan_mvml", + "vl.pvfmkslonan.mvl" => "__builtin_ve_vl_pvfmkslonan_mvl", + "vl.pvfmkslonan.mvml" => "__builtin_ve_vl_pvfmkslonan_mvml", + "vl.pvfmkslone.mvl" => "__builtin_ve_vl_pvfmkslone_mvl", + "vl.pvfmkslone.mvml" => "__builtin_ve_vl_pvfmkslone_mvml", + "vl.pvfmkslonenan.mvl" => "__builtin_ve_vl_pvfmkslonenan_mvl", + "vl.pvfmkslonenan.mvml" => "__builtin_ve_vl_pvfmkslonenan_mvml", + "vl.pvfmkslonum.mvl" => "__builtin_ve_vl_pvfmkslonum_mvl", + "vl.pvfmkslonum.mvml" => "__builtin_ve_vl_pvfmkslonum_mvml", + "vl.pvfmkslt.MvMl" => "__builtin_ve_vl_pvfmkslt_MvMl", + "vl.pvfmkslt.Mvl" => "__builtin_ve_vl_pvfmkslt_Mvl", + "vl.pvfmksltnan.MvMl" => "__builtin_ve_vl_pvfmksltnan_MvMl", + "vl.pvfmksltnan.Mvl" => "__builtin_ve_vl_pvfmksltnan_Mvl", + "vl.pvfmksnan.MvMl" => "__builtin_ve_vl_pvfmksnan_MvMl", + "vl.pvfmksnan.Mvl" => "__builtin_ve_vl_pvfmksnan_Mvl", + "vl.pvfmksne.MvMl" => "__builtin_ve_vl_pvfmksne_MvMl", + "vl.pvfmksne.Mvl" => "__builtin_ve_vl_pvfmksne_Mvl", + "vl.pvfmksnenan.MvMl" => "__builtin_ve_vl_pvfmksnenan_MvMl", + "vl.pvfmksnenan.Mvl" => "__builtin_ve_vl_pvfmksnenan_Mvl", + "vl.pvfmksnum.MvMl" => "__builtin_ve_vl_pvfmksnum_MvMl", + "vl.pvfmksnum.Mvl" => "__builtin_ve_vl_pvfmksnum_Mvl", + "vl.pvfmksupeq.mvl" => "__builtin_ve_vl_pvfmksupeq_mvl", + "vl.pvfmksupeq.mvml" => "__builtin_ve_vl_pvfmksupeq_mvml", + "vl.pvfmksupeqnan.mvl" => "__builtin_ve_vl_pvfmksupeqnan_mvl", + "vl.pvfmksupeqnan.mvml" => "__builtin_ve_vl_pvfmksupeqnan_mvml", + "vl.pvfmksupge.mvl" => "__builtin_ve_vl_pvfmksupge_mvl", + "vl.pvfmksupge.mvml" => "__builtin_ve_vl_pvfmksupge_mvml", + "vl.pvfmksupgenan.mvl" => "__builtin_ve_vl_pvfmksupgenan_mvl", + "vl.pvfmksupgenan.mvml" => "__builtin_ve_vl_pvfmksupgenan_mvml", + "vl.pvfmksupgt.mvl" => "__builtin_ve_vl_pvfmksupgt_mvl", + "vl.pvfmksupgt.mvml" => "__builtin_ve_vl_pvfmksupgt_mvml", + "vl.pvfmksupgtnan.mvl" => "__builtin_ve_vl_pvfmksupgtnan_mvl", + "vl.pvfmksupgtnan.mvml" => "__builtin_ve_vl_pvfmksupgtnan_mvml", + "vl.pvfmksuple.mvl" => "__builtin_ve_vl_pvfmksuple_mvl", + "vl.pvfmksuple.mvml" => "__builtin_ve_vl_pvfmksuple_mvml", + "vl.pvfmksuplenan.mvl" => "__builtin_ve_vl_pvfmksuplenan_mvl", + "vl.pvfmksuplenan.mvml" => "__builtin_ve_vl_pvfmksuplenan_mvml", + "vl.pvfmksuplt.mvl" => "__builtin_ve_vl_pvfmksuplt_mvl", + "vl.pvfmksuplt.mvml" => "__builtin_ve_vl_pvfmksuplt_mvml", + "vl.pvfmksupltnan.mvl" => "__builtin_ve_vl_pvfmksupltnan_mvl", + "vl.pvfmksupltnan.mvml" => "__builtin_ve_vl_pvfmksupltnan_mvml", + "vl.pvfmksupnan.mvl" => "__builtin_ve_vl_pvfmksupnan_mvl", + "vl.pvfmksupnan.mvml" => "__builtin_ve_vl_pvfmksupnan_mvml", + "vl.pvfmksupne.mvl" => "__builtin_ve_vl_pvfmksupne_mvl", + "vl.pvfmksupne.mvml" => "__builtin_ve_vl_pvfmksupne_mvml", + "vl.pvfmksupnenan.mvl" => "__builtin_ve_vl_pvfmksupnenan_mvl", + "vl.pvfmksupnenan.mvml" => "__builtin_ve_vl_pvfmksupnenan_mvml", + "vl.pvfmksupnum.mvl" => "__builtin_ve_vl_pvfmksupnum_mvl", + "vl.pvfmksupnum.mvml" => "__builtin_ve_vl_pvfmksupnum_mvml", + "vl.pvfmkweq.MvMl" => "__builtin_ve_vl_pvfmkweq_MvMl", + "vl.pvfmkweq.Mvl" => "__builtin_ve_vl_pvfmkweq_Mvl", + "vl.pvfmkweqnan.MvMl" => "__builtin_ve_vl_pvfmkweqnan_MvMl", + "vl.pvfmkweqnan.Mvl" => "__builtin_ve_vl_pvfmkweqnan_Mvl", + "vl.pvfmkwge.MvMl" => "__builtin_ve_vl_pvfmkwge_MvMl", + "vl.pvfmkwge.Mvl" => "__builtin_ve_vl_pvfmkwge_Mvl", + "vl.pvfmkwgenan.MvMl" => "__builtin_ve_vl_pvfmkwgenan_MvMl", + "vl.pvfmkwgenan.Mvl" => "__builtin_ve_vl_pvfmkwgenan_Mvl", + "vl.pvfmkwgt.MvMl" => "__builtin_ve_vl_pvfmkwgt_MvMl", + "vl.pvfmkwgt.Mvl" => "__builtin_ve_vl_pvfmkwgt_Mvl", + "vl.pvfmkwgtnan.MvMl" => "__builtin_ve_vl_pvfmkwgtnan_MvMl", + "vl.pvfmkwgtnan.Mvl" => "__builtin_ve_vl_pvfmkwgtnan_Mvl", + "vl.pvfmkwle.MvMl" => "__builtin_ve_vl_pvfmkwle_MvMl", + "vl.pvfmkwle.Mvl" => "__builtin_ve_vl_pvfmkwle_Mvl", + "vl.pvfmkwlenan.MvMl" => "__builtin_ve_vl_pvfmkwlenan_MvMl", + "vl.pvfmkwlenan.Mvl" => "__builtin_ve_vl_pvfmkwlenan_Mvl", + "vl.pvfmkwloeq.mvl" => "__builtin_ve_vl_pvfmkwloeq_mvl", + "vl.pvfmkwloeq.mvml" => "__builtin_ve_vl_pvfmkwloeq_mvml", + "vl.pvfmkwloeqnan.mvl" => "__builtin_ve_vl_pvfmkwloeqnan_mvl", + "vl.pvfmkwloeqnan.mvml" => "__builtin_ve_vl_pvfmkwloeqnan_mvml", + "vl.pvfmkwloge.mvl" => "__builtin_ve_vl_pvfmkwloge_mvl", + "vl.pvfmkwloge.mvml" => "__builtin_ve_vl_pvfmkwloge_mvml", + "vl.pvfmkwlogenan.mvl" => "__builtin_ve_vl_pvfmkwlogenan_mvl", + "vl.pvfmkwlogenan.mvml" => "__builtin_ve_vl_pvfmkwlogenan_mvml", + "vl.pvfmkwlogt.mvl" => "__builtin_ve_vl_pvfmkwlogt_mvl", + "vl.pvfmkwlogt.mvml" => "__builtin_ve_vl_pvfmkwlogt_mvml", + "vl.pvfmkwlogtnan.mvl" => "__builtin_ve_vl_pvfmkwlogtnan_mvl", + "vl.pvfmkwlogtnan.mvml" => "__builtin_ve_vl_pvfmkwlogtnan_mvml", + "vl.pvfmkwlole.mvl" => "__builtin_ve_vl_pvfmkwlole_mvl", + "vl.pvfmkwlole.mvml" => "__builtin_ve_vl_pvfmkwlole_mvml", + "vl.pvfmkwlolenan.mvl" => "__builtin_ve_vl_pvfmkwlolenan_mvl", + "vl.pvfmkwlolenan.mvml" => "__builtin_ve_vl_pvfmkwlolenan_mvml", + "vl.pvfmkwlolt.mvl" => "__builtin_ve_vl_pvfmkwlolt_mvl", + "vl.pvfmkwlolt.mvml" => "__builtin_ve_vl_pvfmkwlolt_mvml", + "vl.pvfmkwloltnan.mvl" => "__builtin_ve_vl_pvfmkwloltnan_mvl", + "vl.pvfmkwloltnan.mvml" => "__builtin_ve_vl_pvfmkwloltnan_mvml", + "vl.pvfmkwlonan.mvl" => "__builtin_ve_vl_pvfmkwlonan_mvl", + "vl.pvfmkwlonan.mvml" => "__builtin_ve_vl_pvfmkwlonan_mvml", + "vl.pvfmkwlone.mvl" => "__builtin_ve_vl_pvfmkwlone_mvl", + "vl.pvfmkwlone.mvml" => "__builtin_ve_vl_pvfmkwlone_mvml", + "vl.pvfmkwlonenan.mvl" => "__builtin_ve_vl_pvfmkwlonenan_mvl", + "vl.pvfmkwlonenan.mvml" => "__builtin_ve_vl_pvfmkwlonenan_mvml", + "vl.pvfmkwlonum.mvl" => "__builtin_ve_vl_pvfmkwlonum_mvl", + "vl.pvfmkwlonum.mvml" => "__builtin_ve_vl_pvfmkwlonum_mvml", + "vl.pvfmkwlt.MvMl" => "__builtin_ve_vl_pvfmkwlt_MvMl", + "vl.pvfmkwlt.Mvl" => "__builtin_ve_vl_pvfmkwlt_Mvl", + "vl.pvfmkwltnan.MvMl" => "__builtin_ve_vl_pvfmkwltnan_MvMl", + "vl.pvfmkwltnan.Mvl" => "__builtin_ve_vl_pvfmkwltnan_Mvl", + "vl.pvfmkwnan.MvMl" => "__builtin_ve_vl_pvfmkwnan_MvMl", + "vl.pvfmkwnan.Mvl" => "__builtin_ve_vl_pvfmkwnan_Mvl", + "vl.pvfmkwne.MvMl" => "__builtin_ve_vl_pvfmkwne_MvMl", + "vl.pvfmkwne.Mvl" => "__builtin_ve_vl_pvfmkwne_Mvl", + "vl.pvfmkwnenan.MvMl" => "__builtin_ve_vl_pvfmkwnenan_MvMl", + "vl.pvfmkwnenan.Mvl" => "__builtin_ve_vl_pvfmkwnenan_Mvl", + "vl.pvfmkwnum.MvMl" => "__builtin_ve_vl_pvfmkwnum_MvMl", + "vl.pvfmkwnum.Mvl" => "__builtin_ve_vl_pvfmkwnum_Mvl", + "vl.pvfmkwupeq.mvl" => "__builtin_ve_vl_pvfmkwupeq_mvl", + "vl.pvfmkwupeq.mvml" => "__builtin_ve_vl_pvfmkwupeq_mvml", + "vl.pvfmkwupeqnan.mvl" => "__builtin_ve_vl_pvfmkwupeqnan_mvl", + "vl.pvfmkwupeqnan.mvml" => "__builtin_ve_vl_pvfmkwupeqnan_mvml", + "vl.pvfmkwupge.mvl" => "__builtin_ve_vl_pvfmkwupge_mvl", + "vl.pvfmkwupge.mvml" => "__builtin_ve_vl_pvfmkwupge_mvml", + "vl.pvfmkwupgenan.mvl" => "__builtin_ve_vl_pvfmkwupgenan_mvl", + "vl.pvfmkwupgenan.mvml" => "__builtin_ve_vl_pvfmkwupgenan_mvml", + "vl.pvfmkwupgt.mvl" => "__builtin_ve_vl_pvfmkwupgt_mvl", + "vl.pvfmkwupgt.mvml" => "__builtin_ve_vl_pvfmkwupgt_mvml", + "vl.pvfmkwupgtnan.mvl" => "__builtin_ve_vl_pvfmkwupgtnan_mvl", + "vl.pvfmkwupgtnan.mvml" => "__builtin_ve_vl_pvfmkwupgtnan_mvml", + "vl.pvfmkwuple.mvl" => "__builtin_ve_vl_pvfmkwuple_mvl", + "vl.pvfmkwuple.mvml" => "__builtin_ve_vl_pvfmkwuple_mvml", + "vl.pvfmkwuplenan.mvl" => "__builtin_ve_vl_pvfmkwuplenan_mvl", + "vl.pvfmkwuplenan.mvml" => "__builtin_ve_vl_pvfmkwuplenan_mvml", + "vl.pvfmkwuplt.mvl" => "__builtin_ve_vl_pvfmkwuplt_mvl", + "vl.pvfmkwuplt.mvml" => "__builtin_ve_vl_pvfmkwuplt_mvml", + "vl.pvfmkwupltnan.mvl" => "__builtin_ve_vl_pvfmkwupltnan_mvl", + "vl.pvfmkwupltnan.mvml" => "__builtin_ve_vl_pvfmkwupltnan_mvml", + "vl.pvfmkwupnan.mvl" => "__builtin_ve_vl_pvfmkwupnan_mvl", + "vl.pvfmkwupnan.mvml" => "__builtin_ve_vl_pvfmkwupnan_mvml", + "vl.pvfmkwupne.mvl" => "__builtin_ve_vl_pvfmkwupne_mvl", + "vl.pvfmkwupne.mvml" => "__builtin_ve_vl_pvfmkwupne_mvml", + "vl.pvfmkwupnenan.mvl" => "__builtin_ve_vl_pvfmkwupnenan_mvl", + "vl.pvfmkwupnenan.mvml" => "__builtin_ve_vl_pvfmkwupnenan_mvml", + "vl.pvfmkwupnum.mvl" => "__builtin_ve_vl_pvfmkwupnum_mvl", + "vl.pvfmkwupnum.mvml" => "__builtin_ve_vl_pvfmkwupnum_mvml", + "vl.pvfmsb.vsvvMvl" => "__builtin_ve_vl_pvfmsb_vsvvMvl", + "vl.pvfmsb.vsvvl" => "__builtin_ve_vl_pvfmsb_vsvvl", + "vl.pvfmsb.vsvvvl" => "__builtin_ve_vl_pvfmsb_vsvvvl", + "vl.pvfmsb.vvsvMvl" => "__builtin_ve_vl_pvfmsb_vvsvMvl", + "vl.pvfmsb.vvsvl" => "__builtin_ve_vl_pvfmsb_vvsvl", + "vl.pvfmsb.vvsvvl" => "__builtin_ve_vl_pvfmsb_vvsvvl", + "vl.pvfmsb.vvvvMvl" => "__builtin_ve_vl_pvfmsb_vvvvMvl", + "vl.pvfmsb.vvvvl" => "__builtin_ve_vl_pvfmsb_vvvvl", + "vl.pvfmsb.vvvvvl" => "__builtin_ve_vl_pvfmsb_vvvvvl", + "vl.pvfmul.vsvMvl" => "__builtin_ve_vl_pvfmul_vsvMvl", + "vl.pvfmul.vsvl" => "__builtin_ve_vl_pvfmul_vsvl", + "vl.pvfmul.vsvvl" => "__builtin_ve_vl_pvfmul_vsvvl", + "vl.pvfmul.vvvMvl" => "__builtin_ve_vl_pvfmul_vvvMvl", + "vl.pvfmul.vvvl" => "__builtin_ve_vl_pvfmul_vvvl", + "vl.pvfmul.vvvvl" => "__builtin_ve_vl_pvfmul_vvvvl", + "vl.pvfnmad.vsvvMvl" => "__builtin_ve_vl_pvfnmad_vsvvMvl", + "vl.pvfnmad.vsvvl" => "__builtin_ve_vl_pvfnmad_vsvvl", + "vl.pvfnmad.vsvvvl" => "__builtin_ve_vl_pvfnmad_vsvvvl", + "vl.pvfnmad.vvsvMvl" => "__builtin_ve_vl_pvfnmad_vvsvMvl", + "vl.pvfnmad.vvsvl" => "__builtin_ve_vl_pvfnmad_vvsvl", + "vl.pvfnmad.vvsvvl" => "__builtin_ve_vl_pvfnmad_vvsvvl", + "vl.pvfnmad.vvvvMvl" => "__builtin_ve_vl_pvfnmad_vvvvMvl", + "vl.pvfnmad.vvvvl" => "__builtin_ve_vl_pvfnmad_vvvvl", + "vl.pvfnmad.vvvvvl" => "__builtin_ve_vl_pvfnmad_vvvvvl", + "vl.pvfnmsb.vsvvMvl" => "__builtin_ve_vl_pvfnmsb_vsvvMvl", + "vl.pvfnmsb.vsvvl" => "__builtin_ve_vl_pvfnmsb_vsvvl", + "vl.pvfnmsb.vsvvvl" => "__builtin_ve_vl_pvfnmsb_vsvvvl", + "vl.pvfnmsb.vvsvMvl" => "__builtin_ve_vl_pvfnmsb_vvsvMvl", + "vl.pvfnmsb.vvsvl" => "__builtin_ve_vl_pvfnmsb_vvsvl", + "vl.pvfnmsb.vvsvvl" => "__builtin_ve_vl_pvfnmsb_vvsvvl", + "vl.pvfnmsb.vvvvMvl" => "__builtin_ve_vl_pvfnmsb_vvvvMvl", + "vl.pvfnmsb.vvvvl" => "__builtin_ve_vl_pvfnmsb_vvvvl", + "vl.pvfnmsb.vvvvvl" => "__builtin_ve_vl_pvfnmsb_vvvvvl", + "vl.pvfsub.vsvMvl" => "__builtin_ve_vl_pvfsub_vsvMvl", + "vl.pvfsub.vsvl" => "__builtin_ve_vl_pvfsub_vsvl", + "vl.pvfsub.vsvvl" => "__builtin_ve_vl_pvfsub_vsvvl", + "vl.pvfsub.vvvMvl" => "__builtin_ve_vl_pvfsub_vvvMvl", + "vl.pvfsub.vvvl" => "__builtin_ve_vl_pvfsub_vvvl", + "vl.pvfsub.vvvvl" => "__builtin_ve_vl_pvfsub_vvvvl", + "vl.pvldz.vvMvl" => "__builtin_ve_vl_pvldz_vvMvl", + "vl.pvldz.vvl" => "__builtin_ve_vl_pvldz_vvl", + "vl.pvldz.vvvl" => "__builtin_ve_vl_pvldz_vvvl", + "vl.pvldzlo.vvl" => "__builtin_ve_vl_pvldzlo_vvl", + "vl.pvldzlo.vvmvl" => "__builtin_ve_vl_pvldzlo_vvmvl", + "vl.pvldzlo.vvvl" => "__builtin_ve_vl_pvldzlo_vvvl", + "vl.pvldzup.vvl" => "__builtin_ve_vl_pvldzup_vvl", + "vl.pvldzup.vvmvl" => "__builtin_ve_vl_pvldzup_vvmvl", + "vl.pvldzup.vvvl" => "__builtin_ve_vl_pvldzup_vvvl", + "vl.pvmaxs.vsvMvl" => "__builtin_ve_vl_pvmaxs_vsvMvl", + "vl.pvmaxs.vsvl" => "__builtin_ve_vl_pvmaxs_vsvl", + "vl.pvmaxs.vsvvl" => "__builtin_ve_vl_pvmaxs_vsvvl", + "vl.pvmaxs.vvvMvl" => "__builtin_ve_vl_pvmaxs_vvvMvl", + "vl.pvmaxs.vvvl" => "__builtin_ve_vl_pvmaxs_vvvl", + "vl.pvmaxs.vvvvl" => "__builtin_ve_vl_pvmaxs_vvvvl", + "vl.pvmins.vsvMvl" => "__builtin_ve_vl_pvmins_vsvMvl", + "vl.pvmins.vsvl" => "__builtin_ve_vl_pvmins_vsvl", + "vl.pvmins.vsvvl" => "__builtin_ve_vl_pvmins_vsvvl", + "vl.pvmins.vvvMvl" => "__builtin_ve_vl_pvmins_vvvMvl", + "vl.pvmins.vvvl" => "__builtin_ve_vl_pvmins_vvvl", + "vl.pvmins.vvvvl" => "__builtin_ve_vl_pvmins_vvvvl", + "vl.pvor.vsvMvl" => "__builtin_ve_vl_pvor_vsvMvl", + "vl.pvor.vsvl" => "__builtin_ve_vl_pvor_vsvl", + "vl.pvor.vsvvl" => "__builtin_ve_vl_pvor_vsvvl", + "vl.pvor.vvvMvl" => "__builtin_ve_vl_pvor_vvvMvl", + "vl.pvor.vvvl" => "__builtin_ve_vl_pvor_vvvl", + "vl.pvor.vvvvl" => "__builtin_ve_vl_pvor_vvvvl", + "vl.pvpcnt.vvMvl" => "__builtin_ve_vl_pvpcnt_vvMvl", + "vl.pvpcnt.vvl" => "__builtin_ve_vl_pvpcnt_vvl", + "vl.pvpcnt.vvvl" => "__builtin_ve_vl_pvpcnt_vvvl", + "vl.pvpcntlo.vvl" => "__builtin_ve_vl_pvpcntlo_vvl", + "vl.pvpcntlo.vvmvl" => "__builtin_ve_vl_pvpcntlo_vvmvl", + "vl.pvpcntlo.vvvl" => "__builtin_ve_vl_pvpcntlo_vvvl", + "vl.pvpcntup.vvl" => "__builtin_ve_vl_pvpcntup_vvl", + "vl.pvpcntup.vvmvl" => "__builtin_ve_vl_pvpcntup_vvmvl", + "vl.pvpcntup.vvvl" => "__builtin_ve_vl_pvpcntup_vvvl", + "vl.pvrcp.vvl" => "__builtin_ve_vl_pvrcp_vvl", + "vl.pvrcp.vvvl" => "__builtin_ve_vl_pvrcp_vvvl", + "vl.pvrsqrt.vvl" => "__builtin_ve_vl_pvrsqrt_vvl", + "vl.pvrsqrt.vvvl" => "__builtin_ve_vl_pvrsqrt_vvvl", + "vl.pvrsqrtnex.vvl" => "__builtin_ve_vl_pvrsqrtnex_vvl", + "vl.pvrsqrtnex.vvvl" => "__builtin_ve_vl_pvrsqrtnex_vvvl", + "vl.pvseq.vl" => "__builtin_ve_vl_pvseq_vl", + "vl.pvseq.vvl" => "__builtin_ve_vl_pvseq_vvl", + "vl.pvseqlo.vl" => "__builtin_ve_vl_pvseqlo_vl", + "vl.pvseqlo.vvl" => "__builtin_ve_vl_pvseqlo_vvl", + "vl.pvsequp.vl" => "__builtin_ve_vl_pvsequp_vl", + "vl.pvsequp.vvl" => "__builtin_ve_vl_pvsequp_vvl", + "vl.pvsla.vvsMvl" => "__builtin_ve_vl_pvsla_vvsMvl", + "vl.pvsla.vvsl" => "__builtin_ve_vl_pvsla_vvsl", + "vl.pvsla.vvsvl" => "__builtin_ve_vl_pvsla_vvsvl", + "vl.pvsla.vvvMvl" => "__builtin_ve_vl_pvsla_vvvMvl", + "vl.pvsla.vvvl" => "__builtin_ve_vl_pvsla_vvvl", + "vl.pvsla.vvvvl" => "__builtin_ve_vl_pvsla_vvvvl", + "vl.pvsll.vvsMvl" => "__builtin_ve_vl_pvsll_vvsMvl", + "vl.pvsll.vvsl" => "__builtin_ve_vl_pvsll_vvsl", + "vl.pvsll.vvsvl" => "__builtin_ve_vl_pvsll_vvsvl", + "vl.pvsll.vvvMvl" => "__builtin_ve_vl_pvsll_vvvMvl", + "vl.pvsll.vvvl" => "__builtin_ve_vl_pvsll_vvvl", + "vl.pvsll.vvvvl" => "__builtin_ve_vl_pvsll_vvvvl", + "vl.pvsra.vvsMvl" => "__builtin_ve_vl_pvsra_vvsMvl", + "vl.pvsra.vvsl" => "__builtin_ve_vl_pvsra_vvsl", + "vl.pvsra.vvsvl" => "__builtin_ve_vl_pvsra_vvsvl", + "vl.pvsra.vvvMvl" => "__builtin_ve_vl_pvsra_vvvMvl", + "vl.pvsra.vvvl" => "__builtin_ve_vl_pvsra_vvvl", + "vl.pvsra.vvvvl" => "__builtin_ve_vl_pvsra_vvvvl", + "vl.pvsrl.vvsMvl" => "__builtin_ve_vl_pvsrl_vvsMvl", + "vl.pvsrl.vvsl" => "__builtin_ve_vl_pvsrl_vvsl", + "vl.pvsrl.vvsvl" => "__builtin_ve_vl_pvsrl_vvsvl", + "vl.pvsrl.vvvMvl" => "__builtin_ve_vl_pvsrl_vvvMvl", + "vl.pvsrl.vvvl" => "__builtin_ve_vl_pvsrl_vvvl", + "vl.pvsrl.vvvvl" => "__builtin_ve_vl_pvsrl_vvvvl", + "vl.pvsubs.vsvMvl" => "__builtin_ve_vl_pvsubs_vsvMvl", + "vl.pvsubs.vsvl" => "__builtin_ve_vl_pvsubs_vsvl", + "vl.pvsubs.vsvvl" => "__builtin_ve_vl_pvsubs_vsvvl", + "vl.pvsubs.vvvMvl" => "__builtin_ve_vl_pvsubs_vvvMvl", + "vl.pvsubs.vvvl" => "__builtin_ve_vl_pvsubs_vvvl", + "vl.pvsubs.vvvvl" => "__builtin_ve_vl_pvsubs_vvvvl", + "vl.pvsubu.vsvMvl" => "__builtin_ve_vl_pvsubu_vsvMvl", + "vl.pvsubu.vsvl" => "__builtin_ve_vl_pvsubu_vsvl", + "vl.pvsubu.vsvvl" => "__builtin_ve_vl_pvsubu_vsvvl", + "vl.pvsubu.vvvMvl" => "__builtin_ve_vl_pvsubu_vvvMvl", + "vl.pvsubu.vvvl" => "__builtin_ve_vl_pvsubu_vvvl", + "vl.pvsubu.vvvvl" => "__builtin_ve_vl_pvsubu_vvvvl", + "vl.pvxor.vsvMvl" => "__builtin_ve_vl_pvxor_vsvMvl", + "vl.pvxor.vsvl" => "__builtin_ve_vl_pvxor_vsvl", + "vl.pvxor.vsvvl" => "__builtin_ve_vl_pvxor_vsvvl", + "vl.pvxor.vvvMvl" => "__builtin_ve_vl_pvxor_vvvMvl", + "vl.pvxor.vvvl" => "__builtin_ve_vl_pvxor_vvvl", + "vl.pvxor.vvvvl" => "__builtin_ve_vl_pvxor_vvvvl", + "vl.scr.sss" => "__builtin_ve_vl_scr_sss", + "vl.svm.sMs" => "__builtin_ve_vl_svm_sMs", + "vl.svm.sms" => "__builtin_ve_vl_svm_sms", + "vl.svob" => "__builtin_ve_vl_svob", + "vl.tovm.sml" => "__builtin_ve_vl_tovm_sml", + "vl.tscr.ssss" => "__builtin_ve_vl_tscr_ssss", + "vl.vaddsl.vsvl" => "__builtin_ve_vl_vaddsl_vsvl", + "vl.vaddsl.vsvmvl" => "__builtin_ve_vl_vaddsl_vsvmvl", + "vl.vaddsl.vsvvl" => "__builtin_ve_vl_vaddsl_vsvvl", + "vl.vaddsl.vvvl" => "__builtin_ve_vl_vaddsl_vvvl", + "vl.vaddsl.vvvmvl" => "__builtin_ve_vl_vaddsl_vvvmvl", + "vl.vaddsl.vvvvl" => "__builtin_ve_vl_vaddsl_vvvvl", + "vl.vaddswsx.vsvl" => "__builtin_ve_vl_vaddswsx_vsvl", + "vl.vaddswsx.vsvmvl" => "__builtin_ve_vl_vaddswsx_vsvmvl", + "vl.vaddswsx.vsvvl" => "__builtin_ve_vl_vaddswsx_vsvvl", + "vl.vaddswsx.vvvl" => "__builtin_ve_vl_vaddswsx_vvvl", + "vl.vaddswsx.vvvmvl" => "__builtin_ve_vl_vaddswsx_vvvmvl", + "vl.vaddswsx.vvvvl" => "__builtin_ve_vl_vaddswsx_vvvvl", + "vl.vaddswzx.vsvl" => "__builtin_ve_vl_vaddswzx_vsvl", + "vl.vaddswzx.vsvmvl" => "__builtin_ve_vl_vaddswzx_vsvmvl", + "vl.vaddswzx.vsvvl" => "__builtin_ve_vl_vaddswzx_vsvvl", + "vl.vaddswzx.vvvl" => "__builtin_ve_vl_vaddswzx_vvvl", + "vl.vaddswzx.vvvmvl" => "__builtin_ve_vl_vaddswzx_vvvmvl", + "vl.vaddswzx.vvvvl" => "__builtin_ve_vl_vaddswzx_vvvvl", + "vl.vaddul.vsvl" => "__builtin_ve_vl_vaddul_vsvl", + "vl.vaddul.vsvmvl" => "__builtin_ve_vl_vaddul_vsvmvl", + "vl.vaddul.vsvvl" => "__builtin_ve_vl_vaddul_vsvvl", + "vl.vaddul.vvvl" => "__builtin_ve_vl_vaddul_vvvl", + "vl.vaddul.vvvmvl" => "__builtin_ve_vl_vaddul_vvvmvl", + "vl.vaddul.vvvvl" => "__builtin_ve_vl_vaddul_vvvvl", + "vl.vadduw.vsvl" => "__builtin_ve_vl_vadduw_vsvl", + "vl.vadduw.vsvmvl" => "__builtin_ve_vl_vadduw_vsvmvl", + "vl.vadduw.vsvvl" => "__builtin_ve_vl_vadduw_vsvvl", + "vl.vadduw.vvvl" => "__builtin_ve_vl_vadduw_vvvl", + "vl.vadduw.vvvmvl" => "__builtin_ve_vl_vadduw_vvvmvl", + "vl.vadduw.vvvvl" => "__builtin_ve_vl_vadduw_vvvvl", + "vl.vand.vsvl" => "__builtin_ve_vl_vand_vsvl", + "vl.vand.vsvmvl" => "__builtin_ve_vl_vand_vsvmvl", + "vl.vand.vsvvl" => "__builtin_ve_vl_vand_vsvvl", + "vl.vand.vvvl" => "__builtin_ve_vl_vand_vvvl", + "vl.vand.vvvmvl" => "__builtin_ve_vl_vand_vvvmvl", + "vl.vand.vvvvl" => "__builtin_ve_vl_vand_vvvvl", + "vl.vbrdd.vsl" => "__builtin_ve_vl_vbrdd_vsl", + "vl.vbrdd.vsmvl" => "__builtin_ve_vl_vbrdd_vsmvl", + "vl.vbrdd.vsvl" => "__builtin_ve_vl_vbrdd_vsvl", + "vl.vbrdl.vsl" => "__builtin_ve_vl_vbrdl_vsl", + "vl.vbrdl.vsmvl" => "__builtin_ve_vl_vbrdl_vsmvl", + "vl.vbrdl.vsvl" => "__builtin_ve_vl_vbrdl_vsvl", + "vl.vbrds.vsl" => "__builtin_ve_vl_vbrds_vsl", + "vl.vbrds.vsmvl" => "__builtin_ve_vl_vbrds_vsmvl", + "vl.vbrds.vsvl" => "__builtin_ve_vl_vbrds_vsvl", + "vl.vbrdw.vsl" => "__builtin_ve_vl_vbrdw_vsl", + "vl.vbrdw.vsmvl" => "__builtin_ve_vl_vbrdw_vsmvl", + "vl.vbrdw.vsvl" => "__builtin_ve_vl_vbrdw_vsvl", + "vl.vbrv.vvl" => "__builtin_ve_vl_vbrv_vvl", + "vl.vbrv.vvmvl" => "__builtin_ve_vl_vbrv_vvmvl", + "vl.vbrv.vvvl" => "__builtin_ve_vl_vbrv_vvvl", + "vl.vcmpsl.vsvl" => "__builtin_ve_vl_vcmpsl_vsvl", + "vl.vcmpsl.vsvmvl" => "__builtin_ve_vl_vcmpsl_vsvmvl", + "vl.vcmpsl.vsvvl" => "__builtin_ve_vl_vcmpsl_vsvvl", + "vl.vcmpsl.vvvl" => "__builtin_ve_vl_vcmpsl_vvvl", + "vl.vcmpsl.vvvmvl" => "__builtin_ve_vl_vcmpsl_vvvmvl", + "vl.vcmpsl.vvvvl" => "__builtin_ve_vl_vcmpsl_vvvvl", + "vl.vcmpswsx.vsvl" => "__builtin_ve_vl_vcmpswsx_vsvl", + "vl.vcmpswsx.vsvmvl" => "__builtin_ve_vl_vcmpswsx_vsvmvl", + "vl.vcmpswsx.vsvvl" => "__builtin_ve_vl_vcmpswsx_vsvvl", + "vl.vcmpswsx.vvvl" => "__builtin_ve_vl_vcmpswsx_vvvl", + "vl.vcmpswsx.vvvmvl" => "__builtin_ve_vl_vcmpswsx_vvvmvl", + "vl.vcmpswsx.vvvvl" => "__builtin_ve_vl_vcmpswsx_vvvvl", + "vl.vcmpswzx.vsvl" => "__builtin_ve_vl_vcmpswzx_vsvl", + "vl.vcmpswzx.vsvmvl" => "__builtin_ve_vl_vcmpswzx_vsvmvl", + "vl.vcmpswzx.vsvvl" => "__builtin_ve_vl_vcmpswzx_vsvvl", + "vl.vcmpswzx.vvvl" => "__builtin_ve_vl_vcmpswzx_vvvl", + "vl.vcmpswzx.vvvmvl" => "__builtin_ve_vl_vcmpswzx_vvvmvl", + "vl.vcmpswzx.vvvvl" => "__builtin_ve_vl_vcmpswzx_vvvvl", + "vl.vcmpul.vsvl" => "__builtin_ve_vl_vcmpul_vsvl", + "vl.vcmpul.vsvmvl" => "__builtin_ve_vl_vcmpul_vsvmvl", + "vl.vcmpul.vsvvl" => "__builtin_ve_vl_vcmpul_vsvvl", + "vl.vcmpul.vvvl" => "__builtin_ve_vl_vcmpul_vvvl", + "vl.vcmpul.vvvmvl" => "__builtin_ve_vl_vcmpul_vvvmvl", + "vl.vcmpul.vvvvl" => "__builtin_ve_vl_vcmpul_vvvvl", + "vl.vcmpuw.vsvl" => "__builtin_ve_vl_vcmpuw_vsvl", + "vl.vcmpuw.vsvmvl" => "__builtin_ve_vl_vcmpuw_vsvmvl", + "vl.vcmpuw.vsvvl" => "__builtin_ve_vl_vcmpuw_vsvvl", + "vl.vcmpuw.vvvl" => "__builtin_ve_vl_vcmpuw_vvvl", + "vl.vcmpuw.vvvmvl" => "__builtin_ve_vl_vcmpuw_vvvmvl", + "vl.vcmpuw.vvvvl" => "__builtin_ve_vl_vcmpuw_vvvvl", + "vl.vcp.vvmvl" => "__builtin_ve_vl_vcp_vvmvl", + "vl.vcvtdl.vvl" => "__builtin_ve_vl_vcvtdl_vvl", + "vl.vcvtdl.vvvl" => "__builtin_ve_vl_vcvtdl_vvvl", + "vl.vcvtds.vvl" => "__builtin_ve_vl_vcvtds_vvl", + "vl.vcvtds.vvvl" => "__builtin_ve_vl_vcvtds_vvvl", + "vl.vcvtdw.vvl" => "__builtin_ve_vl_vcvtdw_vvl", + "vl.vcvtdw.vvvl" => "__builtin_ve_vl_vcvtdw_vvvl", + "vl.vcvtld.vvl" => "__builtin_ve_vl_vcvtld_vvl", + "vl.vcvtld.vvmvl" => "__builtin_ve_vl_vcvtld_vvmvl", + "vl.vcvtld.vvvl" => "__builtin_ve_vl_vcvtld_vvvl", + "vl.vcvtldrz.vvl" => "__builtin_ve_vl_vcvtldrz_vvl", + "vl.vcvtldrz.vvmvl" => "__builtin_ve_vl_vcvtldrz_vvmvl", + "vl.vcvtldrz.vvvl" => "__builtin_ve_vl_vcvtldrz_vvvl", + "vl.vcvtsd.vvl" => "__builtin_ve_vl_vcvtsd_vvl", + "vl.vcvtsd.vvvl" => "__builtin_ve_vl_vcvtsd_vvvl", + "vl.vcvtsw.vvl" => "__builtin_ve_vl_vcvtsw_vvl", + "vl.vcvtsw.vvvl" => "__builtin_ve_vl_vcvtsw_vvvl", + "vl.vcvtwdsx.vvl" => "__builtin_ve_vl_vcvtwdsx_vvl", + "vl.vcvtwdsx.vvmvl" => "__builtin_ve_vl_vcvtwdsx_vvmvl", + "vl.vcvtwdsx.vvvl" => "__builtin_ve_vl_vcvtwdsx_vvvl", + "vl.vcvtwdsxrz.vvl" => "__builtin_ve_vl_vcvtwdsxrz_vvl", + "vl.vcvtwdsxrz.vvmvl" => "__builtin_ve_vl_vcvtwdsxrz_vvmvl", + "vl.vcvtwdsxrz.vvvl" => "__builtin_ve_vl_vcvtwdsxrz_vvvl", + "vl.vcvtwdzx.vvl" => "__builtin_ve_vl_vcvtwdzx_vvl", + "vl.vcvtwdzx.vvmvl" => "__builtin_ve_vl_vcvtwdzx_vvmvl", + "vl.vcvtwdzx.vvvl" => "__builtin_ve_vl_vcvtwdzx_vvvl", + "vl.vcvtwdzxrz.vvl" => "__builtin_ve_vl_vcvtwdzxrz_vvl", + "vl.vcvtwdzxrz.vvmvl" => "__builtin_ve_vl_vcvtwdzxrz_vvmvl", + "vl.vcvtwdzxrz.vvvl" => "__builtin_ve_vl_vcvtwdzxrz_vvvl", + "vl.vcvtwssx.vvl" => "__builtin_ve_vl_vcvtwssx_vvl", + "vl.vcvtwssx.vvmvl" => "__builtin_ve_vl_vcvtwssx_vvmvl", + "vl.vcvtwssx.vvvl" => "__builtin_ve_vl_vcvtwssx_vvvl", + "vl.vcvtwssxrz.vvl" => "__builtin_ve_vl_vcvtwssxrz_vvl", + "vl.vcvtwssxrz.vvmvl" => "__builtin_ve_vl_vcvtwssxrz_vvmvl", + "vl.vcvtwssxrz.vvvl" => "__builtin_ve_vl_vcvtwssxrz_vvvl", + "vl.vcvtwszx.vvl" => "__builtin_ve_vl_vcvtwszx_vvl", + "vl.vcvtwszx.vvmvl" => "__builtin_ve_vl_vcvtwszx_vvmvl", + "vl.vcvtwszx.vvvl" => "__builtin_ve_vl_vcvtwszx_vvvl", + "vl.vcvtwszxrz.vvl" => "__builtin_ve_vl_vcvtwszxrz_vvl", + "vl.vcvtwszxrz.vvmvl" => "__builtin_ve_vl_vcvtwszxrz_vvmvl", + "vl.vcvtwszxrz.vvvl" => "__builtin_ve_vl_vcvtwszxrz_vvvl", + "vl.vdivsl.vsvl" => "__builtin_ve_vl_vdivsl_vsvl", + "vl.vdivsl.vsvmvl" => "__builtin_ve_vl_vdivsl_vsvmvl", + "vl.vdivsl.vsvvl" => "__builtin_ve_vl_vdivsl_vsvvl", + "vl.vdivsl.vvsl" => "__builtin_ve_vl_vdivsl_vvsl", + "vl.vdivsl.vvsmvl" => "__builtin_ve_vl_vdivsl_vvsmvl", + "vl.vdivsl.vvsvl" => "__builtin_ve_vl_vdivsl_vvsvl", + "vl.vdivsl.vvvl" => "__builtin_ve_vl_vdivsl_vvvl", + "vl.vdivsl.vvvmvl" => "__builtin_ve_vl_vdivsl_vvvmvl", + "vl.vdivsl.vvvvl" => "__builtin_ve_vl_vdivsl_vvvvl", + "vl.vdivswsx.vsvl" => "__builtin_ve_vl_vdivswsx_vsvl", + "vl.vdivswsx.vsvmvl" => "__builtin_ve_vl_vdivswsx_vsvmvl", + "vl.vdivswsx.vsvvl" => "__builtin_ve_vl_vdivswsx_vsvvl", + "vl.vdivswsx.vvsl" => "__builtin_ve_vl_vdivswsx_vvsl", + "vl.vdivswsx.vvsmvl" => "__builtin_ve_vl_vdivswsx_vvsmvl", + "vl.vdivswsx.vvsvl" => "__builtin_ve_vl_vdivswsx_vvsvl", + "vl.vdivswsx.vvvl" => "__builtin_ve_vl_vdivswsx_vvvl", + "vl.vdivswsx.vvvmvl" => "__builtin_ve_vl_vdivswsx_vvvmvl", + "vl.vdivswsx.vvvvl" => "__builtin_ve_vl_vdivswsx_vvvvl", + "vl.vdivswzx.vsvl" => "__builtin_ve_vl_vdivswzx_vsvl", + "vl.vdivswzx.vsvmvl" => "__builtin_ve_vl_vdivswzx_vsvmvl", + "vl.vdivswzx.vsvvl" => "__builtin_ve_vl_vdivswzx_vsvvl", + "vl.vdivswzx.vvsl" => "__builtin_ve_vl_vdivswzx_vvsl", + "vl.vdivswzx.vvsmvl" => "__builtin_ve_vl_vdivswzx_vvsmvl", + "vl.vdivswzx.vvsvl" => "__builtin_ve_vl_vdivswzx_vvsvl", + "vl.vdivswzx.vvvl" => "__builtin_ve_vl_vdivswzx_vvvl", + "vl.vdivswzx.vvvmvl" => "__builtin_ve_vl_vdivswzx_vvvmvl", + "vl.vdivswzx.vvvvl" => "__builtin_ve_vl_vdivswzx_vvvvl", + "vl.vdivul.vsvl" => "__builtin_ve_vl_vdivul_vsvl", + "vl.vdivul.vsvmvl" => "__builtin_ve_vl_vdivul_vsvmvl", + "vl.vdivul.vsvvl" => "__builtin_ve_vl_vdivul_vsvvl", + "vl.vdivul.vvsl" => "__builtin_ve_vl_vdivul_vvsl", + "vl.vdivul.vvsmvl" => "__builtin_ve_vl_vdivul_vvsmvl", + "vl.vdivul.vvsvl" => "__builtin_ve_vl_vdivul_vvsvl", + "vl.vdivul.vvvl" => "__builtin_ve_vl_vdivul_vvvl", + "vl.vdivul.vvvmvl" => "__builtin_ve_vl_vdivul_vvvmvl", + "vl.vdivul.vvvvl" => "__builtin_ve_vl_vdivul_vvvvl", + "vl.vdivuw.vsvl" => "__builtin_ve_vl_vdivuw_vsvl", + "vl.vdivuw.vsvmvl" => "__builtin_ve_vl_vdivuw_vsvmvl", + "vl.vdivuw.vsvvl" => "__builtin_ve_vl_vdivuw_vsvvl", + "vl.vdivuw.vvsl" => "__builtin_ve_vl_vdivuw_vvsl", + "vl.vdivuw.vvsmvl" => "__builtin_ve_vl_vdivuw_vvsmvl", + "vl.vdivuw.vvsvl" => "__builtin_ve_vl_vdivuw_vvsvl", + "vl.vdivuw.vvvl" => "__builtin_ve_vl_vdivuw_vvvl", + "vl.vdivuw.vvvmvl" => "__builtin_ve_vl_vdivuw_vvvmvl", + "vl.vdivuw.vvvvl" => "__builtin_ve_vl_vdivuw_vvvvl", + "vl.veqv.vsvl" => "__builtin_ve_vl_veqv_vsvl", + "vl.veqv.vsvmvl" => "__builtin_ve_vl_veqv_vsvmvl", + "vl.veqv.vsvvl" => "__builtin_ve_vl_veqv_vsvvl", + "vl.veqv.vvvl" => "__builtin_ve_vl_veqv_vvvl", + "vl.veqv.vvvmvl" => "__builtin_ve_vl_veqv_vvvmvl", + "vl.veqv.vvvvl" => "__builtin_ve_vl_veqv_vvvvl", + "vl.vex.vvmvl" => "__builtin_ve_vl_vex_vvmvl", + "vl.vfaddd.vsvl" => "__builtin_ve_vl_vfaddd_vsvl", + "vl.vfaddd.vsvmvl" => "__builtin_ve_vl_vfaddd_vsvmvl", + "vl.vfaddd.vsvvl" => "__builtin_ve_vl_vfaddd_vsvvl", + "vl.vfaddd.vvvl" => "__builtin_ve_vl_vfaddd_vvvl", + "vl.vfaddd.vvvmvl" => "__builtin_ve_vl_vfaddd_vvvmvl", + "vl.vfaddd.vvvvl" => "__builtin_ve_vl_vfaddd_vvvvl", + "vl.vfadds.vsvl" => "__builtin_ve_vl_vfadds_vsvl", + "vl.vfadds.vsvmvl" => "__builtin_ve_vl_vfadds_vsvmvl", + "vl.vfadds.vsvvl" => "__builtin_ve_vl_vfadds_vsvvl", + "vl.vfadds.vvvl" => "__builtin_ve_vl_vfadds_vvvl", + "vl.vfadds.vvvmvl" => "__builtin_ve_vl_vfadds_vvvmvl", + "vl.vfadds.vvvvl" => "__builtin_ve_vl_vfadds_vvvvl", + "vl.vfcmpd.vsvl" => "__builtin_ve_vl_vfcmpd_vsvl", + "vl.vfcmpd.vsvmvl" => "__builtin_ve_vl_vfcmpd_vsvmvl", + "vl.vfcmpd.vsvvl" => "__builtin_ve_vl_vfcmpd_vsvvl", + "vl.vfcmpd.vvvl" => "__builtin_ve_vl_vfcmpd_vvvl", + "vl.vfcmpd.vvvmvl" => "__builtin_ve_vl_vfcmpd_vvvmvl", + "vl.vfcmpd.vvvvl" => "__builtin_ve_vl_vfcmpd_vvvvl", + "vl.vfcmps.vsvl" => "__builtin_ve_vl_vfcmps_vsvl", + "vl.vfcmps.vsvmvl" => "__builtin_ve_vl_vfcmps_vsvmvl", + "vl.vfcmps.vsvvl" => "__builtin_ve_vl_vfcmps_vsvvl", + "vl.vfcmps.vvvl" => "__builtin_ve_vl_vfcmps_vvvl", + "vl.vfcmps.vvvmvl" => "__builtin_ve_vl_vfcmps_vvvmvl", + "vl.vfcmps.vvvvl" => "__builtin_ve_vl_vfcmps_vvvvl", + "vl.vfdivd.vsvl" => "__builtin_ve_vl_vfdivd_vsvl", + "vl.vfdivd.vsvmvl" => "__builtin_ve_vl_vfdivd_vsvmvl", + "vl.vfdivd.vsvvl" => "__builtin_ve_vl_vfdivd_vsvvl", + "vl.vfdivd.vvvl" => "__builtin_ve_vl_vfdivd_vvvl", + "vl.vfdivd.vvvmvl" => "__builtin_ve_vl_vfdivd_vvvmvl", + "vl.vfdivd.vvvvl" => "__builtin_ve_vl_vfdivd_vvvvl", + "vl.vfdivs.vsvl" => "__builtin_ve_vl_vfdivs_vsvl", + "vl.vfdivs.vsvmvl" => "__builtin_ve_vl_vfdivs_vsvmvl", + "vl.vfdivs.vsvvl" => "__builtin_ve_vl_vfdivs_vsvvl", + "vl.vfdivs.vvvl" => "__builtin_ve_vl_vfdivs_vvvl", + "vl.vfdivs.vvvmvl" => "__builtin_ve_vl_vfdivs_vvvmvl", + "vl.vfdivs.vvvvl" => "__builtin_ve_vl_vfdivs_vvvvl", + "vl.vfmadd.vsvvl" => "__builtin_ve_vl_vfmadd_vsvvl", + "vl.vfmadd.vsvvmvl" => "__builtin_ve_vl_vfmadd_vsvvmvl", + "vl.vfmadd.vsvvvl" => "__builtin_ve_vl_vfmadd_vsvvvl", + "vl.vfmadd.vvsvl" => "__builtin_ve_vl_vfmadd_vvsvl", + "vl.vfmadd.vvsvmvl" => "__builtin_ve_vl_vfmadd_vvsvmvl", + "vl.vfmadd.vvsvvl" => "__builtin_ve_vl_vfmadd_vvsvvl", + "vl.vfmadd.vvvvl" => "__builtin_ve_vl_vfmadd_vvvvl", + "vl.vfmadd.vvvvmvl" => "__builtin_ve_vl_vfmadd_vvvvmvl", + "vl.vfmadd.vvvvvl" => "__builtin_ve_vl_vfmadd_vvvvvl", + "vl.vfmads.vsvvl" => "__builtin_ve_vl_vfmads_vsvvl", + "vl.vfmads.vsvvmvl" => "__builtin_ve_vl_vfmads_vsvvmvl", + "vl.vfmads.vsvvvl" => "__builtin_ve_vl_vfmads_vsvvvl", + "vl.vfmads.vvsvl" => "__builtin_ve_vl_vfmads_vvsvl", + "vl.vfmads.vvsvmvl" => "__builtin_ve_vl_vfmads_vvsvmvl", + "vl.vfmads.vvsvvl" => "__builtin_ve_vl_vfmads_vvsvvl", + "vl.vfmads.vvvvl" => "__builtin_ve_vl_vfmads_vvvvl", + "vl.vfmads.vvvvmvl" => "__builtin_ve_vl_vfmads_vvvvmvl", + "vl.vfmads.vvvvvl" => "__builtin_ve_vl_vfmads_vvvvvl", + "vl.vfmaxd.vsvl" => "__builtin_ve_vl_vfmaxd_vsvl", + "vl.vfmaxd.vsvmvl" => "__builtin_ve_vl_vfmaxd_vsvmvl", + "vl.vfmaxd.vsvvl" => "__builtin_ve_vl_vfmaxd_vsvvl", + "vl.vfmaxd.vvvl" => "__builtin_ve_vl_vfmaxd_vvvl", + "vl.vfmaxd.vvvmvl" => "__builtin_ve_vl_vfmaxd_vvvmvl", + "vl.vfmaxd.vvvvl" => "__builtin_ve_vl_vfmaxd_vvvvl", + "vl.vfmaxs.vsvl" => "__builtin_ve_vl_vfmaxs_vsvl", + "vl.vfmaxs.vsvmvl" => "__builtin_ve_vl_vfmaxs_vsvmvl", + "vl.vfmaxs.vsvvl" => "__builtin_ve_vl_vfmaxs_vsvvl", + "vl.vfmaxs.vvvl" => "__builtin_ve_vl_vfmaxs_vvvl", + "vl.vfmaxs.vvvmvl" => "__builtin_ve_vl_vfmaxs_vvvmvl", + "vl.vfmaxs.vvvvl" => "__builtin_ve_vl_vfmaxs_vvvvl", + "vl.vfmind.vsvl" => "__builtin_ve_vl_vfmind_vsvl", + "vl.vfmind.vsvmvl" => "__builtin_ve_vl_vfmind_vsvmvl", + "vl.vfmind.vsvvl" => "__builtin_ve_vl_vfmind_vsvvl", + "vl.vfmind.vvvl" => "__builtin_ve_vl_vfmind_vvvl", + "vl.vfmind.vvvmvl" => "__builtin_ve_vl_vfmind_vvvmvl", + "vl.vfmind.vvvvl" => "__builtin_ve_vl_vfmind_vvvvl", + "vl.vfmins.vsvl" => "__builtin_ve_vl_vfmins_vsvl", + "vl.vfmins.vsvmvl" => "__builtin_ve_vl_vfmins_vsvmvl", + "vl.vfmins.vsvvl" => "__builtin_ve_vl_vfmins_vsvvl", + "vl.vfmins.vvvl" => "__builtin_ve_vl_vfmins_vvvl", + "vl.vfmins.vvvmvl" => "__builtin_ve_vl_vfmins_vvvmvl", + "vl.vfmins.vvvvl" => "__builtin_ve_vl_vfmins_vvvvl", + "vl.vfmkdeq.mvl" => "__builtin_ve_vl_vfmkdeq_mvl", + "vl.vfmkdeq.mvml" => "__builtin_ve_vl_vfmkdeq_mvml", + "vl.vfmkdeqnan.mvl" => "__builtin_ve_vl_vfmkdeqnan_mvl", + "vl.vfmkdeqnan.mvml" => "__builtin_ve_vl_vfmkdeqnan_mvml", + "vl.vfmkdge.mvl" => "__builtin_ve_vl_vfmkdge_mvl", + "vl.vfmkdge.mvml" => "__builtin_ve_vl_vfmkdge_mvml", + "vl.vfmkdgenan.mvl" => "__builtin_ve_vl_vfmkdgenan_mvl", + "vl.vfmkdgenan.mvml" => "__builtin_ve_vl_vfmkdgenan_mvml", + "vl.vfmkdgt.mvl" => "__builtin_ve_vl_vfmkdgt_mvl", + "vl.vfmkdgt.mvml" => "__builtin_ve_vl_vfmkdgt_mvml", + "vl.vfmkdgtnan.mvl" => "__builtin_ve_vl_vfmkdgtnan_mvl", + "vl.vfmkdgtnan.mvml" => "__builtin_ve_vl_vfmkdgtnan_mvml", + "vl.vfmkdle.mvl" => "__builtin_ve_vl_vfmkdle_mvl", + "vl.vfmkdle.mvml" => "__builtin_ve_vl_vfmkdle_mvml", + "vl.vfmkdlenan.mvl" => "__builtin_ve_vl_vfmkdlenan_mvl", + "vl.vfmkdlenan.mvml" => "__builtin_ve_vl_vfmkdlenan_mvml", + "vl.vfmkdlt.mvl" => "__builtin_ve_vl_vfmkdlt_mvl", + "vl.vfmkdlt.mvml" => "__builtin_ve_vl_vfmkdlt_mvml", + "vl.vfmkdltnan.mvl" => "__builtin_ve_vl_vfmkdltnan_mvl", + "vl.vfmkdltnan.mvml" => "__builtin_ve_vl_vfmkdltnan_mvml", + "vl.vfmkdnan.mvl" => "__builtin_ve_vl_vfmkdnan_mvl", + "vl.vfmkdnan.mvml" => "__builtin_ve_vl_vfmkdnan_mvml", + "vl.vfmkdne.mvl" => "__builtin_ve_vl_vfmkdne_mvl", + "vl.vfmkdne.mvml" => "__builtin_ve_vl_vfmkdne_mvml", + "vl.vfmkdnenan.mvl" => "__builtin_ve_vl_vfmkdnenan_mvl", + "vl.vfmkdnenan.mvml" => "__builtin_ve_vl_vfmkdnenan_mvml", + "vl.vfmkdnum.mvl" => "__builtin_ve_vl_vfmkdnum_mvl", + "vl.vfmkdnum.mvml" => "__builtin_ve_vl_vfmkdnum_mvml", + "vl.vfmklaf.ml" => "__builtin_ve_vl_vfmklaf_ml", + "vl.vfmklat.ml" => "__builtin_ve_vl_vfmklat_ml", + "vl.vfmkleq.mvl" => "__builtin_ve_vl_vfmkleq_mvl", + "vl.vfmkleq.mvml" => "__builtin_ve_vl_vfmkleq_mvml", + "vl.vfmkleqnan.mvl" => "__builtin_ve_vl_vfmkleqnan_mvl", + "vl.vfmkleqnan.mvml" => "__builtin_ve_vl_vfmkleqnan_mvml", + "vl.vfmklge.mvl" => "__builtin_ve_vl_vfmklge_mvl", + "vl.vfmklge.mvml" => "__builtin_ve_vl_vfmklge_mvml", + "vl.vfmklgenan.mvl" => "__builtin_ve_vl_vfmklgenan_mvl", + "vl.vfmklgenan.mvml" => "__builtin_ve_vl_vfmklgenan_mvml", + "vl.vfmklgt.mvl" => "__builtin_ve_vl_vfmklgt_mvl", + "vl.vfmklgt.mvml" => "__builtin_ve_vl_vfmklgt_mvml", + "vl.vfmklgtnan.mvl" => "__builtin_ve_vl_vfmklgtnan_mvl", + "vl.vfmklgtnan.mvml" => "__builtin_ve_vl_vfmklgtnan_mvml", + "vl.vfmklle.mvl" => "__builtin_ve_vl_vfmklle_mvl", + "vl.vfmklle.mvml" => "__builtin_ve_vl_vfmklle_mvml", + "vl.vfmkllenan.mvl" => "__builtin_ve_vl_vfmkllenan_mvl", + "vl.vfmkllenan.mvml" => "__builtin_ve_vl_vfmkllenan_mvml", + "vl.vfmkllt.mvl" => "__builtin_ve_vl_vfmkllt_mvl", + "vl.vfmkllt.mvml" => "__builtin_ve_vl_vfmkllt_mvml", + "vl.vfmklltnan.mvl" => "__builtin_ve_vl_vfmklltnan_mvl", + "vl.vfmklltnan.mvml" => "__builtin_ve_vl_vfmklltnan_mvml", + "vl.vfmklnan.mvl" => "__builtin_ve_vl_vfmklnan_mvl", + "vl.vfmklnan.mvml" => "__builtin_ve_vl_vfmklnan_mvml", + "vl.vfmklne.mvl" => "__builtin_ve_vl_vfmklne_mvl", + "vl.vfmklne.mvml" => "__builtin_ve_vl_vfmklne_mvml", + "vl.vfmklnenan.mvl" => "__builtin_ve_vl_vfmklnenan_mvl", + "vl.vfmklnenan.mvml" => "__builtin_ve_vl_vfmklnenan_mvml", + "vl.vfmklnum.mvl" => "__builtin_ve_vl_vfmklnum_mvl", + "vl.vfmklnum.mvml" => "__builtin_ve_vl_vfmklnum_mvml", + "vl.vfmkseq.mvl" => "__builtin_ve_vl_vfmkseq_mvl", + "vl.vfmkseq.mvml" => "__builtin_ve_vl_vfmkseq_mvml", + "vl.vfmkseqnan.mvl" => "__builtin_ve_vl_vfmkseqnan_mvl", + "vl.vfmkseqnan.mvml" => "__builtin_ve_vl_vfmkseqnan_mvml", + "vl.vfmksge.mvl" => "__builtin_ve_vl_vfmksge_mvl", + "vl.vfmksge.mvml" => "__builtin_ve_vl_vfmksge_mvml", + "vl.vfmksgenan.mvl" => "__builtin_ve_vl_vfmksgenan_mvl", + "vl.vfmksgenan.mvml" => "__builtin_ve_vl_vfmksgenan_mvml", + "vl.vfmksgt.mvl" => "__builtin_ve_vl_vfmksgt_mvl", + "vl.vfmksgt.mvml" => "__builtin_ve_vl_vfmksgt_mvml", + "vl.vfmksgtnan.mvl" => "__builtin_ve_vl_vfmksgtnan_mvl", + "vl.vfmksgtnan.mvml" => "__builtin_ve_vl_vfmksgtnan_mvml", + "vl.vfmksle.mvl" => "__builtin_ve_vl_vfmksle_mvl", + "vl.vfmksle.mvml" => "__builtin_ve_vl_vfmksle_mvml", + "vl.vfmkslenan.mvl" => "__builtin_ve_vl_vfmkslenan_mvl", + "vl.vfmkslenan.mvml" => "__builtin_ve_vl_vfmkslenan_mvml", + "vl.vfmkslt.mvl" => "__builtin_ve_vl_vfmkslt_mvl", + "vl.vfmkslt.mvml" => "__builtin_ve_vl_vfmkslt_mvml", + "vl.vfmksltnan.mvl" => "__builtin_ve_vl_vfmksltnan_mvl", + "vl.vfmksltnan.mvml" => "__builtin_ve_vl_vfmksltnan_mvml", + "vl.vfmksnan.mvl" => "__builtin_ve_vl_vfmksnan_mvl", + "vl.vfmksnan.mvml" => "__builtin_ve_vl_vfmksnan_mvml", + "vl.vfmksne.mvl" => "__builtin_ve_vl_vfmksne_mvl", + "vl.vfmksne.mvml" => "__builtin_ve_vl_vfmksne_mvml", + "vl.vfmksnenan.mvl" => "__builtin_ve_vl_vfmksnenan_mvl", + "vl.vfmksnenan.mvml" => "__builtin_ve_vl_vfmksnenan_mvml", + "vl.vfmksnum.mvl" => "__builtin_ve_vl_vfmksnum_mvl", + "vl.vfmksnum.mvml" => "__builtin_ve_vl_vfmksnum_mvml", + "vl.vfmkweq.mvl" => "__builtin_ve_vl_vfmkweq_mvl", + "vl.vfmkweq.mvml" => "__builtin_ve_vl_vfmkweq_mvml", + "vl.vfmkweqnan.mvl" => "__builtin_ve_vl_vfmkweqnan_mvl", + "vl.vfmkweqnan.mvml" => "__builtin_ve_vl_vfmkweqnan_mvml", + "vl.vfmkwge.mvl" => "__builtin_ve_vl_vfmkwge_mvl", + "vl.vfmkwge.mvml" => "__builtin_ve_vl_vfmkwge_mvml", + "vl.vfmkwgenan.mvl" => "__builtin_ve_vl_vfmkwgenan_mvl", + "vl.vfmkwgenan.mvml" => "__builtin_ve_vl_vfmkwgenan_mvml", + "vl.vfmkwgt.mvl" => "__builtin_ve_vl_vfmkwgt_mvl", + "vl.vfmkwgt.mvml" => "__builtin_ve_vl_vfmkwgt_mvml", + "vl.vfmkwgtnan.mvl" => "__builtin_ve_vl_vfmkwgtnan_mvl", + "vl.vfmkwgtnan.mvml" => "__builtin_ve_vl_vfmkwgtnan_mvml", + "vl.vfmkwle.mvl" => "__builtin_ve_vl_vfmkwle_mvl", + "vl.vfmkwle.mvml" => "__builtin_ve_vl_vfmkwle_mvml", + "vl.vfmkwlenan.mvl" => "__builtin_ve_vl_vfmkwlenan_mvl", + "vl.vfmkwlenan.mvml" => "__builtin_ve_vl_vfmkwlenan_mvml", + "vl.vfmkwlt.mvl" => "__builtin_ve_vl_vfmkwlt_mvl", + "vl.vfmkwlt.mvml" => "__builtin_ve_vl_vfmkwlt_mvml", + "vl.vfmkwltnan.mvl" => "__builtin_ve_vl_vfmkwltnan_mvl", + "vl.vfmkwltnan.mvml" => "__builtin_ve_vl_vfmkwltnan_mvml", + "vl.vfmkwnan.mvl" => "__builtin_ve_vl_vfmkwnan_mvl", + "vl.vfmkwnan.mvml" => "__builtin_ve_vl_vfmkwnan_mvml", + "vl.vfmkwne.mvl" => "__builtin_ve_vl_vfmkwne_mvl", + "vl.vfmkwne.mvml" => "__builtin_ve_vl_vfmkwne_mvml", + "vl.vfmkwnenan.mvl" => "__builtin_ve_vl_vfmkwnenan_mvl", + "vl.vfmkwnenan.mvml" => "__builtin_ve_vl_vfmkwnenan_mvml", + "vl.vfmkwnum.mvl" => "__builtin_ve_vl_vfmkwnum_mvl", + "vl.vfmkwnum.mvml" => "__builtin_ve_vl_vfmkwnum_mvml", + "vl.vfmsbd.vsvvl" => "__builtin_ve_vl_vfmsbd_vsvvl", + "vl.vfmsbd.vsvvmvl" => "__builtin_ve_vl_vfmsbd_vsvvmvl", + "vl.vfmsbd.vsvvvl" => "__builtin_ve_vl_vfmsbd_vsvvvl", + "vl.vfmsbd.vvsvl" => "__builtin_ve_vl_vfmsbd_vvsvl", + "vl.vfmsbd.vvsvmvl" => "__builtin_ve_vl_vfmsbd_vvsvmvl", + "vl.vfmsbd.vvsvvl" => "__builtin_ve_vl_vfmsbd_vvsvvl", + "vl.vfmsbd.vvvvl" => "__builtin_ve_vl_vfmsbd_vvvvl", + "vl.vfmsbd.vvvvmvl" => "__builtin_ve_vl_vfmsbd_vvvvmvl", + "vl.vfmsbd.vvvvvl" => "__builtin_ve_vl_vfmsbd_vvvvvl", + "vl.vfmsbs.vsvvl" => "__builtin_ve_vl_vfmsbs_vsvvl", + "vl.vfmsbs.vsvvmvl" => "__builtin_ve_vl_vfmsbs_vsvvmvl", + "vl.vfmsbs.vsvvvl" => "__builtin_ve_vl_vfmsbs_vsvvvl", + "vl.vfmsbs.vvsvl" => "__builtin_ve_vl_vfmsbs_vvsvl", + "vl.vfmsbs.vvsvmvl" => "__builtin_ve_vl_vfmsbs_vvsvmvl", + "vl.vfmsbs.vvsvvl" => "__builtin_ve_vl_vfmsbs_vvsvvl", + "vl.vfmsbs.vvvvl" => "__builtin_ve_vl_vfmsbs_vvvvl", + "vl.vfmsbs.vvvvmvl" => "__builtin_ve_vl_vfmsbs_vvvvmvl", + "vl.vfmsbs.vvvvvl" => "__builtin_ve_vl_vfmsbs_vvvvvl", + "vl.vfmuld.vsvl" => "__builtin_ve_vl_vfmuld_vsvl", + "vl.vfmuld.vsvmvl" => "__builtin_ve_vl_vfmuld_vsvmvl", + "vl.vfmuld.vsvvl" => "__builtin_ve_vl_vfmuld_vsvvl", + "vl.vfmuld.vvvl" => "__builtin_ve_vl_vfmuld_vvvl", + "vl.vfmuld.vvvmvl" => "__builtin_ve_vl_vfmuld_vvvmvl", + "vl.vfmuld.vvvvl" => "__builtin_ve_vl_vfmuld_vvvvl", + "vl.vfmuls.vsvl" => "__builtin_ve_vl_vfmuls_vsvl", + "vl.vfmuls.vsvmvl" => "__builtin_ve_vl_vfmuls_vsvmvl", + "vl.vfmuls.vsvvl" => "__builtin_ve_vl_vfmuls_vsvvl", + "vl.vfmuls.vvvl" => "__builtin_ve_vl_vfmuls_vvvl", + "vl.vfmuls.vvvmvl" => "__builtin_ve_vl_vfmuls_vvvmvl", + "vl.vfmuls.vvvvl" => "__builtin_ve_vl_vfmuls_vvvvl", + "vl.vfnmadd.vsvvl" => "__builtin_ve_vl_vfnmadd_vsvvl", + "vl.vfnmadd.vsvvmvl" => "__builtin_ve_vl_vfnmadd_vsvvmvl", + "vl.vfnmadd.vsvvvl" => "__builtin_ve_vl_vfnmadd_vsvvvl", + "vl.vfnmadd.vvsvl" => "__builtin_ve_vl_vfnmadd_vvsvl", + "vl.vfnmadd.vvsvmvl" => "__builtin_ve_vl_vfnmadd_vvsvmvl", + "vl.vfnmadd.vvsvvl" => "__builtin_ve_vl_vfnmadd_vvsvvl", + "vl.vfnmadd.vvvvl" => "__builtin_ve_vl_vfnmadd_vvvvl", + "vl.vfnmadd.vvvvmvl" => "__builtin_ve_vl_vfnmadd_vvvvmvl", + "vl.vfnmadd.vvvvvl" => "__builtin_ve_vl_vfnmadd_vvvvvl", + "vl.vfnmads.vsvvl" => "__builtin_ve_vl_vfnmads_vsvvl", + "vl.vfnmads.vsvvmvl" => "__builtin_ve_vl_vfnmads_vsvvmvl", + "vl.vfnmads.vsvvvl" => "__builtin_ve_vl_vfnmads_vsvvvl", + "vl.vfnmads.vvsvl" => "__builtin_ve_vl_vfnmads_vvsvl", + "vl.vfnmads.vvsvmvl" => "__builtin_ve_vl_vfnmads_vvsvmvl", + "vl.vfnmads.vvsvvl" => "__builtin_ve_vl_vfnmads_vvsvvl", + "vl.vfnmads.vvvvl" => "__builtin_ve_vl_vfnmads_vvvvl", + "vl.vfnmads.vvvvmvl" => "__builtin_ve_vl_vfnmads_vvvvmvl", + "vl.vfnmads.vvvvvl" => "__builtin_ve_vl_vfnmads_vvvvvl", + "vl.vfnmsbd.vsvvl" => "__builtin_ve_vl_vfnmsbd_vsvvl", + "vl.vfnmsbd.vsvvmvl" => "__builtin_ve_vl_vfnmsbd_vsvvmvl", + "vl.vfnmsbd.vsvvvl" => "__builtin_ve_vl_vfnmsbd_vsvvvl", + "vl.vfnmsbd.vvsvl" => "__builtin_ve_vl_vfnmsbd_vvsvl", + "vl.vfnmsbd.vvsvmvl" => "__builtin_ve_vl_vfnmsbd_vvsvmvl", + "vl.vfnmsbd.vvsvvl" => "__builtin_ve_vl_vfnmsbd_vvsvvl", + "vl.vfnmsbd.vvvvl" => "__builtin_ve_vl_vfnmsbd_vvvvl", + "vl.vfnmsbd.vvvvmvl" => "__builtin_ve_vl_vfnmsbd_vvvvmvl", + "vl.vfnmsbd.vvvvvl" => "__builtin_ve_vl_vfnmsbd_vvvvvl", + "vl.vfnmsbs.vsvvl" => "__builtin_ve_vl_vfnmsbs_vsvvl", + "vl.vfnmsbs.vsvvmvl" => "__builtin_ve_vl_vfnmsbs_vsvvmvl", + "vl.vfnmsbs.vsvvvl" => "__builtin_ve_vl_vfnmsbs_vsvvvl", + "vl.vfnmsbs.vvsvl" => "__builtin_ve_vl_vfnmsbs_vvsvl", + "vl.vfnmsbs.vvsvmvl" => "__builtin_ve_vl_vfnmsbs_vvsvmvl", + "vl.vfnmsbs.vvsvvl" => "__builtin_ve_vl_vfnmsbs_vvsvvl", + "vl.vfnmsbs.vvvvl" => "__builtin_ve_vl_vfnmsbs_vvvvl", + "vl.vfnmsbs.vvvvmvl" => "__builtin_ve_vl_vfnmsbs_vvvvmvl", + "vl.vfnmsbs.vvvvvl" => "__builtin_ve_vl_vfnmsbs_vvvvvl", + "vl.vfrmaxdfst.vvl" => "__builtin_ve_vl_vfrmaxdfst_vvl", + "vl.vfrmaxdfst.vvvl" => "__builtin_ve_vl_vfrmaxdfst_vvvl", + "vl.vfrmaxdlst.vvl" => "__builtin_ve_vl_vfrmaxdlst_vvl", + "vl.vfrmaxdlst.vvvl" => "__builtin_ve_vl_vfrmaxdlst_vvvl", + "vl.vfrmaxsfst.vvl" => "__builtin_ve_vl_vfrmaxsfst_vvl", + "vl.vfrmaxsfst.vvvl" => "__builtin_ve_vl_vfrmaxsfst_vvvl", + "vl.vfrmaxslst.vvl" => "__builtin_ve_vl_vfrmaxslst_vvl", + "vl.vfrmaxslst.vvvl" => "__builtin_ve_vl_vfrmaxslst_vvvl", + "vl.vfrmindfst.vvl" => "__builtin_ve_vl_vfrmindfst_vvl", + "vl.vfrmindfst.vvvl" => "__builtin_ve_vl_vfrmindfst_vvvl", + "vl.vfrmindlst.vvl" => "__builtin_ve_vl_vfrmindlst_vvl", + "vl.vfrmindlst.vvvl" => "__builtin_ve_vl_vfrmindlst_vvvl", + "vl.vfrminsfst.vvl" => "__builtin_ve_vl_vfrminsfst_vvl", + "vl.vfrminsfst.vvvl" => "__builtin_ve_vl_vfrminsfst_vvvl", + "vl.vfrminslst.vvl" => "__builtin_ve_vl_vfrminslst_vvl", + "vl.vfrminslst.vvvl" => "__builtin_ve_vl_vfrminslst_vvvl", + "vl.vfsqrtd.vvl" => "__builtin_ve_vl_vfsqrtd_vvl", + "vl.vfsqrtd.vvvl" => "__builtin_ve_vl_vfsqrtd_vvvl", + "vl.vfsqrts.vvl" => "__builtin_ve_vl_vfsqrts_vvl", + "vl.vfsqrts.vvvl" => "__builtin_ve_vl_vfsqrts_vvvl", + "vl.vfsubd.vsvl" => "__builtin_ve_vl_vfsubd_vsvl", + "vl.vfsubd.vsvmvl" => "__builtin_ve_vl_vfsubd_vsvmvl", + "vl.vfsubd.vsvvl" => "__builtin_ve_vl_vfsubd_vsvvl", + "vl.vfsubd.vvvl" => "__builtin_ve_vl_vfsubd_vvvl", + "vl.vfsubd.vvvmvl" => "__builtin_ve_vl_vfsubd_vvvmvl", + "vl.vfsubd.vvvvl" => "__builtin_ve_vl_vfsubd_vvvvl", + "vl.vfsubs.vsvl" => "__builtin_ve_vl_vfsubs_vsvl", + "vl.vfsubs.vsvmvl" => "__builtin_ve_vl_vfsubs_vsvmvl", + "vl.vfsubs.vsvvl" => "__builtin_ve_vl_vfsubs_vsvvl", + "vl.vfsubs.vvvl" => "__builtin_ve_vl_vfsubs_vvvl", + "vl.vfsubs.vvvmvl" => "__builtin_ve_vl_vfsubs_vvvmvl", + "vl.vfsubs.vvvvl" => "__builtin_ve_vl_vfsubs_vvvvl", + "vl.vfsumd.vvl" => "__builtin_ve_vl_vfsumd_vvl", + "vl.vfsumd.vvml" => "__builtin_ve_vl_vfsumd_vvml", + "vl.vfsums.vvl" => "__builtin_ve_vl_vfsums_vvl", + "vl.vfsums.vvml" => "__builtin_ve_vl_vfsums_vvml", + "vl.vgt.vvssl" => "__builtin_ve_vl_vgt_vvssl", + "vl.vgt.vvssml" => "__builtin_ve_vl_vgt_vvssml", + "vl.vgt.vvssmvl" => "__builtin_ve_vl_vgt_vvssmvl", + "vl.vgt.vvssvl" => "__builtin_ve_vl_vgt_vvssvl", + "vl.vgtlsx.vvssl" => "__builtin_ve_vl_vgtlsx_vvssl", + "vl.vgtlsx.vvssml" => "__builtin_ve_vl_vgtlsx_vvssml", + "vl.vgtlsx.vvssmvl" => "__builtin_ve_vl_vgtlsx_vvssmvl", + "vl.vgtlsx.vvssvl" => "__builtin_ve_vl_vgtlsx_vvssvl", + "vl.vgtlsxnc.vvssl" => "__builtin_ve_vl_vgtlsxnc_vvssl", + "vl.vgtlsxnc.vvssml" => "__builtin_ve_vl_vgtlsxnc_vvssml", + "vl.vgtlsxnc.vvssmvl" => "__builtin_ve_vl_vgtlsxnc_vvssmvl", + "vl.vgtlsxnc.vvssvl" => "__builtin_ve_vl_vgtlsxnc_vvssvl", + "vl.vgtlzx.vvssl" => "__builtin_ve_vl_vgtlzx_vvssl", + "vl.vgtlzx.vvssml" => "__builtin_ve_vl_vgtlzx_vvssml", + "vl.vgtlzx.vvssmvl" => "__builtin_ve_vl_vgtlzx_vvssmvl", + "vl.vgtlzx.vvssvl" => "__builtin_ve_vl_vgtlzx_vvssvl", + "vl.vgtlzxnc.vvssl" => "__builtin_ve_vl_vgtlzxnc_vvssl", + "vl.vgtlzxnc.vvssml" => "__builtin_ve_vl_vgtlzxnc_vvssml", + "vl.vgtlzxnc.vvssmvl" => "__builtin_ve_vl_vgtlzxnc_vvssmvl", + "vl.vgtlzxnc.vvssvl" => "__builtin_ve_vl_vgtlzxnc_vvssvl", + "vl.vgtnc.vvssl" => "__builtin_ve_vl_vgtnc_vvssl", + "vl.vgtnc.vvssml" => "__builtin_ve_vl_vgtnc_vvssml", + "vl.vgtnc.vvssmvl" => "__builtin_ve_vl_vgtnc_vvssmvl", + "vl.vgtnc.vvssvl" => "__builtin_ve_vl_vgtnc_vvssvl", + "vl.vgtu.vvssl" => "__builtin_ve_vl_vgtu_vvssl", + "vl.vgtu.vvssml" => "__builtin_ve_vl_vgtu_vvssml", + "vl.vgtu.vvssmvl" => "__builtin_ve_vl_vgtu_vvssmvl", + "vl.vgtu.vvssvl" => "__builtin_ve_vl_vgtu_vvssvl", + "vl.vgtunc.vvssl" => "__builtin_ve_vl_vgtunc_vvssl", + "vl.vgtunc.vvssml" => "__builtin_ve_vl_vgtunc_vvssml", + "vl.vgtunc.vvssmvl" => "__builtin_ve_vl_vgtunc_vvssmvl", + "vl.vgtunc.vvssvl" => "__builtin_ve_vl_vgtunc_vvssvl", + "vl.vld.vssl" => "__builtin_ve_vl_vld_vssl", + "vl.vld.vssvl" => "__builtin_ve_vl_vld_vssvl", + "vl.vld2d.vssl" => "__builtin_ve_vl_vld2d_vssl", + "vl.vld2d.vssvl" => "__builtin_ve_vl_vld2d_vssvl", + "vl.vld2dnc.vssl" => "__builtin_ve_vl_vld2dnc_vssl", + "vl.vld2dnc.vssvl" => "__builtin_ve_vl_vld2dnc_vssvl", + "vl.vldl2dsx.vssl" => "__builtin_ve_vl_vldl2dsx_vssl", + "vl.vldl2dsx.vssvl" => "__builtin_ve_vl_vldl2dsx_vssvl", + "vl.vldl2dsxnc.vssl" => "__builtin_ve_vl_vldl2dsxnc_vssl", + "vl.vldl2dsxnc.vssvl" => "__builtin_ve_vl_vldl2dsxnc_vssvl", + "vl.vldl2dzx.vssl" => "__builtin_ve_vl_vldl2dzx_vssl", + "vl.vldl2dzx.vssvl" => "__builtin_ve_vl_vldl2dzx_vssvl", + "vl.vldl2dzxnc.vssl" => "__builtin_ve_vl_vldl2dzxnc_vssl", + "vl.vldl2dzxnc.vssvl" => "__builtin_ve_vl_vldl2dzxnc_vssvl", + "vl.vldlsx.vssl" => "__builtin_ve_vl_vldlsx_vssl", + "vl.vldlsx.vssvl" => "__builtin_ve_vl_vldlsx_vssvl", + "vl.vldlsxnc.vssl" => "__builtin_ve_vl_vldlsxnc_vssl", + "vl.vldlsxnc.vssvl" => "__builtin_ve_vl_vldlsxnc_vssvl", + "vl.vldlzx.vssl" => "__builtin_ve_vl_vldlzx_vssl", + "vl.vldlzx.vssvl" => "__builtin_ve_vl_vldlzx_vssvl", + "vl.vldlzxnc.vssl" => "__builtin_ve_vl_vldlzxnc_vssl", + "vl.vldlzxnc.vssvl" => "__builtin_ve_vl_vldlzxnc_vssvl", + "vl.vldnc.vssl" => "__builtin_ve_vl_vldnc_vssl", + "vl.vldnc.vssvl" => "__builtin_ve_vl_vldnc_vssvl", + "vl.vldu.vssl" => "__builtin_ve_vl_vldu_vssl", + "vl.vldu.vssvl" => "__builtin_ve_vl_vldu_vssvl", + "vl.vldu2d.vssl" => "__builtin_ve_vl_vldu2d_vssl", + "vl.vldu2d.vssvl" => "__builtin_ve_vl_vldu2d_vssvl", + "vl.vldu2dnc.vssl" => "__builtin_ve_vl_vldu2dnc_vssl", + "vl.vldu2dnc.vssvl" => "__builtin_ve_vl_vldu2dnc_vssvl", + "vl.vldunc.vssl" => "__builtin_ve_vl_vldunc_vssl", + "vl.vldunc.vssvl" => "__builtin_ve_vl_vldunc_vssvl", + "vl.vldz.vvl" => "__builtin_ve_vl_vldz_vvl", + "vl.vldz.vvmvl" => "__builtin_ve_vl_vldz_vvmvl", + "vl.vldz.vvvl" => "__builtin_ve_vl_vldz_vvvl", + "vl.vmaxsl.vsvl" => "__builtin_ve_vl_vmaxsl_vsvl", + "vl.vmaxsl.vsvmvl" => "__builtin_ve_vl_vmaxsl_vsvmvl", + "vl.vmaxsl.vsvvl" => "__builtin_ve_vl_vmaxsl_vsvvl", + "vl.vmaxsl.vvvl" => "__builtin_ve_vl_vmaxsl_vvvl", + "vl.vmaxsl.vvvmvl" => "__builtin_ve_vl_vmaxsl_vvvmvl", + "vl.vmaxsl.vvvvl" => "__builtin_ve_vl_vmaxsl_vvvvl", + "vl.vmaxswsx.vsvl" => "__builtin_ve_vl_vmaxswsx_vsvl", + "vl.vmaxswsx.vsvmvl" => "__builtin_ve_vl_vmaxswsx_vsvmvl", + "vl.vmaxswsx.vsvvl" => "__builtin_ve_vl_vmaxswsx_vsvvl", + "vl.vmaxswsx.vvvl" => "__builtin_ve_vl_vmaxswsx_vvvl", + "vl.vmaxswsx.vvvmvl" => "__builtin_ve_vl_vmaxswsx_vvvmvl", + "vl.vmaxswsx.vvvvl" => "__builtin_ve_vl_vmaxswsx_vvvvl", + "vl.vmaxswzx.vsvl" => "__builtin_ve_vl_vmaxswzx_vsvl", + "vl.vmaxswzx.vsvmvl" => "__builtin_ve_vl_vmaxswzx_vsvmvl", + "vl.vmaxswzx.vsvvl" => "__builtin_ve_vl_vmaxswzx_vsvvl", + "vl.vmaxswzx.vvvl" => "__builtin_ve_vl_vmaxswzx_vvvl", + "vl.vmaxswzx.vvvmvl" => "__builtin_ve_vl_vmaxswzx_vvvmvl", + "vl.vmaxswzx.vvvvl" => "__builtin_ve_vl_vmaxswzx_vvvvl", + "vl.vminsl.vsvl" => "__builtin_ve_vl_vminsl_vsvl", + "vl.vminsl.vsvmvl" => "__builtin_ve_vl_vminsl_vsvmvl", + "vl.vminsl.vsvvl" => "__builtin_ve_vl_vminsl_vsvvl", + "vl.vminsl.vvvl" => "__builtin_ve_vl_vminsl_vvvl", + "vl.vminsl.vvvmvl" => "__builtin_ve_vl_vminsl_vvvmvl", + "vl.vminsl.vvvvl" => "__builtin_ve_vl_vminsl_vvvvl", + "vl.vminswsx.vsvl" => "__builtin_ve_vl_vminswsx_vsvl", + "vl.vminswsx.vsvmvl" => "__builtin_ve_vl_vminswsx_vsvmvl", + "vl.vminswsx.vsvvl" => "__builtin_ve_vl_vminswsx_vsvvl", + "vl.vminswsx.vvvl" => "__builtin_ve_vl_vminswsx_vvvl", + "vl.vminswsx.vvvmvl" => "__builtin_ve_vl_vminswsx_vvvmvl", + "vl.vminswsx.vvvvl" => "__builtin_ve_vl_vminswsx_vvvvl", + "vl.vminswzx.vsvl" => "__builtin_ve_vl_vminswzx_vsvl", + "vl.vminswzx.vsvmvl" => "__builtin_ve_vl_vminswzx_vsvmvl", + "vl.vminswzx.vsvvl" => "__builtin_ve_vl_vminswzx_vsvvl", + "vl.vminswzx.vvvl" => "__builtin_ve_vl_vminswzx_vvvl", + "vl.vminswzx.vvvmvl" => "__builtin_ve_vl_vminswzx_vvvmvl", + "vl.vminswzx.vvvvl" => "__builtin_ve_vl_vminswzx_vvvvl", + "vl.vmrg.vsvml" => "__builtin_ve_vl_vmrg_vsvml", + "vl.vmrg.vsvmvl" => "__builtin_ve_vl_vmrg_vsvmvl", + "vl.vmrg.vvvml" => "__builtin_ve_vl_vmrg_vvvml", + "vl.vmrg.vvvmvl" => "__builtin_ve_vl_vmrg_vvvmvl", + "vl.vmrgw.vsvMl" => "__builtin_ve_vl_vmrgw_vsvMl", + "vl.vmrgw.vsvMvl" => "__builtin_ve_vl_vmrgw_vsvMvl", + "vl.vmrgw.vvvMl" => "__builtin_ve_vl_vmrgw_vvvMl", + "vl.vmrgw.vvvMvl" => "__builtin_ve_vl_vmrgw_vvvMvl", + "vl.vmulsl.vsvl" => "__builtin_ve_vl_vmulsl_vsvl", + "vl.vmulsl.vsvmvl" => "__builtin_ve_vl_vmulsl_vsvmvl", + "vl.vmulsl.vsvvl" => "__builtin_ve_vl_vmulsl_vsvvl", + "vl.vmulsl.vvvl" => "__builtin_ve_vl_vmulsl_vvvl", + "vl.vmulsl.vvvmvl" => "__builtin_ve_vl_vmulsl_vvvmvl", + "vl.vmulsl.vvvvl" => "__builtin_ve_vl_vmulsl_vvvvl", + "vl.vmulslw.vsvl" => "__builtin_ve_vl_vmulslw_vsvl", + "vl.vmulslw.vsvvl" => "__builtin_ve_vl_vmulslw_vsvvl", + "vl.vmulslw.vvvl" => "__builtin_ve_vl_vmulslw_vvvl", + "vl.vmulslw.vvvvl" => "__builtin_ve_vl_vmulslw_vvvvl", + "vl.vmulswsx.vsvl" => "__builtin_ve_vl_vmulswsx_vsvl", + "vl.vmulswsx.vsvmvl" => "__builtin_ve_vl_vmulswsx_vsvmvl", + "vl.vmulswsx.vsvvl" => "__builtin_ve_vl_vmulswsx_vsvvl", + "vl.vmulswsx.vvvl" => "__builtin_ve_vl_vmulswsx_vvvl", + "vl.vmulswsx.vvvmvl" => "__builtin_ve_vl_vmulswsx_vvvmvl", + "vl.vmulswsx.vvvvl" => "__builtin_ve_vl_vmulswsx_vvvvl", + "vl.vmulswzx.vsvl" => "__builtin_ve_vl_vmulswzx_vsvl", + "vl.vmulswzx.vsvmvl" => "__builtin_ve_vl_vmulswzx_vsvmvl", + "vl.vmulswzx.vsvvl" => "__builtin_ve_vl_vmulswzx_vsvvl", + "vl.vmulswzx.vvvl" => "__builtin_ve_vl_vmulswzx_vvvl", + "vl.vmulswzx.vvvmvl" => "__builtin_ve_vl_vmulswzx_vvvmvl", + "vl.vmulswzx.vvvvl" => "__builtin_ve_vl_vmulswzx_vvvvl", + "vl.vmulul.vsvl" => "__builtin_ve_vl_vmulul_vsvl", + "vl.vmulul.vsvmvl" => "__builtin_ve_vl_vmulul_vsvmvl", + "vl.vmulul.vsvvl" => "__builtin_ve_vl_vmulul_vsvvl", + "vl.vmulul.vvvl" => "__builtin_ve_vl_vmulul_vvvl", + "vl.vmulul.vvvmvl" => "__builtin_ve_vl_vmulul_vvvmvl", + "vl.vmulul.vvvvl" => "__builtin_ve_vl_vmulul_vvvvl", + "vl.vmuluw.vsvl" => "__builtin_ve_vl_vmuluw_vsvl", + "vl.vmuluw.vsvmvl" => "__builtin_ve_vl_vmuluw_vsvmvl", + "vl.vmuluw.vsvvl" => "__builtin_ve_vl_vmuluw_vsvvl", + "vl.vmuluw.vvvl" => "__builtin_ve_vl_vmuluw_vvvl", + "vl.vmuluw.vvvmvl" => "__builtin_ve_vl_vmuluw_vvvmvl", + "vl.vmuluw.vvvvl" => "__builtin_ve_vl_vmuluw_vvvvl", + "vl.vmv.vsvl" => "__builtin_ve_vl_vmv_vsvl", + "vl.vmv.vsvmvl" => "__builtin_ve_vl_vmv_vsvmvl", + "vl.vmv.vsvvl" => "__builtin_ve_vl_vmv_vsvvl", + "vl.vor.vsvl" => "__builtin_ve_vl_vor_vsvl", + "vl.vor.vsvmvl" => "__builtin_ve_vl_vor_vsvmvl", + "vl.vor.vsvvl" => "__builtin_ve_vl_vor_vsvvl", + "vl.vor.vvvl" => "__builtin_ve_vl_vor_vvvl", + "vl.vor.vvvmvl" => "__builtin_ve_vl_vor_vvvmvl", + "vl.vor.vvvvl" => "__builtin_ve_vl_vor_vvvvl", + "vl.vpcnt.vvl" => "__builtin_ve_vl_vpcnt_vvl", + "vl.vpcnt.vvmvl" => "__builtin_ve_vl_vpcnt_vvmvl", + "vl.vpcnt.vvvl" => "__builtin_ve_vl_vpcnt_vvvl", + "vl.vrand.vvl" => "__builtin_ve_vl_vrand_vvl", + "vl.vrand.vvml" => "__builtin_ve_vl_vrand_vvml", + "vl.vrcpd.vvl" => "__builtin_ve_vl_vrcpd_vvl", + "vl.vrcpd.vvvl" => "__builtin_ve_vl_vrcpd_vvvl", + "vl.vrcps.vvl" => "__builtin_ve_vl_vrcps_vvl", + "vl.vrcps.vvvl" => "__builtin_ve_vl_vrcps_vvvl", + "vl.vrmaxslfst.vvl" => "__builtin_ve_vl_vrmaxslfst_vvl", + "vl.vrmaxslfst.vvvl" => "__builtin_ve_vl_vrmaxslfst_vvvl", + "vl.vrmaxsllst.vvl" => "__builtin_ve_vl_vrmaxsllst_vvl", + "vl.vrmaxsllst.vvvl" => "__builtin_ve_vl_vrmaxsllst_vvvl", + "vl.vrmaxswfstsx.vvl" => "__builtin_ve_vl_vrmaxswfstsx_vvl", + "vl.vrmaxswfstsx.vvvl" => "__builtin_ve_vl_vrmaxswfstsx_vvvl", + "vl.vrmaxswfstzx.vvl" => "__builtin_ve_vl_vrmaxswfstzx_vvl", + "vl.vrmaxswfstzx.vvvl" => "__builtin_ve_vl_vrmaxswfstzx_vvvl", + "vl.vrmaxswlstsx.vvl" => "__builtin_ve_vl_vrmaxswlstsx_vvl", + "vl.vrmaxswlstsx.vvvl" => "__builtin_ve_vl_vrmaxswlstsx_vvvl", + "vl.vrmaxswlstzx.vvl" => "__builtin_ve_vl_vrmaxswlstzx_vvl", + "vl.vrmaxswlstzx.vvvl" => "__builtin_ve_vl_vrmaxswlstzx_vvvl", + "vl.vrminslfst.vvl" => "__builtin_ve_vl_vrminslfst_vvl", + "vl.vrminslfst.vvvl" => "__builtin_ve_vl_vrminslfst_vvvl", + "vl.vrminsllst.vvl" => "__builtin_ve_vl_vrminsllst_vvl", + "vl.vrminsllst.vvvl" => "__builtin_ve_vl_vrminsllst_vvvl", + "vl.vrminswfstsx.vvl" => "__builtin_ve_vl_vrminswfstsx_vvl", + "vl.vrminswfstsx.vvvl" => "__builtin_ve_vl_vrminswfstsx_vvvl", + "vl.vrminswfstzx.vvl" => "__builtin_ve_vl_vrminswfstzx_vvl", + "vl.vrminswfstzx.vvvl" => "__builtin_ve_vl_vrminswfstzx_vvvl", + "vl.vrminswlstsx.vvl" => "__builtin_ve_vl_vrminswlstsx_vvl", + "vl.vrminswlstsx.vvvl" => "__builtin_ve_vl_vrminswlstsx_vvvl", + "vl.vrminswlstzx.vvl" => "__builtin_ve_vl_vrminswlstzx_vvl", + "vl.vrminswlstzx.vvvl" => "__builtin_ve_vl_vrminswlstzx_vvvl", + "vl.vror.vvl" => "__builtin_ve_vl_vror_vvl", + "vl.vror.vvml" => "__builtin_ve_vl_vror_vvml", + "vl.vrsqrtd.vvl" => "__builtin_ve_vl_vrsqrtd_vvl", + "vl.vrsqrtd.vvvl" => "__builtin_ve_vl_vrsqrtd_vvvl", + "vl.vrsqrtdnex.vvl" => "__builtin_ve_vl_vrsqrtdnex_vvl", + "vl.vrsqrtdnex.vvvl" => "__builtin_ve_vl_vrsqrtdnex_vvvl", + "vl.vrsqrts.vvl" => "__builtin_ve_vl_vrsqrts_vvl", + "vl.vrsqrts.vvvl" => "__builtin_ve_vl_vrsqrts_vvvl", + "vl.vrsqrtsnex.vvl" => "__builtin_ve_vl_vrsqrtsnex_vvl", + "vl.vrsqrtsnex.vvvl" => "__builtin_ve_vl_vrsqrtsnex_vvvl", + "vl.vrxor.vvl" => "__builtin_ve_vl_vrxor_vvl", + "vl.vrxor.vvml" => "__builtin_ve_vl_vrxor_vvml", + "vl.vsc.vvssl" => "__builtin_ve_vl_vsc_vvssl", + "vl.vsc.vvssml" => "__builtin_ve_vl_vsc_vvssml", + "vl.vscl.vvssl" => "__builtin_ve_vl_vscl_vvssl", + "vl.vscl.vvssml" => "__builtin_ve_vl_vscl_vvssml", + "vl.vsclnc.vvssl" => "__builtin_ve_vl_vsclnc_vvssl", + "vl.vsclnc.vvssml" => "__builtin_ve_vl_vsclnc_vvssml", + "vl.vsclncot.vvssl" => "__builtin_ve_vl_vsclncot_vvssl", + "vl.vsclncot.vvssml" => "__builtin_ve_vl_vsclncot_vvssml", + "vl.vsclot.vvssl" => "__builtin_ve_vl_vsclot_vvssl", + "vl.vsclot.vvssml" => "__builtin_ve_vl_vsclot_vvssml", + "vl.vscnc.vvssl" => "__builtin_ve_vl_vscnc_vvssl", + "vl.vscnc.vvssml" => "__builtin_ve_vl_vscnc_vvssml", + "vl.vscncot.vvssl" => "__builtin_ve_vl_vscncot_vvssl", + "vl.vscncot.vvssml" => "__builtin_ve_vl_vscncot_vvssml", + "vl.vscot.vvssl" => "__builtin_ve_vl_vscot_vvssl", + "vl.vscot.vvssml" => "__builtin_ve_vl_vscot_vvssml", + "vl.vscu.vvssl" => "__builtin_ve_vl_vscu_vvssl", + "vl.vscu.vvssml" => "__builtin_ve_vl_vscu_vvssml", + "vl.vscunc.vvssl" => "__builtin_ve_vl_vscunc_vvssl", + "vl.vscunc.vvssml" => "__builtin_ve_vl_vscunc_vvssml", + "vl.vscuncot.vvssl" => "__builtin_ve_vl_vscuncot_vvssl", + "vl.vscuncot.vvssml" => "__builtin_ve_vl_vscuncot_vvssml", + "vl.vscuot.vvssl" => "__builtin_ve_vl_vscuot_vvssl", + "vl.vscuot.vvssml" => "__builtin_ve_vl_vscuot_vvssml", + "vl.vseq.vl" => "__builtin_ve_vl_vseq_vl", + "vl.vseq.vvl" => "__builtin_ve_vl_vseq_vvl", + "vl.vsfa.vvssl" => "__builtin_ve_vl_vsfa_vvssl", + "vl.vsfa.vvssmvl" => "__builtin_ve_vl_vsfa_vvssmvl", + "vl.vsfa.vvssvl" => "__builtin_ve_vl_vsfa_vvssvl", + "vl.vshf.vvvsl" => "__builtin_ve_vl_vshf_vvvsl", + "vl.vshf.vvvsvl" => "__builtin_ve_vl_vshf_vvvsvl", + "vl.vslal.vvsl" => "__builtin_ve_vl_vslal_vvsl", + "vl.vslal.vvsmvl" => "__builtin_ve_vl_vslal_vvsmvl", + "vl.vslal.vvsvl" => "__builtin_ve_vl_vslal_vvsvl", + "vl.vslal.vvvl" => "__builtin_ve_vl_vslal_vvvl", + "vl.vslal.vvvmvl" => "__builtin_ve_vl_vslal_vvvmvl", + "vl.vslal.vvvvl" => "__builtin_ve_vl_vslal_vvvvl", + "vl.vslawsx.vvsl" => "__builtin_ve_vl_vslawsx_vvsl", + "vl.vslawsx.vvsmvl" => "__builtin_ve_vl_vslawsx_vvsmvl", + "vl.vslawsx.vvsvl" => "__builtin_ve_vl_vslawsx_vvsvl", + "vl.vslawsx.vvvl" => "__builtin_ve_vl_vslawsx_vvvl", + "vl.vslawsx.vvvmvl" => "__builtin_ve_vl_vslawsx_vvvmvl", + "vl.vslawsx.vvvvl" => "__builtin_ve_vl_vslawsx_vvvvl", + "vl.vslawzx.vvsl" => "__builtin_ve_vl_vslawzx_vvsl", + "vl.vslawzx.vvsmvl" => "__builtin_ve_vl_vslawzx_vvsmvl", + "vl.vslawzx.vvsvl" => "__builtin_ve_vl_vslawzx_vvsvl", + "vl.vslawzx.vvvl" => "__builtin_ve_vl_vslawzx_vvvl", + "vl.vslawzx.vvvmvl" => "__builtin_ve_vl_vslawzx_vvvmvl", + "vl.vslawzx.vvvvl" => "__builtin_ve_vl_vslawzx_vvvvl", + "vl.vsll.vvsl" => "__builtin_ve_vl_vsll_vvsl", + "vl.vsll.vvsmvl" => "__builtin_ve_vl_vsll_vvsmvl", + "vl.vsll.vvsvl" => "__builtin_ve_vl_vsll_vvsvl", + "vl.vsll.vvvl" => "__builtin_ve_vl_vsll_vvvl", + "vl.vsll.vvvmvl" => "__builtin_ve_vl_vsll_vvvmvl", + "vl.vsll.vvvvl" => "__builtin_ve_vl_vsll_vvvvl", + "vl.vsral.vvsl" => "__builtin_ve_vl_vsral_vvsl", + "vl.vsral.vvsmvl" => "__builtin_ve_vl_vsral_vvsmvl", + "vl.vsral.vvsvl" => "__builtin_ve_vl_vsral_vvsvl", + "vl.vsral.vvvl" => "__builtin_ve_vl_vsral_vvvl", + "vl.vsral.vvvmvl" => "__builtin_ve_vl_vsral_vvvmvl", + "vl.vsral.vvvvl" => "__builtin_ve_vl_vsral_vvvvl", + "vl.vsrawsx.vvsl" => "__builtin_ve_vl_vsrawsx_vvsl", + "vl.vsrawsx.vvsmvl" => "__builtin_ve_vl_vsrawsx_vvsmvl", + "vl.vsrawsx.vvsvl" => "__builtin_ve_vl_vsrawsx_vvsvl", + "vl.vsrawsx.vvvl" => "__builtin_ve_vl_vsrawsx_vvvl", + "vl.vsrawsx.vvvmvl" => "__builtin_ve_vl_vsrawsx_vvvmvl", + "vl.vsrawsx.vvvvl" => "__builtin_ve_vl_vsrawsx_vvvvl", + "vl.vsrawzx.vvsl" => "__builtin_ve_vl_vsrawzx_vvsl", + "vl.vsrawzx.vvsmvl" => "__builtin_ve_vl_vsrawzx_vvsmvl", + "vl.vsrawzx.vvsvl" => "__builtin_ve_vl_vsrawzx_vvsvl", + "vl.vsrawzx.vvvl" => "__builtin_ve_vl_vsrawzx_vvvl", + "vl.vsrawzx.vvvmvl" => "__builtin_ve_vl_vsrawzx_vvvmvl", + "vl.vsrawzx.vvvvl" => "__builtin_ve_vl_vsrawzx_vvvvl", + "vl.vsrl.vvsl" => "__builtin_ve_vl_vsrl_vvsl", + "vl.vsrl.vvsmvl" => "__builtin_ve_vl_vsrl_vvsmvl", + "vl.vsrl.vvsvl" => "__builtin_ve_vl_vsrl_vvsvl", + "vl.vsrl.vvvl" => "__builtin_ve_vl_vsrl_vvvl", + "vl.vsrl.vvvmvl" => "__builtin_ve_vl_vsrl_vvvmvl", + "vl.vsrl.vvvvl" => "__builtin_ve_vl_vsrl_vvvvl", + "vl.vst.vssl" => "__builtin_ve_vl_vst_vssl", + "vl.vst.vssml" => "__builtin_ve_vl_vst_vssml", + "vl.vst2d.vssl" => "__builtin_ve_vl_vst2d_vssl", + "vl.vst2d.vssml" => "__builtin_ve_vl_vst2d_vssml", + "vl.vst2dnc.vssl" => "__builtin_ve_vl_vst2dnc_vssl", + "vl.vst2dnc.vssml" => "__builtin_ve_vl_vst2dnc_vssml", + "vl.vst2dncot.vssl" => "__builtin_ve_vl_vst2dncot_vssl", + "vl.vst2dncot.vssml" => "__builtin_ve_vl_vst2dncot_vssml", + "vl.vst2dot.vssl" => "__builtin_ve_vl_vst2dot_vssl", + "vl.vst2dot.vssml" => "__builtin_ve_vl_vst2dot_vssml", + "vl.vstl.vssl" => "__builtin_ve_vl_vstl_vssl", + "vl.vstl.vssml" => "__builtin_ve_vl_vstl_vssml", + "vl.vstl2d.vssl" => "__builtin_ve_vl_vstl2d_vssl", + "vl.vstl2d.vssml" => "__builtin_ve_vl_vstl2d_vssml", + "vl.vstl2dnc.vssl" => "__builtin_ve_vl_vstl2dnc_vssl", + "vl.vstl2dnc.vssml" => "__builtin_ve_vl_vstl2dnc_vssml", + "vl.vstl2dncot.vssl" => "__builtin_ve_vl_vstl2dncot_vssl", + "vl.vstl2dncot.vssml" => "__builtin_ve_vl_vstl2dncot_vssml", + "vl.vstl2dot.vssl" => "__builtin_ve_vl_vstl2dot_vssl", + "vl.vstl2dot.vssml" => "__builtin_ve_vl_vstl2dot_vssml", + "vl.vstlnc.vssl" => "__builtin_ve_vl_vstlnc_vssl", + "vl.vstlnc.vssml" => "__builtin_ve_vl_vstlnc_vssml", + "vl.vstlncot.vssl" => "__builtin_ve_vl_vstlncot_vssl", + "vl.vstlncot.vssml" => "__builtin_ve_vl_vstlncot_vssml", + "vl.vstlot.vssl" => "__builtin_ve_vl_vstlot_vssl", + "vl.vstlot.vssml" => "__builtin_ve_vl_vstlot_vssml", + "vl.vstnc.vssl" => "__builtin_ve_vl_vstnc_vssl", + "vl.vstnc.vssml" => "__builtin_ve_vl_vstnc_vssml", + "vl.vstncot.vssl" => "__builtin_ve_vl_vstncot_vssl", + "vl.vstncot.vssml" => "__builtin_ve_vl_vstncot_vssml", + "vl.vstot.vssl" => "__builtin_ve_vl_vstot_vssl", + "vl.vstot.vssml" => "__builtin_ve_vl_vstot_vssml", + "vl.vstu.vssl" => "__builtin_ve_vl_vstu_vssl", + "vl.vstu.vssml" => "__builtin_ve_vl_vstu_vssml", + "vl.vstu2d.vssl" => "__builtin_ve_vl_vstu2d_vssl", + "vl.vstu2d.vssml" => "__builtin_ve_vl_vstu2d_vssml", + "vl.vstu2dnc.vssl" => "__builtin_ve_vl_vstu2dnc_vssl", + "vl.vstu2dnc.vssml" => "__builtin_ve_vl_vstu2dnc_vssml", + "vl.vstu2dncot.vssl" => "__builtin_ve_vl_vstu2dncot_vssl", + "vl.vstu2dncot.vssml" => "__builtin_ve_vl_vstu2dncot_vssml", + "vl.vstu2dot.vssl" => "__builtin_ve_vl_vstu2dot_vssl", + "vl.vstu2dot.vssml" => "__builtin_ve_vl_vstu2dot_vssml", + "vl.vstunc.vssl" => "__builtin_ve_vl_vstunc_vssl", + "vl.vstunc.vssml" => "__builtin_ve_vl_vstunc_vssml", + "vl.vstuncot.vssl" => "__builtin_ve_vl_vstuncot_vssl", + "vl.vstuncot.vssml" => "__builtin_ve_vl_vstuncot_vssml", + "vl.vstuot.vssl" => "__builtin_ve_vl_vstuot_vssl", + "vl.vstuot.vssml" => "__builtin_ve_vl_vstuot_vssml", + "vl.vsubsl.vsvl" => "__builtin_ve_vl_vsubsl_vsvl", + "vl.vsubsl.vsvmvl" => "__builtin_ve_vl_vsubsl_vsvmvl", + "vl.vsubsl.vsvvl" => "__builtin_ve_vl_vsubsl_vsvvl", + "vl.vsubsl.vvvl" => "__builtin_ve_vl_vsubsl_vvvl", + "vl.vsubsl.vvvmvl" => "__builtin_ve_vl_vsubsl_vvvmvl", + "vl.vsubsl.vvvvl" => "__builtin_ve_vl_vsubsl_vvvvl", + "vl.vsubswsx.vsvl" => "__builtin_ve_vl_vsubswsx_vsvl", + "vl.vsubswsx.vsvmvl" => "__builtin_ve_vl_vsubswsx_vsvmvl", + "vl.vsubswsx.vsvvl" => "__builtin_ve_vl_vsubswsx_vsvvl", + "vl.vsubswsx.vvvl" => "__builtin_ve_vl_vsubswsx_vvvl", + "vl.vsubswsx.vvvmvl" => "__builtin_ve_vl_vsubswsx_vvvmvl", + "vl.vsubswsx.vvvvl" => "__builtin_ve_vl_vsubswsx_vvvvl", + "vl.vsubswzx.vsvl" => "__builtin_ve_vl_vsubswzx_vsvl", + "vl.vsubswzx.vsvmvl" => "__builtin_ve_vl_vsubswzx_vsvmvl", + "vl.vsubswzx.vsvvl" => "__builtin_ve_vl_vsubswzx_vsvvl", + "vl.vsubswzx.vvvl" => "__builtin_ve_vl_vsubswzx_vvvl", + "vl.vsubswzx.vvvmvl" => "__builtin_ve_vl_vsubswzx_vvvmvl", + "vl.vsubswzx.vvvvl" => "__builtin_ve_vl_vsubswzx_vvvvl", + "vl.vsubul.vsvl" => "__builtin_ve_vl_vsubul_vsvl", + "vl.vsubul.vsvmvl" => "__builtin_ve_vl_vsubul_vsvmvl", + "vl.vsubul.vsvvl" => "__builtin_ve_vl_vsubul_vsvvl", + "vl.vsubul.vvvl" => "__builtin_ve_vl_vsubul_vvvl", + "vl.vsubul.vvvmvl" => "__builtin_ve_vl_vsubul_vvvmvl", + "vl.vsubul.vvvvl" => "__builtin_ve_vl_vsubul_vvvvl", + "vl.vsubuw.vsvl" => "__builtin_ve_vl_vsubuw_vsvl", + "vl.vsubuw.vsvmvl" => "__builtin_ve_vl_vsubuw_vsvmvl", + "vl.vsubuw.vsvvl" => "__builtin_ve_vl_vsubuw_vsvvl", + "vl.vsubuw.vvvl" => "__builtin_ve_vl_vsubuw_vvvl", + "vl.vsubuw.vvvmvl" => "__builtin_ve_vl_vsubuw_vvvmvl", + "vl.vsubuw.vvvvl" => "__builtin_ve_vl_vsubuw_vvvvl", + "vl.vsuml.vvl" => "__builtin_ve_vl_vsuml_vvl", + "vl.vsuml.vvml" => "__builtin_ve_vl_vsuml_vvml", + "vl.vsumwsx.vvl" => "__builtin_ve_vl_vsumwsx_vvl", + "vl.vsumwsx.vvml" => "__builtin_ve_vl_vsumwsx_vvml", + "vl.vsumwzx.vvl" => "__builtin_ve_vl_vsumwzx_vvl", + "vl.vsumwzx.vvml" => "__builtin_ve_vl_vsumwzx_vvml", + "vl.vxor.vsvl" => "__builtin_ve_vl_vxor_vsvl", + "vl.vxor.vsvmvl" => "__builtin_ve_vl_vxor_vsvmvl", + "vl.vxor.vsvvl" => "__builtin_ve_vl_vxor_vsvvl", + "vl.vxor.vvvl" => "__builtin_ve_vl_vxor_vvvl", + "vl.vxor.vvvmvl" => "__builtin_ve_vl_vxor_vvvmvl", + "vl.vxor.vvvvl" => "__builtin_ve_vl_vxor_vvvvl", + "vl.xorm.MMM" => "__builtin_ve_vl_xorm_MMM", + "vl.xorm.mmm" => "__builtin_ve_vl_xorm_mmm", + _ => unimplemented!("***** unsupported LLVM intrinsic {}", name), + } + } + ve(name) + } + "x86" => { + #[allow(non_snake_case)] + fn x86(name: &str) -> &str { + match name { + // x86 + "aadd32" => "__builtin_ia32_aadd32", + "aadd64" => "__builtin_ia32_aadd64", + "aand32" => "__builtin_ia32_aand32", + "aand64" => "__builtin_ia32_aand64", + "addcarry.u32" => "__builtin_ia32_addcarry_u32", + "addcarry.u64" => "__builtin_ia32_addcarry_u64", + "addcarryx.u32" => "__builtin_ia32_addcarryx_u32", + "addcarryx.u64" => "__builtin_ia32_addcarryx_u64", + "aesni.aesdec" => "__builtin_ia32_aesdec128", + "aesni.aesdec.256" => "__builtin_ia32_aesdec256", + "aesni.aesdec.512" => "__builtin_ia32_aesdec512", + "aesni.aesdeclast" => "__builtin_ia32_aesdeclast128", + "aesni.aesdeclast.256" => "__builtin_ia32_aesdeclast256", + "aesni.aesdeclast.512" => "__builtin_ia32_aesdeclast512", + "aesni.aesenc" => "__builtin_ia32_aesenc128", + "aesni.aesenc.256" => "__builtin_ia32_aesenc256", + "aesni.aesenc.512" => "__builtin_ia32_aesenc512", + "aesni.aesenclast" => "__builtin_ia32_aesenclast128", + "aesni.aesenclast.256" => "__builtin_ia32_aesenclast256", + "aesni.aesenclast.512" => "__builtin_ia32_aesenclast512", + "aesni.aesimc" => "__builtin_ia32_aesimc128", + "aesni.aeskeygenassist" => "__builtin_ia32_aeskeygenassist128", + "aor32" => "__builtin_ia32_aor32", + "aor64" => "__builtin_ia32_aor64", + "avx.addsub.pd.256" => "__builtin_ia32_addsubpd256", + "avx.addsub.ps.256" => "__builtin_ia32_addsubps256", + "avx.blend.pd.256" => "__builtin_ia32_blendpd256", + "avx.blend.ps.256" => "__builtin_ia32_blendps256", + "avx.blendv.pd.256" => "__builtin_ia32_blendvpd256", + "avx.blendv.ps.256" => "__builtin_ia32_blendvps256", + "avx.cmp.pd.256" => "__builtin_ia32_cmppd256", + "avx.cmp.ps.256" => "__builtin_ia32_cmpps256", + "avx.cvt.pd2.ps.256" => "__builtin_ia32_cvtpd2ps256", + "avx.cvt.pd2dq.256" => "__builtin_ia32_cvtpd2dq256", + "avx.cvt.ps2.pd.256" => "__builtin_ia32_cvtps2pd256", + "avx.cvt.ps2dq.256" => "__builtin_ia32_cvtps2dq256", + "avx.cvtdq2.pd.256" => "__builtin_ia32_cvtdq2pd256", + "avx.cvtdq2.ps.256" => "__builtin_ia32_cvtdq2ps256", + "avx.cvtt.pd2dq.256" => "__builtin_ia32_cvttpd2dq256", + "avx.cvtt.ps2dq.256" => "__builtin_ia32_cvttps2dq256", + "avx.dp.ps.256" => "__builtin_ia32_dpps256", + "avx.hadd.pd.256" => "__builtin_ia32_haddpd256", + "avx.hadd.ps.256" => "__builtin_ia32_haddps256", + "avx.hsub.pd.256" => "__builtin_ia32_hsubpd256", + "avx.hsub.ps.256" => "__builtin_ia32_hsubps256", + "avx.ldu.dq.256" => "__builtin_ia32_lddqu256", + "avx.maskload.pd" => "__builtin_ia32_maskloadpd", + "avx.maskload.pd.256" => "__builtin_ia32_maskloadpd256", + "avx.maskload.ps" => "__builtin_ia32_maskloadps", + "avx.maskload.ps.256" => "__builtin_ia32_maskloadps256", + "avx.maskstore.pd" => "__builtin_ia32_maskstorepd", + "avx.maskstore.pd.256" => "__builtin_ia32_maskstorepd256", + "avx.maskstore.ps" => "__builtin_ia32_maskstoreps", + "avx.maskstore.ps.256" => "__builtin_ia32_maskstoreps256", + "avx.max.pd.256" => "__builtin_ia32_maxpd256", + "avx.max.ps.256" => "__builtin_ia32_maxps256", + "avx.min.pd.256" => "__builtin_ia32_minpd256", + "avx.min.ps.256" => "__builtin_ia32_minps256", + "avx.movmsk.pd.256" => "__builtin_ia32_movmskpd256", + "avx.movmsk.ps.256" => "__builtin_ia32_movmskps256", + "avx.ptestc.256" => "__builtin_ia32_ptestc256", + "avx.ptestnzc.256" => "__builtin_ia32_ptestnzc256", + "avx.ptestz.256" => "__builtin_ia32_ptestz256", + "avx.rcp.ps.256" => "__builtin_ia32_rcpps256", + "avx.round.pd.256" => "__builtin_ia32_roundpd256", + "avx.round.ps.256" => "__builtin_ia32_roundps256", + "avx.rsqrt.ps.256" => "__builtin_ia32_rsqrtps256", + "avx.sqrt.pd.256" => "__builtin_ia32_sqrtpd256", + "avx.sqrt.ps.256" => "__builtin_ia32_sqrtps256", + "avx.storeu.dq.256" => "__builtin_ia32_storedqu256", + "avx.storeu.pd.256" => "__builtin_ia32_storeupd256", + "avx.storeu.ps.256" => "__builtin_ia32_storeups256", + "avx.vbroadcastf128.pd.256" => "__builtin_ia32_vbroadcastf128_pd256", + "avx.vbroadcastf128.ps.256" => "__builtin_ia32_vbroadcastf128_ps256", + "avx.vextractf128.pd.256" => "__builtin_ia32_vextractf128_pd256", + "avx.vextractf128.ps.256" => "__builtin_ia32_vextractf128_ps256", + "avx.vextractf128.si.256" => "__builtin_ia32_vextractf128_si256", + "avx.vinsertf128.pd.256" => "__builtin_ia32_vinsertf128_pd256", + "avx.vinsertf128.ps.256" => "__builtin_ia32_vinsertf128_ps256", + "avx.vinsertf128.si.256" => "__builtin_ia32_vinsertf128_si256", + "avx.vperm2f128.pd.256" => "__builtin_ia32_vperm2f128_pd256", + "avx.vperm2f128.ps.256" => "__builtin_ia32_vperm2f128_ps256", + "avx.vperm2f128.si.256" => "__builtin_ia32_vperm2f128_si256", + "avx.vpermilvar.pd" => "__builtin_ia32_vpermilvarpd", + "avx.vpermilvar.pd.256" => "__builtin_ia32_vpermilvarpd256", + "avx.vpermilvar.ps" => "__builtin_ia32_vpermilvarps", + "avx.vpermilvar.ps.256" => "__builtin_ia32_vpermilvarps256", + "avx.vtestc.pd" => "__builtin_ia32_vtestcpd", + "avx.vtestc.pd.256" => "__builtin_ia32_vtestcpd256", + "avx.vtestc.ps" => "__builtin_ia32_vtestcps", + "avx.vtestc.ps.256" => "__builtin_ia32_vtestcps256", + "avx.vtestnzc.pd" => "__builtin_ia32_vtestnzcpd", + "avx.vtestnzc.pd.256" => "__builtin_ia32_vtestnzcpd256", + "avx.vtestnzc.ps" => "__builtin_ia32_vtestnzcps", + "avx.vtestnzc.ps.256" => "__builtin_ia32_vtestnzcps256", + "avx.vtestz.pd" => "__builtin_ia32_vtestzpd", + "avx.vtestz.pd.256" => "__builtin_ia32_vtestzpd256", + "avx.vtestz.ps" => "__builtin_ia32_vtestzps", + "avx.vtestz.ps.256" => "__builtin_ia32_vtestzps256", + "avx.vzeroall" => "__builtin_ia32_vzeroall", + "avx.vzeroupper" => "__builtin_ia32_vzeroupper", + "avx10.mask.getexp.bf16.128" => "__builtin_ia32_vgetexpbf16128_mask", + "avx10.mask.getexp.bf16.256" => "__builtin_ia32_vgetexpbf16256_mask", + "avx10.mask.getexp.bf16.512" => "__builtin_ia32_vgetexpbf16512_mask", + "avx10.mask.getmant.bf16.128" => "__builtin_ia32_vgetmantbf16128_mask", + "avx10.mask.getmant.bf16.256" => "__builtin_ia32_vgetmantbf16256_mask", + "avx10.mask.getmant.bf16.512" => "__builtin_ia32_vgetmantbf16512_mask", + "avx10.mask.rcp.bf16.128" => "__builtin_ia32_vrcpbf16128_mask", + "avx10.mask.rcp.bf16.256" => "__builtin_ia32_vrcpbf16256_mask", + "avx10.mask.rcp.bf16.512" => "__builtin_ia32_vrcpbf16512_mask", + "avx10.mask.reduce.bf16.128" => "__builtin_ia32_vreducebf16128_mask", + "avx10.mask.reduce.bf16.256" => "__builtin_ia32_vreducebf16256_mask", + "avx10.mask.reduce.bf16.512" => "__builtin_ia32_vreducebf16512_mask", + "avx10.mask.rndscale.bf16.128" => "__builtin_ia32_vrndscalebf16_128_mask", + "avx10.mask.rndscale.bf16.256" => "__builtin_ia32_vrndscalebf16_256_mask", + "avx10.mask.rndscale.bf16.512" => "__builtin_ia32_vrndscalebf16_mask", + "avx10.mask.rsqrt.bf16.128" => "__builtin_ia32_vrsqrtbf16128_mask", + "avx10.mask.rsqrt.bf16.256" => "__builtin_ia32_vrsqrtbf16256_mask", + "avx10.mask.rsqrt.bf16.512" => "__builtin_ia32_vrsqrtbf16512_mask", + "avx10.mask.scalef.bf16.128" => "__builtin_ia32_vscalefbf16128_mask", + "avx10.mask.scalef.bf16.256" => "__builtin_ia32_vscalefbf16256_mask", + "avx10.mask.scalef.bf16.512" => "__builtin_ia32_vscalefbf16512_mask", + "avx10.mask.vcvt2ps2phx.128" => "__builtin_ia32_vcvt2ps2phx128_mask", + "avx10.mask.vcvt2ps2phx.256" => "__builtin_ia32_vcvt2ps2phx256_mask", + "avx10.mask.vcvt2ps2phx.512" => "__builtin_ia32_vcvt2ps2phx512_mask", + "avx10.mask.vcvtbiasph2bf8128" => "__builtin_ia32_vcvtbiasph2bf8_128_mask", + "avx10.mask.vcvtbiasph2bf8256" => "__builtin_ia32_vcvtbiasph2bf8_256_mask", + "avx10.mask.vcvtbiasph2bf8512" => "__builtin_ia32_vcvtbiasph2bf8_512_mask", + "avx10.mask.vcvtbiasph2bf8s128" => "__builtin_ia32_vcvtbiasph2bf8s_128_mask", + "avx10.mask.vcvtbiasph2bf8s256" => "__builtin_ia32_vcvtbiasph2bf8s_256_mask", + "avx10.mask.vcvtbiasph2bf8s512" => "__builtin_ia32_vcvtbiasph2bf8s_512_mask", + "avx10.mask.vcvtbiasph2hf8128" => "__builtin_ia32_vcvtbiasph2hf8_128_mask", + "avx10.mask.vcvtbiasph2hf8256" => "__builtin_ia32_vcvtbiasph2hf8_256_mask", + "avx10.mask.vcvtbiasph2hf8512" => "__builtin_ia32_vcvtbiasph2hf8_512_mask", + "avx10.mask.vcvtbiasph2hf8s128" => "__builtin_ia32_vcvtbiasph2hf8s_128_mask", + "avx10.mask.vcvtbiasph2hf8s256" => "__builtin_ia32_vcvtbiasph2hf8s_256_mask", + "avx10.mask.vcvtbiasph2hf8s512" => "__builtin_ia32_vcvtbiasph2hf8s_512_mask", + "avx10.mask.vcvthf82ph128" => "__builtin_ia32_vcvthf8_2ph128_mask", + "avx10.mask.vcvthf82ph256" => "__builtin_ia32_vcvthf8_2ph256_mask", + "avx10.mask.vcvthf82ph512" => "__builtin_ia32_vcvthf8_2ph512_mask", + "avx10.mask.vcvtph2bf8128" => "__builtin_ia32_vcvtph2bf8_128_mask", + "avx10.mask.vcvtph2bf8256" => "__builtin_ia32_vcvtph2bf8_256_mask", + "avx10.mask.vcvtph2bf8512" => "__builtin_ia32_vcvtph2bf8_512_mask", + "avx10.mask.vcvtph2bf8s128" => "__builtin_ia32_vcvtph2bf8s_128_mask", + "avx10.mask.vcvtph2bf8s256" => "__builtin_ia32_vcvtph2bf8s_256_mask", + "avx10.mask.vcvtph2bf8s512" => "__builtin_ia32_vcvtph2bf8s_512_mask", + "avx10.mask.vcvtph2hf8128" => "__builtin_ia32_vcvtph2hf8_128_mask", + "avx10.mask.vcvtph2hf8256" => "__builtin_ia32_vcvtph2hf8_256_mask", + "avx10.mask.vcvtph2hf8512" => "__builtin_ia32_vcvtph2hf8_512_mask", + "avx10.mask.vcvtph2hf8s128" => "__builtin_ia32_vcvtph2hf8s_128_mask", + "avx10.mask.vcvtph2hf8s256" => "__builtin_ia32_vcvtph2hf8s_256_mask", + "avx10.mask.vcvtph2hf8s512" => "__builtin_ia32_vcvtph2hf8s_512_mask", + "avx10.mask.vcvtph2ibs128" => "__builtin_ia32_vcvtph2ibs128_mask", + "avx10.mask.vcvtph2ibs256" => "__builtin_ia32_vcvtph2ibs256_mask", + "avx10.mask.vcvtph2ibs512" => "__builtin_ia32_vcvtph2ibs512_mask", + "avx10.mask.vcvtph2iubs128" => "__builtin_ia32_vcvtph2iubs128_mask", + "avx10.mask.vcvtph2iubs256" => "__builtin_ia32_vcvtph2iubs256_mask", + "avx10.mask.vcvtph2iubs512" => "__builtin_ia32_vcvtph2iubs512_mask", + "avx10.mask.vcvtps2ibs128" => "__builtin_ia32_vcvtps2ibs128_mask", + "avx10.mask.vcvtps2ibs256" => "__builtin_ia32_vcvtps2ibs256_mask", + "avx10.mask.vcvtps2ibs512" => "__builtin_ia32_vcvtps2ibs512_mask", + "avx10.mask.vcvtps2iubs128" => "__builtin_ia32_vcvtps2iubs128_mask", + "avx10.mask.vcvtps2iubs256" => "__builtin_ia32_vcvtps2iubs256_mask", + "avx10.mask.vcvtps2iubs512" => "__builtin_ia32_vcvtps2iubs512_mask", + "avx10.mask.vcvttpd2dqs.128" => "__builtin_ia32_vcvttpd2dqs128_mask", + "avx10.mask.vcvttpd2dqs.256" => "__builtin_ia32_vcvttpd2dqs256_mask", + // [INVALID CONVERSION]: "avx10.mask.vcvttpd2dqs.round.512" => "__builtin_ia32_vcvttpd2dqs512_round_mask", + "avx10.mask.vcvttpd2qqs.128" => "__builtin_ia32_vcvttpd2qqs128_mask", + "avx10.mask.vcvttpd2qqs.256" => "__builtin_ia32_vcvttpd2qqs256_mask", + // [INVALID CONVERSION]: "avx10.mask.vcvttpd2qqs.round.512" => "__builtin_ia32_vcvttpd2qqs512_round_mask", + "avx10.mask.vcvttpd2udqs.128" => "__builtin_ia32_vcvttpd2udqs128_mask", + "avx10.mask.vcvttpd2udqs.256" => "__builtin_ia32_vcvttpd2udqs256_mask", + // [INVALID CONVERSION]: "avx10.mask.vcvttpd2udqs.round.512" => "__builtin_ia32_vcvttpd2udqs512_round_mask", + "avx10.mask.vcvttpd2uqqs.128" => "__builtin_ia32_vcvttpd2uqqs128_mask", + "avx10.mask.vcvttpd2uqqs.256" => "__builtin_ia32_vcvttpd2uqqs256_mask", + // [INVALID CONVERSION]: "avx10.mask.vcvttpd2uqqs.round.512" => "__builtin_ia32_vcvttpd2uqqs512_round_mask", + "avx10.mask.vcvttph2ibs128" => "__builtin_ia32_vcvttph2ibs128_mask", + "avx10.mask.vcvttph2ibs256" => "__builtin_ia32_vcvttph2ibs256_mask", + "avx10.mask.vcvttph2ibs512" => "__builtin_ia32_vcvttph2ibs512_mask", + "avx10.mask.vcvttph2iubs128" => "__builtin_ia32_vcvttph2iubs128_mask", + "avx10.mask.vcvttph2iubs256" => "__builtin_ia32_vcvttph2iubs256_mask", + "avx10.mask.vcvttph2iubs512" => "__builtin_ia32_vcvttph2iubs512_mask", + "avx10.mask.vcvttps2dqs.128" => "__builtin_ia32_vcvttps2dqs128_mask", + "avx10.mask.vcvttps2dqs.256" => "__builtin_ia32_vcvttps2dqs256_mask", + // [INVALID CONVERSION]: "avx10.mask.vcvttps2dqs.round.512" => "__builtin_ia32_vcvttps2dqs512_round_mask", + "avx10.mask.vcvttps2ibs128" => "__builtin_ia32_vcvttps2ibs128_mask", + "avx10.mask.vcvttps2ibs256" => "__builtin_ia32_vcvttps2ibs256_mask", + "avx10.mask.vcvttps2ibs512" => "__builtin_ia32_vcvttps2ibs512_mask", + "avx10.mask.vcvttps2iubs128" => "__builtin_ia32_vcvttps2iubs128_mask", + "avx10.mask.vcvttps2iubs256" => "__builtin_ia32_vcvttps2iubs256_mask", + "avx10.mask.vcvttps2iubs512" => "__builtin_ia32_vcvttps2iubs512_mask", + "avx10.mask.vcvttps2qqs.128" => "__builtin_ia32_vcvttps2qqs128_mask", + "avx10.mask.vcvttps2qqs.256" => "__builtin_ia32_vcvttps2qqs256_mask", + // [INVALID CONVERSION]: "avx10.mask.vcvttps2qqs.round.512" => "__builtin_ia32_vcvttps2qqs512_round_mask", + "avx10.mask.vcvttps2udqs.128" => "__builtin_ia32_vcvttps2udqs128_mask", + "avx10.mask.vcvttps2udqs.256" => "__builtin_ia32_vcvttps2udqs256_mask", + // [INVALID CONVERSION]: "avx10.mask.vcvttps2udqs.round.512" => "__builtin_ia32_vcvttps2udqs512_round_mask", + "avx10.mask.vcvttps2uqqs.128" => "__builtin_ia32_vcvttps2uqqs128_mask", + "avx10.mask.vcvttps2uqqs.256" => "__builtin_ia32_vcvttps2uqqs256_mask", + // [INVALID CONVERSION]: "avx10.mask.vcvttps2uqqs.round.512" => "__builtin_ia32_vcvttps2uqqs512_round_mask", + // [INVALID CONVERSION]: "avx10.mask.vminmaxpd.round" => "__builtin_ia32_vminmaxpd512_round_mask", + "avx10.mask.vminmaxpd128" => "__builtin_ia32_vminmaxpd128_mask", + "avx10.mask.vminmaxpd256" => "__builtin_ia32_vminmaxpd256_mask", + // [INVALID CONVERSION]: "avx10.mask.vminmaxph.round" => "__builtin_ia32_vminmaxph512_round_mask", + "avx10.mask.vminmaxph128" => "__builtin_ia32_vminmaxph128_mask", + "avx10.mask.vminmaxph256" => "__builtin_ia32_vminmaxph256_mask", + // [INVALID CONVERSION]: "avx10.mask.vminmaxps.round" => "__builtin_ia32_vminmaxps512_round_mask", + "avx10.mask.vminmaxps128" => "__builtin_ia32_vminmaxps128_mask", + "avx10.mask.vminmaxps256" => "__builtin_ia32_vminmaxps256_mask", + // [INVALID CONVERSION]: "avx10.mask.vminmaxsd.round" => "__builtin_ia32_vminmaxsd_round_mask", + // [INVALID CONVERSION]: "avx10.mask.vminmaxsh.round" => "__builtin_ia32_vminmaxsh_round_mask", + // [INVALID CONVERSION]: "avx10.mask.vminmaxss.round" => "__builtin_ia32_vminmaxss_round_mask", + "avx10.vaddbf16128" => "__builtin_ia32_vaddbf16128", + "avx10.vaddbf16256" => "__builtin_ia32_vaddbf16256", + "avx10.vaddbf16512" => "__builtin_ia32_vaddbf16512", + "avx10.vaddpd256" => "__builtin_ia32_vaddpd256_round", + "avx10.vaddph256" => "__builtin_ia32_vaddph256_round", + "avx10.vaddps256" => "__builtin_ia32_vaddps256_round", + "avx10.vcomisbf16eq" => "__builtin_ia32_vcomisbf16eq", + "avx10.vcomisbf16ge" => "__builtin_ia32_vcomisbf16ge", + "avx10.vcomisbf16gt" => "__builtin_ia32_vcomisbf16gt", + "avx10.vcomisbf16le" => "__builtin_ia32_vcomisbf16le", + "avx10.vcomisbf16lt" => "__builtin_ia32_vcomisbf16lt", + "avx10.vcomisbf16neq" => "__builtin_ia32_vcomisbf16neq", + "avx10.vcvt2ph2bf8128" => "__builtin_ia32_vcvt2ph2bf8_128", + "avx10.vcvt2ph2bf8256" => "__builtin_ia32_vcvt2ph2bf8_256", + "avx10.vcvt2ph2bf8512" => "__builtin_ia32_vcvt2ph2bf8_512", + "avx10.vcvt2ph2bf8s128" => "__builtin_ia32_vcvt2ph2bf8s_128", + "avx10.vcvt2ph2bf8s256" => "__builtin_ia32_vcvt2ph2bf8s_256", + "avx10.vcvt2ph2bf8s512" => "__builtin_ia32_vcvt2ph2bf8s_512", + "avx10.vcvt2ph2hf8128" => "__builtin_ia32_vcvt2ph2hf8_128", + "avx10.vcvt2ph2hf8256" => "__builtin_ia32_vcvt2ph2hf8_256", + "avx10.vcvt2ph2hf8512" => "__builtin_ia32_vcvt2ph2hf8_512", + "avx10.vcvt2ph2hf8s128" => "__builtin_ia32_vcvt2ph2hf8s_128", + "avx10.vcvt2ph2hf8s256" => "__builtin_ia32_vcvt2ph2hf8s_256", + "avx10.vcvt2ph2hf8s512" => "__builtin_ia32_vcvt2ph2hf8s_512", + "avx10.vcvtbf162ibs128" => "__builtin_ia32_vcvtbf162ibs128", + "avx10.vcvtbf162ibs256" => "__builtin_ia32_vcvtbf162ibs256", + "avx10.vcvtbf162ibs512" => "__builtin_ia32_vcvtbf162ibs512", + "avx10.vcvtbf162iubs128" => "__builtin_ia32_vcvtbf162iubs128", + "avx10.vcvtbf162iubs256" => "__builtin_ia32_vcvtbf162iubs256", + "avx10.vcvtbf162iubs512" => "__builtin_ia32_vcvtbf162iubs512", + "avx10.vcvttbf162ibs128" => "__builtin_ia32_vcvttbf162ibs128", + "avx10.vcvttbf162ibs256" => "__builtin_ia32_vcvttbf162ibs256", + "avx10.vcvttbf162ibs512" => "__builtin_ia32_vcvttbf162ibs512", + "avx10.vcvttbf162iubs128" => "__builtin_ia32_vcvttbf162iubs128", + "avx10.vcvttbf162iubs256" => "__builtin_ia32_vcvttbf162iubs256", + "avx10.vcvttbf162iubs512" => "__builtin_ia32_vcvttbf162iubs512", + "avx10.vcvttsd2sis" => "__builtin_ia32_vcvttsd2sis32", + "avx10.vcvttsd2sis64" => "__builtin_ia32_vcvttsd2sis64", + "avx10.vcvttsd2usis" => "__builtin_ia32_vcvttsd2usis32", + "avx10.vcvttsd2usis64" => "__builtin_ia32_vcvttsd2usis64", + "avx10.vcvttss2sis" => "__builtin_ia32_vcvttss2sis32", + "avx10.vcvttss2sis64" => "__builtin_ia32_vcvttss2sis64", + "avx10.vcvttss2usis" => "__builtin_ia32_vcvttss2usis32", + "avx10.vcvttss2usis64" => "__builtin_ia32_vcvttss2usis64", + "avx10.vdivbf16128" => "__builtin_ia32_vdivbf16128", + "avx10.vdivbf16256" => "__builtin_ia32_vdivbf16256", + "avx10.vdivbf16512" => "__builtin_ia32_vdivbf16512", + "avx10.vdpphps.128" => "__builtin_ia32_vdpphps128", + "avx10.vdpphps.256" => "__builtin_ia32_vdpphps256", + "avx10.vdpphps.512" => "__builtin_ia32_vdpphps512", + "avx10.vfmadd132bf16128" => "__builtin_ia32_vfmadd132bf16128", + "avx10.vfmadd132bf16256" => "__builtin_ia32_vfmadd132bf16256", + "avx10.vfmadd132bf16512" => "__builtin_ia32_vfmadd132bf16512", + "avx10.vfmadd213bf16128" => "__builtin_ia32_vfmadd213bf16128", + "avx10.vfmadd213bf16256" => "__builtin_ia32_vfmadd213bf16256", + "avx10.vfmadd231bf16128" => "__builtin_ia32_vfmadd231bf16128", + "avx10.vfmadd231bf16256" => "__builtin_ia32_vfmadd231bf16256", + "avx10.vfmadd231bf16512" => "__builtin_ia32_vfmadd231bf16512", + "avx10.vfmsub132bf16128" => "__builtin_ia32_vfmsub132bf16128", + "avx10.vfmsub132bf16256" => "__builtin_ia32_vfmsub132bf16256", + "avx10.vfmsub132bf16512" => "__builtin_ia32_vfmsub132bf16512", + "avx10.vfmsub213bf16128" => "__builtin_ia32_vfmsub213bf16128", + "avx10.vfmsub213bf16256" => "__builtin_ia32_vfmsub213bf16256", + "avx10.vfmsub213bf16512" => "__builtin_ia32_vfmsub213bf16512", + "avx10.vfmsub231bf16128" => "__builtin_ia32_vfmsub231bf16128", + "avx10.vfmsub231bf16256" => "__builtin_ia32_vfmsub231bf16256", + "avx10.vfmsub231bf16512" => "__builtin_ia32_vfmsub231bf16512", + "avx10.vfnmadd132bf16128" => "__builtin_ia32_vfnmadd132bf16128", + "avx10.vfnmadd132bf16256" => "__builtin_ia32_vfnmadd132bf16256", + "avx10.vfnmadd132bf16512" => "__builtin_ia32_vfnmadd132bf16512", + "avx10.vfnmadd213bf16128" => "__builtin_ia32_vfnmadd213bf16128", + "avx10.vfnmadd213bf16256" => "__builtin_ia32_vfnmadd213bf16256", + "avx10.vfnmadd213bf16512" => "__builtin_ia32_vfnmadd213bf16512", + "avx10.vfnmadd231bf16128" => "__builtin_ia32_vfnmadd231bf16128", + "avx10.vfnmadd231bf16256" => "__builtin_ia32_vfnmadd231bf16256", + "avx10.vfnmadd231bf16512" => "__builtin_ia32_vfnmadd231bf16512", + "avx10.vfnmsub132bf16128" => "__builtin_ia32_vfnmsub132bf16128", + "avx10.vfnmsub132bf16256" => "__builtin_ia32_vfnmsub132bf16256", + "avx10.vfnmsub132bf16512" => "__builtin_ia32_vfnmsub132bf16512", + "avx10.vfnmsub213bf16128" => "__builtin_ia32_vfnmsub213bf16128", + "avx10.vfnmsub213bf16256" => "__builtin_ia32_vfnmsub213bf16256", + "avx10.vfnmsub213bf16512" => "__builtin_ia32_vfnmsub213bf16512", + "avx10.vfnmsub231bf16128" => "__builtin_ia32_vfnmsub231bf16128", + "avx10.vfnmsub231bf16256" => "__builtin_ia32_vfnmsub231bf16256", + "avx10.vfnmsub231bf16512" => "__builtin_ia32_vfnmsub231bf16512", + "avx10.vmaxbf16128" => "__builtin_ia32_vmaxbf16128", + "avx10.vmaxbf16256" => "__builtin_ia32_vmaxbf16256", + "avx10.vmaxbf16512" => "__builtin_ia32_vmaxbf16512", + "avx10.vminbf16128" => "__builtin_ia32_vminbf16128", + "avx10.vminbf16256" => "__builtin_ia32_vminbf16256", + "avx10.vminbf16512" => "__builtin_ia32_vminbf16512", + "avx10.vminmaxbf16128" => "__builtin_ia32_vminmaxbf16128", + "avx10.vminmaxbf16256" => "__builtin_ia32_vminmaxbf16256", + "avx10.vminmaxbf16512" => "__builtin_ia32_vminmaxbf16512", + "avx10.vminmaxpd128" => "__builtin_ia32_vminmaxpd128", + "avx10.vminmaxpd256" => "__builtin_ia32_vminmaxpd256", + "avx10.vminmaxph128" => "__builtin_ia32_vminmaxph128", + "avx10.vminmaxph256" => "__builtin_ia32_vminmaxph256", + "avx10.vminmaxps128" => "__builtin_ia32_vminmaxps128", + "avx10.vminmaxps256" => "__builtin_ia32_vminmaxps256", + "avx10.vmovrsb128" => "__builtin_ia32_vmovrsb128", + "avx10.vmovrsb256" => "__builtin_ia32_vmovrsb256", + "avx10.vmovrsb512" => "__builtin_ia32_vmovrsb512", + "avx10.vmovrsd128" => "__builtin_ia32_vmovrsd128", + "avx10.vmovrsd256" => "__builtin_ia32_vmovrsd256", + "avx10.vmovrsd512" => "__builtin_ia32_vmovrsd512", + "avx10.vmovrsq128" => "__builtin_ia32_vmovrsq128", + "avx10.vmovrsq256" => "__builtin_ia32_vmovrsq256", + "avx10.vmovrsq512" => "__builtin_ia32_vmovrsq512", + "avx10.vmovrsw128" => "__builtin_ia32_vmovrsw128", + "avx10.vmovrsw256" => "__builtin_ia32_vmovrsw256", + "avx10.vmovrsw512" => "__builtin_ia32_vmovrsw512", + "avx10.vmpsadbw.512" => "__builtin_ia32_mpsadbw512", + "avx10.vmulbf16128" => "__builtin_ia32_vmulbf16128", + "avx10.vmulbf16256" => "__builtin_ia32_vmulbf16256", + "avx10.vmulbf16512" => "__builtin_ia32_vmulbf16512", + "avx10.vpdpbssd.512" => "__builtin_ia32_vpdpbssd512", + "avx10.vpdpbssds.512" => "__builtin_ia32_vpdpbssds512", + "avx10.vpdpbsud.512" => "__builtin_ia32_vpdpbsud512", + "avx10.vpdpbsuds.512" => "__builtin_ia32_vpdpbsuds512", + "avx10.vpdpbuud.512" => "__builtin_ia32_vpdpbuud512", + "avx10.vpdpbuuds.512" => "__builtin_ia32_vpdpbuuds512", + "avx10.vpdpwsud.512" => "__builtin_ia32_vpdpwsud512", + "avx10.vpdpwsuds.512" => "__builtin_ia32_vpdpwsuds512", + "avx10.vpdpwusd.512" => "__builtin_ia32_vpdpwusd512", + "avx10.vpdpwusds.512" => "__builtin_ia32_vpdpwusds512", + "avx10.vpdpwuud.512" => "__builtin_ia32_vpdpwuud512", + "avx10.vpdpwuuds.512" => "__builtin_ia32_vpdpwuuds512", + "avx10.vsubbf16128" => "__builtin_ia32_vsubbf16128", + "avx10.vsubbf16256" => "__builtin_ia32_vsubbf16256", + "avx10.vsubbf16512" => "__builtin_ia32_vsubbf16512", + "avx2.gather.d.d" => "__builtin_ia32_gatherd_d", + "avx2.gather.d.d.256" => "__builtin_ia32_gatherd_d256", + "avx2.gather.d.pd" => "__builtin_ia32_gatherd_pd", + "avx2.gather.d.pd.256" => "__builtin_ia32_gatherd_pd256", + "avx2.gather.d.ps" => "__builtin_ia32_gatherd_ps", + "avx2.gather.d.ps.256" => "__builtin_ia32_gatherd_ps256", + "avx2.gather.d.q" => "__builtin_ia32_gatherd_q", + "avx2.gather.d.q.256" => "__builtin_ia32_gatherd_q256", + "avx2.gather.q.d" => "__builtin_ia32_gatherq_d", + "avx2.gather.q.d.256" => "__builtin_ia32_gatherq_d256", + "avx2.gather.q.pd" => "__builtin_ia32_gatherq_pd", + "avx2.gather.q.pd.256" => "__builtin_ia32_gatherq_pd256", + "avx2.gather.q.ps" => "__builtin_ia32_gatherq_ps", + "avx2.gather.q.ps.256" => "__builtin_ia32_gatherq_ps256", + "avx2.gather.q.q" => "__builtin_ia32_gatherq_q", + "avx2.gather.q.q.256" => "__builtin_ia32_gatherq_q256", + "avx2.maskload.d" => "__builtin_ia32_maskloadd", + "avx2.maskload.d.256" => "__builtin_ia32_maskloadd256", + "avx2.maskload.q" => "__builtin_ia32_maskloadq", + "avx2.maskload.q.256" => "__builtin_ia32_maskloadq256", + "avx2.maskstore.d" => "__builtin_ia32_maskstored", + "avx2.maskstore.d.256" => "__builtin_ia32_maskstored256", + "avx2.maskstore.q" => "__builtin_ia32_maskstoreq", + "avx2.maskstore.q.256" => "__builtin_ia32_maskstoreq256", + "avx2.movntdqa" => "__builtin_ia32_movntdqa256", + "avx2.mpsadbw" => "__builtin_ia32_mpsadbw256", + "avx2.pabs.b" => "__builtin_ia32_pabsb256", + "avx2.pabs.d" => "__builtin_ia32_pabsd256", + "avx2.pabs.w" => "__builtin_ia32_pabsw256", + "avx2.packssdw" => "__builtin_ia32_packssdw256", + "avx2.packsswb" => "__builtin_ia32_packsswb256", + "avx2.packusdw" => "__builtin_ia32_packusdw256", + "avx2.packuswb" => "__builtin_ia32_packuswb256", + "avx2.padds.b" => "__builtin_ia32_paddsb256", + "avx2.padds.w" => "__builtin_ia32_paddsw256", + "avx2.paddus.b" => "__builtin_ia32_paddusb256", + "avx2.paddus.w" => "__builtin_ia32_paddusw256", + "avx2.pavg.b" => "__builtin_ia32_pavgb256", + "avx2.pavg.w" => "__builtin_ia32_pavgw256", + "avx2.pblendd.128" => "__builtin_ia32_pblendd128", + "avx2.pblendd.256" => "__builtin_ia32_pblendd256", + "avx2.pblendvb" => "__builtin_ia32_pblendvb256", + "avx2.pblendw" => "__builtin_ia32_pblendw256", + "avx2.pbroadcastb.128" => "__builtin_ia32_pbroadcastb128", + "avx2.pbroadcastb.256" => "__builtin_ia32_pbroadcastb256", + "avx2.pbroadcastd.128" => "__builtin_ia32_pbroadcastd128", + "avx2.pbroadcastd.256" => "__builtin_ia32_pbroadcastd256", + "avx2.pbroadcastq.128" => "__builtin_ia32_pbroadcastq128", + "avx2.pbroadcastq.256" => "__builtin_ia32_pbroadcastq256", + "avx2.pbroadcastw.128" => "__builtin_ia32_pbroadcastw128", + "avx2.pbroadcastw.256" => "__builtin_ia32_pbroadcastw256", + "avx2.permd" => "__builtin_ia32_permvarsi256", + "avx2.permps" => "__builtin_ia32_permvarsf256", + "avx2.phadd.d" => "__builtin_ia32_phaddd256", + "avx2.phadd.sw" => "__builtin_ia32_phaddsw256", + "avx2.phadd.w" => "__builtin_ia32_phaddw256", + "avx2.phsub.d" => "__builtin_ia32_phsubd256", + "avx2.phsub.sw" => "__builtin_ia32_phsubsw256", + "avx2.phsub.w" => "__builtin_ia32_phsubw256", + "avx2.pmadd.ub.sw" => "__builtin_ia32_pmaddubsw256", + "avx2.pmadd.wd" => "__builtin_ia32_pmaddwd256", + "avx2.pmaxs.b" => "__builtin_ia32_pmaxsb256", + "avx2.pmaxs.d" => "__builtin_ia32_pmaxsd256", + "avx2.pmaxs.w" => "__builtin_ia32_pmaxsw256", + "avx2.pmaxu.b" => "__builtin_ia32_pmaxub256", + "avx2.pmaxu.d" => "__builtin_ia32_pmaxud256", + "avx2.pmaxu.w" => "__builtin_ia32_pmaxuw256", + "avx2.pmins.b" => "__builtin_ia32_pminsb256", + "avx2.pmins.d" => "__builtin_ia32_pminsd256", + "avx2.pmins.w" => "__builtin_ia32_pminsw256", + "avx2.pminu.b" => "__builtin_ia32_pminub256", + "avx2.pminu.d" => "__builtin_ia32_pminud256", + "avx2.pminu.w" => "__builtin_ia32_pminuw256", + "avx2.pmovmskb" => "__builtin_ia32_pmovmskb256", + "avx2.pmovsxbd" => "__builtin_ia32_pmovsxbd256", + "avx2.pmovsxbq" => "__builtin_ia32_pmovsxbq256", + "avx2.pmovsxbw" => "__builtin_ia32_pmovsxbw256", + "avx2.pmovsxdq" => "__builtin_ia32_pmovsxdq256", + "avx2.pmovsxwd" => "__builtin_ia32_pmovsxwd256", + "avx2.pmovsxwq" => "__builtin_ia32_pmovsxwq256", + "avx2.pmovzxbd" => "__builtin_ia32_pmovzxbd256", + "avx2.pmovzxbq" => "__builtin_ia32_pmovzxbq256", + "avx2.pmovzxbw" => "__builtin_ia32_pmovzxbw256", + "avx2.pmovzxdq" => "__builtin_ia32_pmovzxdq256", + "avx2.pmovzxwd" => "__builtin_ia32_pmovzxwd256", + "avx2.pmovzxwq" => "__builtin_ia32_pmovzxwq256", + "avx2.pmul.dq" => "__builtin_ia32_pmuldq256", + "avx2.pmul.hr.sw" => "__builtin_ia32_pmulhrsw256", + "avx2.pmulh.w" => "__builtin_ia32_pmulhw256", + "avx2.pmulhu.w" => "__builtin_ia32_pmulhuw256", + "avx2.pmulu.dq" => "__builtin_ia32_pmuludq256", + "avx2.psad.bw" => "__builtin_ia32_psadbw256", + "avx2.pshuf.b" => "__builtin_ia32_pshufb256", + "avx2.psign.b" => "__builtin_ia32_psignb256", + "avx2.psign.d" => "__builtin_ia32_psignd256", + "avx2.psign.w" => "__builtin_ia32_psignw256", + "avx2.psll.d" => "__builtin_ia32_pslld256", + "avx2.psll.dq" => "__builtin_ia32_pslldqi256", + "avx2.psll.dq.bs" => "__builtin_ia32_pslldqi256_byteshift", + "avx2.psll.q" => "__builtin_ia32_psllq256", + "avx2.psll.w" => "__builtin_ia32_psllw256", + "avx2.pslli.d" => "__builtin_ia32_pslldi256", + "avx2.pslli.q" => "__builtin_ia32_psllqi256", + "avx2.pslli.w" => "__builtin_ia32_psllwi256", + "avx2.psllv.d" => "__builtin_ia32_psllv4si", + "avx2.psllv.d.256" => "__builtin_ia32_psllv8si", + "avx2.psllv.q" => "__builtin_ia32_psllv2di", + "avx2.psllv.q.256" => "__builtin_ia32_psllv4di", + "avx2.psra.d" => "__builtin_ia32_psrad256", + "avx2.psra.w" => "__builtin_ia32_psraw256", + "avx2.psrai.d" => "__builtin_ia32_psradi256", + "avx2.psrai.w" => "__builtin_ia32_psrawi256", + "avx2.psrav.d" => "__builtin_ia32_psrav4si", + "avx2.psrav.d.256" => "__builtin_ia32_psrav8si", + "avx2.psrl.d" => "__builtin_ia32_psrld256", + "avx2.psrl.dq" => "__builtin_ia32_psrldqi256", + "avx2.psrl.dq.bs" => "__builtin_ia32_psrldqi256_byteshift", + "avx2.psrl.q" => "__builtin_ia32_psrlq256", + "avx2.psrl.w" => "__builtin_ia32_psrlw256", + "avx2.psrli.d" => "__builtin_ia32_psrldi256", + "avx2.psrli.q" => "__builtin_ia32_psrlqi256", + "avx2.psrli.w" => "__builtin_ia32_psrlwi256", + "avx2.psrlv.d" => "__builtin_ia32_psrlv4si", + "avx2.psrlv.d.256" => "__builtin_ia32_psrlv8si", + "avx2.psrlv.q" => "__builtin_ia32_psrlv2di", + "avx2.psrlv.q.256" => "__builtin_ia32_psrlv4di", + "avx2.psubs.b" => "__builtin_ia32_psubsb256", + "avx2.psubs.w" => "__builtin_ia32_psubsw256", + "avx2.psubus.b" => "__builtin_ia32_psubusb256", + "avx2.psubus.w" => "__builtin_ia32_psubusw256", + "avx2.vbroadcast.sd.pd.256" => "__builtin_ia32_vbroadcastsd_pd256", + "avx2.vbroadcast.ss.ps" => "__builtin_ia32_vbroadcastss_ps", + "avx2.vbroadcast.ss.ps.256" => "__builtin_ia32_vbroadcastss_ps256", + "avx2.vextracti128" => "__builtin_ia32_extract128i256", + "avx2.vinserti128" => "__builtin_ia32_insert128i256", + "avx2.vpdpbssd.128" => "__builtin_ia32_vpdpbssd128", + "avx2.vpdpbssd.256" => "__builtin_ia32_vpdpbssd256", + "avx2.vpdpbssds.128" => "__builtin_ia32_vpdpbssds128", + "avx2.vpdpbssds.256" => "__builtin_ia32_vpdpbssds256", + "avx2.vpdpbsud.128" => "__builtin_ia32_vpdpbsud128", + "avx2.vpdpbsud.256" => "__builtin_ia32_vpdpbsud256", + "avx2.vpdpbsuds.128" => "__builtin_ia32_vpdpbsuds128", + "avx2.vpdpbsuds.256" => "__builtin_ia32_vpdpbsuds256", + "avx2.vpdpbuud.128" => "__builtin_ia32_vpdpbuud128", + "avx2.vpdpbuud.256" => "__builtin_ia32_vpdpbuud256", + "avx2.vpdpbuuds.128" => "__builtin_ia32_vpdpbuuds128", + "avx2.vpdpbuuds.256" => "__builtin_ia32_vpdpbuuds256", + "avx2.vpdpwsud.128" => "__builtin_ia32_vpdpwsud128", + "avx2.vpdpwsud.256" => "__builtin_ia32_vpdpwsud256", + "avx2.vpdpwsuds.128" => "__builtin_ia32_vpdpwsuds128", + "avx2.vpdpwsuds.256" => "__builtin_ia32_vpdpwsuds256", + "avx2.vpdpwusd.128" => "__builtin_ia32_vpdpwusd128", + "avx2.vpdpwusd.256" => "__builtin_ia32_vpdpwusd256", + "avx2.vpdpwusds.128" => "__builtin_ia32_vpdpwusds128", + "avx2.vpdpwusds.256" => "__builtin_ia32_vpdpwusds256", + "avx2.vpdpwuud.128" => "__builtin_ia32_vpdpwuud128", + "avx2.vpdpwuud.256" => "__builtin_ia32_vpdpwuud256", + "avx2.vpdpwuuds.128" => "__builtin_ia32_vpdpwuuds128", + "avx2.vpdpwuuds.256" => "__builtin_ia32_vpdpwuuds256", + "avx2.vperm2i128" => "__builtin_ia32_permti256", + "avx512.add.pd.512" => "__builtin_ia32_addpd512", + "avx512.add.ps.512" => "__builtin_ia32_addps512", + "avx512.broadcastmb.128" => "__builtin_ia32_broadcastmb128", + "avx512.broadcastmb.256" => "__builtin_ia32_broadcastmb256", + "avx512.broadcastmb.512" => "__builtin_ia32_broadcastmb512", + "avx512.broadcastmw.128" => "__builtin_ia32_broadcastmw128", + "avx512.broadcastmw.256" => "__builtin_ia32_broadcastmw256", + "avx512.broadcastmw.512" => "__builtin_ia32_broadcastmw512", + "avx512.conflict.d.128" => "__builtin_ia32_vpconflictsi_128", + "avx512.conflict.d.256" => "__builtin_ia32_vpconflictsi_256", + "avx512.conflict.d.512" => "__builtin_ia32_vpconflictsi_512", + "avx512.conflict.q.128" => "__builtin_ia32_vpconflictdi_128", + "avx512.conflict.q.256" => "__builtin_ia32_vpconflictdi_256", + "avx512.conflict.q.512" => "__builtin_ia32_vpconflictdi_512", + "avx512.cvtb2mask.128" => "__builtin_ia32_cvtb2mask128", + "avx512.cvtb2mask.256" => "__builtin_ia32_cvtb2mask256", + "avx512.cvtb2mask.512" => "__builtin_ia32_cvtb2mask512", + "avx512.cvtd2mask.128" => "__builtin_ia32_cvtd2mask128", + "avx512.cvtd2mask.256" => "__builtin_ia32_cvtd2mask256", + "avx512.cvtd2mask.512" => "__builtin_ia32_cvtd2mask512", + "avx512.cvtmask2b.128" => "__builtin_ia32_cvtmask2b128", + "avx512.cvtmask2b.256" => "__builtin_ia32_cvtmask2b256", + "avx512.cvtmask2b.512" => "__builtin_ia32_cvtmask2b512", + "avx512.cvtmask2d.128" => "__builtin_ia32_cvtmask2d128", + "avx512.cvtmask2d.256" => "__builtin_ia32_cvtmask2d256", + "avx512.cvtmask2d.512" => "__builtin_ia32_cvtmask2d512", + "avx512.cvtmask2q.128" => "__builtin_ia32_cvtmask2q128", + "avx512.cvtmask2q.256" => "__builtin_ia32_cvtmask2q256", + "avx512.cvtmask2q.512" => "__builtin_ia32_cvtmask2q512", + "avx512.cvtmask2w.128" => "__builtin_ia32_cvtmask2w128", + "avx512.cvtmask2w.256" => "__builtin_ia32_cvtmask2w256", + "avx512.cvtmask2w.512" => "__builtin_ia32_cvtmask2w512", + "avx512.cvtq2mask.128" => "__builtin_ia32_cvtq2mask128", + "avx512.cvtq2mask.256" => "__builtin_ia32_cvtq2mask256", + "avx512.cvtq2mask.512" => "__builtin_ia32_cvtq2mask512", + "avx512.cvtsd2usi" => "__builtin_ia32_cvtsd2usi", + "avx512.cvtsd2usi64" => "__builtin_ia32_cvtsd2usi64", + "avx512.cvtsi2sd32" => "__builtin_ia32_cvtsi2sd32", + "avx512.cvtsi2sd64" => "__builtin_ia32_cvtsi2sd64", + "avx512.cvtsi2ss32" => "__builtin_ia32_cvtsi2ss32", + "avx512.cvtsi2ss64" => "__builtin_ia32_cvtsi2ss64", + "avx512.cvtss2usi" => "__builtin_ia32_cvtss2usi", + "avx512.cvtss2usi64" => "__builtin_ia32_cvtss2usi64", + "avx512.cvttsd2si" => "__builtin_ia32_vcvttsd2si32", + "avx512.cvttsd2si64" => "__builtin_ia32_vcvttsd2si64", + "avx512.cvttsd2usi" => "__builtin_ia32_vcvttsd2usi32", + // [DUPLICATE]: "avx512.cvttsd2usi" => "__builtin_ia32_cvttsd2usi", + "avx512.cvttsd2usi64" => "__builtin_ia32_vcvttsd2usi64", + // [DUPLICATE]: "avx512.cvttsd2usi64" => "__builtin_ia32_cvttsd2usi64", + "avx512.cvttss2si" => "__builtin_ia32_vcvttss2si32", + "avx512.cvttss2si64" => "__builtin_ia32_vcvttss2si64", + "avx512.cvttss2usi" => "__builtin_ia32_vcvttss2usi32", + // [DUPLICATE]: "avx512.cvttss2usi" => "__builtin_ia32_cvttss2usi", + "avx512.cvttss2usi64" => "__builtin_ia32_vcvttss2usi64", + // [DUPLICATE]: "avx512.cvttss2usi64" => "__builtin_ia32_cvttss2usi64", + "avx512.cvtusi2sd" => "__builtin_ia32_cvtusi2sd", + // [DUPLICATE]: "avx512.cvtusi2sd" => "__builtin_ia32_cvtusi2sd32", + "avx512.cvtusi2ss" => "__builtin_ia32_cvtusi2ss32", + // [DUPLICATE]: "avx512.cvtusi2ss" => "__builtin_ia32_cvtusi2ss", + "avx512.cvtusi642sd" => "__builtin_ia32_cvtusi2sd64", + // [DUPLICATE]: "avx512.cvtusi642sd" => "__builtin_ia32_cvtusi642sd", + "avx512.cvtusi642ss" => "__builtin_ia32_cvtusi2ss64", + // [DUPLICATE]: "avx512.cvtusi642ss" => "__builtin_ia32_cvtusi642ss", + "avx512.cvtw2mask.128" => "__builtin_ia32_cvtw2mask128", + "avx512.cvtw2mask.256" => "__builtin_ia32_cvtw2mask256", + "avx512.cvtw2mask.512" => "__builtin_ia32_cvtw2mask512", + "avx512.dbpsadbw.128" => "__builtin_ia32_dbpsadbw128", + "avx512.dbpsadbw.256" => "__builtin_ia32_dbpsadbw256", + "avx512.dbpsadbw.512" => "__builtin_ia32_dbpsadbw512", + "avx512.div.pd.512" => "__builtin_ia32_divpd512", + "avx512.div.ps.512" => "__builtin_ia32_divps512", + "avx512.exp2.pd" => "__builtin_ia32_exp2pd_mask", + "avx512.exp2.ps" => "__builtin_ia32_exp2ps_mask", + "avx512.gather.dpd.512" => "__builtin_ia32_gathersiv8df", + "avx512.gather.dpi.512" => "__builtin_ia32_gathersiv16si", + "avx512.gather.dpq.512" => "__builtin_ia32_gathersiv8di", + "avx512.gather.dps.512" => "__builtin_ia32_gathersiv16sf", + "avx512.gather.qpd.512" => "__builtin_ia32_gatherdiv8df", + "avx512.gather.qpi.512" => "__builtin_ia32_gatherdiv16si", + "avx512.gather.qpq.512" => "__builtin_ia32_gatherdiv8di", + "avx512.gather.qps.512" => "__builtin_ia32_gatherdiv16sf", + "avx512.gather3div2.df" => "__builtin_ia32_gather3div2df", + "avx512.gather3div2.di" => "__builtin_ia32_gather3div2di", + "avx512.gather3div4.df" => "__builtin_ia32_gather3div4df", + "avx512.gather3div4.di" => "__builtin_ia32_gather3div4di", + "avx512.gather3div4.sf" => "__builtin_ia32_gather3div4sf", + "avx512.gather3div4.si" => "__builtin_ia32_gather3div4si", + "avx512.gather3div8.sf" => "__builtin_ia32_gather3div8sf", + "avx512.gather3div8.si" => "__builtin_ia32_gather3div8si", + "avx512.gather3siv2.df" => "__builtin_ia32_gather3siv2df", + "avx512.gather3siv2.di" => "__builtin_ia32_gather3siv2di", + "avx512.gather3siv4.df" => "__builtin_ia32_gather3siv4df", + "avx512.gather3siv4.di" => "__builtin_ia32_gather3siv4di", + "avx512.gather3siv4.sf" => "__builtin_ia32_gather3siv4sf", + "avx512.gather3siv4.si" => "__builtin_ia32_gather3siv4si", + "avx512.gather3siv8.sf" => "__builtin_ia32_gather3siv8sf", + "avx512.gather3siv8.si" => "__builtin_ia32_gather3siv8si", + "avx512.gatherpf.dpd.512" => "__builtin_ia32_gatherpfdpd", + "avx512.gatherpf.dps.512" => "__builtin_ia32_gatherpfdps", + "avx512.gatherpf.qpd.512" => "__builtin_ia32_gatherpfqpd", + "avx512.gatherpf.qps.512" => "__builtin_ia32_gatherpfqps", + "avx512.kand.w" => "__builtin_ia32_kandhi", + "avx512.kandn.w" => "__builtin_ia32_kandnhi", + "avx512.knot.w" => "__builtin_ia32_knothi", + "avx512.kor.w" => "__builtin_ia32_korhi", + "avx512.kortestc.w" => "__builtin_ia32_kortestchi", + "avx512.kortestz.w" => "__builtin_ia32_kortestzhi", + "avx512.kunpck.bw" => "__builtin_ia32_kunpckhi", + "avx512.kunpck.dq" => "__builtin_ia32_kunpckdi", + "avx512.kunpck.wd" => "__builtin_ia32_kunpcksi", + "avx512.kxnor.w" => "__builtin_ia32_kxnorhi", + "avx512.kxor.w" => "__builtin_ia32_kxorhi", + "avx512.mask.add.pd.128" => "__builtin_ia32_addpd128_mask", + "avx512.mask.add.pd.256" => "__builtin_ia32_addpd256_mask", + "avx512.mask.add.pd.512" => "__builtin_ia32_addpd512_mask", + "avx512.mask.add.ps.128" => "__builtin_ia32_addps128_mask", + "avx512.mask.add.ps.256" => "__builtin_ia32_addps256_mask", + "avx512.mask.add.ps.512" => "__builtin_ia32_addps512_mask", + // [INVALID CONVERSION]: "avx512.mask.add.sd.round" => "__builtin_ia32_addsd_round_mask", + // [INVALID CONVERSION]: "avx512.mask.add.ss.round" => "__builtin_ia32_addss_round_mask", + "avx512.mask.and.pd.128" => "__builtin_ia32_andpd128_mask", + "avx512.mask.and.pd.256" => "__builtin_ia32_andpd256_mask", + "avx512.mask.and.pd.512" => "__builtin_ia32_andpd512_mask", + "avx512.mask.and.ps.128" => "__builtin_ia32_andps128_mask", + "avx512.mask.and.ps.256" => "__builtin_ia32_andps256_mask", + "avx512.mask.and.ps.512" => "__builtin_ia32_andps512_mask", + "avx512.mask.andn.pd.128" => "__builtin_ia32_andnpd128_mask", + "avx512.mask.andn.pd.256" => "__builtin_ia32_andnpd256_mask", + "avx512.mask.andn.pd.512" => "__builtin_ia32_andnpd512_mask", + "avx512.mask.andn.ps.128" => "__builtin_ia32_andnps128_mask", + "avx512.mask.andn.ps.256" => "__builtin_ia32_andnps256_mask", + "avx512.mask.andn.ps.512" => "__builtin_ia32_andnps512_mask", + "avx512.mask.blend.d.512" => "__builtin_ia32_blendmd_512_mask", + "avx512.mask.blend.pd.512" => "__builtin_ia32_blendmpd_512_mask", + "avx512.mask.blend.ps.512" => "__builtin_ia32_blendmps_512_mask", + "avx512.mask.blend.q.512" => "__builtin_ia32_blendmq_512_mask", + "avx512.mask.broadcastf32x2.256" => "__builtin_ia32_broadcastf32x2_256_mask", + "avx512.mask.broadcastf32x2.512" => "__builtin_ia32_broadcastf32x2_512_mask", + "avx512.mask.broadcastf32x4.256" => "__builtin_ia32_broadcastf32x4_256_mask", + "avx512.mask.broadcastf32x4.512" => "__builtin_ia32_broadcastf32x4_512", + "avx512.mask.broadcastf32x8.512" => "__builtin_ia32_broadcastf32x8_512_mask", + "avx512.mask.broadcastf64x2.256" => "__builtin_ia32_broadcastf64x2_256_mask", + "avx512.mask.broadcastf64x2.512" => "__builtin_ia32_broadcastf64x2_512_mask", + "avx512.mask.broadcastf64x4.512" => "__builtin_ia32_broadcastf64x4_512", + "avx512.mask.broadcasti32x2.128" => "__builtin_ia32_broadcasti32x2_128_mask", + "avx512.mask.broadcasti32x2.256" => "__builtin_ia32_broadcasti32x2_256_mask", + "avx512.mask.broadcasti32x2.512" => "__builtin_ia32_broadcasti32x2_512_mask", + "avx512.mask.broadcasti32x4.256" => "__builtin_ia32_broadcasti32x4_256_mask", + "avx512.mask.broadcasti32x4.512" => "__builtin_ia32_broadcasti32x4_512", + "avx512.mask.broadcasti32x8.512" => "__builtin_ia32_broadcasti32x8_512_mask", + "avx512.mask.broadcasti64x2.256" => "__builtin_ia32_broadcasti64x2_256_mask", + "avx512.mask.broadcasti64x2.512" => "__builtin_ia32_broadcasti64x2_512_mask", + "avx512.mask.broadcasti64x4.512" => "__builtin_ia32_broadcasti64x4_512", + "avx512.mask.cmp.pd.128" => "__builtin_ia32_cmppd128_mask", + "avx512.mask.cmp.pd.256" => "__builtin_ia32_cmppd256_mask", + "avx512.mask.cmp.pd.512" => "__builtin_ia32_cmppd512_mask", + "avx512.mask.cmp.ps.128" => "__builtin_ia32_cmpps128_mask", + "avx512.mask.cmp.ps.256" => "__builtin_ia32_cmpps256_mask", + "avx512.mask.cmp.ps.512" => "__builtin_ia32_cmpps512_mask", + "avx512.mask.cmp.sd" => "__builtin_ia32_cmpsd_mask", + "avx512.mask.cmp.ss" => "__builtin_ia32_cmpss_mask", + "avx512.mask.compress.d.128" => "__builtin_ia32_compresssi128_mask", + "avx512.mask.compress.d.256" => "__builtin_ia32_compresssi256_mask", + "avx512.mask.compress.d.512" => "__builtin_ia32_compresssi512_mask", + "avx512.mask.compress.pd.128" => "__builtin_ia32_compressdf128_mask", + "avx512.mask.compress.pd.256" => "__builtin_ia32_compressdf256_mask", + "avx512.mask.compress.pd.512" => "__builtin_ia32_compressdf512_mask", + "avx512.mask.compress.ps.128" => "__builtin_ia32_compresssf128_mask", + "avx512.mask.compress.ps.256" => "__builtin_ia32_compresssf256_mask", + "avx512.mask.compress.ps.512" => "__builtin_ia32_compresssf512_mask", + "avx512.mask.compress.q.128" => "__builtin_ia32_compressdi128_mask", + "avx512.mask.compress.q.256" => "__builtin_ia32_compressdi256_mask", + "avx512.mask.compress.q.512" => "__builtin_ia32_compressdi512_mask", + "avx512.mask.compress.store.d.128" => "__builtin_ia32_compressstoresi128_mask", + "avx512.mask.compress.store.d.256" => "__builtin_ia32_compressstoresi256_mask", + "avx512.mask.compress.store.d.512" => "__builtin_ia32_compressstoresi512_mask", + "avx512.mask.compress.store.pd.128" => "__builtin_ia32_compressstoredf128_mask", + "avx512.mask.compress.store.pd.256" => "__builtin_ia32_compressstoredf256_mask", + "avx512.mask.compress.store.pd.512" => "__builtin_ia32_compressstoredf512_mask", + "avx512.mask.compress.store.ps.128" => "__builtin_ia32_compressstoresf128_mask", + "avx512.mask.compress.store.ps.256" => "__builtin_ia32_compressstoresf256_mask", + "avx512.mask.compress.store.ps.512" => "__builtin_ia32_compressstoresf512_mask", + "avx512.mask.compress.store.q.128" => "__builtin_ia32_compressstoredi128_mask", + "avx512.mask.compress.store.q.256" => "__builtin_ia32_compressstoredi256_mask", + "avx512.mask.compress.store.q.512" => "__builtin_ia32_compressstoredi512_mask", + "avx512.mask.conflict.d.128" => "__builtin_ia32_vpconflictsi_128_mask", + "avx512.mask.conflict.d.256" => "__builtin_ia32_vpconflictsi_256_mask", + "avx512.mask.conflict.d.512" => "__builtin_ia32_vpconflictsi_512_mask", + "avx512.mask.conflict.q.128" => "__builtin_ia32_vpconflictdi_128_mask", + "avx512.mask.conflict.q.256" => "__builtin_ia32_vpconflictdi_256_mask", + "avx512.mask.conflict.q.512" => "__builtin_ia32_vpconflictdi_512_mask", + "avx512.mask.cvtdq2pd.128" => "__builtin_ia32_cvtdq2pd128_mask", + "avx512.mask.cvtdq2pd.256" => "__builtin_ia32_cvtdq2pd256_mask", + "avx512.mask.cvtdq2pd.512" => "__builtin_ia32_cvtdq2pd512_mask", + "avx512.mask.cvtdq2ps.128" => "__builtin_ia32_cvtdq2ps128_mask", + "avx512.mask.cvtdq2ps.256" => "__builtin_ia32_cvtdq2ps256_mask", + "avx512.mask.cvtdq2ps.512" => "__builtin_ia32_cvtdq2ps512_mask", + "avx512.mask.cvtpd2dq.128" => "__builtin_ia32_cvtpd2dq128_mask", + "avx512.mask.cvtpd2dq.256" => "__builtin_ia32_cvtpd2dq256_mask", + "avx512.mask.cvtpd2dq.512" => "__builtin_ia32_cvtpd2dq512_mask", + "avx512.mask.cvtpd2ps" => "__builtin_ia32_cvtpd2ps_mask", + "avx512.mask.cvtpd2ps.256" => "__builtin_ia32_cvtpd2ps256_mask", + "avx512.mask.cvtpd2ps.512" => "__builtin_ia32_cvtpd2ps512_mask", + "avx512.mask.cvtpd2qq.128" => "__builtin_ia32_cvtpd2qq128_mask", + "avx512.mask.cvtpd2qq.256" => "__builtin_ia32_cvtpd2qq256_mask", + "avx512.mask.cvtpd2qq.512" => "__builtin_ia32_cvtpd2qq512_mask", + "avx512.mask.cvtpd2udq.128" => "__builtin_ia32_cvtpd2udq128_mask", + "avx512.mask.cvtpd2udq.256" => "__builtin_ia32_cvtpd2udq256_mask", + "avx512.mask.cvtpd2udq.512" => "__builtin_ia32_cvtpd2udq512_mask", + "avx512.mask.cvtpd2uqq.128" => "__builtin_ia32_cvtpd2uqq128_mask", + "avx512.mask.cvtpd2uqq.256" => "__builtin_ia32_cvtpd2uqq256_mask", + "avx512.mask.cvtpd2uqq.512" => "__builtin_ia32_cvtpd2uqq512_mask", + "avx512.mask.cvtps2dq.128" => "__builtin_ia32_cvtps2dq128_mask", + "avx512.mask.cvtps2dq.256" => "__builtin_ia32_cvtps2dq256_mask", + "avx512.mask.cvtps2dq.512" => "__builtin_ia32_cvtps2dq512_mask", + "avx512.mask.cvtps2pd.128" => "__builtin_ia32_cvtps2pd128_mask", + "avx512.mask.cvtps2pd.256" => "__builtin_ia32_cvtps2pd256_mask", + "avx512.mask.cvtps2pd.512" => "__builtin_ia32_cvtps2pd512_mask", + "avx512.mask.cvtps2qq.128" => "__builtin_ia32_cvtps2qq128_mask", + "avx512.mask.cvtps2qq.256" => "__builtin_ia32_cvtps2qq256_mask", + "avx512.mask.cvtps2qq.512" => "__builtin_ia32_cvtps2qq512_mask", + "avx512.mask.cvtps2udq.128" => "__builtin_ia32_cvtps2udq128_mask", + "avx512.mask.cvtps2udq.256" => "__builtin_ia32_cvtps2udq256_mask", + "avx512.mask.cvtps2udq.512" => "__builtin_ia32_cvtps2udq512_mask", + "avx512.mask.cvtps2uqq.128" => "__builtin_ia32_cvtps2uqq128_mask", + "avx512.mask.cvtps2uqq.256" => "__builtin_ia32_cvtps2uqq256_mask", + "avx512.mask.cvtps2uqq.512" => "__builtin_ia32_cvtps2uqq512_mask", + "avx512.mask.cvtqq2pd.128" => "__builtin_ia32_cvtqq2pd128_mask", + "avx512.mask.cvtqq2pd.256" => "__builtin_ia32_cvtqq2pd256_mask", + "avx512.mask.cvtqq2pd.512" => "__builtin_ia32_cvtqq2pd512_mask", + "avx512.mask.cvtqq2ps.128" => "__builtin_ia32_cvtqq2ps128_mask", + "avx512.mask.cvtqq2ps.256" => "__builtin_ia32_cvtqq2ps256_mask", + "avx512.mask.cvtqq2ps.512" => "__builtin_ia32_cvtqq2ps512_mask", + // [INVALID CONVERSION]: "avx512.mask.cvtsd2ss.round" => "__builtin_ia32_cvtsd2ss_round_mask", + // [INVALID CONVERSION]: "avx512.mask.cvtss2sd.round" => "__builtin_ia32_cvtss2sd_round_mask", + "avx512.mask.cvttpd2dq.128" => "__builtin_ia32_cvttpd2dq128_mask", + "avx512.mask.cvttpd2dq.256" => "__builtin_ia32_cvttpd2dq256_mask", + "avx512.mask.cvttpd2dq.512" => "__builtin_ia32_cvttpd2dq512_mask", + "avx512.mask.cvttpd2qq.128" => "__builtin_ia32_cvttpd2qq128_mask", + "avx512.mask.cvttpd2qq.256" => "__builtin_ia32_cvttpd2qq256_mask", + "avx512.mask.cvttpd2qq.512" => "__builtin_ia32_cvttpd2qq512_mask", + "avx512.mask.cvttpd2udq.128" => "__builtin_ia32_cvttpd2udq128_mask", + "avx512.mask.cvttpd2udq.256" => "__builtin_ia32_cvttpd2udq256_mask", + "avx512.mask.cvttpd2udq.512" => "__builtin_ia32_cvttpd2udq512_mask", + "avx512.mask.cvttpd2uqq.128" => "__builtin_ia32_cvttpd2uqq128_mask", + "avx512.mask.cvttpd2uqq.256" => "__builtin_ia32_cvttpd2uqq256_mask", + "avx512.mask.cvttpd2uqq.512" => "__builtin_ia32_cvttpd2uqq512_mask", + "avx512.mask.cvttps2dq.128" => "__builtin_ia32_cvttps2dq128_mask", + "avx512.mask.cvttps2dq.256" => "__builtin_ia32_cvttps2dq256_mask", + "avx512.mask.cvttps2dq.512" => "__builtin_ia32_cvttps2dq512_mask", + "avx512.mask.cvttps2qq.128" => "__builtin_ia32_cvttps2qq128_mask", + "avx512.mask.cvttps2qq.256" => "__builtin_ia32_cvttps2qq256_mask", + "avx512.mask.cvttps2qq.512" => "__builtin_ia32_cvttps2qq512_mask", + "avx512.mask.cvttps2udq.128" => "__builtin_ia32_cvttps2udq128_mask", + "avx512.mask.cvttps2udq.256" => "__builtin_ia32_cvttps2udq256_mask", + "avx512.mask.cvttps2udq.512" => "__builtin_ia32_cvttps2udq512_mask", + "avx512.mask.cvttps2uqq.128" => "__builtin_ia32_cvttps2uqq128_mask", + "avx512.mask.cvttps2uqq.256" => "__builtin_ia32_cvttps2uqq256_mask", + "avx512.mask.cvttps2uqq.512" => "__builtin_ia32_cvttps2uqq512_mask", + "avx512.mask.cvtudq2pd.128" => "__builtin_ia32_cvtudq2pd128_mask", + "avx512.mask.cvtudq2pd.256" => "__builtin_ia32_cvtudq2pd256_mask", + "avx512.mask.cvtudq2pd.512" => "__builtin_ia32_cvtudq2pd512_mask", + "avx512.mask.cvtudq2ps.128" => "__builtin_ia32_cvtudq2ps128_mask", + "avx512.mask.cvtudq2ps.256" => "__builtin_ia32_cvtudq2ps256_mask", + "avx512.mask.cvtudq2ps.512" => "__builtin_ia32_cvtudq2ps512_mask", + "avx512.mask.cvtuqq2pd.128" => "__builtin_ia32_cvtuqq2pd128_mask", + "avx512.mask.cvtuqq2pd.256" => "__builtin_ia32_cvtuqq2pd256_mask", + "avx512.mask.cvtuqq2pd.512" => "__builtin_ia32_cvtuqq2pd512_mask", + "avx512.mask.cvtuqq2ps.128" => "__builtin_ia32_cvtuqq2ps128_mask", + "avx512.mask.cvtuqq2ps.256" => "__builtin_ia32_cvtuqq2ps256_mask", + "avx512.mask.cvtuqq2ps.512" => "__builtin_ia32_cvtuqq2ps512_mask", + "avx512.mask.dbpsadbw.128" => "__builtin_ia32_dbpsadbw128_mask", + "avx512.mask.dbpsadbw.256" => "__builtin_ia32_dbpsadbw256_mask", + "avx512.mask.dbpsadbw.512" => "__builtin_ia32_dbpsadbw512_mask", + "avx512.mask.div.pd.128" => "__builtin_ia32_divpd_mask", + "avx512.mask.div.pd.256" => "__builtin_ia32_divpd256_mask", + "avx512.mask.div.pd.512" => "__builtin_ia32_divpd512_mask", + "avx512.mask.div.ps.128" => "__builtin_ia32_divps_mask", + "avx512.mask.div.ps.256" => "__builtin_ia32_divps256_mask", + "avx512.mask.div.ps.512" => "__builtin_ia32_divps512_mask", + // [INVALID CONVERSION]: "avx512.mask.div.sd.round" => "__builtin_ia32_divsd_round_mask", + // [INVALID CONVERSION]: "avx512.mask.div.ss.round" => "__builtin_ia32_divss_round_mask", + "avx512.mask.expand.d.128" => "__builtin_ia32_expandsi128_mask", + "avx512.mask.expand.d.256" => "__builtin_ia32_expandsi256_mask", + "avx512.mask.expand.d.512" => "__builtin_ia32_expandsi512_mask", + "avx512.mask.expand.load.d.128" => "__builtin_ia32_expandloadsi128_mask", + "avx512.mask.expand.load.d.256" => "__builtin_ia32_expandloadsi256_mask", + "avx512.mask.expand.load.d.512" => "__builtin_ia32_expandloadsi512_mask", + "avx512.mask.expand.load.pd.128" => "__builtin_ia32_expandloaddf128_mask", + "avx512.mask.expand.load.pd.256" => "__builtin_ia32_expandloaddf256_mask", + "avx512.mask.expand.load.pd.512" => "__builtin_ia32_expandloaddf512_mask", + "avx512.mask.expand.load.ps.128" => "__builtin_ia32_expandloadsf128_mask", + "avx512.mask.expand.load.ps.256" => "__builtin_ia32_expandloadsf256_mask", + "avx512.mask.expand.load.ps.512" => "__builtin_ia32_expandloadsf512_mask", + "avx512.mask.expand.load.q.128" => "__builtin_ia32_expandloaddi128_mask", + "avx512.mask.expand.load.q.256" => "__builtin_ia32_expandloaddi256_mask", + "avx512.mask.expand.load.q.512" => "__builtin_ia32_expandloaddi512_mask", + "avx512.mask.expand.pd.128" => "__builtin_ia32_expanddf128_mask", + "avx512.mask.expand.pd.256" => "__builtin_ia32_expanddf256_mask", + "avx512.mask.expand.pd.512" => "__builtin_ia32_expanddf512_mask", + "avx512.mask.expand.ps.128" => "__builtin_ia32_expandsf128_mask", + "avx512.mask.expand.ps.256" => "__builtin_ia32_expandsf256_mask", + "avx512.mask.expand.ps.512" => "__builtin_ia32_expandsf512_mask", + "avx512.mask.expand.q.128" => "__builtin_ia32_expanddi128_mask", + "avx512.mask.expand.q.256" => "__builtin_ia32_expanddi256_mask", + "avx512.mask.expand.q.512" => "__builtin_ia32_expanddi512_mask", + "avx512.mask.fixupimm.pd.128" => "__builtin_ia32_fixupimmpd128_mask", + "avx512.mask.fixupimm.pd.256" => "__builtin_ia32_fixupimmpd256_mask", + "avx512.mask.fixupimm.pd.512" => "__builtin_ia32_fixupimmpd512_mask", + "avx512.mask.fixupimm.ps.128" => "__builtin_ia32_fixupimmps128_mask", + "avx512.mask.fixupimm.ps.256" => "__builtin_ia32_fixupimmps256_mask", + "avx512.mask.fixupimm.ps.512" => "__builtin_ia32_fixupimmps512_mask", + "avx512.mask.fixupimm.sd" => "__builtin_ia32_fixupimmsd_mask", + "avx512.mask.fixupimm.ss" => "__builtin_ia32_fixupimmss_mask", + "avx512.mask.fpclass.pd.128" => "__builtin_ia32_fpclasspd128_mask", + "avx512.mask.fpclass.pd.256" => "__builtin_ia32_fpclasspd256_mask", + "avx512.mask.fpclass.pd.512" => "__builtin_ia32_fpclasspd512_mask", + "avx512.mask.fpclass.ps.128" => "__builtin_ia32_fpclassps128_mask", + "avx512.mask.fpclass.ps.256" => "__builtin_ia32_fpclassps256_mask", + "avx512.mask.fpclass.ps.512" => "__builtin_ia32_fpclassps512_mask", + "avx512.mask.fpclass.sd" => "__builtin_ia32_fpclasssd_mask", + "avx512.mask.fpclass.ss" => "__builtin_ia32_fpclassss_mask", + "avx512.mask.getexp.pd.128" => "__builtin_ia32_getexppd128_mask", + "avx512.mask.getexp.pd.256" => "__builtin_ia32_getexppd256_mask", + "avx512.mask.getexp.pd.512" => "__builtin_ia32_getexppd512_mask", + "avx512.mask.getexp.ps.128" => "__builtin_ia32_getexpps128_mask", + "avx512.mask.getexp.ps.256" => "__builtin_ia32_getexpps256_mask", + "avx512.mask.getexp.ps.512" => "__builtin_ia32_getexpps512_mask", + // [INVALID CONVERSION]: "avx512.mask.getexp.sd" => "__builtin_ia32_getexpsd128_round_mask", + // [INVALID CONVERSION]: "avx512.mask.getexp.ss" => "__builtin_ia32_getexpss128_round_mask", + "avx512.mask.getmant.pd.128" => "__builtin_ia32_getmantpd128_mask", + "avx512.mask.getmant.pd.256" => "__builtin_ia32_getmantpd256_mask", + "avx512.mask.getmant.pd.512" => "__builtin_ia32_getmantpd512_mask", + "avx512.mask.getmant.ps.128" => "__builtin_ia32_getmantps128_mask", + "avx512.mask.getmant.ps.256" => "__builtin_ia32_getmantps256_mask", + "avx512.mask.getmant.ps.512" => "__builtin_ia32_getmantps512_mask", + // [INVALID CONVERSION]: "avx512.mask.getmant.sd" => "__builtin_ia32_getmantsd_round_mask", + // [INVALID CONVERSION]: "avx512.mask.getmant.ss" => "__builtin_ia32_getmantss_round_mask", + "avx512.mask.insertf32x4.256" => "__builtin_ia32_insertf32x4_256_mask", + "avx512.mask.insertf32x4.512" => "__builtin_ia32_insertf32x4_mask", + "avx512.mask.insertf32x8.512" => "__builtin_ia32_insertf32x8_mask", + "avx512.mask.insertf64x2.256" => "__builtin_ia32_insertf64x2_256_mask", + "avx512.mask.insertf64x2.512" => "__builtin_ia32_insertf64x2_512_mask", + "avx512.mask.insertf64x4.512" => "__builtin_ia32_insertf64x4_mask", + "avx512.mask.inserti32x4.256" => "__builtin_ia32_inserti32x4_256_mask", + "avx512.mask.inserti32x4.512" => "__builtin_ia32_inserti32x4_mask", + "avx512.mask.inserti32x8.512" => "__builtin_ia32_inserti32x8_mask", + "avx512.mask.inserti64x2.256" => "__builtin_ia32_inserti64x2_256_mask", + "avx512.mask.inserti64x2.512" => "__builtin_ia32_inserti64x2_512_mask", + "avx512.mask.inserti64x4.512" => "__builtin_ia32_inserti64x4_mask", + "avx512.mask.loadu.d.512" => "__builtin_ia32_loaddqusi512_mask", + "avx512.mask.loadu.pd.512" => "__builtin_ia32_loadupd512_mask", + "avx512.mask.loadu.ps.512" => "__builtin_ia32_loadups512_mask", + "avx512.mask.loadu.q.512" => "__builtin_ia32_loaddqudi512_mask", + "avx512.mask.lzcnt.d.512" => "__builtin_ia32_vplzcntd_512_mask", + "avx512.mask.lzcnt.q.512" => "__builtin_ia32_vplzcntq_512_mask", + "avx512.mask.max.pd.128" => "__builtin_ia32_maxpd_mask", + "avx512.mask.max.pd.256" => "__builtin_ia32_maxpd256_mask", + "avx512.mask.max.pd.512" => "__builtin_ia32_maxpd512_mask", + "avx512.mask.max.ps.128" => "__builtin_ia32_maxps_mask", + "avx512.mask.max.ps.256" => "__builtin_ia32_maxps256_mask", + "avx512.mask.max.ps.512" => "__builtin_ia32_maxps512_mask", + // [INVALID CONVERSION]: "avx512.mask.max.sd.round" => "__builtin_ia32_maxsd_round_mask", + // [INVALID CONVERSION]: "avx512.mask.max.ss.round" => "__builtin_ia32_maxss_round_mask", + "avx512.mask.min.pd.128" => "__builtin_ia32_minpd_mask", + "avx512.mask.min.pd.256" => "__builtin_ia32_minpd256_mask", + "avx512.mask.min.pd.512" => "__builtin_ia32_minpd512_mask", + "avx512.mask.min.ps.128" => "__builtin_ia32_minps_mask", + "avx512.mask.min.ps.256" => "__builtin_ia32_minps256_mask", + "avx512.mask.min.ps.512" => "__builtin_ia32_minps512_mask", + // [INVALID CONVERSION]: "avx512.mask.min.sd.round" => "__builtin_ia32_minsd_round_mask", + // [INVALID CONVERSION]: "avx512.mask.min.ss.round" => "__builtin_ia32_minss_round_mask", + "avx512.mask.move.sd" => "__builtin_ia32_movsd_mask", + "avx512.mask.move.ss" => "__builtin_ia32_movss_mask", + "avx512.mask.mul.pd.128" => "__builtin_ia32_mulpd_mask", + "avx512.mask.mul.pd.256" => "__builtin_ia32_mulpd256_mask", + "avx512.mask.mul.pd.512" => "__builtin_ia32_mulpd512_mask", + "avx512.mask.mul.ps.128" => "__builtin_ia32_mulps_mask", + "avx512.mask.mul.ps.256" => "__builtin_ia32_mulps256_mask", + "avx512.mask.mul.ps.512" => "__builtin_ia32_mulps512_mask", + // [INVALID CONVERSION]: "avx512.mask.mul.sd.round" => "__builtin_ia32_mulsd_round_mask", + // [INVALID CONVERSION]: "avx512.mask.mul.ss.round" => "__builtin_ia32_mulss_round_mask", + "avx512.mask.or.pd.128" => "__builtin_ia32_orpd128_mask", + "avx512.mask.or.pd.256" => "__builtin_ia32_orpd256_mask", + "avx512.mask.or.pd.512" => "__builtin_ia32_orpd512_mask", + "avx512.mask.or.ps.128" => "__builtin_ia32_orps128_mask", + "avx512.mask.or.ps.256" => "__builtin_ia32_orps256_mask", + "avx512.mask.or.ps.512" => "__builtin_ia32_orps512_mask", + "avx512.mask.pabs.b.128" => "__builtin_ia32_pabsb128_mask", + "avx512.mask.pabs.b.256" => "__builtin_ia32_pabsb256_mask", + "avx512.mask.pabs.b.512" => "__builtin_ia32_pabsb512_mask", + "avx512.mask.pabs.d.128" => "__builtin_ia32_pabsd128_mask", + "avx512.mask.pabs.d.256" => "__builtin_ia32_pabsd256_mask", + "avx512.mask.pabs.d.512" => "__builtin_ia32_pabsd512_mask", + "avx512.mask.pabs.q.128" => "__builtin_ia32_pabsq128_mask", + "avx512.mask.pabs.q.256" => "__builtin_ia32_pabsq256_mask", + "avx512.mask.pabs.q.512" => "__builtin_ia32_pabsq512_mask", + "avx512.mask.pabs.w.128" => "__builtin_ia32_pabsw128_mask", + "avx512.mask.pabs.w.256" => "__builtin_ia32_pabsw256_mask", + "avx512.mask.pabs.w.512" => "__builtin_ia32_pabsw512_mask", + "avx512.mask.packssdw.128" => "__builtin_ia32_packssdw128_mask", + "avx512.mask.packssdw.256" => "__builtin_ia32_packssdw256_mask", + "avx512.mask.packssdw.512" => "__builtin_ia32_packssdw512_mask", + "avx512.mask.packsswb.128" => "__builtin_ia32_packsswb128_mask", + "avx512.mask.packsswb.256" => "__builtin_ia32_packsswb256_mask", + "avx512.mask.packsswb.512" => "__builtin_ia32_packsswb512_mask", + "avx512.mask.packusdw.128" => "__builtin_ia32_packusdw128_mask", + "avx512.mask.packusdw.256" => "__builtin_ia32_packusdw256_mask", + "avx512.mask.packusdw.512" => "__builtin_ia32_packusdw512_mask", + "avx512.mask.packuswb.128" => "__builtin_ia32_packuswb128_mask", + "avx512.mask.packuswb.256" => "__builtin_ia32_packuswb256_mask", + "avx512.mask.packuswb.512" => "__builtin_ia32_packuswb512_mask", + "avx512.mask.padd.b.128" => "__builtin_ia32_paddb128_mask", + "avx512.mask.padd.b.256" => "__builtin_ia32_paddb256_mask", + "avx512.mask.padd.b.512" => "__builtin_ia32_paddb512_mask", + "avx512.mask.padd.d.128" => "__builtin_ia32_paddd128_mask", + "avx512.mask.padd.d.256" => "__builtin_ia32_paddd256_mask", + "avx512.mask.padd.d.512" => "__builtin_ia32_paddd512_mask", + "avx512.mask.padd.q.128" => "__builtin_ia32_paddq128_mask", + "avx512.mask.padd.q.256" => "__builtin_ia32_paddq256_mask", + "avx512.mask.padd.q.512" => "__builtin_ia32_paddq512_mask", + "avx512.mask.padd.w.128" => "__builtin_ia32_paddw128_mask", + "avx512.mask.padd.w.256" => "__builtin_ia32_paddw256_mask", + "avx512.mask.padd.w.512" => "__builtin_ia32_paddw512_mask", + "avx512.mask.padds.b.128" => "__builtin_ia32_paddsb128_mask", + "avx512.mask.padds.b.256" => "__builtin_ia32_paddsb256_mask", + "avx512.mask.padds.b.512" => "__builtin_ia32_paddsb512_mask", + "avx512.mask.padds.w.128" => "__builtin_ia32_paddsw128_mask", + "avx512.mask.padds.w.256" => "__builtin_ia32_paddsw256_mask", + "avx512.mask.padds.w.512" => "__builtin_ia32_paddsw512_mask", + "avx512.mask.paddus.b.128" => "__builtin_ia32_paddusb128_mask", + "avx512.mask.paddus.b.256" => "__builtin_ia32_paddusb256_mask", + "avx512.mask.paddus.b.512" => "__builtin_ia32_paddusb512_mask", + "avx512.mask.paddus.w.128" => "__builtin_ia32_paddusw128_mask", + "avx512.mask.paddus.w.256" => "__builtin_ia32_paddusw256_mask", + "avx512.mask.paddus.w.512" => "__builtin_ia32_paddusw512_mask", + "avx512.mask.pand.d.512" => "__builtin_ia32_pandd512_mask", + "avx512.mask.pand.q.512" => "__builtin_ia32_pandq512_mask", + "avx512.mask.pavg.b.128" => "__builtin_ia32_pavgb128_mask", + "avx512.mask.pavg.b.256" => "__builtin_ia32_pavgb256_mask", + "avx512.mask.pavg.b.512" => "__builtin_ia32_pavgb512_mask", + "avx512.mask.pavg.w.128" => "__builtin_ia32_pavgw128_mask", + "avx512.mask.pavg.w.256" => "__builtin_ia32_pavgw256_mask", + "avx512.mask.pavg.w.512" => "__builtin_ia32_pavgw512_mask", + "avx512.mask.pbroadcast.b.gpr.128" => "__builtin_ia32_pbroadcastb128_gpr_mask", + "avx512.mask.pbroadcast.b.gpr.256" => "__builtin_ia32_pbroadcastb256_gpr_mask", + "avx512.mask.pbroadcast.b.gpr.512" => "__builtin_ia32_pbroadcastb512_gpr_mask", + "avx512.mask.pbroadcast.d.gpr.128" => "__builtin_ia32_pbroadcastd128_gpr_mask", + "avx512.mask.pbroadcast.d.gpr.256" => "__builtin_ia32_pbroadcastd256_gpr_mask", + "avx512.mask.pbroadcast.d.gpr.512" => "__builtin_ia32_pbroadcastd512_gpr_mask", + "avx512.mask.pbroadcast.q.gpr.128" => "__builtin_ia32_pbroadcastq128_gpr_mask", + "avx512.mask.pbroadcast.q.gpr.256" => "__builtin_ia32_pbroadcastq256_gpr_mask", + "avx512.mask.pbroadcast.q.gpr.512" => "__builtin_ia32_pbroadcastq512_gpr_mask", + "avx512.mask.pbroadcast.q.mem.512" => "__builtin_ia32_pbroadcastq512_mem_mask", + "avx512.mask.pbroadcast.w.gpr.128" => "__builtin_ia32_pbroadcastw128_gpr_mask", + "avx512.mask.pbroadcast.w.gpr.256" => "__builtin_ia32_pbroadcastw256_gpr_mask", + "avx512.mask.pbroadcast.w.gpr.512" => "__builtin_ia32_pbroadcastw512_gpr_mask", + "avx512.mask.pcmpeq.b.128" => "__builtin_ia32_pcmpeqb128_mask", + "avx512.mask.pcmpeq.b.256" => "__builtin_ia32_pcmpeqb256_mask", + "avx512.mask.pcmpeq.b.512" => "__builtin_ia32_pcmpeqb512_mask", + "avx512.mask.pcmpeq.d.128" => "__builtin_ia32_pcmpeqd128_mask", + "avx512.mask.pcmpeq.d.256" => "__builtin_ia32_pcmpeqd256_mask", + "avx512.mask.pcmpeq.d.512" => "__builtin_ia32_pcmpeqd512_mask", + "avx512.mask.pcmpeq.q.128" => "__builtin_ia32_pcmpeqq128_mask", + "avx512.mask.pcmpeq.q.256" => "__builtin_ia32_pcmpeqq256_mask", + "avx512.mask.pcmpeq.q.512" => "__builtin_ia32_pcmpeqq512_mask", + "avx512.mask.pcmpeq.w.128" => "__builtin_ia32_pcmpeqw128_mask", + "avx512.mask.pcmpeq.w.256" => "__builtin_ia32_pcmpeqw256_mask", + "avx512.mask.pcmpeq.w.512" => "__builtin_ia32_pcmpeqw512_mask", + "avx512.mask.pcmpgt.b.128" => "__builtin_ia32_pcmpgtb128_mask", + "avx512.mask.pcmpgt.b.256" => "__builtin_ia32_pcmpgtb256_mask", + "avx512.mask.pcmpgt.b.512" => "__builtin_ia32_pcmpgtb512_mask", + "avx512.mask.pcmpgt.d.128" => "__builtin_ia32_pcmpgtd128_mask", + "avx512.mask.pcmpgt.d.256" => "__builtin_ia32_pcmpgtd256_mask", + "avx512.mask.pcmpgt.d.512" => "__builtin_ia32_pcmpgtd512_mask", + "avx512.mask.pcmpgt.q.128" => "__builtin_ia32_pcmpgtq128_mask", + "avx512.mask.pcmpgt.q.256" => "__builtin_ia32_pcmpgtq256_mask", + "avx512.mask.pcmpgt.q.512" => "__builtin_ia32_pcmpgtq512_mask", + "avx512.mask.pcmpgt.w.128" => "__builtin_ia32_pcmpgtw128_mask", + "avx512.mask.pcmpgt.w.256" => "__builtin_ia32_pcmpgtw256_mask", + "avx512.mask.pcmpgt.w.512" => "__builtin_ia32_pcmpgtw512_mask", + "avx512.mask.permvar.df.256" => "__builtin_ia32_permvardf256_mask", + "avx512.mask.permvar.df.512" => "__builtin_ia32_permvardf512_mask", + "avx512.mask.permvar.di.256" => "__builtin_ia32_permvardi256_mask", + "avx512.mask.permvar.di.512" => "__builtin_ia32_permvardi512_mask", + "avx512.mask.permvar.hi.128" => "__builtin_ia32_permvarhi128_mask", + "avx512.mask.permvar.hi.256" => "__builtin_ia32_permvarhi256_mask", + "avx512.mask.permvar.hi.512" => "__builtin_ia32_permvarhi512_mask", + "avx512.mask.permvar.qi.128" => "__builtin_ia32_permvarqi128_mask", + "avx512.mask.permvar.qi.256" => "__builtin_ia32_permvarqi256_mask", + "avx512.mask.permvar.qi.512" => "__builtin_ia32_permvarqi512_mask", + "avx512.mask.permvar.sf.256" => "__builtin_ia32_permvarsf256_mask", + "avx512.mask.permvar.sf.512" => "__builtin_ia32_permvarsf512_mask", + "avx512.mask.permvar.si.256" => "__builtin_ia32_permvarsi256_mask", + "avx512.mask.permvar.si.512" => "__builtin_ia32_permvarsi512_mask", + "avx512.mask.pmaddubs.w.128" => "__builtin_ia32_pmaddubsw128_mask", + "avx512.mask.pmaddubs.w.256" => "__builtin_ia32_pmaddubsw256_mask", + "avx512.mask.pmaddubs.w.512" => "__builtin_ia32_pmaddubsw512_mask", + "avx512.mask.pmaddw.d.128" => "__builtin_ia32_pmaddwd128_mask", + "avx512.mask.pmaddw.d.256" => "__builtin_ia32_pmaddwd256_mask", + "avx512.mask.pmaddw.d.512" => "__builtin_ia32_pmaddwd512_mask", + "avx512.mask.pmaxs.b.128" => "__builtin_ia32_pmaxsb128_mask", + "avx512.mask.pmaxs.b.256" => "__builtin_ia32_pmaxsb256_mask", + "avx512.mask.pmaxs.b.512" => "__builtin_ia32_pmaxsb512_mask", + "avx512.mask.pmaxs.d.128" => "__builtin_ia32_pmaxsd128_mask", + "avx512.mask.pmaxs.d.256" => "__builtin_ia32_pmaxsd256_mask", + "avx512.mask.pmaxs.d.512" => "__builtin_ia32_pmaxsd512_mask", + "avx512.mask.pmaxs.q.128" => "__builtin_ia32_pmaxsq128_mask", + "avx512.mask.pmaxs.q.256" => "__builtin_ia32_pmaxsq256_mask", + "avx512.mask.pmaxs.q.512" => "__builtin_ia32_pmaxsq512_mask", + "avx512.mask.pmaxs.w.128" => "__builtin_ia32_pmaxsw128_mask", + "avx512.mask.pmaxs.w.256" => "__builtin_ia32_pmaxsw256_mask", + "avx512.mask.pmaxs.w.512" => "__builtin_ia32_pmaxsw512_mask", + "avx512.mask.pmaxu.b.128" => "__builtin_ia32_pmaxub128_mask", + "avx512.mask.pmaxu.b.256" => "__builtin_ia32_pmaxub256_mask", + "avx512.mask.pmaxu.b.512" => "__builtin_ia32_pmaxub512_mask", + "avx512.mask.pmaxu.d.128" => "__builtin_ia32_pmaxud128_mask", + "avx512.mask.pmaxu.d.256" => "__builtin_ia32_pmaxud256_mask", + "avx512.mask.pmaxu.d.512" => "__builtin_ia32_pmaxud512_mask", + "avx512.mask.pmaxu.q.128" => "__builtin_ia32_pmaxuq128_mask", + "avx512.mask.pmaxu.q.256" => "__builtin_ia32_pmaxuq256_mask", + "avx512.mask.pmaxu.q.512" => "__builtin_ia32_pmaxuq512_mask", + "avx512.mask.pmaxu.w.128" => "__builtin_ia32_pmaxuw128_mask", + "avx512.mask.pmaxu.w.256" => "__builtin_ia32_pmaxuw256_mask", + "avx512.mask.pmaxu.w.512" => "__builtin_ia32_pmaxuw512_mask", + "avx512.mask.pmins.b.128" => "__builtin_ia32_pminsb128_mask", + "avx512.mask.pmins.b.256" => "__builtin_ia32_pminsb256_mask", + "avx512.mask.pmins.b.512" => "__builtin_ia32_pminsb512_mask", + "avx512.mask.pmins.d.128" => "__builtin_ia32_pminsd128_mask", + "avx512.mask.pmins.d.256" => "__builtin_ia32_pminsd256_mask", + "avx512.mask.pmins.d.512" => "__builtin_ia32_pminsd512_mask", + "avx512.mask.pmins.q.128" => "__builtin_ia32_pminsq128_mask", + "avx512.mask.pmins.q.256" => "__builtin_ia32_pminsq256_mask", + "avx512.mask.pmins.q.512" => "__builtin_ia32_pminsq512_mask", + "avx512.mask.pmins.w.128" => "__builtin_ia32_pminsw128_mask", + "avx512.mask.pmins.w.256" => "__builtin_ia32_pminsw256_mask", + "avx512.mask.pmins.w.512" => "__builtin_ia32_pminsw512_mask", + "avx512.mask.pminu.b.128" => "__builtin_ia32_pminub128_mask", + "avx512.mask.pminu.b.256" => "__builtin_ia32_pminub256_mask", + "avx512.mask.pminu.b.512" => "__builtin_ia32_pminub512_mask", + "avx512.mask.pminu.d.128" => "__builtin_ia32_pminud128_mask", + "avx512.mask.pminu.d.256" => "__builtin_ia32_pminud256_mask", + "avx512.mask.pminu.d.512" => "__builtin_ia32_pminud512_mask", + "avx512.mask.pminu.q.128" => "__builtin_ia32_pminuq128_mask", + "avx512.mask.pminu.q.256" => "__builtin_ia32_pminuq256_mask", + "avx512.mask.pminu.q.512" => "__builtin_ia32_pminuq512_mask", + "avx512.mask.pminu.w.128" => "__builtin_ia32_pminuw128_mask", + "avx512.mask.pminu.w.256" => "__builtin_ia32_pminuw256_mask", + "avx512.mask.pminu.w.512" => "__builtin_ia32_pminuw512_mask", + "avx512.mask.pmov.db.128" => "__builtin_ia32_pmovdb128_mask", + "avx512.mask.pmov.db.256" => "__builtin_ia32_pmovdb256_mask", + "avx512.mask.pmov.db.512" => "__builtin_ia32_pmovdb512_mask", + "avx512.mask.pmov.db.mem.128" => "__builtin_ia32_pmovdb128mem_mask", + "avx512.mask.pmov.db.mem.256" => "__builtin_ia32_pmovdb256mem_mask", + "avx512.mask.pmov.db.mem.512" => "__builtin_ia32_pmovdb512mem_mask", + "avx512.mask.pmov.dw.128" => "__builtin_ia32_pmovdw128_mask", + "avx512.mask.pmov.dw.256" => "__builtin_ia32_pmovdw256_mask", + "avx512.mask.pmov.dw.512" => "__builtin_ia32_pmovdw512_mask", + "avx512.mask.pmov.dw.mem.128" => "__builtin_ia32_pmovdw128mem_mask", + "avx512.mask.pmov.dw.mem.256" => "__builtin_ia32_pmovdw256mem_mask", + "avx512.mask.pmov.dw.mem.512" => "__builtin_ia32_pmovdw512mem_mask", + "avx512.mask.pmov.qb.128" => "__builtin_ia32_pmovqb128_mask", + "avx512.mask.pmov.qb.256" => "__builtin_ia32_pmovqb256_mask", + "avx512.mask.pmov.qb.512" => "__builtin_ia32_pmovqb512_mask", + "avx512.mask.pmov.qb.mem.128" => "__builtin_ia32_pmovqb128mem_mask", + "avx512.mask.pmov.qb.mem.256" => "__builtin_ia32_pmovqb256mem_mask", + "avx512.mask.pmov.qb.mem.512" => "__builtin_ia32_pmovqb512mem_mask", + "avx512.mask.pmov.qd.128" => "__builtin_ia32_pmovqd128_mask", + "avx512.mask.pmov.qd.256" => "__builtin_ia32_pmovqd256_mask", + "avx512.mask.pmov.qd.512" => "__builtin_ia32_pmovqd512_mask", + "avx512.mask.pmov.qd.mem.128" => "__builtin_ia32_pmovqd128mem_mask", + "avx512.mask.pmov.qd.mem.256" => "__builtin_ia32_pmovqd256mem_mask", + "avx512.mask.pmov.qd.mem.512" => "__builtin_ia32_pmovqd512mem_mask", + "avx512.mask.pmov.qw.128" => "__builtin_ia32_pmovqw128_mask", + "avx512.mask.pmov.qw.256" => "__builtin_ia32_pmovqw256_mask", + "avx512.mask.pmov.qw.512" => "__builtin_ia32_pmovqw512_mask", + "avx512.mask.pmov.qw.mem.128" => "__builtin_ia32_pmovqw128mem_mask", + "avx512.mask.pmov.qw.mem.256" => "__builtin_ia32_pmovqw256mem_mask", + "avx512.mask.pmov.qw.mem.512" => "__builtin_ia32_pmovqw512mem_mask", + "avx512.mask.pmov.wb.128" => "__builtin_ia32_pmovwb128_mask", + "avx512.mask.pmov.wb.256" => "__builtin_ia32_pmovwb256_mask", + "avx512.mask.pmov.wb.512" => "__builtin_ia32_pmovwb512_mask", + "avx512.mask.pmov.wb.mem.128" => "__builtin_ia32_pmovwb128mem_mask", + "avx512.mask.pmov.wb.mem.256" => "__builtin_ia32_pmovwb256mem_mask", + "avx512.mask.pmov.wb.mem.512" => "__builtin_ia32_pmovwb512mem_mask", + "avx512.mask.pmovs.db.128" => "__builtin_ia32_pmovsdb128_mask", + "avx512.mask.pmovs.db.256" => "__builtin_ia32_pmovsdb256_mask", + "avx512.mask.pmovs.db.512" => "__builtin_ia32_pmovsdb512_mask", + "avx512.mask.pmovs.db.mem.128" => "__builtin_ia32_pmovsdb128mem_mask", + "avx512.mask.pmovs.db.mem.256" => "__builtin_ia32_pmovsdb256mem_mask", + "avx512.mask.pmovs.db.mem.512" => "__builtin_ia32_pmovsdb512mem_mask", + "avx512.mask.pmovs.dw.128" => "__builtin_ia32_pmovsdw128_mask", + "avx512.mask.pmovs.dw.256" => "__builtin_ia32_pmovsdw256_mask", + "avx512.mask.pmovs.dw.512" => "__builtin_ia32_pmovsdw512_mask", + "avx512.mask.pmovs.dw.mem.128" => "__builtin_ia32_pmovsdw128mem_mask", + "avx512.mask.pmovs.dw.mem.256" => "__builtin_ia32_pmovsdw256mem_mask", + "avx512.mask.pmovs.dw.mem.512" => "__builtin_ia32_pmovsdw512mem_mask", + "avx512.mask.pmovs.qb.128" => "__builtin_ia32_pmovsqb128_mask", + "avx512.mask.pmovs.qb.256" => "__builtin_ia32_pmovsqb256_mask", + "avx512.mask.pmovs.qb.512" => "__builtin_ia32_pmovsqb512_mask", + "avx512.mask.pmovs.qb.mem.128" => "__builtin_ia32_pmovsqb128mem_mask", + "avx512.mask.pmovs.qb.mem.256" => "__builtin_ia32_pmovsqb256mem_mask", + "avx512.mask.pmovs.qb.mem.512" => "__builtin_ia32_pmovsqb512mem_mask", + "avx512.mask.pmovs.qd.128" => "__builtin_ia32_pmovsqd128_mask", + "avx512.mask.pmovs.qd.256" => "__builtin_ia32_pmovsqd256_mask", + "avx512.mask.pmovs.qd.512" => "__builtin_ia32_pmovsqd512_mask", + "avx512.mask.pmovs.qd.mem.128" => "__builtin_ia32_pmovsqd128mem_mask", + "avx512.mask.pmovs.qd.mem.256" => "__builtin_ia32_pmovsqd256mem_mask", + "avx512.mask.pmovs.qd.mem.512" => "__builtin_ia32_pmovsqd512mem_mask", + "avx512.mask.pmovs.qw.128" => "__builtin_ia32_pmovsqw128_mask", + "avx512.mask.pmovs.qw.256" => "__builtin_ia32_pmovsqw256_mask", + "avx512.mask.pmovs.qw.512" => "__builtin_ia32_pmovsqw512_mask", + "avx512.mask.pmovs.qw.mem.128" => "__builtin_ia32_pmovsqw128mem_mask", + "avx512.mask.pmovs.qw.mem.256" => "__builtin_ia32_pmovsqw256mem_mask", + "avx512.mask.pmovs.qw.mem.512" => "__builtin_ia32_pmovsqw512mem_mask", + "avx512.mask.pmovs.wb.128" => "__builtin_ia32_pmovswb128_mask", + "avx512.mask.pmovs.wb.256" => "__builtin_ia32_pmovswb256_mask", + "avx512.mask.pmovs.wb.512" => "__builtin_ia32_pmovswb512_mask", + "avx512.mask.pmovs.wb.mem.128" => "__builtin_ia32_pmovswb128mem_mask", + "avx512.mask.pmovs.wb.mem.256" => "__builtin_ia32_pmovswb256mem_mask", + "avx512.mask.pmovs.wb.mem.512" => "__builtin_ia32_pmovswb512mem_mask", + "avx512.mask.pmovsxb.d.128" => "__builtin_ia32_pmovsxbd128_mask", + "avx512.mask.pmovsxb.d.256" => "__builtin_ia32_pmovsxbd256_mask", + "avx512.mask.pmovsxb.d.512" => "__builtin_ia32_pmovsxbd512_mask", + "avx512.mask.pmovsxb.q.128" => "__builtin_ia32_pmovsxbq128_mask", + "avx512.mask.pmovsxb.q.256" => "__builtin_ia32_pmovsxbq256_mask", + "avx512.mask.pmovsxb.q.512" => "__builtin_ia32_pmovsxbq512_mask", + "avx512.mask.pmovsxb.w.128" => "__builtin_ia32_pmovsxbw128_mask", + "avx512.mask.pmovsxb.w.256" => "__builtin_ia32_pmovsxbw256_mask", + "avx512.mask.pmovsxb.w.512" => "__builtin_ia32_pmovsxbw512_mask", + "avx512.mask.pmovsxd.q.128" => "__builtin_ia32_pmovsxdq128_mask", + "avx512.mask.pmovsxd.q.256" => "__builtin_ia32_pmovsxdq256_mask", + "avx512.mask.pmovsxd.q.512" => "__builtin_ia32_pmovsxdq512_mask", + "avx512.mask.pmovsxw.d.128" => "__builtin_ia32_pmovsxwd128_mask", + "avx512.mask.pmovsxw.d.256" => "__builtin_ia32_pmovsxwd256_mask", + "avx512.mask.pmovsxw.d.512" => "__builtin_ia32_pmovsxwd512_mask", + "avx512.mask.pmovsxw.q.128" => "__builtin_ia32_pmovsxwq128_mask", + "avx512.mask.pmovsxw.q.256" => "__builtin_ia32_pmovsxwq256_mask", + "avx512.mask.pmovsxw.q.512" => "__builtin_ia32_pmovsxwq512_mask", + "avx512.mask.pmovus.db.128" => "__builtin_ia32_pmovusdb128_mask", + "avx512.mask.pmovus.db.256" => "__builtin_ia32_pmovusdb256_mask", + "avx512.mask.pmovus.db.512" => "__builtin_ia32_pmovusdb512_mask", + "avx512.mask.pmovus.db.mem.128" => "__builtin_ia32_pmovusdb128mem_mask", + "avx512.mask.pmovus.db.mem.256" => "__builtin_ia32_pmovusdb256mem_mask", + "avx512.mask.pmovus.db.mem.512" => "__builtin_ia32_pmovusdb512mem_mask", + "avx512.mask.pmovus.dw.128" => "__builtin_ia32_pmovusdw128_mask", + "avx512.mask.pmovus.dw.256" => "__builtin_ia32_pmovusdw256_mask", + "avx512.mask.pmovus.dw.512" => "__builtin_ia32_pmovusdw512_mask", + "avx512.mask.pmovus.dw.mem.128" => "__builtin_ia32_pmovusdw128mem_mask", + "avx512.mask.pmovus.dw.mem.256" => "__builtin_ia32_pmovusdw256mem_mask", + "avx512.mask.pmovus.dw.mem.512" => "__builtin_ia32_pmovusdw512mem_mask", + "avx512.mask.pmovus.qb.128" => "__builtin_ia32_pmovusqb128_mask", + "avx512.mask.pmovus.qb.256" => "__builtin_ia32_pmovusqb256_mask", + "avx512.mask.pmovus.qb.512" => "__builtin_ia32_pmovusqb512_mask", + "avx512.mask.pmovus.qb.mem.128" => "__builtin_ia32_pmovusqb128mem_mask", + "avx512.mask.pmovus.qb.mem.256" => "__builtin_ia32_pmovusqb256mem_mask", + "avx512.mask.pmovus.qb.mem.512" => "__builtin_ia32_pmovusqb512mem_mask", + "avx512.mask.pmovus.qd.128" => "__builtin_ia32_pmovusqd128_mask", + "avx512.mask.pmovus.qd.256" => "__builtin_ia32_pmovusqd256_mask", + "avx512.mask.pmovus.qd.512" => "__builtin_ia32_pmovusqd512_mask", + "avx512.mask.pmovus.qd.mem.128" => "__builtin_ia32_pmovusqd128mem_mask", + "avx512.mask.pmovus.qd.mem.256" => "__builtin_ia32_pmovusqd256mem_mask", + "avx512.mask.pmovus.qd.mem.512" => "__builtin_ia32_pmovusqd512mem_mask", + "avx512.mask.pmovus.qw.128" => "__builtin_ia32_pmovusqw128_mask", + "avx512.mask.pmovus.qw.256" => "__builtin_ia32_pmovusqw256_mask", + "avx512.mask.pmovus.qw.512" => "__builtin_ia32_pmovusqw512_mask", + "avx512.mask.pmovus.qw.mem.128" => "__builtin_ia32_pmovusqw128mem_mask", + "avx512.mask.pmovus.qw.mem.256" => "__builtin_ia32_pmovusqw256mem_mask", + "avx512.mask.pmovus.qw.mem.512" => "__builtin_ia32_pmovusqw512mem_mask", + "avx512.mask.pmovus.wb.128" => "__builtin_ia32_pmovuswb128_mask", + "avx512.mask.pmovus.wb.256" => "__builtin_ia32_pmovuswb256_mask", + "avx512.mask.pmovus.wb.512" => "__builtin_ia32_pmovuswb512_mask", + "avx512.mask.pmovus.wb.mem.128" => "__builtin_ia32_pmovuswb128mem_mask", + "avx512.mask.pmovus.wb.mem.256" => "__builtin_ia32_pmovuswb256mem_mask", + "avx512.mask.pmovus.wb.mem.512" => "__builtin_ia32_pmovuswb512mem_mask", + "avx512.mask.pmovzxb.d.128" => "__builtin_ia32_pmovzxbd128_mask", + "avx512.mask.pmovzxb.d.256" => "__builtin_ia32_pmovzxbd256_mask", + "avx512.mask.pmovzxb.d.512" => "__builtin_ia32_pmovzxbd512_mask", + "avx512.mask.pmovzxb.q.128" => "__builtin_ia32_pmovzxbq128_mask", + "avx512.mask.pmovzxb.q.256" => "__builtin_ia32_pmovzxbq256_mask", + "avx512.mask.pmovzxb.q.512" => "__builtin_ia32_pmovzxbq512_mask", + "avx512.mask.pmovzxb.w.128" => "__builtin_ia32_pmovzxbw128_mask", + "avx512.mask.pmovzxb.w.256" => "__builtin_ia32_pmovzxbw256_mask", + "avx512.mask.pmovzxb.w.512" => "__builtin_ia32_pmovzxbw512_mask", + "avx512.mask.pmovzxd.q.128" => "__builtin_ia32_pmovzxdq128_mask", + "avx512.mask.pmovzxd.q.256" => "__builtin_ia32_pmovzxdq256_mask", + "avx512.mask.pmovzxd.q.512" => "__builtin_ia32_pmovzxdq512_mask", + "avx512.mask.pmovzxw.d.128" => "__builtin_ia32_pmovzxwd128_mask", + "avx512.mask.pmovzxw.d.256" => "__builtin_ia32_pmovzxwd256_mask", + "avx512.mask.pmovzxw.d.512" => "__builtin_ia32_pmovzxwd512_mask", + "avx512.mask.pmovzxw.q.128" => "__builtin_ia32_pmovzxwq128_mask", + "avx512.mask.pmovzxw.q.256" => "__builtin_ia32_pmovzxwq256_mask", + "avx512.mask.pmovzxw.q.512" => "__builtin_ia32_pmovzxwq512_mask", + "avx512.mask.pmul.dq.128" => "__builtin_ia32_pmuldq128_mask", + "avx512.mask.pmul.dq.256" => "__builtin_ia32_pmuldq256_mask", + "avx512.mask.pmul.dq.512" => "__builtin_ia32_pmuldq512_mask", + "avx512.mask.pmul.hr.sw.128" => "__builtin_ia32_pmulhrsw128_mask", + "avx512.mask.pmul.hr.sw.256" => "__builtin_ia32_pmulhrsw256_mask", + "avx512.mask.pmul.hr.sw.512" => "__builtin_ia32_pmulhrsw512_mask", + "avx512.mask.pmulh.w.128" => "__builtin_ia32_pmulhw128_mask", + "avx512.mask.pmulh.w.256" => "__builtin_ia32_pmulhw256_mask", + "avx512.mask.pmulh.w.512" => "__builtin_ia32_pmulhw512_mask", + "avx512.mask.pmulhu.w.128" => "__builtin_ia32_pmulhuw128_mask", + "avx512.mask.pmulhu.w.256" => "__builtin_ia32_pmulhuw256_mask", + "avx512.mask.pmulhu.w.512" => "__builtin_ia32_pmulhuw512_mask", + "avx512.mask.pmull.d.128" => "__builtin_ia32_pmulld128_mask", + "avx512.mask.pmull.d.256" => "__builtin_ia32_pmulld256_mask", + "avx512.mask.pmull.d.512" => "__builtin_ia32_pmulld512_mask", + "avx512.mask.pmull.q.128" => "__builtin_ia32_pmullq128_mask", + "avx512.mask.pmull.q.256" => "__builtin_ia32_pmullq256_mask", + "avx512.mask.pmull.q.512" => "__builtin_ia32_pmullq512_mask", + "avx512.mask.pmull.w.128" => "__builtin_ia32_pmullw128_mask", + "avx512.mask.pmull.w.256" => "__builtin_ia32_pmullw256_mask", + "avx512.mask.pmull.w.512" => "__builtin_ia32_pmullw512_mask", + "avx512.mask.pmultishift.qb.128" => "__builtin_ia32_vpmultishiftqb128_mask", + "avx512.mask.pmultishift.qb.256" => "__builtin_ia32_vpmultishiftqb256_mask", + "avx512.mask.pmultishift.qb.512" => "__builtin_ia32_vpmultishiftqb512_mask", + "avx512.mask.pmulu.dq.128" => "__builtin_ia32_pmuludq128_mask", + "avx512.mask.pmulu.dq.256" => "__builtin_ia32_pmuludq256_mask", + "avx512.mask.pmulu.dq.512" => "__builtin_ia32_pmuludq512_mask", + "avx512.mask.prol.d.128" => "__builtin_ia32_prold128_mask", + "avx512.mask.prol.d.256" => "__builtin_ia32_prold256_mask", + "avx512.mask.prol.d.512" => "__builtin_ia32_prold512_mask", + "avx512.mask.prol.q.128" => "__builtin_ia32_prolq128_mask", + "avx512.mask.prol.q.256" => "__builtin_ia32_prolq256_mask", + "avx512.mask.prol.q.512" => "__builtin_ia32_prolq512_mask", + "avx512.mask.prolv.d.128" => "__builtin_ia32_prolvd128_mask", + "avx512.mask.prolv.d.256" => "__builtin_ia32_prolvd256_mask", + "avx512.mask.prolv.d.512" => "__builtin_ia32_prolvd512_mask", + "avx512.mask.prolv.q.128" => "__builtin_ia32_prolvq128_mask", + "avx512.mask.prolv.q.256" => "__builtin_ia32_prolvq256_mask", + "avx512.mask.prolv.q.512" => "__builtin_ia32_prolvq512_mask", + "avx512.mask.pror.d.128" => "__builtin_ia32_prord128_mask", + "avx512.mask.pror.d.256" => "__builtin_ia32_prord256_mask", + "avx512.mask.pror.d.512" => "__builtin_ia32_prord512_mask", + "avx512.mask.pror.q.128" => "__builtin_ia32_prorq128_mask", + "avx512.mask.pror.q.256" => "__builtin_ia32_prorq256_mask", + "avx512.mask.pror.q.512" => "__builtin_ia32_prorq512_mask", + "avx512.mask.prorv.d.128" => "__builtin_ia32_prorvd128_mask", + "avx512.mask.prorv.d.256" => "__builtin_ia32_prorvd256_mask", + "avx512.mask.prorv.d.512" => "__builtin_ia32_prorvd512_mask", + "avx512.mask.prorv.q.128" => "__builtin_ia32_prorvq128_mask", + "avx512.mask.prorv.q.256" => "__builtin_ia32_prorvq256_mask", + "avx512.mask.prorv.q.512" => "__builtin_ia32_prorvq512_mask", + "avx512.mask.pshuf.b.128" => "__builtin_ia32_pshufb128_mask", + "avx512.mask.pshuf.b.256" => "__builtin_ia32_pshufb256_mask", + "avx512.mask.pshuf.b.512" => "__builtin_ia32_pshufb512_mask", + "avx512.mask.psll.d" => "__builtin_ia32_pslld512_mask", + "avx512.mask.psll.d.128" => "__builtin_ia32_pslld128_mask", + "avx512.mask.psll.d.256" => "__builtin_ia32_pslld256_mask", + "avx512.mask.psll.di.128" => "__builtin_ia32_pslldi128_mask", + "avx512.mask.psll.di.256" => "__builtin_ia32_pslldi256_mask", + "avx512.mask.psll.di.512" => "__builtin_ia32_pslldi512_mask", + "avx512.mask.psll.q" => "__builtin_ia32_psllq512_mask", + "avx512.mask.psll.q.128" => "__builtin_ia32_psllq128_mask", + "avx512.mask.psll.q.256" => "__builtin_ia32_psllq256_mask", + "avx512.mask.psll.qi.128" => "__builtin_ia32_psllqi128_mask", + "avx512.mask.psll.qi.256" => "__builtin_ia32_psllqi256_mask", + "avx512.mask.psll.qi.512" => "__builtin_ia32_psllqi512_mask", + "avx512.mask.psll.w.128" => "__builtin_ia32_psllw128_mask", + "avx512.mask.psll.w.256" => "__builtin_ia32_psllw256_mask", + "avx512.mask.psll.w.512" => "__builtin_ia32_psllw512_mask", + "avx512.mask.psll.wi.128" => "__builtin_ia32_psllwi128_mask", + "avx512.mask.psll.wi.256" => "__builtin_ia32_psllwi256_mask", + "avx512.mask.psll.wi.512" => "__builtin_ia32_psllwi512_mask", + "avx512.mask.psllv.d" => "__builtin_ia32_psllv16si_mask", + "avx512.mask.psllv.q" => "__builtin_ia32_psllv8di_mask", + "avx512.mask.psllv16.hi" => "__builtin_ia32_psllv16hi_mask", + "avx512.mask.psllv2.di" => "__builtin_ia32_psllv2di_mask", + "avx512.mask.psllv32hi" => "__builtin_ia32_psllv32hi_mask", + "avx512.mask.psllv4.di" => "__builtin_ia32_psllv4di_mask", + "avx512.mask.psllv4.si" => "__builtin_ia32_psllv4si_mask", + "avx512.mask.psllv8.hi" => "__builtin_ia32_psllv8hi_mask", + "avx512.mask.psllv8.si" => "__builtin_ia32_psllv8si_mask", + "avx512.mask.psra.d" => "__builtin_ia32_psrad512_mask", + "avx512.mask.psra.d.128" => "__builtin_ia32_psrad128_mask", + "avx512.mask.psra.d.256" => "__builtin_ia32_psrad256_mask", + "avx512.mask.psra.di.128" => "__builtin_ia32_psradi128_mask", + "avx512.mask.psra.di.256" => "__builtin_ia32_psradi256_mask", + "avx512.mask.psra.di.512" => "__builtin_ia32_psradi512_mask", + "avx512.mask.psra.q" => "__builtin_ia32_psraq512_mask", + "avx512.mask.psra.q.128" => "__builtin_ia32_psraq128_mask", + "avx512.mask.psra.q.256" => "__builtin_ia32_psraq256_mask", + "avx512.mask.psra.qi.128" => "__builtin_ia32_psraqi128_mask", + "avx512.mask.psra.qi.256" => "__builtin_ia32_psraqi256_mask", + "avx512.mask.psra.qi.512" => "__builtin_ia32_psraqi512_mask", + "avx512.mask.psra.w.128" => "__builtin_ia32_psraw128_mask", + "avx512.mask.psra.w.256" => "__builtin_ia32_psraw256_mask", + "avx512.mask.psra.w.512" => "__builtin_ia32_psraw512_mask", + "avx512.mask.psra.wi.128" => "__builtin_ia32_psrawi128_mask", + "avx512.mask.psra.wi.256" => "__builtin_ia32_psrawi256_mask", + "avx512.mask.psra.wi.512" => "__builtin_ia32_psrawi512_mask", + "avx512.mask.psrav.d" => "__builtin_ia32_psrav16si_mask", + "avx512.mask.psrav.q" => "__builtin_ia32_psrav8di_mask", + "avx512.mask.psrav.q.128" => "__builtin_ia32_psravq128_mask", + "avx512.mask.psrav.q.256" => "__builtin_ia32_psravq256_mask", + "avx512.mask.psrav16.hi" => "__builtin_ia32_psrav16hi_mask", + "avx512.mask.psrav32.hi" => "__builtin_ia32_psrav32hi_mask", + "avx512.mask.psrav4.si" => "__builtin_ia32_psrav4si_mask", + "avx512.mask.psrav8.hi" => "__builtin_ia32_psrav8hi_mask", + "avx512.mask.psrav8.si" => "__builtin_ia32_psrav8si_mask", + "avx512.mask.psrl.d" => "__builtin_ia32_psrld512_mask", + "avx512.mask.psrl.d.128" => "__builtin_ia32_psrld128_mask", + "avx512.mask.psrl.d.256" => "__builtin_ia32_psrld256_mask", + "avx512.mask.psrl.di.128" => "__builtin_ia32_psrldi128_mask", + "avx512.mask.psrl.di.256" => "__builtin_ia32_psrldi256_mask", + "avx512.mask.psrl.di.512" => "__builtin_ia32_psrldi512_mask", + "avx512.mask.psrl.q" => "__builtin_ia32_psrlq512_mask", + "avx512.mask.psrl.q.128" => "__builtin_ia32_psrlq128_mask", + "avx512.mask.psrl.q.256" => "__builtin_ia32_psrlq256_mask", + "avx512.mask.psrl.qi.128" => "__builtin_ia32_psrlqi128_mask", + "avx512.mask.psrl.qi.256" => "__builtin_ia32_psrlqi256_mask", + "avx512.mask.psrl.qi.512" => "__builtin_ia32_psrlqi512_mask", + "avx512.mask.psrl.w.128" => "__builtin_ia32_psrlw128_mask", + "avx512.mask.psrl.w.256" => "__builtin_ia32_psrlw256_mask", + "avx512.mask.psrl.w.512" => "__builtin_ia32_psrlw512_mask", + "avx512.mask.psrl.wi.128" => "__builtin_ia32_psrlwi128_mask", + "avx512.mask.psrl.wi.256" => "__builtin_ia32_psrlwi256_mask", + "avx512.mask.psrl.wi.512" => "__builtin_ia32_psrlwi512_mask", + "avx512.mask.psrlv.d" => "__builtin_ia32_psrlv16si_mask", + "avx512.mask.psrlv.q" => "__builtin_ia32_psrlv8di_mask", + "avx512.mask.psrlv16.hi" => "__builtin_ia32_psrlv16hi_mask", + "avx512.mask.psrlv2.di" => "__builtin_ia32_psrlv2di_mask", + "avx512.mask.psrlv32hi" => "__builtin_ia32_psrlv32hi_mask", + "avx512.mask.psrlv4.di" => "__builtin_ia32_psrlv4di_mask", + "avx512.mask.psrlv4.si" => "__builtin_ia32_psrlv4si_mask", + "avx512.mask.psrlv8.hi" => "__builtin_ia32_psrlv8hi_mask", + "avx512.mask.psrlv8.si" => "__builtin_ia32_psrlv8si_mask", + "avx512.mask.psub.b.128" => "__builtin_ia32_psubb128_mask", + "avx512.mask.psub.b.256" => "__builtin_ia32_psubb256_mask", + "avx512.mask.psub.b.512" => "__builtin_ia32_psubb512_mask", + "avx512.mask.psub.d.128" => "__builtin_ia32_psubd128_mask", + "avx512.mask.psub.d.256" => "__builtin_ia32_psubd256_mask", + "avx512.mask.psub.d.512" => "__builtin_ia32_psubd512_mask", + "avx512.mask.psub.q.128" => "__builtin_ia32_psubq128_mask", + "avx512.mask.psub.q.256" => "__builtin_ia32_psubq256_mask", + "avx512.mask.psub.q.512" => "__builtin_ia32_psubq512_mask", + "avx512.mask.psub.w.128" => "__builtin_ia32_psubw128_mask", + "avx512.mask.psub.w.256" => "__builtin_ia32_psubw256_mask", + "avx512.mask.psub.w.512" => "__builtin_ia32_psubw512_mask", + "avx512.mask.psubs.b.128" => "__builtin_ia32_psubsb128_mask", + "avx512.mask.psubs.b.256" => "__builtin_ia32_psubsb256_mask", + "avx512.mask.psubs.b.512" => "__builtin_ia32_psubsb512_mask", + "avx512.mask.psubs.w.128" => "__builtin_ia32_psubsw128_mask", + "avx512.mask.psubs.w.256" => "__builtin_ia32_psubsw256_mask", + "avx512.mask.psubs.w.512" => "__builtin_ia32_psubsw512_mask", + "avx512.mask.psubus.b.128" => "__builtin_ia32_psubusb128_mask", + "avx512.mask.psubus.b.256" => "__builtin_ia32_psubusb256_mask", + "avx512.mask.psubus.b.512" => "__builtin_ia32_psubusb512_mask", + "avx512.mask.psubus.w.128" => "__builtin_ia32_psubusw128_mask", + "avx512.mask.psubus.w.256" => "__builtin_ia32_psubusw256_mask", + "avx512.mask.psubus.w.512" => "__builtin_ia32_psubusw512_mask", + "avx512.mask.pternlog.d.128" => "__builtin_ia32_pternlogd128_mask", + "avx512.mask.pternlog.d.256" => "__builtin_ia32_pternlogd256_mask", + "avx512.mask.pternlog.d.512" => "__builtin_ia32_pternlogd512_mask", + "avx512.mask.pternlog.q.128" => "__builtin_ia32_pternlogq128_mask", + "avx512.mask.pternlog.q.256" => "__builtin_ia32_pternlogq256_mask", + "avx512.mask.pternlog.q.512" => "__builtin_ia32_pternlogq512_mask", + "avx512.mask.ptestm.d.512" => "__builtin_ia32_ptestmd512", + "avx512.mask.ptestm.q.512" => "__builtin_ia32_ptestmq512", + "avx512.mask.range.pd.128" => "__builtin_ia32_rangepd128_mask", + "avx512.mask.range.pd.256" => "__builtin_ia32_rangepd256_mask", + "avx512.mask.range.pd.512" => "__builtin_ia32_rangepd512_mask", + "avx512.mask.range.ps.128" => "__builtin_ia32_rangeps128_mask", + "avx512.mask.range.ps.256" => "__builtin_ia32_rangeps256_mask", + "avx512.mask.range.ps.512" => "__builtin_ia32_rangeps512_mask", + // [INVALID CONVERSION]: "avx512.mask.range.sd" => "__builtin_ia32_rangesd128_round_mask", + // [INVALID CONVERSION]: "avx512.mask.range.ss" => "__builtin_ia32_rangess128_round_mask", + "avx512.mask.reduce.pd.128" => "__builtin_ia32_reducepd128_mask", + "avx512.mask.reduce.pd.256" => "__builtin_ia32_reducepd256_mask", + "avx512.mask.reduce.pd.512" => "__builtin_ia32_reducepd512_mask", + "avx512.mask.reduce.ps.128" => "__builtin_ia32_reduceps128_mask", + "avx512.mask.reduce.ps.256" => "__builtin_ia32_reduceps256_mask", + "avx512.mask.reduce.ps.512" => "__builtin_ia32_reduceps512_mask", + "avx512.mask.reduce.sd" => "__builtin_ia32_reducesd_mask", + "avx512.mask.reduce.ss" => "__builtin_ia32_reducess_mask", + "avx512.mask.rndscale.pd.128" => "__builtin_ia32_rndscalepd_128_mask", + "avx512.mask.rndscale.pd.256" => "__builtin_ia32_rndscalepd_256_mask", + "avx512.mask.rndscale.pd.512" => "__builtin_ia32_rndscalepd_mask", + "avx512.mask.rndscale.ps.128" => "__builtin_ia32_rndscaleps_128_mask", + "avx512.mask.rndscale.ps.256" => "__builtin_ia32_rndscaleps_256_mask", + "avx512.mask.rndscale.ps.512" => "__builtin_ia32_rndscaleps_mask", + // [INVALID CONVERSION]: "avx512.mask.rndscale.sd" => "__builtin_ia32_rndscalesd_round_mask", + // [INVALID CONVERSION]: "avx512.mask.rndscale.ss" => "__builtin_ia32_rndscaless_round_mask", + "avx512.mask.scalef.pd.128" => "__builtin_ia32_scalefpd128_mask", + "avx512.mask.scalef.pd.256" => "__builtin_ia32_scalefpd256_mask", + "avx512.mask.scalef.pd.512" => "__builtin_ia32_scalefpd512_mask", + "avx512.mask.scalef.ps.128" => "__builtin_ia32_scalefps128_mask", + "avx512.mask.scalef.ps.256" => "__builtin_ia32_scalefps256_mask", + "avx512.mask.scalef.ps.512" => "__builtin_ia32_scalefps512_mask", + // [INVALID CONVERSION]: "avx512.mask.scalef.sd" => "__builtin_ia32_scalefsd_round_mask", + // [INVALID CONVERSION]: "avx512.mask.scalef.ss" => "__builtin_ia32_scalefss_round_mask", + "avx512.mask.shuf.f32x4" => "__builtin_ia32_shuf_f32x4_mask", + "avx512.mask.shuf.f32x4.256" => "__builtin_ia32_shuf_f32x4_256_mask", + "avx512.mask.shuf.f64x2" => "__builtin_ia32_shuf_f64x2_mask", + "avx512.mask.shuf.f64x2.256" => "__builtin_ia32_shuf_f64x2_256_mask", + "avx512.mask.shuf.i32x4" => "__builtin_ia32_shuf_i32x4_mask", + "avx512.mask.shuf.i32x4.256" => "__builtin_ia32_shuf_i32x4_256_mask", + "avx512.mask.shuf.i64x2" => "__builtin_ia32_shuf_i64x2_mask", + "avx512.mask.shuf.i64x2.256" => "__builtin_ia32_shuf_i64x2_256_mask", + "avx512.mask.shuf.pd.128" => "__builtin_ia32_shufpd128_mask", + "avx512.mask.shuf.pd.256" => "__builtin_ia32_shufpd256_mask", + "avx512.mask.shuf.pd.512" => "__builtin_ia32_shufpd512_mask", + "avx512.mask.shuf.ps.128" => "__builtin_ia32_shufps128_mask", + "avx512.mask.shuf.ps.256" => "__builtin_ia32_shufps256_mask", + "avx512.mask.shuf.ps.512" => "__builtin_ia32_shufps512_mask", + "avx512.mask.sqrt.pd.128" => "__builtin_ia32_sqrtpd128_mask", + "avx512.mask.sqrt.pd.256" => "__builtin_ia32_sqrtpd256_mask", + "avx512.mask.sqrt.pd.512" => "__builtin_ia32_sqrtpd512_mask", + "avx512.mask.sqrt.ps.128" => "__builtin_ia32_sqrtps128_mask", + "avx512.mask.sqrt.ps.256" => "__builtin_ia32_sqrtps256_mask", + "avx512.mask.sqrt.ps.512" => "__builtin_ia32_sqrtps512_mask", + // [INVALID CONVERSION]: "avx512.mask.sqrt.sd" => "__builtin_ia32_sqrtsd_round_mask", + // [INVALID CONVERSION]: "avx512.mask.sqrt.ss" => "__builtin_ia32_sqrtss_round_mask", + "avx512.mask.store.ss" => "__builtin_ia32_storess_mask", + "avx512.mask.storeu.d.512" => "__builtin_ia32_storedqusi512_mask", + "avx512.mask.storeu.pd.512" => "__builtin_ia32_storeupd512_mask", + "avx512.mask.storeu.ps.512" => "__builtin_ia32_storeups512_mask", + "avx512.mask.storeu.q.512" => "__builtin_ia32_storedqudi512_mask", + "avx512.mask.sub.pd.128" => "__builtin_ia32_subpd128_mask", + "avx512.mask.sub.pd.256" => "__builtin_ia32_subpd256_mask", + "avx512.mask.sub.pd.512" => "__builtin_ia32_subpd512_mask", + "avx512.mask.sub.ps.128" => "__builtin_ia32_subps128_mask", + "avx512.mask.sub.ps.256" => "__builtin_ia32_subps256_mask", + "avx512.mask.sub.ps.512" => "__builtin_ia32_subps512_mask", + // [INVALID CONVERSION]: "avx512.mask.sub.sd.round" => "__builtin_ia32_subsd_round_mask", + // [INVALID CONVERSION]: "avx512.mask.sub.ss.round" => "__builtin_ia32_subss_round_mask", + "avx512.mask.valign.d.128" => "__builtin_ia32_alignd128_mask", + "avx512.mask.valign.d.256" => "__builtin_ia32_alignd256_mask", + "avx512.mask.valign.d.512" => "__builtin_ia32_alignd512_mask", + "avx512.mask.valign.q.128" => "__builtin_ia32_alignq128_mask", + "avx512.mask.valign.q.256" => "__builtin_ia32_alignq256_mask", + "avx512.mask.valign.q.512" => "__builtin_ia32_alignq512_mask", + "avx512.mask.vcvtph2ps.128" => "__builtin_ia32_vcvtph2ps_mask", + "avx512.mask.vcvtph2ps.256" => "__builtin_ia32_vcvtph2ps256_mask", + "avx512.mask.vcvtph2ps.512" => "__builtin_ia32_vcvtph2ps512_mask", + "avx512.mask.vcvtps2ph.128" => "__builtin_ia32_vcvtps2ph_mask", + "avx512.mask.vcvtps2ph.256" => "__builtin_ia32_vcvtps2ph256_mask", + "avx512.mask.vcvtps2ph.512" => "__builtin_ia32_vcvtps2ph512_mask", + "avx512.mask.vextractf32x4.256" => "__builtin_ia32_extractf32x4_256_mask", + "avx512.mask.vextractf32x4.512" => "__builtin_ia32_extractf32x4_mask", + "avx512.mask.vextractf32x8.512" => "__builtin_ia32_extractf32x8_mask", + "avx512.mask.vextractf64x2.256" => "__builtin_ia32_extractf64x2_256_mask", + "avx512.mask.vextractf64x2.512" => "__builtin_ia32_extractf64x2_512_mask", + "avx512.mask.vextractf64x4.512" => "__builtin_ia32_extractf64x4_mask", + "avx512.mask.vextracti32x4.256" => "__builtin_ia32_extracti32x4_256_mask", + "avx512.mask.vextracti32x4.512" => "__builtin_ia32_extracti32x4_mask", + "avx512.mask.vextracti32x8.512" => "__builtin_ia32_extracti32x8_mask", + "avx512.mask.vextracti64x2.256" => "__builtin_ia32_extracti64x2_256_mask", + "avx512.mask.vextracti64x2.512" => "__builtin_ia32_extracti64x2_512_mask", + "avx512.mask.vextracti64x4.512" => "__builtin_ia32_extracti64x4_mask", + "avx512.mask.vfmadd.pd.128" => "__builtin_ia32_vfmaddpd128_mask", + "avx512.mask.vfmadd.pd.256" => "__builtin_ia32_vfmaddpd256_mask", + "avx512.mask.vfmadd.pd.512" => "__builtin_ia32_vfmaddpd512_mask", + "avx512.mask.vfmadd.ps.128" => "__builtin_ia32_vfmaddps128_mask", + "avx512.mask.vfmadd.ps.256" => "__builtin_ia32_vfmaddps256_mask", + "avx512.mask.vfmadd.ps.512" => "__builtin_ia32_vfmaddps512_mask", + "avx512.mask.vfmadd.sd" => "__builtin_ia32_vfmaddsd3_mask", + "avx512.mask.vfmadd.ss" => "__builtin_ia32_vfmaddss3_mask", + "avx512.mask.vfmaddsub.pd.128" => "__builtin_ia32_vfmaddsubpd128_mask", + "avx512.mask.vfmaddsub.pd.256" => "__builtin_ia32_vfmaddsubpd256_mask", + "avx512.mask.vfmaddsub.pd.512" => "__builtin_ia32_vfmaddsubpd512_mask", + "avx512.mask.vfmaddsub.ps.128" => "__builtin_ia32_vfmaddsubps128_mask", + "avx512.mask.vfmaddsub.ps.256" => "__builtin_ia32_vfmaddsubps256_mask", + "avx512.mask.vfmaddsub.ps.512" => "__builtin_ia32_vfmaddsubps512_mask", + "avx512.mask.vfnmadd.pd.128" => "__builtin_ia32_vfnmaddpd128_mask", + "avx512.mask.vfnmadd.pd.256" => "__builtin_ia32_vfnmaddpd256_mask", + "avx512.mask.vfnmadd.pd.512" => "__builtin_ia32_vfnmaddpd512_mask", + "avx512.mask.vfnmadd.ps.128" => "__builtin_ia32_vfnmaddps128_mask", + "avx512.mask.vfnmadd.ps.256" => "__builtin_ia32_vfnmaddps256_mask", + "avx512.mask.vfnmadd.ps.512" => "__builtin_ia32_vfnmaddps512_mask", + "avx512.mask.vfnmsub.pd.128" => "__builtin_ia32_vfnmsubpd128_mask", + "avx512.mask.vfnmsub.pd.256" => "__builtin_ia32_vfnmsubpd256_mask", + "avx512.mask.vfnmsub.pd.512" => "__builtin_ia32_vfnmsubpd512_mask", + "avx512.mask.vfnmsub.ps.128" => "__builtin_ia32_vfnmsubps128_mask", + "avx512.mask.vfnmsub.ps.256" => "__builtin_ia32_vfnmsubps256_mask", + "avx512.mask.vfnmsub.ps.512" => "__builtin_ia32_vfnmsubps512_mask", + "avx512.mask.vpermi2var.d.128" => "__builtin_ia32_vpermi2vard128_mask", + "avx512.mask.vpermi2var.d.256" => "__builtin_ia32_vpermi2vard256_mask", + "avx512.mask.vpermi2var.d.512" => "__builtin_ia32_vpermi2vard512_mask", + "avx512.mask.vpermi2var.hi.128" => "__builtin_ia32_vpermi2varhi128_mask", + "avx512.mask.vpermi2var.hi.256" => "__builtin_ia32_vpermi2varhi256_mask", + "avx512.mask.vpermi2var.hi.512" => "__builtin_ia32_vpermi2varhi512_mask", + "avx512.mask.vpermi2var.pd.128" => "__builtin_ia32_vpermi2varpd128_mask", + "avx512.mask.vpermi2var.pd.256" => "__builtin_ia32_vpermi2varpd256_mask", + "avx512.mask.vpermi2var.pd.512" => "__builtin_ia32_vpermi2varpd512_mask", + "avx512.mask.vpermi2var.ps.128" => "__builtin_ia32_vpermi2varps128_mask", + "avx512.mask.vpermi2var.ps.256" => "__builtin_ia32_vpermi2varps256_mask", + "avx512.mask.vpermi2var.ps.512" => "__builtin_ia32_vpermi2varps512_mask", + "avx512.mask.vpermi2var.q.128" => "__builtin_ia32_vpermi2varq128_mask", + "avx512.mask.vpermi2var.q.256" => "__builtin_ia32_vpermi2varq256_mask", + "avx512.mask.vpermi2var.q.512" => "__builtin_ia32_vpermi2varq512_mask", + "avx512.mask.vpermi2var.qi.128" => "__builtin_ia32_vpermi2varqi128_mask", + "avx512.mask.vpermi2var.qi.256" => "__builtin_ia32_vpermi2varqi256_mask", + "avx512.mask.vpermi2var.qi.512" => "__builtin_ia32_vpermi2varqi512_mask", + "avx512.mask.vpermilvar.pd.128" => "__builtin_ia32_vpermilvarpd_mask", + "avx512.mask.vpermilvar.pd.256" => "__builtin_ia32_vpermilvarpd256_mask", + "avx512.mask.vpermilvar.pd.512" => "__builtin_ia32_vpermilvarpd512_mask", + "avx512.mask.vpermilvar.ps.128" => "__builtin_ia32_vpermilvarps_mask", + "avx512.mask.vpermilvar.ps.256" => "__builtin_ia32_vpermilvarps256_mask", + "avx512.mask.vpermilvar.ps.512" => "__builtin_ia32_vpermilvarps512_mask", + "avx512.mask.vpermt.d.512" => "__builtin_ia32_vpermt2vard512_mask", + "avx512.mask.vpermt.pd.512" => "__builtin_ia32_vpermt2varpd512_mask", + "avx512.mask.vpermt.ps.512" => "__builtin_ia32_vpermt2varps512_mask", + "avx512.mask.vpermt.q.512" => "__builtin_ia32_vpermt2varq512_mask", + "avx512.mask.vpermt2var.d.128" => "__builtin_ia32_vpermt2vard128_mask", + "avx512.mask.vpermt2var.d.256" => "__builtin_ia32_vpermt2vard256_mask", + "avx512.mask.vpermt2var.d.512" => "__builtin_ia32_vpermt2vard512_mask", + "avx512.mask.vpermt2var.hi.128" => "__builtin_ia32_vpermt2varhi128_mask", + "avx512.mask.vpermt2var.hi.256" => "__builtin_ia32_vpermt2varhi256_mask", + "avx512.mask.vpermt2var.hi.512" => "__builtin_ia32_vpermt2varhi512_mask", + "avx512.mask.vpermt2var.pd.128" => "__builtin_ia32_vpermt2varpd128_mask", + "avx512.mask.vpermt2var.pd.256" => "__builtin_ia32_vpermt2varpd256_mask", + "avx512.mask.vpermt2var.pd.512" => "__builtin_ia32_vpermt2varpd512_mask", + "avx512.mask.vpermt2var.ps.128" => "__builtin_ia32_vpermt2varps128_mask", + "avx512.mask.vpermt2var.ps.256" => "__builtin_ia32_vpermt2varps256_mask", + "avx512.mask.vpermt2var.ps.512" => "__builtin_ia32_vpermt2varps512_mask", + "avx512.mask.vpermt2var.q.128" => "__builtin_ia32_vpermt2varq128_mask", + "avx512.mask.vpermt2var.q.256" => "__builtin_ia32_vpermt2varq256_mask", + "avx512.mask.vpermt2var.q.512" => "__builtin_ia32_vpermt2varq512_mask", + "avx512.mask.vpermt2var.qi.128" => "__builtin_ia32_vpermt2varqi128_mask", + "avx512.mask.vpermt2var.qi.256" => "__builtin_ia32_vpermt2varqi256_mask", + "avx512.mask.vpermt2var.qi.512" => "__builtin_ia32_vpermt2varqi512_mask", + "avx512.mask.vpmadd52h.uq.128" => "__builtin_ia32_vpmadd52huq128_mask", + "avx512.mask.vpmadd52h.uq.256" => "__builtin_ia32_vpmadd52huq256_mask", + "avx512.mask.vpmadd52h.uq.512" => "__builtin_ia32_vpmadd52huq512_mask", + "avx512.mask.vpmadd52l.uq.128" => "__builtin_ia32_vpmadd52luq128_mask", + "avx512.mask.vpmadd52l.uq.256" => "__builtin_ia32_vpmadd52luq256_mask", + "avx512.mask.vpmadd52l.uq.512" => "__builtin_ia32_vpmadd52luq512_mask", + "avx512.mask.xor.pd.128" => "__builtin_ia32_xorpd128_mask", + "avx512.mask.xor.pd.256" => "__builtin_ia32_xorpd256_mask", + "avx512.mask.xor.pd.512" => "__builtin_ia32_xorpd512_mask", + "avx512.mask.xor.ps.128" => "__builtin_ia32_xorps128_mask", + "avx512.mask.xor.ps.256" => "__builtin_ia32_xorps256_mask", + "avx512.mask.xor.ps.512" => "__builtin_ia32_xorps512_mask", + "avx512.mask3.vfmadd.pd.128" => "__builtin_ia32_vfmaddpd128_mask3", + "avx512.mask3.vfmadd.pd.256" => "__builtin_ia32_vfmaddpd256_mask3", + "avx512.mask3.vfmadd.pd.512" => "__builtin_ia32_vfmaddpd512_mask3", + "avx512.mask3.vfmadd.ps.128" => "__builtin_ia32_vfmaddps128_mask3", + "avx512.mask3.vfmadd.ps.256" => "__builtin_ia32_vfmaddps256_mask3", + "avx512.mask3.vfmadd.ps.512" => "__builtin_ia32_vfmaddps512_mask3", + "avx512.mask3.vfmadd.sd" => "__builtin_ia32_vfmaddsd3_mask3", + "avx512.mask3.vfmadd.ss" => "__builtin_ia32_vfmaddss3_mask3", + "avx512.mask3.vfmaddsub.pd.128" => "__builtin_ia32_vfmaddsubpd128_mask3", + "avx512.mask3.vfmaddsub.pd.256" => "__builtin_ia32_vfmaddsubpd256_mask3", + "avx512.mask3.vfmaddsub.pd.512" => "__builtin_ia32_vfmaddsubpd512_mask3", + "avx512.mask3.vfmaddsub.ps.128" => "__builtin_ia32_vfmaddsubps128_mask3", + "avx512.mask3.vfmaddsub.ps.256" => "__builtin_ia32_vfmaddsubps256_mask3", + "avx512.mask3.vfmaddsub.ps.512" => "__builtin_ia32_vfmaddsubps512_mask3", + "avx512.mask3.vfmsub.pd.128" => "__builtin_ia32_vfmsubpd128_mask3", + "avx512.mask3.vfmsub.pd.256" => "__builtin_ia32_vfmsubpd256_mask3", + "avx512.mask3.vfmsub.pd.512" => "__builtin_ia32_vfmsubpd512_mask3", + "avx512.mask3.vfmsub.ps.128" => "__builtin_ia32_vfmsubps128_mask3", + "avx512.mask3.vfmsub.ps.256" => "__builtin_ia32_vfmsubps256_mask3", + "avx512.mask3.vfmsub.ps.512" => "__builtin_ia32_vfmsubps512_mask3", + "avx512.mask3.vfmsubadd.pd.128" => "__builtin_ia32_vfmsubaddpd128_mask3", + "avx512.mask3.vfmsubadd.pd.256" => "__builtin_ia32_vfmsubaddpd256_mask3", + "avx512.mask3.vfmsubadd.pd.512" => "__builtin_ia32_vfmsubaddpd512_mask3", + "avx512.mask3.vfmsubadd.ps.128" => "__builtin_ia32_vfmsubaddps128_mask3", + "avx512.mask3.vfmsubadd.ps.256" => "__builtin_ia32_vfmsubaddps256_mask3", + "avx512.mask3.vfmsubadd.ps.512" => "__builtin_ia32_vfmsubaddps512_mask3", + "avx512.mask3.vfnmsub.pd.128" => "__builtin_ia32_vfnmsubpd128_mask3", + "avx512.mask3.vfnmsub.pd.256" => "__builtin_ia32_vfnmsubpd256_mask3", + "avx512.mask3.vfnmsub.pd.512" => "__builtin_ia32_vfnmsubpd512_mask3", + "avx512.mask3.vfnmsub.ps.128" => "__builtin_ia32_vfnmsubps128_mask3", + "avx512.mask3.vfnmsub.ps.256" => "__builtin_ia32_vfnmsubps256_mask3", + "avx512.mask3.vfnmsub.ps.512" => "__builtin_ia32_vfnmsubps512_mask3", + "avx512.maskz.fixupimm.pd.128" => "__builtin_ia32_fixupimmpd128_maskz", + "avx512.maskz.fixupimm.pd.256" => "__builtin_ia32_fixupimmpd256_maskz", + "avx512.maskz.fixupimm.pd.512" => "__builtin_ia32_fixupimmpd512_maskz", + "avx512.maskz.fixupimm.ps.128" => "__builtin_ia32_fixupimmps128_maskz", + "avx512.maskz.fixupimm.ps.256" => "__builtin_ia32_fixupimmps256_maskz", + "avx512.maskz.fixupimm.ps.512" => "__builtin_ia32_fixupimmps512_maskz", + "avx512.maskz.fixupimm.sd" => "__builtin_ia32_fixupimmsd_maskz", + "avx512.maskz.fixupimm.ss" => "__builtin_ia32_fixupimmss_maskz", + "avx512.maskz.pternlog.d.128" => "__builtin_ia32_pternlogd128_maskz", + "avx512.maskz.pternlog.d.256" => "__builtin_ia32_pternlogd256_maskz", + "avx512.maskz.pternlog.d.512" => "__builtin_ia32_pternlogd512_maskz", + "avx512.maskz.pternlog.q.128" => "__builtin_ia32_pternlogq128_maskz", + "avx512.maskz.pternlog.q.256" => "__builtin_ia32_pternlogq256_maskz", + "avx512.maskz.pternlog.q.512" => "__builtin_ia32_pternlogq512_maskz", + "avx512.maskz.vfmadd.pd.128" => "__builtin_ia32_vfmaddpd128_maskz", + "avx512.maskz.vfmadd.pd.256" => "__builtin_ia32_vfmaddpd256_maskz", + "avx512.maskz.vfmadd.pd.512" => "__builtin_ia32_vfmaddpd512_maskz", + "avx512.maskz.vfmadd.ps.128" => "__builtin_ia32_vfmaddps128_maskz", + "avx512.maskz.vfmadd.ps.256" => "__builtin_ia32_vfmaddps256_maskz", + "avx512.maskz.vfmadd.ps.512" => "__builtin_ia32_vfmaddps512_maskz", + "avx512.maskz.vfmadd.sd" => "__builtin_ia32_vfmaddsd3_maskz", + "avx512.maskz.vfmadd.ss" => "__builtin_ia32_vfmaddss3_maskz", + "avx512.maskz.vfmaddsub.pd.128" => "__builtin_ia32_vfmaddsubpd128_maskz", + "avx512.maskz.vfmaddsub.pd.256" => "__builtin_ia32_vfmaddsubpd256_maskz", + "avx512.maskz.vfmaddsub.pd.512" => "__builtin_ia32_vfmaddsubpd512_maskz", + "avx512.maskz.vfmaddsub.ps.128" => "__builtin_ia32_vfmaddsubps128_maskz", + "avx512.maskz.vfmaddsub.ps.256" => "__builtin_ia32_vfmaddsubps256_maskz", + "avx512.maskz.vfmaddsub.ps.512" => "__builtin_ia32_vfmaddsubps512_maskz", + "avx512.maskz.vpermt2var.d.128" => "__builtin_ia32_vpermt2vard128_maskz", + "avx512.maskz.vpermt2var.d.256" => "__builtin_ia32_vpermt2vard256_maskz", + "avx512.maskz.vpermt2var.d.512" => "__builtin_ia32_vpermt2vard512_maskz", + "avx512.maskz.vpermt2var.hi.128" => "__builtin_ia32_vpermt2varhi128_maskz", + "avx512.maskz.vpermt2var.hi.256" => "__builtin_ia32_vpermt2varhi256_maskz", + "avx512.maskz.vpermt2var.hi.512" => "__builtin_ia32_vpermt2varhi512_maskz", + "avx512.maskz.vpermt2var.pd.128" => "__builtin_ia32_vpermt2varpd128_maskz", + "avx512.maskz.vpermt2var.pd.256" => "__builtin_ia32_vpermt2varpd256_maskz", + "avx512.maskz.vpermt2var.pd.512" => "__builtin_ia32_vpermt2varpd512_maskz", + "avx512.maskz.vpermt2var.ps.128" => "__builtin_ia32_vpermt2varps128_maskz", + "avx512.maskz.vpermt2var.ps.256" => "__builtin_ia32_vpermt2varps256_maskz", + "avx512.maskz.vpermt2var.ps.512" => "__builtin_ia32_vpermt2varps512_maskz", + "avx512.maskz.vpermt2var.q.128" => "__builtin_ia32_vpermt2varq128_maskz", + "avx512.maskz.vpermt2var.q.256" => "__builtin_ia32_vpermt2varq256_maskz", + "avx512.maskz.vpermt2var.q.512" => "__builtin_ia32_vpermt2varq512_maskz", + "avx512.maskz.vpermt2var.qi.128" => "__builtin_ia32_vpermt2varqi128_maskz", + "avx512.maskz.vpermt2var.qi.256" => "__builtin_ia32_vpermt2varqi256_maskz", + "avx512.maskz.vpermt2var.qi.512" => "__builtin_ia32_vpermt2varqi512_maskz", + "avx512.maskz.vpmadd52h.uq.128" => "__builtin_ia32_vpmadd52huq128_maskz", + "avx512.maskz.vpmadd52h.uq.256" => "__builtin_ia32_vpmadd52huq256_maskz", + "avx512.maskz.vpmadd52h.uq.512" => "__builtin_ia32_vpmadd52huq512_maskz", + "avx512.maskz.vpmadd52l.uq.128" => "__builtin_ia32_vpmadd52luq128_maskz", + "avx512.maskz.vpmadd52l.uq.256" => "__builtin_ia32_vpmadd52luq256_maskz", + "avx512.maskz.vpmadd52l.uq.512" => "__builtin_ia32_vpmadd52luq512_maskz", + "avx512.max.pd.512" => "__builtin_ia32_maxpd512", + "avx512.max.ps.512" => "__builtin_ia32_maxps512", + "avx512.min.pd.512" => "__builtin_ia32_minpd512", + "avx512.min.ps.512" => "__builtin_ia32_minps512", + "avx512.movntdqa" => "__builtin_ia32_movntdqa512", + "avx512.mul.pd.512" => "__builtin_ia32_mulpd512", + "avx512.mul.ps.512" => "__builtin_ia32_mulps512", + "avx512.packssdw.512" => "__builtin_ia32_packssdw512", + "avx512.packsswb.512" => "__builtin_ia32_packsswb512", + "avx512.packusdw.512" => "__builtin_ia32_packusdw512", + "avx512.packuswb.512" => "__builtin_ia32_packuswb512", + "avx512.pavg.b.512" => "__builtin_ia32_pavgb512", + "avx512.pavg.w.512" => "__builtin_ia32_pavgw512", + "avx512.pbroadcastd.512" => "__builtin_ia32_pbroadcastd512", + "avx512.pbroadcastq.512" => "__builtin_ia32_pbroadcastq512", + "avx512.permvar.df.256" => "__builtin_ia32_permvardf256", + "avx512.permvar.df.512" => "__builtin_ia32_permvardf512", + "avx512.permvar.di.256" => "__builtin_ia32_permvardi256", + "avx512.permvar.di.512" => "__builtin_ia32_permvardi512", + "avx512.permvar.hi.128" => "__builtin_ia32_permvarhi128", + "avx512.permvar.hi.256" => "__builtin_ia32_permvarhi256", + "avx512.permvar.hi.512" => "__builtin_ia32_permvarhi512", + "avx512.permvar.qi.128" => "__builtin_ia32_permvarqi128", + "avx512.permvar.qi.256" => "__builtin_ia32_permvarqi256", + "avx512.permvar.qi.512" => "__builtin_ia32_permvarqi512", + "avx512.permvar.sf.512" => "__builtin_ia32_permvarsf512", + "avx512.permvar.si.512" => "__builtin_ia32_permvarsi512", + "avx512.pmaddubs.w.512" => "__builtin_ia32_pmaddubsw512", + "avx512.pmaddw.d.512" => "__builtin_ia32_pmaddwd512", + "avx512.pmovzxbd" => "__builtin_ia32_pmovzxbd512", + "avx512.pmovzxbq" => "__builtin_ia32_pmovzxbq512", + "avx512.pmovzxdq" => "__builtin_ia32_pmovzxdq512", + "avx512.pmovzxwd" => "__builtin_ia32_pmovzxwd512", + "avx512.pmovzxwq" => "__builtin_ia32_pmovzxwq512", + "avx512.pmul.hr.sw.512" => "__builtin_ia32_pmulhrsw512", + "avx512.pmulh.w.512" => "__builtin_ia32_pmulhw512", + "avx512.pmulhu.w.512" => "__builtin_ia32_pmulhuw512", + "avx512.pmultishift.qb.128" => "__builtin_ia32_vpmultishiftqb128", + "avx512.pmultishift.qb.256" => "__builtin_ia32_vpmultishiftqb256", + "avx512.pmultishift.qb.512" => "__builtin_ia32_vpmultishiftqb512", + "avx512.psad.bw.512" => "__builtin_ia32_psadbw512", + "avx512.pshuf.b.512" => "__builtin_ia32_pshufb512", + "avx512.psll.d.512" => "__builtin_ia32_pslld512", + "avx512.psll.dq" => "__builtin_ia32_pslldqi512", + "avx512.psll.dq.bs" => "__builtin_ia32_pslldqi512_byteshift", + "avx512.psll.q.512" => "__builtin_ia32_psllq512", + "avx512.psll.w.512" => "__builtin_ia32_psllw512", + "avx512.pslli.d.512" => "__builtin_ia32_pslldi512", + "avx512.pslli.q.512" => "__builtin_ia32_psllqi512", + "avx512.pslli.w.512" => "__builtin_ia32_psllwi512", + "avx512.psllv.d.512" => "__builtin_ia32_psllv16si", + "avx512.psllv.q.512" => "__builtin_ia32_psllv8di", + "avx512.psllv.w.128" => "__builtin_ia32_psllv8hi", + "avx512.psllv.w.256" => "__builtin_ia32_psllv16hi", + "avx512.psllv.w.512" => "__builtin_ia32_psllv32hi", + "avx512.psra.d.512" => "__builtin_ia32_psrad512", + "avx512.psra.q.128" => "__builtin_ia32_psraq128", + "avx512.psra.q.256" => "__builtin_ia32_psraq256", + "avx512.psra.q.512" => "__builtin_ia32_psraq512", + "avx512.psra.w.512" => "__builtin_ia32_psraw512", + "avx512.psrai.d.512" => "__builtin_ia32_psradi512", + "avx512.psrai.q.128" => "__builtin_ia32_psraqi128", + "avx512.psrai.q.256" => "__builtin_ia32_psraqi256", + "avx512.psrai.q.512" => "__builtin_ia32_psraqi512", + "avx512.psrai.w.512" => "__builtin_ia32_psrawi512", + "avx512.psrav.d.512" => "__builtin_ia32_psrav16si", + "avx512.psrav.q.128" => "__builtin_ia32_psravq128", + "avx512.psrav.q.256" => "__builtin_ia32_psravq256", + "avx512.psrav.q.512" => "__builtin_ia32_psrav8di", + "avx512.psrav.w.128" => "__builtin_ia32_psrav8hi", + "avx512.psrav.w.256" => "__builtin_ia32_psrav16hi", + "avx512.psrav.w.512" => "__builtin_ia32_psrav32hi", + "avx512.psrl.d.512" => "__builtin_ia32_psrld512", + "avx512.psrl.dq" => "__builtin_ia32_psrldqi512", + "avx512.psrl.dq.bs" => "__builtin_ia32_psrldqi512_byteshift", + "avx512.psrl.q.512" => "__builtin_ia32_psrlq512", + "avx512.psrl.w.512" => "__builtin_ia32_psrlw512", + "avx512.psrli.d.512" => "__builtin_ia32_psrldi512", + "avx512.psrli.q.512" => "__builtin_ia32_psrlqi512", + "avx512.psrli.w.512" => "__builtin_ia32_psrlwi512", + "avx512.psrlv.d.512" => "__builtin_ia32_psrlv16si", + "avx512.psrlv.q.512" => "__builtin_ia32_psrlv8di", + "avx512.psrlv.w.128" => "__builtin_ia32_psrlv8hi", + "avx512.psrlv.w.256" => "__builtin_ia32_psrlv16hi", + "avx512.psrlv.w.512" => "__builtin_ia32_psrlv32hi", + "avx512.pternlog.d.128" => "__builtin_ia32_pternlogd128", + "avx512.pternlog.d.256" => "__builtin_ia32_pternlogd256", + "avx512.pternlog.d.512" => "__builtin_ia32_pternlogd512", + "avx512.pternlog.q.128" => "__builtin_ia32_pternlogq128", + "avx512.pternlog.q.256" => "__builtin_ia32_pternlogq256", + "avx512.pternlog.q.512" => "__builtin_ia32_pternlogq512", + "avx512.ptestm.b.128" => "__builtin_ia32_ptestmb128", + "avx512.ptestm.b.256" => "__builtin_ia32_ptestmb256", + "avx512.ptestm.b.512" => "__builtin_ia32_ptestmb512", + "avx512.ptestm.d.128" => "__builtin_ia32_ptestmd128", + "avx512.ptestm.d.256" => "__builtin_ia32_ptestmd256", + "avx512.ptestm.d.512" => "__builtin_ia32_ptestmd512", + "avx512.ptestm.q.128" => "__builtin_ia32_ptestmq128", + "avx512.ptestm.q.256" => "__builtin_ia32_ptestmq256", + "avx512.ptestm.q.512" => "__builtin_ia32_ptestmq512", + "avx512.ptestm.w.128" => "__builtin_ia32_ptestmw128", + "avx512.ptestm.w.256" => "__builtin_ia32_ptestmw256", + "avx512.ptestm.w.512" => "__builtin_ia32_ptestmw512", + "avx512.ptestnm.b.128" => "__builtin_ia32_ptestnmb128", + "avx512.ptestnm.b.256" => "__builtin_ia32_ptestnmb256", + "avx512.ptestnm.b.512" => "__builtin_ia32_ptestnmb512", + "avx512.ptestnm.d.128" => "__builtin_ia32_ptestnmd128", + "avx512.ptestnm.d.256" => "__builtin_ia32_ptestnmd256", + "avx512.ptestnm.d.512" => "__builtin_ia32_ptestnmd512", + "avx512.ptestnm.q.128" => "__builtin_ia32_ptestnmq128", + "avx512.ptestnm.q.256" => "__builtin_ia32_ptestnmq256", + "avx512.ptestnm.q.512" => "__builtin_ia32_ptestnmq512", + "avx512.ptestnm.w.128" => "__builtin_ia32_ptestnmw128", + "avx512.ptestnm.w.256" => "__builtin_ia32_ptestnmw256", + "avx512.ptestnm.w.512" => "__builtin_ia32_ptestnmw512", + "avx512.rcp14.pd.128" => "__builtin_ia32_rcp14pd128_mask", + "avx512.rcp14.pd.256" => "__builtin_ia32_rcp14pd256_mask", + "avx512.rcp14.pd.512" => "__builtin_ia32_rcp14pd512_mask", + "avx512.rcp14.ps.128" => "__builtin_ia32_rcp14ps128_mask", + "avx512.rcp14.ps.256" => "__builtin_ia32_rcp14ps256_mask", + "avx512.rcp14.ps.512" => "__builtin_ia32_rcp14ps512_mask", + "avx512.rcp14.sd" => "__builtin_ia32_rcp14sd_mask", + "avx512.rcp14.ss" => "__builtin_ia32_rcp14ss_mask", + "avx512.rcp28.pd" => "__builtin_ia32_rcp28pd_mask", + "avx512.rcp28.ps" => "__builtin_ia32_rcp28ps_mask", + "avx512.rcp28.sd" => "__builtin_ia32_rcp28sd_mask", + // [DUPLICATE]: "avx512.rcp28.sd" => "__builtin_ia32_rcp28sd_round_mask", + "avx512.rcp28.ss" => "__builtin_ia32_rcp28ss_mask", + // [DUPLICATE]: "avx512.rcp28.ss" => "__builtin_ia32_rcp28ss_round_mask", + "avx512.rndscale.sd" => "__builtin_ia32_rndscalesd", + "avx512.rndscale.ss" => "__builtin_ia32_rndscaless", + "avx512.rsqrt14.pd.128" => "__builtin_ia32_rsqrt14pd128_mask", + "avx512.rsqrt14.pd.256" => "__builtin_ia32_rsqrt14pd256_mask", + "avx512.rsqrt14.pd.512" => "__builtin_ia32_rsqrt14pd512_mask", + "avx512.rsqrt14.ps.128" => "__builtin_ia32_rsqrt14ps128_mask", + "avx512.rsqrt14.ps.256" => "__builtin_ia32_rsqrt14ps256_mask", + "avx512.rsqrt14.ps.512" => "__builtin_ia32_rsqrt14ps512_mask", + "avx512.rsqrt14.sd" => "__builtin_ia32_rsqrt14sd_mask", + "avx512.rsqrt14.ss" => "__builtin_ia32_rsqrt14ss_mask", + "avx512.rsqrt28.pd" => "__builtin_ia32_rsqrt28pd_mask", + "avx512.rsqrt28.ps" => "__builtin_ia32_rsqrt28ps_mask", + "avx512.rsqrt28.sd" => "__builtin_ia32_rsqrt28sd_mask", + // [DUPLICATE]: "avx512.rsqrt28.sd" => "__builtin_ia32_rsqrt28sd_round_mask", + "avx512.rsqrt28.ss" => "__builtin_ia32_rsqrt28ss_mask", + // [DUPLICATE]: "avx512.rsqrt28.ss" => "__builtin_ia32_rsqrt28ss_round_mask", + "avx512.scatter.dpd.512" => "__builtin_ia32_scattersiv8df", + "avx512.scatter.dpi.512" => "__builtin_ia32_scattersiv16si", + "avx512.scatter.dpq.512" => "__builtin_ia32_scattersiv8di", + "avx512.scatter.dps.512" => "__builtin_ia32_scattersiv16sf", + "avx512.scatter.qpd.512" => "__builtin_ia32_scatterdiv8df", + "avx512.scatter.qpi.512" => "__builtin_ia32_scatterdiv16si", + "avx512.scatter.qpq.512" => "__builtin_ia32_scatterdiv8di", + "avx512.scatter.qps.512" => "__builtin_ia32_scatterdiv16sf", + "avx512.scatterdiv2.df" => "__builtin_ia32_scatterdiv2df", + "avx512.scatterdiv2.di" => "__builtin_ia32_scatterdiv2di", + "avx512.scatterdiv4.df" => "__builtin_ia32_scatterdiv4df", + "avx512.scatterdiv4.di" => "__builtin_ia32_scatterdiv4di", + "avx512.scatterdiv4.sf" => "__builtin_ia32_scatterdiv4sf", + "avx512.scatterdiv4.si" => "__builtin_ia32_scatterdiv4si", + "avx512.scatterdiv8.sf" => "__builtin_ia32_scatterdiv8sf", + "avx512.scatterdiv8.si" => "__builtin_ia32_scatterdiv8si", + "avx512.scatterpf.dpd.512" => "__builtin_ia32_scatterpfdpd", + "avx512.scatterpf.dps.512" => "__builtin_ia32_scatterpfdps", + "avx512.scatterpf.qpd.512" => "__builtin_ia32_scatterpfqpd", + "avx512.scatterpf.qps.512" => "__builtin_ia32_scatterpfqps", + "avx512.scattersiv2.df" => "__builtin_ia32_scattersiv2df", + "avx512.scattersiv2.di" => "__builtin_ia32_scattersiv2di", + "avx512.scattersiv4.df" => "__builtin_ia32_scattersiv4df", + "avx512.scattersiv4.di" => "__builtin_ia32_scattersiv4di", + "avx512.scattersiv4.sf" => "__builtin_ia32_scattersiv4sf", + "avx512.scattersiv4.si" => "__builtin_ia32_scattersiv4si", + "avx512.scattersiv8.sf" => "__builtin_ia32_scattersiv8sf", + "avx512.scattersiv8.si" => "__builtin_ia32_scattersiv8si", + "avx512.sqrt.pd.512" => "__builtin_ia32_sqrtpd512_mask", + "avx512.sqrt.ps.512" => "__builtin_ia32_sqrtps512_mask", + "avx512.sqrt.sd" => "__builtin_ia32_sqrtrndsd", + "avx512.sqrt.ss" => "__builtin_ia32_sqrtrndss", + "avx512.sub.pd.512" => "__builtin_ia32_subpd512", + "avx512.sub.ps.512" => "__builtin_ia32_subps512", + "avx512.vbroadcast.sd.512" => "__builtin_ia32_vbroadcastsd512", + "avx512.vbroadcast.sd.pd.512" => "__builtin_ia32_vbroadcastsd_pd512", + "avx512.vbroadcast.ss.512" => "__builtin_ia32_vbroadcastss512", + "avx512.vbroadcast.ss.ps.512" => "__builtin_ia32_vbroadcastss_ps512", + "avx512.vcomi.sd" => "__builtin_ia32_vcomisd", + "avx512.vcomi.ss" => "__builtin_ia32_vcomiss", + "avx512.vcvtsd2si32" => "__builtin_ia32_vcvtsd2si32", + "avx512.vcvtsd2si64" => "__builtin_ia32_vcvtsd2si64", + "avx512.vcvtsd2usi32" => "__builtin_ia32_vcvtsd2usi32", + "avx512.vcvtsd2usi64" => "__builtin_ia32_vcvtsd2usi64", + "avx512.vcvtss2si32" => "__builtin_ia32_vcvtss2si32", + "avx512.vcvtss2si64" => "__builtin_ia32_vcvtss2si64", + "avx512.vcvtss2usi32" => "__builtin_ia32_vcvtss2usi32", + "avx512.vcvtss2usi64" => "__builtin_ia32_vcvtss2usi64", + "avx512.vpdpbusd.128" => "__builtin_ia32_vpdpbusd128", + "avx512.vpdpbusd.256" => "__builtin_ia32_vpdpbusd256", + "avx512.vpdpbusd.512" => "__builtin_ia32_vpdpbusd512", + "avx512.vpdpbusds.128" => "__builtin_ia32_vpdpbusds128", + "avx512.vpdpbusds.256" => "__builtin_ia32_vpdpbusds256", + "avx512.vpdpbusds.512" => "__builtin_ia32_vpdpbusds512", + "avx512.vpdpwssd.128" => "__builtin_ia32_vpdpwssd128", + "avx512.vpdpwssd.256" => "__builtin_ia32_vpdpwssd256", + "avx512.vpdpwssd.512" => "__builtin_ia32_vpdpwssd512", + "avx512.vpdpwssds.128" => "__builtin_ia32_vpdpwssds128", + "avx512.vpdpwssds.256" => "__builtin_ia32_vpdpwssds256", + "avx512.vpdpwssds.512" => "__builtin_ia32_vpdpwssds512", + "avx512.vpermi2var.d.128" => "__builtin_ia32_vpermi2vard128", + "avx512.vpermi2var.d.256" => "__builtin_ia32_vpermi2vard256", + "avx512.vpermi2var.d.512" => "__builtin_ia32_vpermi2vard512", + "avx512.vpermi2var.hi.128" => "__builtin_ia32_vpermi2varhi128", + "avx512.vpermi2var.hi.256" => "__builtin_ia32_vpermi2varhi256", + "avx512.vpermi2var.hi.512" => "__builtin_ia32_vpermi2varhi512", + "avx512.vpermi2var.pd.128" => "__builtin_ia32_vpermi2varpd128", + "avx512.vpermi2var.pd.256" => "__builtin_ia32_vpermi2varpd256", + "avx512.vpermi2var.pd.512" => "__builtin_ia32_vpermi2varpd512", + "avx512.vpermi2var.ps.128" => "__builtin_ia32_vpermi2varps128", + "avx512.vpermi2var.ps.256" => "__builtin_ia32_vpermi2varps256", + "avx512.vpermi2var.ps.512" => "__builtin_ia32_vpermi2varps512", + "avx512.vpermi2var.q.128" => "__builtin_ia32_vpermi2varq128", + "avx512.vpermi2var.q.256" => "__builtin_ia32_vpermi2varq256", + "avx512.vpermi2var.q.512" => "__builtin_ia32_vpermi2varq512", + "avx512.vpermi2var.qi.128" => "__builtin_ia32_vpermi2varqi128", + "avx512.vpermi2var.qi.256" => "__builtin_ia32_vpermi2varqi256", + "avx512.vpermi2var.qi.512" => "__builtin_ia32_vpermi2varqi512", + "avx512.vpermilvar.pd.512" => "__builtin_ia32_vpermilvarpd512", + "avx512.vpermilvar.ps.512" => "__builtin_ia32_vpermilvarps512", + "avx512.vpmadd52h.uq.128" => "__builtin_ia32_vpmadd52huq128", + "avx512.vpmadd52h.uq.256" => "__builtin_ia32_vpmadd52huq256", + "avx512.vpmadd52h.uq.512" => "__builtin_ia32_vpmadd52huq512", + "avx512.vpmadd52l.uq.128" => "__builtin_ia32_vpmadd52luq128", + "avx512.vpmadd52l.uq.256" => "__builtin_ia32_vpmadd52luq256", + "avx512.vpmadd52l.uq.512" => "__builtin_ia32_vpmadd52luq512", + "avx512bf16.cvtne2ps2bf16.128" => "__builtin_ia32_cvtne2ps2bf16_128", + "avx512bf16.cvtne2ps2bf16.256" => "__builtin_ia32_cvtne2ps2bf16_256", + "avx512bf16.cvtne2ps2bf16.512" => "__builtin_ia32_cvtne2ps2bf16_512", + "avx512bf16.cvtneps2bf16.256" => "__builtin_ia32_cvtneps2bf16_256", + "avx512bf16.cvtneps2bf16.512" => "__builtin_ia32_cvtneps2bf16_512", + "avx512bf16.dpbf16ps.128" => "__builtin_ia32_dpbf16ps_128", + "avx512bf16.dpbf16ps.256" => "__builtin_ia32_dpbf16ps_256", + "avx512bf16.dpbf16ps.512" => "__builtin_ia32_dpbf16ps_512", + "avx512fp16.add.ph.512" => "__builtin_ia32_addph512", + "avx512fp16.div.ph.512" => "__builtin_ia32_divph512", + // [INVALID CONVERSION]: "avx512fp16.mask.add.sh.round" => "__builtin_ia32_addsh_round_mask", + "avx512fp16.mask.cmp.sh" => "__builtin_ia32_cmpsh_mask", + // [INVALID CONVERSION]: "avx512fp16.mask.div.sh.round" => "__builtin_ia32_divsh_round_mask", + "avx512fp16.mask.fpclass.sh" => "__builtin_ia32_fpclasssh_mask", + "avx512fp16.mask.getexp.ph.128" => "__builtin_ia32_getexpph128_mask", + "avx512fp16.mask.getexp.ph.256" => "__builtin_ia32_getexpph256_mask", + "avx512fp16.mask.getexp.ph.512" => "__builtin_ia32_getexpph512_mask", + // [INVALID CONVERSION]: "avx512fp16.mask.getexp.sh" => "__builtin_ia32_getexpsh128_round_mask", + "avx512fp16.mask.getmant.ph.128" => "__builtin_ia32_getmantph128_mask", + "avx512fp16.mask.getmant.ph.256" => "__builtin_ia32_getmantph256_mask", + "avx512fp16.mask.getmant.ph.512" => "__builtin_ia32_getmantph512_mask", + // [INVALID CONVERSION]: "avx512fp16.mask.getmant.sh" => "__builtin_ia32_getmantsh_round_mask", + // [INVALID CONVERSION]: "avx512fp16.mask.max.sh.round" => "__builtin_ia32_maxsh_round_mask", + // [INVALID CONVERSION]: "avx512fp16.mask.min.sh.round" => "__builtin_ia32_minsh_round_mask", + // [INVALID CONVERSION]: "avx512fp16.mask.mul.sh.round" => "__builtin_ia32_mulsh_round_mask", + "avx512fp16.mask.rcp.ph.128" => "__builtin_ia32_rcpph128_mask", + "avx512fp16.mask.rcp.ph.256" => "__builtin_ia32_rcpph256_mask", + "avx512fp16.mask.rcp.ph.512" => "__builtin_ia32_rcpph512_mask", + "avx512fp16.mask.rcp.sh" => "__builtin_ia32_rcpsh_mask", + "avx512fp16.mask.reduce.ph.128" => "__builtin_ia32_reduceph128_mask", + "avx512fp16.mask.reduce.ph.256" => "__builtin_ia32_reduceph256_mask", + "avx512fp16.mask.reduce.ph.512" => "__builtin_ia32_reduceph512_mask", + "avx512fp16.mask.reduce.sh" => "__builtin_ia32_reducesh_mask", + "avx512fp16.mask.rndscale.ph.128" => "__builtin_ia32_rndscaleph_128_mask", + "avx512fp16.mask.rndscale.ph.256" => "__builtin_ia32_rndscaleph_256_mask", + "avx512fp16.mask.rndscale.ph.512" => "__builtin_ia32_rndscaleph_mask", + // [INVALID CONVERSION]: "avx512fp16.mask.rndscale.sh" => "__builtin_ia32_rndscalesh_round_mask", + "avx512fp16.mask.rsqrt.ph.128" => "__builtin_ia32_rsqrtph128_mask", + "avx512fp16.mask.rsqrt.ph.256" => "__builtin_ia32_rsqrtph256_mask", + "avx512fp16.mask.rsqrt.ph.512" => "__builtin_ia32_rsqrtph512_mask", + "avx512fp16.mask.rsqrt.sh" => "__builtin_ia32_rsqrtsh_mask", + "avx512fp16.mask.scalef.ph.128" => "__builtin_ia32_scalefph128_mask", + "avx512fp16.mask.scalef.ph.256" => "__builtin_ia32_scalefph256_mask", + "avx512fp16.mask.scalef.ph.512" => "__builtin_ia32_scalefph512_mask", + // [INVALID CONVERSION]: "avx512fp16.mask.scalef.sh" => "__builtin_ia32_scalefsh_round_mask", + // [INVALID CONVERSION]: "avx512fp16.mask.sub.sh.round" => "__builtin_ia32_subsh_round_mask", + "avx512fp16.mask.vcvtdq2ph.128" => "__builtin_ia32_vcvtdq2ph128_mask", + "avx512fp16.mask.vcvtpd2ph.128" => "__builtin_ia32_vcvtpd2ph128_mask", + "avx512fp16.mask.vcvtpd2ph.256" => "__builtin_ia32_vcvtpd2ph256_mask", + "avx512fp16.mask.vcvtpd2ph.512" => "__builtin_ia32_vcvtpd2ph512_mask", + "avx512fp16.mask.vcvtph2dq.128" => "__builtin_ia32_vcvtph2dq128_mask", + "avx512fp16.mask.vcvtph2dq.256" => "__builtin_ia32_vcvtph2dq256_mask", + "avx512fp16.mask.vcvtph2dq.512" => "__builtin_ia32_vcvtph2dq512_mask", + "avx512fp16.mask.vcvtph2pd.128" => "__builtin_ia32_vcvtph2pd128_mask", + "avx512fp16.mask.vcvtph2pd.256" => "__builtin_ia32_vcvtph2pd256_mask", + "avx512fp16.mask.vcvtph2pd.512" => "__builtin_ia32_vcvtph2pd512_mask", + "avx512fp16.mask.vcvtph2psx.128" => "__builtin_ia32_vcvtph2psx128_mask", + "avx512fp16.mask.vcvtph2psx.256" => "__builtin_ia32_vcvtph2psx256_mask", + "avx512fp16.mask.vcvtph2psx.512" => "__builtin_ia32_vcvtph2psx512_mask", + "avx512fp16.mask.vcvtph2qq.128" => "__builtin_ia32_vcvtph2qq128_mask", + "avx512fp16.mask.vcvtph2qq.256" => "__builtin_ia32_vcvtph2qq256_mask", + "avx512fp16.mask.vcvtph2qq.512" => "__builtin_ia32_vcvtph2qq512_mask", + "avx512fp16.mask.vcvtph2udq.128" => "__builtin_ia32_vcvtph2udq128_mask", + "avx512fp16.mask.vcvtph2udq.256" => "__builtin_ia32_vcvtph2udq256_mask", + "avx512fp16.mask.vcvtph2udq.512" => "__builtin_ia32_vcvtph2udq512_mask", + "avx512fp16.mask.vcvtph2uqq.128" => "__builtin_ia32_vcvtph2uqq128_mask", + "avx512fp16.mask.vcvtph2uqq.256" => "__builtin_ia32_vcvtph2uqq256_mask", + "avx512fp16.mask.vcvtph2uqq.512" => "__builtin_ia32_vcvtph2uqq512_mask", + "avx512fp16.mask.vcvtph2uw.128" => "__builtin_ia32_vcvtph2uw128_mask", + "avx512fp16.mask.vcvtph2uw.256" => "__builtin_ia32_vcvtph2uw256_mask", + "avx512fp16.mask.vcvtph2uw.512" => "__builtin_ia32_vcvtph2uw512_mask", + "avx512fp16.mask.vcvtph2w.128" => "__builtin_ia32_vcvtph2w128_mask", + "avx512fp16.mask.vcvtph2w.256" => "__builtin_ia32_vcvtph2w256_mask", + "avx512fp16.mask.vcvtph2w.512" => "__builtin_ia32_vcvtph2w512_mask", + "avx512fp16.mask.vcvtps2phx.128" => "__builtin_ia32_vcvtps2phx128_mask", + "avx512fp16.mask.vcvtps2phx.256" => "__builtin_ia32_vcvtps2phx256_mask", + "avx512fp16.mask.vcvtps2phx.512" => "__builtin_ia32_vcvtps2phx512_mask", + "avx512fp16.mask.vcvtqq2ph.128" => "__builtin_ia32_vcvtqq2ph128_mask", + "avx512fp16.mask.vcvtqq2ph.256" => "__builtin_ia32_vcvtqq2ph256_mask", + // [INVALID CONVERSION]: "avx512fp16.mask.vcvtsd2sh.round" => "__builtin_ia32_vcvtsd2sh_round_mask", + // [INVALID CONVERSION]: "avx512fp16.mask.vcvtsh2sd.round" => "__builtin_ia32_vcvtsh2sd_round_mask", + // [INVALID CONVERSION]: "avx512fp16.mask.vcvtsh2ss.round" => "__builtin_ia32_vcvtsh2ss_round_mask", + // [INVALID CONVERSION]: "avx512fp16.mask.vcvtss2sh.round" => "__builtin_ia32_vcvtss2sh_round_mask", + "avx512fp16.mask.vcvttph2dq.128" => "__builtin_ia32_vcvttph2dq128_mask", + "avx512fp16.mask.vcvttph2dq.256" => "__builtin_ia32_vcvttph2dq256_mask", + "avx512fp16.mask.vcvttph2dq.512" => "__builtin_ia32_vcvttph2dq512_mask", + "avx512fp16.mask.vcvttph2qq.128" => "__builtin_ia32_vcvttph2qq128_mask", + "avx512fp16.mask.vcvttph2qq.256" => "__builtin_ia32_vcvttph2qq256_mask", + "avx512fp16.mask.vcvttph2qq.512" => "__builtin_ia32_vcvttph2qq512_mask", + "avx512fp16.mask.vcvttph2udq.128" => "__builtin_ia32_vcvttph2udq128_mask", + "avx512fp16.mask.vcvttph2udq.256" => "__builtin_ia32_vcvttph2udq256_mask", + "avx512fp16.mask.vcvttph2udq.512" => "__builtin_ia32_vcvttph2udq512_mask", + "avx512fp16.mask.vcvttph2uqq.128" => "__builtin_ia32_vcvttph2uqq128_mask", + "avx512fp16.mask.vcvttph2uqq.256" => "__builtin_ia32_vcvttph2uqq256_mask", + "avx512fp16.mask.vcvttph2uqq.512" => "__builtin_ia32_vcvttph2uqq512_mask", + "avx512fp16.mask.vcvttph2uw.128" => "__builtin_ia32_vcvttph2uw128_mask", + "avx512fp16.mask.vcvttph2uw.256" => "__builtin_ia32_vcvttph2uw256_mask", + "avx512fp16.mask.vcvttph2uw.512" => "__builtin_ia32_vcvttph2uw512_mask", + "avx512fp16.mask.vcvttph2w.128" => "__builtin_ia32_vcvttph2w128_mask", + "avx512fp16.mask.vcvttph2w.256" => "__builtin_ia32_vcvttph2w256_mask", + "avx512fp16.mask.vcvttph2w.512" => "__builtin_ia32_vcvttph2w512_mask", + "avx512fp16.mask.vcvtudq2ph.128" => "__builtin_ia32_vcvtudq2ph128_mask", + "avx512fp16.mask.vcvtuqq2ph.128" => "__builtin_ia32_vcvtuqq2ph128_mask", + "avx512fp16.mask.vcvtuqq2ph.256" => "__builtin_ia32_vcvtuqq2ph256_mask", + "avx512fp16.mask.vfcmadd.cph.128" => "__builtin_ia32_vfcmaddcph128_mask", + "avx512fp16.mask.vfcmadd.cph.256" => "__builtin_ia32_vfcmaddcph256_mask", + "avx512fp16.mask.vfcmadd.cph.512" => "__builtin_ia32_vfcmaddcph512_mask3", + "avx512fp16.mask.vfcmadd.csh" => "__builtin_ia32_vfcmaddcsh_mask", + "avx512fp16.mask.vfcmul.cph.128" => "__builtin_ia32_vfcmulcph128_mask", + "avx512fp16.mask.vfcmul.cph.256" => "__builtin_ia32_vfcmulcph256_mask", + "avx512fp16.mask.vfcmul.cph.512" => "__builtin_ia32_vfcmulcph512_mask", + "avx512fp16.mask.vfcmul.csh" => "__builtin_ia32_vfcmulcsh_mask", + "avx512fp16.mask.vfmadd.cph.128" => "__builtin_ia32_vfmaddcph128_mask", + "avx512fp16.mask.vfmadd.cph.256" => "__builtin_ia32_vfmaddcph256_mask", + "avx512fp16.mask.vfmadd.cph.512" => "__builtin_ia32_vfmaddcph512_mask3", + "avx512fp16.mask.vfmadd.csh" => "__builtin_ia32_vfmaddcsh_mask", + "avx512fp16.mask.vfmul.cph.128" => "__builtin_ia32_vfmulcph128_mask", + "avx512fp16.mask.vfmul.cph.256" => "__builtin_ia32_vfmulcph256_mask", + "avx512fp16.mask.vfmul.cph.512" => "__builtin_ia32_vfmulcph512_mask", + "avx512fp16.mask.vfmul.csh" => "__builtin_ia32_vfmulcsh_mask", + "avx512fp16.maskz.vfcmadd.cph.128" => "__builtin_ia32_vfcmaddcph128_maskz", + "avx512fp16.maskz.vfcmadd.cph.256" => "__builtin_ia32_vfcmaddcph256_maskz", + "avx512fp16.maskz.vfcmadd.cph.512" => "__builtin_ia32_vfcmaddcph512_maskz", + "avx512fp16.maskz.vfcmadd.csh" => "__builtin_ia32_vfcmaddcsh_maskz", + "avx512fp16.maskz.vfmadd.cph.128" => "__builtin_ia32_vfmaddcph128_maskz", + "avx512fp16.maskz.vfmadd.cph.256" => "__builtin_ia32_vfmaddcph256_maskz", + "avx512fp16.maskz.vfmadd.cph.512" => "__builtin_ia32_vfmaddcph512_maskz", + "avx512fp16.maskz.vfmadd.csh" => "__builtin_ia32_vfmaddcsh_maskz", + "avx512fp16.max.ph.128" => "__builtin_ia32_maxph128", + "avx512fp16.max.ph.256" => "__builtin_ia32_maxph256", + "avx512fp16.max.ph.512" => "__builtin_ia32_maxph512", + "avx512fp16.min.ph.128" => "__builtin_ia32_minph128", + "avx512fp16.min.ph.256" => "__builtin_ia32_minph256", + "avx512fp16.min.ph.512" => "__builtin_ia32_minph512", + "avx512fp16.mul.ph.512" => "__builtin_ia32_mulph512", + "avx512fp16.sub.ph.512" => "__builtin_ia32_subph512", + "avx512fp16.vcomi.sh" => "__builtin_ia32_vcomish", + "avx512fp16.vcvtsh2si32" => "__builtin_ia32_vcvtsh2si32", + "avx512fp16.vcvtsh2si64" => "__builtin_ia32_vcvtsh2si64", + "avx512fp16.vcvtsh2usi32" => "__builtin_ia32_vcvtsh2usi32", + "avx512fp16.vcvtsh2usi64" => "__builtin_ia32_vcvtsh2usi64", + "avx512fp16.vcvtsi2sh" => "__builtin_ia32_vcvtsi2sh", + "avx512fp16.vcvtsi642sh" => "__builtin_ia32_vcvtsi642sh", + "avx512fp16.vcvttsh2si32" => "__builtin_ia32_vcvttsh2si32", + "avx512fp16.vcvttsh2si64" => "__builtin_ia32_vcvttsh2si64", + "avx512fp16.vcvttsh2usi32" => "__builtin_ia32_vcvttsh2usi32", + "avx512fp16.vcvttsh2usi64" => "__builtin_ia32_vcvttsh2usi64", + "avx512fp16.vcvtusi2sh" => "__builtin_ia32_vcvtusi2sh", + "avx512fp16.vcvtusi642sh" => "__builtin_ia32_vcvtusi642sh", + "avx512fp16.vfmaddsub.ph.128" => "__builtin_ia32_vfmaddsubph", + "avx512fp16.vfmaddsub.ph.256" => "__builtin_ia32_vfmaddsubph256", + "axor32" => "__builtin_ia32_axor32", + "axor64" => "__builtin_ia32_axor64", + "bmi.bextr.32" => "__builtin_ia32_bextr_u32", + "bmi.bextr.64" => "__builtin_ia32_bextr_u64", + "bmi.bzhi.32" => "__builtin_ia32_bzhi_si", + "bmi.bzhi.64" => "__builtin_ia32_bzhi_di", + "bmi.pdep.32" => "__builtin_ia32_pdep_si", + "bmi.pdep.64" => "__builtin_ia32_pdep_di", + "bmi.pext.32" => "__builtin_ia32_pext_si", + "bmi.pext.64" => "__builtin_ia32_pext_di", + "cldemote" => "__builtin_ia32_cldemote", + "clflushopt" => "__builtin_ia32_clflushopt", + "clrssbsy" => "__builtin_ia32_clrssbsy", + "clui" => "__builtin_ia32_clui", + "clwb" => "__builtin_ia32_clwb", + "clzero" => "__builtin_ia32_clzero", + "cmpccxadd32" => "__builtin_ia32_cmpccxadd32", + "cmpccxadd64" => "__builtin_ia32_cmpccxadd64", + "directstore32" => "__builtin_ia32_directstore_u32", + "directstore64" => "__builtin_ia32_directstore_u64", + "enqcmd" => "__builtin_ia32_enqcmd", + "enqcmds" => "__builtin_ia32_enqcmds", + "flags.read.u32" => "__builtin_ia32_readeflags_u32", + "flags.read.u64" => "__builtin_ia32_readeflags_u64", + "flags.write.u32" => "__builtin_ia32_writeeflags_u32", + "flags.write.u64" => "__builtin_ia32_writeeflags_u64", + "fma.mask.vfmadd.pd.512" => "__builtin_ia32_vfmaddpd512_mask", + "fma.mask.vfmadd.ps.512" => "__builtin_ia32_vfmaddps512_mask", + "fma.mask.vfmaddsub.pd.512" => "__builtin_ia32_vfmaddsubpd512_mask", + "fma.mask.vfmaddsub.ps.512" => "__builtin_ia32_vfmaddsubps512_mask", + "fma.mask.vfmsub.pd.512" => "__builtin_ia32_vfmsubpd512_mask", + "fma.mask.vfmsub.ps.512" => "__builtin_ia32_vfmsubps512_mask", + "fma.mask.vfmsubadd.pd.512" => "__builtin_ia32_vfmsubaddpd512_mask", + "fma.mask.vfmsubadd.ps.512" => "__builtin_ia32_vfmsubaddps512_mask", + "fma.mask.vfnmadd.pd.512" => "__builtin_ia32_vfnmaddpd512_mask", + "fma.mask.vfnmadd.ps.512" => "__builtin_ia32_vfnmaddps512_mask", + "fma.mask.vfnmsub.pd.512" => "__builtin_ia32_vfnmsubpd512_mask", + "fma.mask.vfnmsub.ps.512" => "__builtin_ia32_vfnmsubps512_mask", + "fma.vfmadd.pd" => "__builtin_ia32_vfmaddpd", + "fma.vfmadd.pd.256" => "__builtin_ia32_vfmaddpd256", + "fma.vfmadd.ps" => "__builtin_ia32_vfmaddps", + "fma.vfmadd.ps.256" => "__builtin_ia32_vfmaddps256", + "fma.vfmadd.sd" => "__builtin_ia32_vfmaddsd", + "fma.vfmadd.ss" => "__builtin_ia32_vfmaddss", + "fma.vfmaddsub.pd" => "__builtin_ia32_vfmaddsubpd", + "fma.vfmaddsub.pd.256" => "__builtin_ia32_vfmaddsubpd256", + "fma.vfmaddsub.ps" => "__builtin_ia32_vfmaddsubps", + "fma.vfmaddsub.ps.256" => "__builtin_ia32_vfmaddsubps256", + "fma.vfmsub.pd" => "__builtin_ia32_vfmsubpd", + "fma.vfmsub.pd.256" => "__builtin_ia32_vfmsubpd256", + "fma.vfmsub.ps" => "__builtin_ia32_vfmsubps", + "fma.vfmsub.ps.256" => "__builtin_ia32_vfmsubps256", + "fma.vfmsub.sd" => "__builtin_ia32_vfmsubsd", + "fma.vfmsub.ss" => "__builtin_ia32_vfmsubss", + "fma.vfmsubadd.pd" => "__builtin_ia32_vfmsubaddpd", + "fma.vfmsubadd.pd.256" => "__builtin_ia32_vfmsubaddpd256", + "fma.vfmsubadd.ps" => "__builtin_ia32_vfmsubaddps", + "fma.vfmsubadd.ps.256" => "__builtin_ia32_vfmsubaddps256", + "fma.vfnmadd.pd" => "__builtin_ia32_vfnmaddpd", + "fma.vfnmadd.pd.256" => "__builtin_ia32_vfnmaddpd256", + "fma.vfnmadd.ps" => "__builtin_ia32_vfnmaddps", + "fma.vfnmadd.ps.256" => "__builtin_ia32_vfnmaddps256", + "fma.vfnmadd.sd" => "__builtin_ia32_vfnmaddsd", + "fma.vfnmadd.ss" => "__builtin_ia32_vfnmaddss", + "fma.vfnmsub.pd" => "__builtin_ia32_vfnmsubpd", + "fma.vfnmsub.pd.256" => "__builtin_ia32_vfnmsubpd256", + "fma.vfnmsub.ps" => "__builtin_ia32_vfnmsubps", + "fma.vfnmsub.ps.256" => "__builtin_ia32_vfnmsubps256", + "fma.vfnmsub.sd" => "__builtin_ia32_vfnmsubsd", + "fma.vfnmsub.ss" => "__builtin_ia32_vfnmsubss", + "fxrstor" => "__builtin_ia32_fxrstor", + "fxrstor64" => "__builtin_ia32_fxrstor64", + "fxsave" => "__builtin_ia32_fxsave", + "fxsave64" => "__builtin_ia32_fxsave64", + "incsspd" => "__builtin_ia32_incsspd", + "incsspq" => "__builtin_ia32_incsspq", + "invpcid" => "__builtin_ia32_invpcid", + "ldtilecfg" => "__builtin_ia32_tile_loadconfig", + "ldtilecfg.internal" => "__builtin_ia32_tile_loadconfig_internal", + "llwpcb" => "__builtin_ia32_llwpcb", + "loadiwkey" => "__builtin_ia32_loadiwkey", + "lwpins32" => "__builtin_ia32_lwpins32", + "lwpins64" => "__builtin_ia32_lwpins64", + "lwpval32" => "__builtin_ia32_lwpval32", + "lwpval64" => "__builtin_ia32_lwpval64", + "mmx.emms" => "__builtin_ia32_emms", + "mmx.femms" => "__builtin_ia32_femms", + "monitorx" => "__builtin_ia32_monitorx", + "movdir64b" => "__builtin_ia32_movdir64b", + "movrsdi" => "__builtin_ia32_movrsdi", + "movrshi" => "__builtin_ia32_movrshi", + "movrsqi" => "__builtin_ia32_movrsqi", + "movrssi" => "__builtin_ia32_movrssi", + "mwaitx" => "__builtin_ia32_mwaitx", + "pclmulqdq" => "__builtin_ia32_pclmulqdq128", + "pclmulqdq.256" => "__builtin_ia32_pclmulqdq256", + "pclmulqdq.512" => "__builtin_ia32_pclmulqdq512", + "prefetchrs" => "__builtin_ia32_prefetchrs", + "ptwrite32" => "__builtin_ia32_ptwrite32", + "ptwrite64" => "__builtin_ia32_ptwrite64", + "rdfsbase.32" => "__builtin_ia32_rdfsbase32", + "rdfsbase.64" => "__builtin_ia32_rdfsbase64", + "rdgsbase.32" => "__builtin_ia32_rdgsbase32", + "rdgsbase.64" => "__builtin_ia32_rdgsbase64", + "rdpid" => "__builtin_ia32_rdpid", + "rdpkru" => "__builtin_ia32_rdpkru", + "rdpmc" => "__builtin_ia32_rdpmc", + "rdpru" => "__builtin_ia32_rdpru", + "rdsspd" => "__builtin_ia32_rdsspd", + "rdsspq" => "__builtin_ia32_rdsspq", + "rdtsc" => "__builtin_ia32_rdtsc", + "rdtscp" => "__builtin_ia32_rdtscp", + "rstorssp" => "__builtin_ia32_rstorssp", + "saveprevssp" => "__builtin_ia32_saveprevssp", + "senduipi" => "__builtin_ia32_senduipi", + "serialize" => "__builtin_ia32_serialize", + "setssbsy" => "__builtin_ia32_setssbsy", + "sha1msg1" => "__builtin_ia32_sha1msg1", + "sha1msg2" => "__builtin_ia32_sha1msg2", + "sha1nexte" => "__builtin_ia32_sha1nexte", + "sha1rnds4" => "__builtin_ia32_sha1rnds4", + "sha256msg1" => "__builtin_ia32_sha256msg1", + "sha256msg2" => "__builtin_ia32_sha256msg2", + "sha256rnds2" => "__builtin_ia32_sha256rnds2", + "slwpcb" => "__builtin_ia32_slwpcb", + "sse.add.ss" => "__builtin_ia32_addss", + "sse.cmp.ps" => "__builtin_ia32_cmpps", + "sse.cmp.ss" => "__builtin_ia32_cmpss", + "sse.comieq.ss" => "__builtin_ia32_comieq", + "sse.comige.ss" => "__builtin_ia32_comige", + "sse.comigt.ss" => "__builtin_ia32_comigt", + "sse.comile.ss" => "__builtin_ia32_comile", + "sse.comilt.ss" => "__builtin_ia32_comilt", + "sse.comineq.ss" => "__builtin_ia32_comineq", + "sse.cvtsi2ss" => "__builtin_ia32_cvtsi2ss", + "sse.cvtsi642ss" => "__builtin_ia32_cvtsi642ss", + "sse.cvtss2si" => "__builtin_ia32_cvtss2si", + "sse.cvtss2si64" => "__builtin_ia32_cvtss2si64", + "sse.cvttss2si" => "__builtin_ia32_cvttss2si", + "sse.cvttss2si64" => "__builtin_ia32_cvttss2si64", + "sse.div.ss" => "__builtin_ia32_divss", + "sse.max.ps" => "__builtin_ia32_maxps", + "sse.max.ss" => "__builtin_ia32_maxss", + "sse.min.ps" => "__builtin_ia32_minps", + "sse.min.ss" => "__builtin_ia32_minss", + "sse.movmsk.ps" => "__builtin_ia32_movmskps", + "sse.mul.ss" => "__builtin_ia32_mulss", + "sse.rcp.ps" => "__builtin_ia32_rcpps", + "sse.rcp.ss" => "__builtin_ia32_rcpss", + "sse.rsqrt.ps" => "__builtin_ia32_rsqrtps", + "sse.rsqrt.ss" => "__builtin_ia32_rsqrtss", + "sse.sfence" => "__builtin_ia32_sfence", + "sse.sqrt.ps" => "__builtin_ia32_sqrtps", + "sse.sqrt.ss" => "__builtin_ia32_sqrtss", + "sse.storeu.ps" => "__builtin_ia32_storeups", + "sse.sub.ss" => "__builtin_ia32_subss", + "sse.ucomieq.ss" => "__builtin_ia32_ucomieq", + "sse.ucomige.ss" => "__builtin_ia32_ucomige", + "sse.ucomigt.ss" => "__builtin_ia32_ucomigt", + "sse.ucomile.ss" => "__builtin_ia32_ucomile", + "sse.ucomilt.ss" => "__builtin_ia32_ucomilt", + "sse.ucomineq.ss" => "__builtin_ia32_ucomineq", + "sse2.add.sd" => "__builtin_ia32_addsd", + "sse2.clflush" => "__builtin_ia32_clflush", + "sse2.cmp.pd" => "__builtin_ia32_cmppd", + "sse2.cmp.sd" => "__builtin_ia32_cmpsd", + "sse2.comieq.sd" => "__builtin_ia32_comisdeq", + "sse2.comige.sd" => "__builtin_ia32_comisdge", + "sse2.comigt.sd" => "__builtin_ia32_comisdgt", + "sse2.comile.sd" => "__builtin_ia32_comisdle", + "sse2.comilt.sd" => "__builtin_ia32_comisdlt", + "sse2.comineq.sd" => "__builtin_ia32_comisdneq", + "sse2.cvtdq2pd" => "__builtin_ia32_cvtdq2pd", + "sse2.cvtdq2ps" => "__builtin_ia32_cvtdq2ps", + "sse2.cvtpd2dq" => "__builtin_ia32_cvtpd2dq", + "sse2.cvtpd2ps" => "__builtin_ia32_cvtpd2ps", + "sse2.cvtps2dq" => "__builtin_ia32_cvtps2dq", + "sse2.cvtps2pd" => "__builtin_ia32_cvtps2pd", + "sse2.cvtsd2si" => "__builtin_ia32_cvtsd2si", + "sse2.cvtsd2si64" => "__builtin_ia32_cvtsd2si64", + "sse2.cvtsd2ss" => "__builtin_ia32_cvtsd2ss", + "sse2.cvtsi2sd" => "__builtin_ia32_cvtsi2sd", + "sse2.cvtsi642sd" => "__builtin_ia32_cvtsi642sd", + "sse2.cvtss2sd" => "__builtin_ia32_cvtss2sd", + "sse2.cvttpd2dq" => "__builtin_ia32_cvttpd2dq", + "sse2.cvttps2dq" => "__builtin_ia32_cvttps2dq", + "sse2.cvttsd2si" => "__builtin_ia32_cvttsd2si", + "sse2.cvttsd2si64" => "__builtin_ia32_cvttsd2si64", + "sse2.div.sd" => "__builtin_ia32_divsd", + "sse2.lfence" => "__builtin_ia32_lfence", + "sse2.maskmov.dqu" => "__builtin_ia32_maskmovdqu", + "sse2.max.pd" => "__builtin_ia32_maxpd", + "sse2.max.sd" => "__builtin_ia32_maxsd", + "sse2.mfence" => "__builtin_ia32_mfence", + "sse2.min.pd" => "__builtin_ia32_minpd", + "sse2.min.sd" => "__builtin_ia32_minsd", + "sse2.movmsk.pd" => "__builtin_ia32_movmskpd", + "sse2.mul.sd" => "__builtin_ia32_mulsd", + "sse2.packssdw.128" => "__builtin_ia32_packssdw128", + "sse2.packsswb.128" => "__builtin_ia32_packsswb128", + "sse2.packuswb.128" => "__builtin_ia32_packuswb128", + "sse2.padds.b" => "__builtin_ia32_paddsb128", + "sse2.padds.w" => "__builtin_ia32_paddsw128", + "sse2.paddus.b" => "__builtin_ia32_paddusb128", + "sse2.paddus.w" => "__builtin_ia32_paddusw128", + "sse2.pause" => "__builtin_ia32_pause", + "sse2.pavg.b" => "__builtin_ia32_pavgb128", + "sse2.pavg.w" => "__builtin_ia32_pavgw128", + "sse2.pmadd.wd" => "__builtin_ia32_pmaddwd128", + "sse2.pmaxs.w" => "__builtin_ia32_pmaxsw128", + "sse2.pmaxu.b" => "__builtin_ia32_pmaxub128", + "sse2.pmins.w" => "__builtin_ia32_pminsw128", + "sse2.pminu.b" => "__builtin_ia32_pminub128", + "sse2.pmovmskb.128" => "__builtin_ia32_pmovmskb128", + "sse2.pmulh.w" => "__builtin_ia32_pmulhw128", + "sse2.pmulhu.w" => "__builtin_ia32_pmulhuw128", + "sse2.pmulu.dq" => "__builtin_ia32_pmuludq128", + "sse2.psad.bw" => "__builtin_ia32_psadbw128", + "sse2.pshuf.d" => "__builtin_ia32_pshufd", + "sse2.pshufh.w" => "__builtin_ia32_pshufhw", + "sse2.pshufl.w" => "__builtin_ia32_pshuflw", + "sse2.psll.d" => "__builtin_ia32_pslld128", + "sse2.psll.dq" => "__builtin_ia32_pslldqi128", + "sse2.psll.dq.bs" => "__builtin_ia32_pslldqi128_byteshift", + "sse2.psll.q" => "__builtin_ia32_psllq128", + "sse2.psll.w" => "__builtin_ia32_psllw128", + "sse2.pslli.d" => "__builtin_ia32_pslldi128", + "sse2.pslli.q" => "__builtin_ia32_psllqi128", + "sse2.pslli.w" => "__builtin_ia32_psllwi128", + "sse2.psra.d" => "__builtin_ia32_psrad128", + "sse2.psra.w" => "__builtin_ia32_psraw128", + "sse2.psrai.d" => "__builtin_ia32_psradi128", + "sse2.psrai.w" => "__builtin_ia32_psrawi128", + "sse2.psrl.d" => "__builtin_ia32_psrld128", + "sse2.psrl.dq" => "__builtin_ia32_psrldqi128", + "sse2.psrl.dq.bs" => "__builtin_ia32_psrldqi128_byteshift", + "sse2.psrl.q" => "__builtin_ia32_psrlq128", + "sse2.psrl.w" => "__builtin_ia32_psrlw128", + "sse2.psrli.d" => "__builtin_ia32_psrldi128", + "sse2.psrli.q" => "__builtin_ia32_psrlqi128", + "sse2.psrli.w" => "__builtin_ia32_psrlwi128", + "sse2.psubs.b" => "__builtin_ia32_psubsb128", + "sse2.psubs.w" => "__builtin_ia32_psubsw128", + "sse2.psubus.b" => "__builtin_ia32_psubusb128", + "sse2.psubus.w" => "__builtin_ia32_psubusw128", + "sse2.sqrt.pd" => "__builtin_ia32_sqrtpd", + "sse2.sqrt.sd" => "__builtin_ia32_sqrtsd", + "sse2.storel.dq" => "__builtin_ia32_storelv4si", + "sse2.storeu.dq" => "__builtin_ia32_storedqu", + "sse2.storeu.pd" => "__builtin_ia32_storeupd", + "sse2.sub.sd" => "__builtin_ia32_subsd", + "sse2.ucomieq.sd" => "__builtin_ia32_ucomisdeq", + "sse2.ucomige.sd" => "__builtin_ia32_ucomisdge", + "sse2.ucomigt.sd" => "__builtin_ia32_ucomisdgt", + "sse2.ucomile.sd" => "__builtin_ia32_ucomisdle", + "sse2.ucomilt.sd" => "__builtin_ia32_ucomisdlt", + "sse2.ucomineq.sd" => "__builtin_ia32_ucomisdneq", + "sse3.addsub.pd" => "__builtin_ia32_addsubpd", + "sse3.addsub.ps" => "__builtin_ia32_addsubps", + "sse3.hadd.pd" => "__builtin_ia32_haddpd", + "sse3.hadd.ps" => "__builtin_ia32_haddps", + "sse3.hsub.pd" => "__builtin_ia32_hsubpd", + "sse3.hsub.ps" => "__builtin_ia32_hsubps", + "sse3.ldu.dq" => "__builtin_ia32_lddqu", + "sse3.monitor" => "__builtin_ia32_monitor", + "sse3.mwait" => "__builtin_ia32_mwait", + "sse41.blendpd" => "__builtin_ia32_blendpd", + "sse41.blendps" => "__builtin_ia32_blendps", + "sse41.blendvpd" => "__builtin_ia32_blendvpd", + "sse41.blendvps" => "__builtin_ia32_blendvps", + "sse41.dppd" => "__builtin_ia32_dppd", + "sse41.dpps" => "__builtin_ia32_dpps", + "sse41.extractps" => "__builtin_ia32_extractps128", + "sse41.insertps" => "__builtin_ia32_insertps128", + "sse41.movntdqa" => "__builtin_ia32_movntdqa", + "sse41.mpsadbw" => "__builtin_ia32_mpsadbw128", + "sse41.packusdw" => "__builtin_ia32_packusdw128", + "sse41.pblendvb" => "__builtin_ia32_pblendvb128", + "sse41.pblendw" => "__builtin_ia32_pblendw128", + "sse41.phminposuw" => "__builtin_ia32_phminposuw128", + "sse41.pmaxsb" => "__builtin_ia32_pmaxsb128", + "sse41.pmaxsd" => "__builtin_ia32_pmaxsd128", + "sse41.pmaxud" => "__builtin_ia32_pmaxud128", + "sse41.pmaxuw" => "__builtin_ia32_pmaxuw128", + "sse41.pminsb" => "__builtin_ia32_pminsb128", + "sse41.pminsd" => "__builtin_ia32_pminsd128", + "sse41.pminud" => "__builtin_ia32_pminud128", + "sse41.pminuw" => "__builtin_ia32_pminuw128", + "sse41.pmovsxbd" => "__builtin_ia32_pmovsxbd128", + "sse41.pmovsxbq" => "__builtin_ia32_pmovsxbq128", + "sse41.pmovsxbw" => "__builtin_ia32_pmovsxbw128", + "sse41.pmovsxdq" => "__builtin_ia32_pmovsxdq128", + "sse41.pmovsxwd" => "__builtin_ia32_pmovsxwd128", + "sse41.pmovsxwq" => "__builtin_ia32_pmovsxwq128", + "sse41.pmovzxbd" => "__builtin_ia32_pmovzxbd128", + "sse41.pmovzxbq" => "__builtin_ia32_pmovzxbq128", + "sse41.pmovzxbw" => "__builtin_ia32_pmovzxbw128", + "sse41.pmovzxdq" => "__builtin_ia32_pmovzxdq128", + "sse41.pmovzxwd" => "__builtin_ia32_pmovzxwd128", + "sse41.pmovzxwq" => "__builtin_ia32_pmovzxwq128", + "sse41.pmuldq" => "__builtin_ia32_pmuldq128", + "sse41.ptestc" => "__builtin_ia32_ptestc128", + "sse41.ptestnzc" => "__builtin_ia32_ptestnzc128", + "sse41.ptestz" => "__builtin_ia32_ptestz128", + "sse41.round.pd" => "__builtin_ia32_roundpd", + "sse41.round.ps" => "__builtin_ia32_roundps", + "sse41.round.sd" => "__builtin_ia32_roundsd", + "sse41.round.ss" => "__builtin_ia32_roundss", + "sse42.crc32.32.16" => "__builtin_ia32_crc32hi", + "sse42.crc32.32.32" => "__builtin_ia32_crc32si", + "sse42.crc32.32.8" => "__builtin_ia32_crc32qi", + "sse42.crc32.64.64" => "__builtin_ia32_crc32di", + "sse42.pcmpestri128" => "__builtin_ia32_pcmpestri128", + "sse42.pcmpestria128" => "__builtin_ia32_pcmpestria128", + "sse42.pcmpestric128" => "__builtin_ia32_pcmpestric128", + "sse42.pcmpestrio128" => "__builtin_ia32_pcmpestrio128", + "sse42.pcmpestris128" => "__builtin_ia32_pcmpestris128", + "sse42.pcmpestriz128" => "__builtin_ia32_pcmpestriz128", + "sse42.pcmpestrm128" => "__builtin_ia32_pcmpestrm128", + "sse42.pcmpistri128" => "__builtin_ia32_pcmpistri128", + "sse42.pcmpistria128" => "__builtin_ia32_pcmpistria128", + "sse42.pcmpistric128" => "__builtin_ia32_pcmpistric128", + "sse42.pcmpistrio128" => "__builtin_ia32_pcmpistrio128", + "sse42.pcmpistris128" => "__builtin_ia32_pcmpistris128", + "sse42.pcmpistriz128" => "__builtin_ia32_pcmpistriz128", + "sse42.pcmpistrm128" => "__builtin_ia32_pcmpistrm128", + "sse4a.extrq" => "__builtin_ia32_extrq", + "sse4a.extrqi" => "__builtin_ia32_extrqi", + "sse4a.insertq" => "__builtin_ia32_insertq", + "sse4a.insertqi" => "__builtin_ia32_insertqi", + "sse4a.movnt.sd" => "__builtin_ia32_movntsd", + "sse4a.movnt.ss" => "__builtin_ia32_movntss", + "ssse3.pabs.b.128" => "__builtin_ia32_pabsb128", + "ssse3.pabs.d.128" => "__builtin_ia32_pabsd128", + "ssse3.pabs.w.128" => "__builtin_ia32_pabsw128", + "ssse3.phadd.d.128" => "__builtin_ia32_phaddd128", + "ssse3.phadd.sw.128" => "__builtin_ia32_phaddsw128", + "ssse3.phadd.w.128" => "__builtin_ia32_phaddw128", + "ssse3.phsub.d.128" => "__builtin_ia32_phsubd128", + "ssse3.phsub.sw.128" => "__builtin_ia32_phsubsw128", + "ssse3.phsub.w.128" => "__builtin_ia32_phsubw128", + "ssse3.pmadd.ub.sw.128" => "__builtin_ia32_pmaddubsw128", + "ssse3.pmul.hr.sw.128" => "__builtin_ia32_pmulhrsw128", + "ssse3.pshuf.b.128" => "__builtin_ia32_pshufb128", + "ssse3.psign.b.128" => "__builtin_ia32_psignb128", + "ssse3.psign.d.128" => "__builtin_ia32_psignd128", + "ssse3.psign.w.128" => "__builtin_ia32_psignw128", + "sttilecfg" => "__builtin_ia32_tile_storeconfig", + "stui" => "__builtin_ia32_stui", + "subborrow.u32" => "__builtin_ia32_subborrow_u32", + "subborrow.u64" => "__builtin_ia32_subborrow_u64", + "t2rpntlvwz0" => "__builtin_ia32_t2rpntlvwz0", + "t2rpntlvwz0rs" => "__builtin_ia32_t2rpntlvwz0rs", + "t2rpntlvwz0rst1" => "__builtin_ia32_t2rpntlvwz0rst1", + "t2rpntlvwz0t1" => "__builtin_ia32_t2rpntlvwz0t1", + "t2rpntlvwz1" => "__builtin_ia32_t2rpntlvwz1", + "t2rpntlvwz1rs" => "__builtin_ia32_t2rpntlvwz1rs", + "t2rpntlvwz1rst1" => "__builtin_ia32_t2rpntlvwz1rst1", + "t2rpntlvwz1t1" => "__builtin_ia32_t2rpntlvwz1t1", + "tbm.bextri.u32" => "__builtin_ia32_bextri_u32", + "tbm.bextri.u64" => "__builtin_ia32_bextri_u64", + "tcmmimfp16ps" => "__builtin_ia32_tcmmimfp16ps", + "tcmmimfp16ps.internal" => "__builtin_ia32_tcmmimfp16ps_internal", + "tcmmrlfp16ps" => "__builtin_ia32_tcmmrlfp16ps", + "tcmmrlfp16ps.internal" => "__builtin_ia32_tcmmrlfp16ps_internal", + "tconjtcmmimfp16ps" => "__builtin_ia32_tconjtcmmimfp16ps", + "tconjtcmmimfp16ps.internal" => "__builtin_ia32_tconjtcmmimfp16ps_internal", + "tconjtfp16" => "__builtin_ia32_tconjtfp16", + "tconjtfp16.internal" => "__builtin_ia32_tconjtfp16_internal", + "tcvtrowd2ps" => "__builtin_ia32_tcvtrowd2ps", + "tcvtrowd2ps.internal" => "__builtin_ia32_tcvtrowd2ps_internal", + "tcvtrowps2bf16h" => "__builtin_ia32_tcvtrowps2bf16h", + "tcvtrowps2bf16h.internal" => "__builtin_ia32_tcvtrowps2bf16h_internal", + "tcvtrowps2bf16l" => "__builtin_ia32_tcvtrowps2bf16l", + "tcvtrowps2bf16l.internal" => "__builtin_ia32_tcvtrowps2bf16l_internal", + "tcvtrowps2phh" => "__builtin_ia32_tcvtrowps2phh", + "tcvtrowps2phh.internal" => "__builtin_ia32_tcvtrowps2phh_internal", + "tcvtrowps2phl" => "__builtin_ia32_tcvtrowps2phl", + "tcvtrowps2phl.internal" => "__builtin_ia32_tcvtrowps2phl_internal", + "tdpbf16ps" => "__builtin_ia32_tdpbf16ps", + "tdpbf16ps.internal" => "__builtin_ia32_tdpbf16ps_internal", + "tdpbf8ps" => "__builtin_ia32_tdpbf8ps", + "tdpbf8ps.internal" => "__builtin_ia32_tdpbf8ps_internal", + "tdpbhf8ps" => "__builtin_ia32_tdpbhf8ps", + "tdpbhf8ps.internal" => "__builtin_ia32_tdpbhf8ps_internal", + "tdpbssd" => "__builtin_ia32_tdpbssd", + "tdpbssd.internal" => "__builtin_ia32_tdpbssd_internal", + "tdpbsud" => "__builtin_ia32_tdpbsud", + "tdpbsud.internal" => "__builtin_ia32_tdpbsud_internal", + "tdpbusd" => "__builtin_ia32_tdpbusd", + "tdpbusd.internal" => "__builtin_ia32_tdpbusd_internal", + "tdpbuud" => "__builtin_ia32_tdpbuud", + "tdpbuud.internal" => "__builtin_ia32_tdpbuud_internal", + "tdpfp16ps" => "__builtin_ia32_tdpfp16ps", + "tdpfp16ps.internal" => "__builtin_ia32_tdpfp16ps_internal", + "tdphbf8ps" => "__builtin_ia32_tdphbf8ps", + "tdphbf8ps.internal" => "__builtin_ia32_tdphbf8ps_internal", + "tdphf8ps" => "__builtin_ia32_tdphf8ps", + "tdphf8ps.internal" => "__builtin_ia32_tdphf8ps_internal", + "testui" => "__builtin_ia32_testui", + "tileloadd64" => "__builtin_ia32_tileloadd64", + "tileloadd64.internal" => "__builtin_ia32_tileloadd64_internal", + "tileloaddrs64" => "__builtin_ia32_tileloaddrs64", + "tileloaddrs64.internal" => "__builtin_ia32_tileloaddrs64_internal", + "tileloaddrst164" => "__builtin_ia32_tileloaddrst164", + "tileloaddrst164.internal" => "__builtin_ia32_tileloaddrst164_internal", + "tileloaddt164" => "__builtin_ia32_tileloaddt164", + "tileloaddt164.internal" => "__builtin_ia32_tileloaddt164_internal", + "tilemovrow" => "__builtin_ia32_tilemovrow", + "tilemovrow.internal" => "__builtin_ia32_tilemovrow_internal", + "tilerelease" => "__builtin_ia32_tilerelease", + "tilestored64" => "__builtin_ia32_tilestored64", + "tilestored64.internal" => "__builtin_ia32_tilestored64_internal", + "tilezero" => "__builtin_ia32_tilezero", + "tilezero.internal" => "__builtin_ia32_tilezero_internal", + "tmmultf32ps" => "__builtin_ia32_tmmultf32ps", + "tmmultf32ps.internal" => "__builtin_ia32_tmmultf32ps_internal", + "tpause" => "__builtin_ia32_tpause", + "ttcmmimfp16ps" => "__builtin_ia32_ttcmmimfp16ps", + "ttcmmimfp16ps.internal" => "__builtin_ia32_ttcmmimfp16ps_internal", + "ttcmmrlfp16ps" => "__builtin_ia32_ttcmmrlfp16ps", + "ttcmmrlfp16ps.internal" => "__builtin_ia32_ttcmmrlfp16ps_internal", + "ttdpbf16ps" => "__builtin_ia32_ttdpbf16ps", + "ttdpbf16ps.internal" => "__builtin_ia32_ttdpbf16ps_internal", + "ttdpfp16ps" => "__builtin_ia32_ttdpfp16ps", + "ttdpfp16ps.internal" => "__builtin_ia32_ttdpfp16ps_internal", + "ttmmultf32ps" => "__builtin_ia32_ttmmultf32ps", + "ttmmultf32ps.internal" => "__builtin_ia32_ttmmultf32ps_internal", + "ttransposed" => "__builtin_ia32_ttransposed", + "ttransposed.internal" => "__builtin_ia32_ttransposed_internal", + "umonitor" => "__builtin_ia32_umonitor", + "umwait" => "__builtin_ia32_umwait", + "urdmsr" => "__builtin_ia32_urdmsr", + "uwrmsr" => "__builtin_ia32_uwrmsr", + "vbcstnebf162ps128" => "__builtin_ia32_vbcstnebf162ps128", + "vbcstnebf162ps256" => "__builtin_ia32_vbcstnebf162ps256", + "vbcstnesh2ps128" => "__builtin_ia32_vbcstnesh2ps128", + "vbcstnesh2ps256" => "__builtin_ia32_vbcstnesh2ps256", + "vcvtneebf162ps128" => "__builtin_ia32_vcvtneebf162ps128", + "vcvtneebf162ps256" => "__builtin_ia32_vcvtneebf162ps256", + "vcvtneeph2ps128" => "__builtin_ia32_vcvtneeph2ps128", + "vcvtneeph2ps256" => "__builtin_ia32_vcvtneeph2ps256", + "vcvtneobf162ps128" => "__builtin_ia32_vcvtneobf162ps128", + "vcvtneobf162ps256" => "__builtin_ia32_vcvtneobf162ps256", + "vcvtneoph2ps128" => "__builtin_ia32_vcvtneoph2ps128", + "vcvtneoph2ps256" => "__builtin_ia32_vcvtneoph2ps256", + "vcvtneps2bf16128" => "__builtin_ia32_vcvtneps2bf16128", + "vcvtneps2bf16256" => "__builtin_ia32_vcvtneps2bf16256", + "vcvtph2ps.128" => "__builtin_ia32_vcvtph2ps", + "vcvtph2ps.256" => "__builtin_ia32_vcvtph2ps256", + "vcvtps2ph.128" => "__builtin_ia32_vcvtps2ph", + "vcvtps2ph.256" => "__builtin_ia32_vcvtps2ph256", + "vgf2p8affineinvqb.128" => "__builtin_ia32_vgf2p8affineinvqb_v16qi", + "vgf2p8affineinvqb.256" => "__builtin_ia32_vgf2p8affineinvqb_v32qi", + "vgf2p8affineinvqb.512" => "__builtin_ia32_vgf2p8affineinvqb_v64qi", + "vgf2p8affineqb.128" => "__builtin_ia32_vgf2p8affineqb_v16qi", + "vgf2p8affineqb.256" => "__builtin_ia32_vgf2p8affineqb_v32qi", + "vgf2p8affineqb.512" => "__builtin_ia32_vgf2p8affineqb_v64qi", + "vgf2p8mulb.128" => "__builtin_ia32_vgf2p8mulb_v16qi", + "vgf2p8mulb.256" => "__builtin_ia32_vgf2p8mulb_v32qi", + "vgf2p8mulb.512" => "__builtin_ia32_vgf2p8mulb_v64qi", + "vsha512msg1" => "__builtin_ia32_vsha512msg1", + "vsha512msg2" => "__builtin_ia32_vsha512msg2", + "vsha512rnds2" => "__builtin_ia32_vsha512rnds2", + "vsm3msg1" => "__builtin_ia32_vsm3msg1", + "vsm3msg2" => "__builtin_ia32_vsm3msg2", + "vsm3rnds2" => "__builtin_ia32_vsm3rnds2", + "vsm4key4128" => "__builtin_ia32_vsm4key4128", + "vsm4key4256" => "__builtin_ia32_vsm4key4256", + "vsm4key4512" => "__builtin_ia32_vsm4key4512", + "vsm4rnds4128" => "__builtin_ia32_vsm4rnds4128", + "vsm4rnds4256" => "__builtin_ia32_vsm4rnds4256", + "vsm4rnds4512" => "__builtin_ia32_vsm4rnds4512", + "wbinvd" => "__builtin_ia32_wbinvd", + "wbnoinvd" => "__builtin_ia32_wbnoinvd", + "wrfsbase.32" => "__builtin_ia32_wrfsbase32", + "wrfsbase.64" => "__builtin_ia32_wrfsbase64", + "wrgsbase.32" => "__builtin_ia32_wrgsbase32", + "wrgsbase.64" => "__builtin_ia32_wrgsbase64", + "wrpkru" => "__builtin_ia32_wrpkru", + "wrssd" => "__builtin_ia32_wrssd", + "wrssq" => "__builtin_ia32_wrssq", + "wrussd" => "__builtin_ia32_wrussd", + "wrussq" => "__builtin_ia32_wrussq", + "xabort" => "__builtin_ia32_xabort", + "xbegin" => "__builtin_ia32_xbegin", + "xend" => "__builtin_ia32_xend", + "xop.vfrcz.pd" => "__builtin_ia32_vfrczpd", + "xop.vfrcz.pd.256" => "__builtin_ia32_vfrczpd256", + "xop.vfrcz.ps" => "__builtin_ia32_vfrczps", + "xop.vfrcz.ps.256" => "__builtin_ia32_vfrczps256", + "xop.vfrcz.sd" => "__builtin_ia32_vfrczsd", + "xop.vfrcz.ss" => "__builtin_ia32_vfrczss", + "xop.vpcmov" => "__builtin_ia32_vpcmov", + "xop.vpcmov.256" => "__builtin_ia32_vpcmov_256", + "xop.vpcomb" => "__builtin_ia32_vpcomb", + "xop.vpcomd" => "__builtin_ia32_vpcomd", + "xop.vpcomq" => "__builtin_ia32_vpcomq", + "xop.vpcomub" => "__builtin_ia32_vpcomub", + "xop.vpcomud" => "__builtin_ia32_vpcomud", + "xop.vpcomuq" => "__builtin_ia32_vpcomuq", + "xop.vpcomuw" => "__builtin_ia32_vpcomuw", + "xop.vpcomw" => "__builtin_ia32_vpcomw", + "xop.vpermil2pd" => "__builtin_ia32_vpermil2pd", + "xop.vpermil2pd.256" => "__builtin_ia32_vpermil2pd256", + "xop.vpermil2ps" => "__builtin_ia32_vpermil2ps", + "xop.vpermil2ps.256" => "__builtin_ia32_vpermil2ps256", + "xop.vphaddbd" => "__builtin_ia32_vphaddbd", + "xop.vphaddbq" => "__builtin_ia32_vphaddbq", + "xop.vphaddbw" => "__builtin_ia32_vphaddbw", + "xop.vphadddq" => "__builtin_ia32_vphadddq", + "xop.vphaddubd" => "__builtin_ia32_vphaddubd", + "xop.vphaddubq" => "__builtin_ia32_vphaddubq", + "xop.vphaddubw" => "__builtin_ia32_vphaddubw", + "xop.vphaddudq" => "__builtin_ia32_vphaddudq", + "xop.vphadduwd" => "__builtin_ia32_vphadduwd", + "xop.vphadduwq" => "__builtin_ia32_vphadduwq", + "xop.vphaddwd" => "__builtin_ia32_vphaddwd", + "xop.vphaddwq" => "__builtin_ia32_vphaddwq", + "xop.vphsubbw" => "__builtin_ia32_vphsubbw", + "xop.vphsubdq" => "__builtin_ia32_vphsubdq", + "xop.vphsubwd" => "__builtin_ia32_vphsubwd", + "xop.vpmacsdd" => "__builtin_ia32_vpmacsdd", + "xop.vpmacsdqh" => "__builtin_ia32_vpmacsdqh", + "xop.vpmacsdql" => "__builtin_ia32_vpmacsdql", + "xop.vpmacssdd" => "__builtin_ia32_vpmacssdd", + "xop.vpmacssdqh" => "__builtin_ia32_vpmacssdqh", + "xop.vpmacssdql" => "__builtin_ia32_vpmacssdql", + "xop.vpmacsswd" => "__builtin_ia32_vpmacsswd", + "xop.vpmacssww" => "__builtin_ia32_vpmacssww", + "xop.vpmacswd" => "__builtin_ia32_vpmacswd", + "xop.vpmacsww" => "__builtin_ia32_vpmacsww", + "xop.vpmadcsswd" => "__builtin_ia32_vpmadcsswd", + "xop.vpmadcswd" => "__builtin_ia32_vpmadcswd", + "xop.vpperm" => "__builtin_ia32_vpperm", + "xop.vprotb" => "__builtin_ia32_vprotb", + "xop.vprotbi" => "__builtin_ia32_vprotbi", + "xop.vprotd" => "__builtin_ia32_vprotd", + "xop.vprotdi" => "__builtin_ia32_vprotdi", + "xop.vprotq" => "__builtin_ia32_vprotq", + "xop.vprotqi" => "__builtin_ia32_vprotqi", + "xop.vprotw" => "__builtin_ia32_vprotw", + "xop.vprotwi" => "__builtin_ia32_vprotwi", + "xop.vpshab" => "__builtin_ia32_vpshab", + "xop.vpshad" => "__builtin_ia32_vpshad", + "xop.vpshaq" => "__builtin_ia32_vpshaq", + "xop.vpshaw" => "__builtin_ia32_vpshaw", + "xop.vpshlb" => "__builtin_ia32_vpshlb", + "xop.vpshld" => "__builtin_ia32_vpshld", + "xop.vpshlq" => "__builtin_ia32_vpshlq", + "xop.vpshlw" => "__builtin_ia32_vpshlw", + "xresldtrk" => "__builtin_ia32_xresldtrk", + "xsusldtrk" => "__builtin_ia32_xsusldtrk", + "xtest" => "__builtin_ia32_xtest", + _ => unimplemented!("***** unsupported LLVM intrinsic {}", name), + } + } + x86(name) + } + "xcore" => { + #[allow(non_snake_case)] + fn xcore(name: &str) -> &str { + match name { + // xcore + "bitrev" => "__builtin_bitrev", + "getid" => "__builtin_getid", + "getps" => "__builtin_getps", + "setps" => "__builtin_setps", + _ => unimplemented!("***** unsupported LLVM intrinsic {}", name), + } + } + xcore(name) + } + _ => unimplemented!("***** unsupported LLVM intrinsic {}", name), + } } diff --git a/compiler/rustc_codegen_gcc/src/intrinsic/llvm.rs b/compiler/rustc_codegen_gcc/src/intrinsic/llvm.rs index 0eebd21001a..0b77694f115 100644 --- a/compiler/rustc_codegen_gcc/src/intrinsic/llvm.rs +++ b/compiler/rustc_codegen_gcc/src/intrinsic/llvm.rs @@ -1012,7 +1012,7 @@ pub fn intrinsic<'gcc, 'tcx>(name: &str, cx: &CodegenCx<'gcc, 'tcx>) -> Function }; let func = cx.context.get_builtin_function(gcc_name); cx.functions.borrow_mut().insert(gcc_name.to_string(), func); - return func; + func } #[cfg(feature = "master")] @@ -1548,10 +1548,13 @@ pub fn intrinsic<'gcc, 'tcx>(name: &str, cx: &CodegenCx<'gcc, 'tcx>) -> Function "llvm.x86.tcmmrlfp16ps" => "__builtin_trap", // NOTE: this file is generated by https://github.com/GuillaumeGomez/llvmint/blob/master/generate_list.py - _ => include!("archs.rs"), + _ => map_arch_intrinsic(name), }; let func = cx.context.get_target_builtin_function(gcc_name); cx.functions.borrow_mut().insert(gcc_name.to_string(), func); func } + +#[cfg(feature = "master")] +include!("archs.rs"); diff --git a/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs b/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs index c921851b42b..09132c34aae 100644 --- a/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs +++ b/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs @@ -196,6 +196,95 @@ fn get_simple_function<'gcc, 'tcx>( )) } +fn get_simple_function_f128<'gcc, 'tcx>( + cx: &CodegenCx<'gcc, 'tcx>, + name: Symbol, +) -> Option<Function<'gcc>> { + if !cx.supports_f128_type { + return None; + } + + let f128_type = cx.type_f128(); + let func_name = match name { + sym::ceilf128 => "ceilf128", + sym::floorf128 => "floorf128", + sym::truncf128 => "truncf128", + sym::roundf128 => "roundf128", + sym::round_ties_even_f128 => "roundevenf128", + sym::sqrtf128 => "sqrtf128", + _ => return None, + }; + Some(cx.context.new_function( + None, + FunctionType::Extern, + f128_type, + &[cx.context.new_parameter(None, f128_type, "a")], + func_name, + false, + )) +} + +fn get_simple_function_f128_2args<'gcc, 'tcx>( + cx: &CodegenCx<'gcc, 'tcx>, + name: Symbol, +) -> Option<Function<'gcc>> { + if !cx.supports_f128_type { + return None; + } + + let f128_type = cx.type_f128(); + let func_name = match name { + sym::maxnumf128 => "fmaxf128", + sym::minnumf128 => "fminf128", + _ => return None, + }; + Some(cx.context.new_function( + None, + FunctionType::Extern, + f128_type, + &[ + cx.context.new_parameter(None, f128_type, "a"), + cx.context.new_parameter(None, f128_type, "b"), + ], + func_name, + false, + )) +} + +fn f16_builtin<'gcc, 'tcx>( + cx: &CodegenCx<'gcc, 'tcx>, + name: Symbol, + args: &[OperandRef<'tcx, RValue<'gcc>>], +) -> RValue<'gcc> { + let f32_type = cx.type_f32(); + let builtin_name = match name { + sym::ceilf16 => "__builtin_ceilf", + sym::floorf16 => "__builtin_floorf", + sym::fmaf16 => "fmaf", + sym::maxnumf16 => "__builtin_fmaxf", + sym::minnumf16 => "__builtin_fminf", + sym::powf16 => "__builtin_powf", + sym::powif16 => { + let func = cx.context.get_builtin_function("__builtin_powif"); + let arg0 = cx.context.new_cast(None, args[0].immediate(), f32_type); + let args = [arg0, args[1].immediate()]; + let result = cx.context.new_call(None, func, &args); + return cx.context.new_cast(None, result, cx.type_f16()); + } + sym::roundf16 => "__builtin_roundf", + sym::round_ties_even_f16 => "__builtin_rintf", + sym::sqrtf16 => "__builtin_sqrtf", + sym::truncf16 => "__builtin_truncf", + _ => unreachable!(), + }; + + let func = cx.context.get_builtin_function(builtin_name); + let args: Vec<_> = + args.iter().map(|arg| cx.context.new_cast(None, arg.immediate(), f32_type)).collect(); + let result = cx.context.new_call(None, func, &args); + cx.context.new_cast(None, result, cx.type_f16()) +} + impl<'a, 'gcc, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { fn codegen_intrinsic_call( &mut self, @@ -211,7 +300,9 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tc let fn_args = instance.args; let simple = get_simple_intrinsic(self, name); - let simple_func = get_simple_function(self, name); + let simple_func = get_simple_function(self, name) + .or_else(|| get_simple_function_f128(self, name)) + .or_else(|| get_simple_function_f128_2args(self, name)); // FIXME(tempdragon): Re-enable `clippy::suspicious_else_formatting` if the following issue is solved: // https://github.com/rust-lang/rust-clippy/issues/12497 @@ -234,17 +325,55 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tc &args.iter().map(|arg| arg.immediate()).collect::<Vec<_>>(), ) } - sym::fmaf16 => { - // TODO(antoyo): use the correct builtin for f16. - let func = self.cx.context.get_builtin_function("fmaf"); - let args: Vec<_> = args - .iter() - .map(|arg| { - self.cx.context.new_cast(self.location, arg.immediate(), self.cx.type_f32()) - }) - .collect(); - let result = self.cx.context.new_call(self.location, func, &args); - self.cx.context.new_cast(self.location, result, self.cx.type_f16()) + sym::ceilf16 + | sym::floorf16 + | sym::fmaf16 + | sym::maxnumf16 + | sym::minnumf16 + | sym::powf16 + | sym::powif16 + | sym::roundf16 + | sym::round_ties_even_f16 + | sym::sqrtf16 + | sym::truncf16 => f16_builtin(self, name, args), + sym::fmaf128 => { + let f128_type = self.cx.type_f128(); + let func = self.cx.context.new_function( + None, + FunctionType::Extern, + f128_type, + &[ + self.cx.context.new_parameter(None, f128_type, "a"), + self.cx.context.new_parameter(None, f128_type, "b"), + self.cx.context.new_parameter(None, f128_type, "c"), + ], + "fmaf128", + false, + ); + self.cx.context.new_call( + self.location, + func, + &args.iter().map(|arg| arg.immediate()).collect::<Vec<_>>(), + ) + } + sym::powif128 => { + let f128_type = self.cx.type_f128(); + let func = self.cx.context.new_function( + None, + FunctionType::Extern, + f128_type, + &[ + self.cx.context.new_parameter(None, f128_type, "a"), + self.cx.context.new_parameter(None, self.int_type, "b"), + ], + "__powitf2", + false, + ); + self.cx.context.new_call( + self.location, + func, + &args.iter().map(|arg| arg.immediate()).collect::<Vec<_>>(), + ) } sym::is_val_statically_known => { let a = args[0].immediate(); @@ -526,7 +655,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tc fn type_checked_load( &mut self, - _llvtable: Self::Value, + _vtable: Self::Value, _vtable_byte_offset: u64, _typeid: Self::Value, ) -> Self::Value { @@ -622,23 +751,23 @@ impl<'gcc, 'tcx> ArgAbiExt<'gcc, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> { // We instead thus allocate some scratch space... let scratch_size = cast.size(bx); let scratch_align = cast.align(bx); - let llscratch = bx.alloca(scratch_size, scratch_align); - bx.lifetime_start(llscratch, scratch_size); + let scratch = bx.alloca(scratch_size, scratch_align); + bx.lifetime_start(scratch, scratch_size); // ... where we first store the value... - rustc_codegen_ssa::mir::store_cast(bx, cast, val, llscratch, scratch_align); + rustc_codegen_ssa::mir::store_cast(bx, cast, val, scratch, scratch_align); // ... and then memcpy it to the intended destination. bx.memcpy( dst.val.llval, self.layout.align.abi, - llscratch, + scratch, scratch_align, bx.const_usize(self.layout.size.bytes()), MemFlags::empty(), ); - bx.lifetime_end(llscratch, scratch_size); + bx.lifetime_end(scratch, scratch_size); } } else { OperandValue::Immediate(val).store(bx, dst); diff --git a/compiler/rustc_codegen_gcc/src/intrinsic/simd.rs b/compiler/rustc_codegen_gcc/src/intrinsic/simd.rs index 82ef0d0b13a..6f6bc93b8b2 100644 --- a/compiler/rustc_codegen_gcc/src/intrinsic/simd.rs +++ b/compiler/rustc_codegen_gcc/src/intrinsic/simd.rs @@ -1081,7 +1081,9 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>( let (_, element_ty1) = args[1].layout.ty.simd_size_and_type(bx.tcx()); let (_, element_ty2) = args[2].layout.ty.simd_size_and_type(bx.tcx()); let (pointer_count, underlying_ty) = match *element_ty1.kind() { - ty::RawPtr(p_ty, mutbl) if p_ty == in_elem && mutbl == hir::Mutability::Mut => { + ty::RawPtr(p_ty, mutability) + if p_ty == in_elem && mutability == hir::Mutability::Mut => + { (ptr_count(element_ty1), non_ptr(element_ty1)) } _ => { diff --git a/compiler/rustc_codegen_gcc/src/lib.rs b/compiler/rustc_codegen_gcc/src/lib.rs index dbecbc42618..a912678ef2a 100644 --- a/compiler/rustc_codegen_gcc/src/lib.rs +++ b/compiler/rustc_codegen_gcc/src/lib.rs @@ -3,10 +3,12 @@ * TODO(antoyo): support #[inline] attributes. * TODO(antoyo): support LTO (gcc's equivalent to Full LTO is -flto -flto-partition=one — https://documentation.suse.com/sbp/all/html/SBP-GCC-10/index.html). * For Thin LTO, this might be helpful: +// cspell:disable-next-line * In gcc 4.6 -fwhopr was removed and became default with -flto. The non-whopr path can still be executed via -flto-partition=none. * Or the new incremental LTO (https://www.phoronix.com/news/GCC-Incremental-LTO-Patches)? * - * Maybe some missing optizations enabled by rustc's LTO is in there: https://gcc.gnu.org/onlinedocs/gcc/Optimize-Options.html + * Maybe some missing optimizations enabled by rustc's LTO is in there: https://gcc.gnu.org/onlinedocs/gcc/Optimize-Options.html +// cspell:disable-next-line * Like -fipa-icf (should be already enabled) and maybe -fdevirtualize-at-ltrans. * TODO: disable debug info always being emitted. Perhaps this slows down things? * @@ -100,6 +102,7 @@ use rustc_codegen_ssa::back::write::{ CodegenContext, FatLtoInput, ModuleConfig, TargetMachineFactoryFn, }; use rustc_codegen_ssa::base::codegen_crate; +use rustc_codegen_ssa::target_features::cfg_target_feature; use rustc_codegen_ssa::traits::{CodegenBackend, ExtraBackendMethods, WriteBackendMethods}; use rustc_codegen_ssa::{CodegenResults, CompiledModule, ModuleCodegen, TargetConfig}; use rustc_data_structures::fx::FxIndexMap; @@ -206,7 +209,7 @@ impl CodegenBackend for GccCodegenBackend { #[cfg(not(feature = "master"))] { let temp_dir = TempDir::new().expect("cannot create temporary directory"); - let temp_file = temp_dir.into_path().join("result.asm"); + let temp_file = temp_dir.keep().join("result.asm"); let check_context = Context::default(); check_context.set_print_errors_to_stderr(false); let _int128_ty = check_context.new_c_type(CType::UInt128t); @@ -430,10 +433,11 @@ impl WriteBackendMethods for GccCodegenBackend { ) -> Result<ModuleCodegen<Self::Module>, FatalError> { back::write::link(cgcx, dcx, modules) } + fn autodiff( _cgcx: &CodegenContext<Self>, _module: &ModuleCodegen<Self::Module>, - _diff_fncs: Vec<AutoDiffItem>, + _diff_functions: Vec<AutoDiffItem>, _config: &ModuleConfig, ) -> Result<(), FatalError> { unimplemented!() @@ -473,48 +477,32 @@ fn to_gcc_opt_level(optlevel: Option<OptLevel>) -> OptimizationLevel { /// Returns the features that should be set in `cfg(target_feature)`. fn target_config(sess: &Session, target_info: &LockedTargetInfo) -> TargetConfig { - // TODO(antoyo): use global_gcc_features. - let f = |allow_unstable| { - sess.target - .rust_target_features() - .iter() - .filter_map(|&(feature, gate, _)| { - if allow_unstable - || (gate.in_cfg() - && (sess.is_nightly_build() || gate.requires_nightly().is_none())) - { - Some(feature) - } else { - None - } - }) - .filter(|feature| { - // TODO: we disable Neon for now since we don't support the LLVM intrinsics for it. - if *feature == "neon" { - return false; - } - target_info.cpu_supports(feature) - /* - adx, aes, avx, avx2, avx512bf16, avx512bitalg, avx512bw, avx512cd, avx512dq, avx512er, avx512f, avx512fp16, avx512ifma, - avx512pf, avx512vbmi, avx512vbmi2, avx512vl, avx512vnni, avx512vp2intersect, avx512vpopcntdq, - bmi1, bmi2, cmpxchg16b, ermsb, f16c, fma, fxsr, gfni, lzcnt, movbe, pclmulqdq, popcnt, rdrand, rdseed, rtm, - sha, sse, sse2, sse3, sse4.1, sse4.2, sse4a, ssse3, tbm, vaes, vpclmulqdq, xsave, xsavec, xsaveopt, xsaves - */ - }) - .map(Symbol::intern) - .collect() - }; - - let target_features = f(false); - let unstable_target_features = f(true); + let (unstable_target_features, target_features) = cfg_target_feature(sess, |feature| { + // TODO: we disable Neon for now since we don't support the LLVM intrinsics for it. + if feature == "neon" { + return false; + } + target_info.cpu_supports(feature) + // cSpell:disable + /* + adx, aes, avx, avx2, avx512bf16, avx512bitalg, avx512bw, avx512cd, avx512dq, avx512er, avx512f, avx512fp16, avx512ifma, + avx512pf, avx512vbmi, avx512vbmi2, avx512vl, avx512vnni, avx512vp2intersect, avx512vpopcntdq, + bmi1, bmi2, cmpxchg16b, ermsb, f16c, fma, fxsr, gfni, lzcnt, movbe, pclmulqdq, popcnt, rdrand, rdseed, rtm, + sha, sse, sse2, sse3, sse4.1, sse4.2, sse4a, ssse3, tbm, vaes, vpclmulqdq, xsave, xsavec, xsaveopt, xsaves + */ + // cSpell:enable + }); + + let has_reliable_f16 = target_info.supports_target_dependent_type(CType::Float16); + let has_reliable_f128 = target_info.supports_target_dependent_type(CType::Float128); TargetConfig { target_features, unstable_target_features, // There are no known bugs with GCC support for f16 or f128 - has_reliable_f16: true, - has_reliable_f16_math: true, - has_reliable_f128: true, - has_reliable_f128_math: true, + has_reliable_f16, + has_reliable_f16_math: has_reliable_f16, + has_reliable_f128, + has_reliable_f128_math: has_reliable_f128, } } diff --git a/compiler/rustc_codegen_gcc/src/type_.rs b/compiler/rustc_codegen_gcc/src/type_.rs index 4e0a250b550..15a0206607e 100644 --- a/compiler/rustc_codegen_gcc/src/type_.rs +++ b/compiler/rustc_codegen_gcc/src/type_.rs @@ -302,13 +302,13 @@ impl<'gcc, 'tcx> BaseTypeCodegenMethods for CodegenCx<'gcc, 'tcx> { #[cfg_attr(feature = "master", allow(unused_mut))] fn type_array(&self, ty: Type<'gcc>, mut len: u64) -> Type<'gcc> { #[cfg(not(feature = "master"))] - if let Some(struct_type) = ty.is_struct() { - if struct_type.get_field_count() == 0 { - // NOTE: since gccjit only supports i32 for the array size and libcore's tests uses a - // size of usize::MAX in test_binary_search, we workaround this by setting the size to - // zero for ZSTs. - len = 0; - } + if let Some(struct_type) = ty.is_struct() + && struct_type.get_field_count() == 0 + { + // NOTE: since gccjit only supports i32 for the array size and libcore's tests uses a + // size of usize::MAX in test_binary_search, we workaround this by setting the size to + // zero for ZSTs. + len = 0; } self.context.new_array_type(None, ty, len) diff --git a/compiler/rustc_codegen_gcc/src/type_of.rs b/compiler/rustc_codegen_gcc/src/type_of.rs index 5745acce6fe..093f902bc3d 100644 --- a/compiler/rustc_codegen_gcc/src/type_of.rs +++ b/compiler/rustc_codegen_gcc/src/type_of.rs @@ -217,7 +217,7 @@ impl<'tcx> LayoutGccExt<'tcx> for TyAndLayout<'tcx> { let ty = match *self.ty.kind() { // NOTE: we cannot remove this match like in the LLVM codegen because the call // to fn_ptr_backend_type handle the on-stack attribute. - // TODO(antoyo): find a less hackish way to hande the on-stack attribute. + // TODO(antoyo): find a less hackish way to handle the on-stack attribute. ty::FnPtr(sig_tys, hdr) => cx .fn_ptr_backend_type(cx.fn_abi_of_fn_ptr(sig_tys.with(hdr), ty::List::empty())), _ => self.scalar_gcc_type_at(cx, scalar, Size::ZERO), diff --git a/compiler/rustc_codegen_gcc/tests/failing-ui-tests.txt b/compiler/rustc_codegen_gcc/tests/failing-ui-tests.txt index 0a01a661c35..d931f0d3b5e 100644 --- a/compiler/rustc_codegen_gcc/tests/failing-ui-tests.txt +++ b/compiler/rustc_codegen_gcc/tests/failing-ui-tests.txt @@ -1,26 +1,12 @@ tests/ui/allocator/no_std-alloc-error-handler-custom.rs tests/ui/allocator/no_std-alloc-error-handler-default.rs tests/ui/asm/may_unwind.rs -tests/ui/functions-closures/parallel-codegen-closures.rs -tests/ui/linkage-attr/linkage1.rs -tests/ui/lto/dylib-works.rs -tests/ui/sepcomp/sepcomp-cci.rs -tests/ui/sepcomp/sepcomp-extern.rs -tests/ui/sepcomp/sepcomp-fns-backwards.rs -tests/ui/sepcomp/sepcomp-fns.rs -tests/ui/sepcomp/sepcomp-statics.rs tests/ui/asm/x86_64/may_unwind.rs -tests/ui/panics/catch-unwind-bang.rs tests/ui/drop/dynamic-drop-async.rs tests/ui/cfg/cfg-panic-abort.rs -tests/ui/drop/repeat-drop.rs -tests/ui/coroutine/panic-drops-resume.rs -tests/ui/fmt/format-args-capture.rs -tests/ui/coroutine/panic-drops.rs tests/ui/intrinsics/panic-uninitialized-zeroed.rs tests/ui/iterators/iter-sum-overflow-debug.rs tests/ui/iterators/iter-sum-overflow-overflow-checks.rs -tests/ui/mir/mir_calls_to_shims.rs tests/ui/mir/mir_drop_order.rs tests/ui/mir/mir_let_chains_drop_order.rs tests/ui/oom_unwind.rs @@ -31,27 +17,15 @@ tests/ui/unwind-no-uwtable.rs tests/ui/parser/unclosed-delimiter-in-dep.rs tests/ui/consts/missing_span_in_backtrace.rs tests/ui/drop/dynamic-drop.rs -tests/ui/issues/issue-43853.rs -tests/ui/issues/issue-47364.rs -tests/ui/macros/rfc-2011-nicer-assert-messages/assert-without-captures-does-not-create-unnecessary-code.rs -tests/ui/rfcs/rfc-1857-stabilize-drop-order/drop-order.rs tests/ui/rfcs/rfc-2091-track-caller/std-panic-locations.rs tests/ui/simd/issue-17170.rs tests/ui/simd/issue-39720.rs -tests/ui/alloc-error/default-alloc-error-hook.rs -tests/ui/coroutine/panic-safe.rs tests/ui/issues/issue-14875.rs tests/ui/issues/issue-29948.rs -tests/ui/panics/nested_panic_caught.rs tests/ui/process/println-with-broken-pipe.rs tests/ui/lto/thin-lto-inlines2.rs -tests/ui/lto/weak-works.rs -tests/ui/panic-runtime/lto-abort.rs -tests/ui/lto/thin-lto-inlines.rs -tests/ui/lto/thin-lto-global-allocator.rs -tests/ui/lto/msvc-imp-present.rs +tests/ui/panic-runtime/lto-abort.rs tests/ui/lto/lto-thin-rustc-loads-linker-plugin.rs -tests/ui/lto/all-crates.rs tests/ui/async-await/deep-futures-are-freeze.rs tests/ui/coroutine/resume-after-return.rs tests/ui/simd/masked-load-store.rs @@ -59,15 +33,11 @@ tests/ui/simd/repr_packed.rs tests/ui/async-await/in-trait/dont-project-to-specializable-projection.rs tests/ui/consts/try-operator.rs tests/ui/coroutine/unwind-abort-mix.rs -tests/ui/type-alias-impl-trait/rpit_tait_equality_in_canonical_query.rs -tests/ui/impl-trait/equality-in-canonical-query.rs tests/ui/consts/issue-miri-1910.rs -tests/ui/mir/mir_heavy_promoted.rs tests/ui/consts/const_cmp_type_id.rs tests/ui/consts/issue-73976-monomorphic.rs tests/ui/consts/issue-94675.rs tests/ui/traits/const-traits/const-drop-fail.rs -tests/ui/traits/const-traits/const-drop.rs tests/ui/runtime/on-broken-pipe/child-processes.rs tests/ui/sanitizer/cfi/assoc-ty-lifetime-issue-123053.rs tests/ui/sanitizer/cfi/async-closures.rs @@ -85,14 +55,9 @@ tests/ui/sanitizer/cfi/can-reveal-opaques.rs tests/ui/sanitizer/kcfi-mangling.rs tests/ui/statics/const_generics.rs tests/ui/backtrace/dylib-dep.rs -tests/ui/errors/pic-linker.rs tests/ui/delegation/fn-header.rs -tests/ui/consts/zst_no_llvm_alloc.rs tests/ui/consts/const-eval/parse_ints.rs -tests/ui/simd/intrinsic/generic-arithmetic-pass.rs tests/ui/simd/intrinsic/generic-as.rs -tests/ui/backtrace/backtrace.rs -tests/ui/lifetimes/tail-expr-lock-poisoning.rs tests/ui/runtime/rt-explody-panic-payloads.rs tests/ui/codegen/equal-pointers-unequal/as-cast/inline1.rs tests/ui/codegen/equal-pointers-unequal/as-cast/inline2.rs @@ -108,4 +73,9 @@ tests/ui/codegen/equal-pointers-unequal/strict-provenance/segfault.rs tests/ui/codegen/equal-pointers-unequal/strict-provenance/zero.rs tests/ui/simd/simd-bitmask-notpow2.rs tests/ui/codegen/StackColoring-not-blowup-stack-issue-40883.rs +tests/ui/numbers-arithmetic/u128-as-f32.rs +tests/ui/lto/all-crates.rs tests/ui/uninhabited/uninhabited-transparent-return-abi.rs +tests/ui/coroutine/panic-drops-resume.rs +tests/ui/coroutine/panic-drops.rs +tests/ui/coroutine/panic-safe.rs diff --git a/compiler/rustc_codegen_gcc/tests/lang_tests_common.rs b/compiler/rustc_codegen_gcc/tests/lang_tests_common.rs index bdcf14b4b26..9abe97b1087 100644 --- a/compiler/rustc_codegen_gcc/tests/lang_tests_common.rs +++ b/compiler/rustc_codegen_gcc/tests/lang_tests_common.rs @@ -57,10 +57,10 @@ pub fn main_inner(profile: Profile) { #[cfg(not(feature = "master"))] fn filter(filename: &Path) -> bool { - if let Some(filename) = filename.to_str() { - if filename.ends_with("gep.rs") { - return false; - } + if let Some(filename) = filename.to_str() + && filename.ends_with("gep.rs") + { + return false; } rust_filter(filename) } diff --git a/compiler/rustc_codegen_gcc/tests/run/packed_u128.rs b/compiler/rustc_codegen_gcc/tests/run/packed_u128.rs new file mode 100644 index 00000000000..b7cc6e21023 --- /dev/null +++ b/compiler/rustc_codegen_gcc/tests/run/packed_u128.rs @@ -0,0 +1,31 @@ +// Compiler: +// +// Run-time: +// status: 0 + +#![feature(no_core)] +#![no_std] +#![no_core] +#![no_main] + +extern crate mini_core; +use intrinsics::black_box; +use mini_core::*; +#[repr(packed(1))] +pub struct ScalarInt { + data: u128, + size: u8, +} +#[inline(never)] +#[no_mangle] +fn read_data(a: &ScalarInt) { + black_box(a.data); +} + +#[no_mangle] +extern "C" fn main(argc: i32, _argv: *const *const u8) -> i32 { + let data = + [black_box(ScalarInt { data: 0, size: 1 }), black_box(ScalarInt { data: 0, size: 1 })]; + read_data(&data[1]); + 0 +} diff --git a/compiler/rustc_codegen_gcc/tools/cspell_dicts/rust.txt b/compiler/rustc_codegen_gcc/tools/cspell_dicts/rust.txt new file mode 100644 index 00000000000..379cbd77eef --- /dev/null +++ b/compiler/rustc_codegen_gcc/tools/cspell_dicts/rust.txt @@ -0,0 +1,2 @@ +lateout +repr diff --git a/compiler/rustc_codegen_gcc/tools/cspell_dicts/rustc_codegen_gcc.txt b/compiler/rustc_codegen_gcc/tools/cspell_dicts/rustc_codegen_gcc.txt new file mode 100644 index 00000000000..31023e50ffa --- /dev/null +++ b/compiler/rustc_codegen_gcc/tools/cspell_dicts/rustc_codegen_gcc.txt @@ -0,0 +1,75 @@ +aapcs +addo +archs +ashl +ashr +cgcx +clzll +cmse +codegened +csky +ctlz +ctpop +cttz +ctzll +flto +fmaximumf +fmuladd +fmuladdf +fminimumf +fmul +fptosi +fptosui +fptoui +fwrapv +gimple +hrtb +immediates +liblto +llbb +llcx +llextra +llfn +lgcc +llmod +llresult +llret +ltrans +llty +llval +llvals +loong +lshr +masm +maximumf +maxnumf +mavx +mcmodel +minimumf +minnumf +monomorphization +monomorphizations +monomorphized +monomorphizing +movnt +mulo +nvptx +pointee +powitf +reassoc +riscv +rlib +roundevenf +rustc +sitofp +sizet +spir +subo +sysv +tbaa +uitofp +unord +uninlined +utrunc +xabort +zext diff --git a/compiler/rustc_codegen_gcc/tools/generate_intrinsics.py b/compiler/rustc_codegen_gcc/tools/generate_intrinsics.py index 181f1e501a4..ed0ebf00719 100644 --- a/compiler/rustc_codegen_gcc/tools/generate_intrinsics.py +++ b/compiler/rustc_codegen_gcc/tools/generate_intrinsics.py @@ -168,25 +168,39 @@ def update_intrinsics(llvm_path, llvmint, llvmint2): os.path.dirname(os.path.abspath(__file__)), "../src/intrinsic/archs.rs", ) + # A hashmap of all architectures. This allows us to first match on the architecture, and then on the intrinsics. + # This speeds up the comparison, and makes our code considerably smaller. + # Since all intrinsic names start with "llvm.", we skip that prefix. print("Updating content of `{}`...".format(output_file)) with open(output_file, "w", encoding="utf8") as out: out.write("// File generated by `rustc_codegen_gcc/tools/generate_intrinsics.py`\n") out.write("// DO NOT EDIT IT!\n") - out.write("match name {\n") + out.write("/// Translate a given LLVM intrinsic name to an equivalent GCC one.\n") + out.write("fn map_arch_intrinsic(name:&str)->&str{\n") + out.write('let Some(name) = name.strip_prefix("llvm.") else { unimplemented!("***** unsupported LLVM intrinsic {}", name) };\n') + out.write('let Some((arch, name)) = name.split_once(\'.\') else { unimplemented!("***** unsupported LLVM intrinsic {}", name) };\n') + out.write("match arch {\n") for arch in archs: if len(intrinsics[arch]) == 0: continue + out.write("\"{}\" => {{ #[allow(non_snake_case)] fn {}(name: &str) -> &str {{ match name {{".format(arch,arch)) intrinsics[arch].sort(key=lambda x: (x[0], x[2])) out.write(' // {}\n'.format(arch)) for entry in intrinsics[arch]: + llvm_name = entry[0].removeprefix("llvm."); + llvm_name = llvm_name.removeprefix(arch); + llvm_name = llvm_name.removeprefix("."); if entry[2] is True: # if it is a duplicate - out.write(' // [DUPLICATE]: "{}" => "{}",\n'.format(entry[0], entry[1])) + out.write(' // [DUPLICATE]: "{}" => "{}",\n'.format(llvm_name, entry[1])) elif "_round_mask" in entry[1]: - out.write(' // [INVALID CONVERSION]: "{}" => "{}",\n'.format(entry[0], entry[1])) + out.write(' // [INVALID CONVERSION]: "{}" => "{}",\n'.format(llvm_name, entry[1])) else: - out.write(' "{}" => "{}",\n'.format(entry[0], entry[1])) - out.write(' _ => unimplemented!("***** unsupported LLVM intrinsic {}", name),\n') - out.write("}\n") + out.write(' "{}" => "{}",\n'.format(llvm_name, entry[1])) + out.write(' _ => unimplemented!("***** unsupported LLVM intrinsic {}", name),\n') + out.write("}} }} {}(name) }}\n,".format(arch)) + out.write(' _ => unimplemented!("***** unsupported LLVM architecture {}", name),\n') + out.write("}\n}") + subprocess.call(["rustfmt", output_file]) print("Done!") diff --git a/compiler/rustc_codegen_llvm/messages.ftl b/compiler/rustc_codegen_llvm/messages.ftl index 3faeb9b3b22..3885f18271f 100644 --- a/compiler/rustc_codegen_llvm/messages.ftl +++ b/compiler/rustc_codegen_llvm/messages.ftl @@ -59,16 +59,6 @@ codegen_llvm_symbol_already_defined = codegen_llvm_target_machine = could not create LLVM TargetMachine for triple: {$triple} codegen_llvm_target_machine_with_llvm_err = could not create LLVM TargetMachine for triple: {$triple}: {$llvm_err} -codegen_llvm_unknown_ctarget_feature = - unknown and unstable feature specified for `-Ctarget-feature`: `{$feature}` - .note = it is still passed through to the codegen backend, but use of this feature might be unsound and the behavior of this feature can change in the future - .possible_feature = you might have meant: `{$rust_feature}` - .consider_filing_feature_request = consider filing a feature request - -codegen_llvm_unknown_ctarget_feature_prefix = - unknown feature specified for `-Ctarget-feature`: `{$feature}` - .note = features must begin with a `+` to enable or `-` to disable it - codegen_llvm_unknown_debuginfo_compression = unknown debuginfo compression algorithm {$algorithm} - will fall back to uncompressed debuginfo codegen_llvm_write_bytecode = failed to write bytecode to {$path}: {$err} diff --git a/compiler/rustc_codegen_llvm/src/allocator.rs b/compiler/rustc_codegen_llvm/src/allocator.rs index 4a78e694979..9dca63cfc8d 100644 --- a/compiler/rustc_codegen_llvm/src/allocator.rs +++ b/compiler/rustc_codegen_llvm/src/allocator.rs @@ -57,7 +57,7 @@ pub(crate) unsafe fn codegen( let from_name = mangle_internal_symbol(tcx, &global_fn_name(method.name)); let to_name = mangle_internal_symbol(tcx, &default_fn_name(method.name)); - create_wrapper_function(tcx, &cx, &from_name, &to_name, &args, output, false); + create_wrapper_function(tcx, &cx, &from_name, Some(&to_name), &args, output, false); } } @@ -66,7 +66,7 @@ pub(crate) unsafe fn codegen( tcx, &cx, &mangle_internal_symbol(tcx, "__rust_alloc_error_handler"), - &mangle_internal_symbol(tcx, alloc_error_handler_name(alloc_error_handler_kind)), + Some(&mangle_internal_symbol(tcx, alloc_error_handler_name(alloc_error_handler_kind))), &[usize, usize], // size, align None, true, @@ -81,11 +81,16 @@ pub(crate) unsafe fn codegen( let llval = llvm::LLVMConstInt(i8, val as u64, False); llvm::set_initializer(ll_g, llval); - let name = mangle_internal_symbol(tcx, NO_ALLOC_SHIM_IS_UNSTABLE); - let ll_g = cx.declare_global(&name, i8); - llvm::set_visibility(ll_g, llvm::Visibility::from_generic(tcx.sess.default_visibility())); - let llval = llvm::LLVMConstInt(i8, 0, False); - llvm::set_initializer(ll_g, llval); + // __rust_no_alloc_shim_is_unstable_v2 + create_wrapper_function( + tcx, + &cx, + &mangle_internal_symbol(tcx, NO_ALLOC_SHIM_IS_UNSTABLE), + None, + &[], + None, + false, + ); } if tcx.sess.opts.debuginfo != DebugInfo::None { @@ -99,7 +104,7 @@ fn create_wrapper_function( tcx: TyCtxt<'_>, cx: &SimpleCx<'_>, from_name: &str, - to_name: &str, + to_name: Option<&str>, args: &[&Type], output: Option<&Type>, no_return: bool, @@ -128,33 +133,38 @@ fn create_wrapper_function( attributes::apply_to_llfn(llfn, llvm::AttributePlace::Function, &[uwtable]); } - let callee = declare_simple_fn( - &cx, - to_name, - llvm::CallConv::CCallConv, - llvm::UnnamedAddr::Global, - llvm::Visibility::Hidden, - ty, - ); - if let Some(no_return) = no_return { - // -> ! DIFlagNoReturn - attributes::apply_to_llfn(callee, llvm::AttributePlace::Function, &[no_return]); - } - llvm::set_visibility(callee, llvm::Visibility::Hidden); - let llbb = unsafe { llvm::LLVMAppendBasicBlockInContext(cx.llcx, llfn, c"entry".as_ptr()) }; - let mut bx = SBuilder::build(&cx, llbb); - let args = args - .iter() - .enumerate() - .map(|(i, _)| llvm::get_param(llfn, i as c_uint)) - .collect::<Vec<_>>(); - let ret = bx.call(ty, callee, &args, None); - llvm::LLVMSetTailCall(ret, True); - if output.is_some() { - bx.ret(ret); + + if let Some(to_name) = to_name { + let callee = declare_simple_fn( + &cx, + to_name, + llvm::CallConv::CCallConv, + llvm::UnnamedAddr::Global, + llvm::Visibility::Hidden, + ty, + ); + if let Some(no_return) = no_return { + // -> ! DIFlagNoReturn + attributes::apply_to_llfn(callee, llvm::AttributePlace::Function, &[no_return]); + } + llvm::set_visibility(callee, llvm::Visibility::Hidden); + + let args = args + .iter() + .enumerate() + .map(|(i, _)| llvm::get_param(llfn, i as c_uint)) + .collect::<Vec<_>>(); + let ret = bx.call(ty, callee, &args, None); + llvm::LLVMSetTailCall(ret, True); + if output.is_some() { + bx.ret(ret); + } else { + bx.ret_void() + } } else { + assert!(output.is_none()); bx.ret_void() } } diff --git a/compiler/rustc_codegen_llvm/src/attributes.rs b/compiler/rustc_codegen_llvm/src/attributes.rs index 27fd09745ff..adb53e0b66c 100644 --- a/compiler/rustc_codegen_llvm/src/attributes.rs +++ b/compiler/rustc_codegen_llvm/src/attributes.rs @@ -491,11 +491,7 @@ pub(crate) fn llfn_attrs_from_instance<'ll, 'tcx>( let allocated_pointer = AttributeKind::AllocatedPointer.create_attr(cx.llcx); attributes::apply_to_llfn(llfn, AttributePlace::Argument(0), &[allocated_pointer]); } - // function alignment can be set globally with the `-Zmin-function-alignment=<n>` flag; - // the alignment from a `#[repr(align(<n>))]` is used if it specifies a higher alignment. - if let Some(align) = - Ord::max(cx.tcx.sess.opts.unstable_opts.min_function_alignment, codegen_fn_attrs.alignment) - { + if let Some(align) = codegen_fn_attrs.alignment { llvm::set_alignment(llfn, align); } if let Some(backchain) = backchain_attr(cx) { diff --git a/compiler/rustc_codegen_llvm/src/errors.rs b/compiler/rustc_codegen_llvm/src/errors.rs index 8bc74fbec7e..d50ad8a1a9c 100644 --- a/compiler/rustc_codegen_llvm/src/errors.rs +++ b/compiler/rustc_codegen_llvm/src/errors.rs @@ -3,36 +3,12 @@ use std::path::Path; use rustc_data_structures::small_c_str::SmallCStr; use rustc_errors::{Diag, DiagCtxtHandle, Diagnostic, EmissionGuarantee, Level}; -use rustc_macros::{Diagnostic, Subdiagnostic}; +use rustc_macros::Diagnostic; use rustc_span::Span; use crate::fluent_generated as fluent; #[derive(Diagnostic)] -#[diag(codegen_llvm_unknown_ctarget_feature_prefix)] -#[note] -pub(crate) struct UnknownCTargetFeaturePrefix<'a> { - pub feature: &'a str, -} - -#[derive(Diagnostic)] -#[diag(codegen_llvm_unknown_ctarget_feature)] -#[note] -pub(crate) struct UnknownCTargetFeature<'a> { - pub feature: &'a str, - #[subdiagnostic] - pub rust_feature: PossibleFeature<'a>, -} - -#[derive(Subdiagnostic)] -pub(crate) enum PossibleFeature<'a> { - #[help(codegen_llvm_possible_feature)] - Some { rust_feature: &'a str }, - #[help(codegen_llvm_consider_filing_feature_request)] - None, -} - -#[derive(Diagnostic)] #[diag(codegen_llvm_symbol_already_defined)] pub(crate) struct SymbolAlreadyDefined<'a> { #[primary_span] diff --git a/compiler/rustc_codegen_llvm/src/llvm_util.rs b/compiler/rustc_codegen_llvm/src/llvm_util.rs index 0e77bc43df8..6fd07d562af 100644 --- a/compiler/rustc_codegen_llvm/src/llvm_util.rs +++ b/compiler/rustc_codegen_llvm/src/llvm_util.rs @@ -6,27 +6,20 @@ use std::sync::Once; use std::{ptr, slice, str}; use libc::c_int; -use rustc_codegen_ssa::TargetConfig; use rustc_codegen_ssa::base::wants_wasm_eh; -use rustc_codegen_ssa::codegen_attrs::check_tied_features; -use rustc_data_structures::fx::{FxHashMap, FxHashSet}; +use rustc_codegen_ssa::target_features::cfg_target_feature; +use rustc_codegen_ssa::{TargetConfig, target_features}; +use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::small_c_str::SmallCStr; -use rustc_data_structures::unord::UnordSet; use rustc_fs_util::path_to_c_string; use rustc_middle::bug; use rustc_session::Session; use rustc_session::config::{PrintKind, PrintRequest}; -use rustc_session::features::{StabilityExt, retpoline_features_by_flags}; -use rustc_span::Symbol; use rustc_target::spec::{MergeFunctions, PanicStrategy, SmallDataThresholdSupport}; -use rustc_target::target_features::{RUSTC_SPECIAL_FEATURES, RUSTC_SPECIFIC_FEATURES}; use smallvec::{SmallVec, smallvec}; use crate::back::write::create_informational_target_machine; -use crate::errors::{ - FixedX18InvalidArch, PossibleFeature, UnknownCTargetFeature, UnknownCTargetFeaturePrefix, -}; -use crate::llvm; +use crate::{errors, llvm}; static INIT: Once = Once::new(); @@ -195,15 +188,6 @@ impl<'a> LLVMFeature<'a> { ) -> Self { Self { llvm_feature_name, dependencies } } - - fn contains(&'a self, feat: &str) -> bool { - self.iter().any(|dep| dep == feat) - } - - fn iter(&'a self) -> impl Iterator<Item = &'a str> { - let dependencies = self.dependencies.iter().map(|feat| feat.as_str()); - std::iter::once(self.llvm_feature_name).chain(dependencies) - } } impl<'a> IntoIterator for LLVMFeature<'a> { @@ -216,18 +200,22 @@ impl<'a> IntoIterator for LLVMFeature<'a> { } } -// WARNING: the features after applying `to_llvm_features` must be known -// to LLVM or the feature detection code will walk past the end of the feature -// array, leading to crashes. -// -// To find a list of LLVM's names, see llvm-project/llvm/lib/Target/{ARCH}/*.td -// where `{ARCH}` is the architecture name. Look for instances of `SubtargetFeature`. -// -// Check the current rustc fork of LLVM in the repo at https://github.com/rust-lang/llvm-project/. -// The commit in use can be found via the `llvm-project` submodule in -// https://github.com/rust-lang/rust/tree/master/src Though note that Rust can also be build with -// an external precompiled version of LLVM which might lead to failures if the oldest tested / -// supported LLVM version doesn't yet support the relevant intrinsics. +/// Convert a Rust feature name to an LLVM feature name. Returning `None` means the +/// feature should be skipped, usually because it is not supported by the current +/// LLVM version. +/// +/// WARNING: the features after applying `to_llvm_features` must be known +/// to LLVM or the feature detection code will walk past the end of the feature +/// array, leading to crashes. +/// +/// To find a list of LLVM's names, see llvm-project/llvm/lib/Target/{ARCH}/*.td +/// where `{ARCH}` is the architecture name. Look for instances of `SubtargetFeature`. +/// +/// Check the current rustc fork of LLVM in the repo at +/// <https://github.com/rust-lang/llvm-project/>. The commit in use can be found via the +/// `llvm-project` submodule in <https://github.com/rust-lang/rust/tree/master/src> Though note that +/// Rust can also be build with an external precompiled version of LLVM which might lead to failures +/// if the oldest tested / supported LLVM version doesn't yet support the relevant intrinsics. pub(crate) fn to_llvm_features<'a>(sess: &Session, s: &'a str) -> Option<LLVMFeature<'a>> { let arch = if sess.target.arch == "x86_64" { "x86" @@ -343,98 +331,25 @@ pub(crate) fn target_config(sess: &Session) -> TargetConfig { // the target CPU, that is still expanded to target features (with all their implied features) // by LLVM. let target_machine = create_informational_target_machine(sess, true); - // Compute which of the known target features are enabled in the 'base' target machine. We only - // consider "supported" features; "forbidden" features are not reflected in `cfg` as of now. - let mut features: FxHashSet<Symbol> = sess - .target - .rust_target_features() - .iter() - .filter(|(feature, _, _)| { - // skip checking special features, as LLVM may not understand them - if RUSTC_SPECIAL_FEATURES.contains(feature) { - return true; - } - if let Some(feat) = to_llvm_features(sess, feature) { - for llvm_feature in feat { - let cstr = SmallCStr::new(llvm_feature); - // `LLVMRustHasFeature` is moderately expensive. On targets with many - // features (e.g. x86) these calls take a non-trivial fraction of runtime - // when compiling very small programs. - if !unsafe { llvm::LLVMRustHasFeature(target_machine.raw(), cstr.as_ptr()) } { - return false; - } + + let (unstable_target_features, target_features) = cfg_target_feature(sess, |feature| { + if let Some(feat) = to_llvm_features(sess, feature) { + // All the LLVM features this expands to must be enabled. + for llvm_feature in feat { + let cstr = SmallCStr::new(llvm_feature); + // `LLVMRustHasFeature` is moderately expensive. On targets with many + // features (e.g. x86) these calls take a non-trivial fraction of runtime + // when compiling very small programs. + if !unsafe { llvm::LLVMRustHasFeature(target_machine.raw(), cstr.as_ptr()) } { + return false; } - true - } else { - false } - }) - .map(|(feature, _, _)| Symbol::intern(feature)) - .collect(); - - // Add enabled and remove disabled features. - for (enabled, feature) in - sess.opts.cg.target_feature.split(',').filter_map(|s| match s.chars().next() { - Some('+') => Some((true, Symbol::intern(&s[1..]))), - Some('-') => Some((false, Symbol::intern(&s[1..]))), - _ => None, - }) - { - if enabled { - // Also add all transitively implied features. - - // We don't care about the order in `features` since the only thing we use it for is the - // `features.contains` below. - #[allow(rustc::potential_query_instability)] - features.extend( - sess.target - .implied_target_features(feature.as_str()) - .iter() - .map(|s| Symbol::intern(s)), - ); + true } else { - // Remove transitively reverse-implied features. - - // We don't care about the order in `features` since the only thing we use it for is the - // `features.contains` below. - #[allow(rustc::potential_query_instability)] - features.retain(|f| { - if sess.target.implied_target_features(f.as_str()).contains(&feature.as_str()) { - // If `f` if implies `feature`, then `!feature` implies `!f`, so we have to - // remove `f`. (This is the standard logical contraposition principle.) - false - } else { - // We can keep `f`. - true - } - }); + false } - } - - // Filter enabled features based on feature gates. - let f = |allow_unstable| { - sess.target - .rust_target_features() - .iter() - .filter_map(|(feature, gate, _)| { - // The `allow_unstable` set is used by rustc internally to determined which target - // features are truly available, so we want to return even perma-unstable - // "forbidden" features. - if allow_unstable - || (gate.in_cfg() - && (sess.is_nightly_build() || gate.requires_nightly().is_none())) - { - Some(Symbol::intern(feature)) - } else { - None - } - }) - .filter(|feature| features.contains(&feature)) - .collect() - }; + }); - let target_features = f(false); - let unstable_target_features = f(true); let mut cfg = TargetConfig { target_features, unstable_target_features, @@ -707,10 +622,18 @@ pub(crate) fn target_cpu(sess: &Session) -> &str { handle_native(cpu_name) } -fn llvm_features_by_flags(sess: &Session) -> Vec<&str> { - let mut features: Vec<&str> = Vec::new(); - retpoline_features_by_flags(sess, &mut features); - features +/// The target features for compiler flags other than `-Ctarget-features`. +fn llvm_features_by_flags(sess: &Session, features: &mut Vec<String>) { + target_features::retpoline_features_by_flags(sess, features); + + // -Zfixed-x18 + if sess.opts.unstable_opts.fixed_x18 { + if sess.target.arch != "aarch64" { + sess.dcx().emit_fatal(errors::FixedX18InvalidArch { arch: &sess.target.arch }); + } else { + features.push("+reserve-x18".into()); + } + } } /// The list of LLVM features computed from CLI flags (`-Ctarget-cpu`, `-Ctarget-feature`, @@ -777,6 +700,8 @@ pub(crate) fn global_llvm_features( .split(',') .filter(|v| !v.is_empty()) // Drop +v8plus feature introduced in LLVM 20. + // (Hard-coded target features do not go through `to_llvm_feature` since they already + // are LLVM feature names, hence we need a special case here.) .filter(|v| *v != "+v8plus" || get_version() >= (20, 0, 0)) .map(String::from), ); @@ -787,86 +712,23 @@ pub(crate) fn global_llvm_features( // -Ctarget-features if !only_base_features { - let known_features = sess.target.rust_target_features(); - // Will only be filled when `diagnostics` is set! - let mut featsmap = FxHashMap::default(); - - // Compute implied features - let mut all_rust_features = vec![]; - for feature in sess.opts.cg.target_feature.split(',').chain(llvm_features_by_flags(sess)) { - if let Some(feature) = feature.strip_prefix('+') { - all_rust_features.extend( - UnordSet::from(sess.target.implied_target_features(feature)) - .to_sorted_stable_ord() - .iter() - .map(|&&s| (true, s)), - ) - } else if let Some(feature) = feature.strip_prefix('-') { - // FIXME: Why do we not remove implied features on "-" here? - // We do the equivalent above in `target_config`. - // See <https://github.com/rust-lang/rust/issues/134792>. - all_rust_features.push((false, feature)); - } else if !feature.is_empty() { - if diagnostics { - sess.dcx().emit_warn(UnknownCTargetFeaturePrefix { feature }); - } - } - } - // Remove features that are meant for rustc, not LLVM. - all_rust_features.retain(|(_, feature)| { - // Retain if it is not a rustc feature - !RUSTC_SPECIFIC_FEATURES.contains(feature) - }); - - // Check feature validity. - if diagnostics { - for &(enable, feature) in &all_rust_features { - let feature_state = known_features.iter().find(|&&(v, _, _)| v == feature); - match feature_state { - None => { - let rust_feature = - known_features.iter().find_map(|&(rust_feature, _, _)| { - let llvm_features = to_llvm_features(sess, rust_feature)?; - if llvm_features.contains(feature) - && !llvm_features.contains(rust_feature) - { - Some(rust_feature) - } else { - None - } - }); - let unknown_feature = if let Some(rust_feature) = rust_feature { - UnknownCTargetFeature { - feature, - rust_feature: PossibleFeature::Some { rust_feature }, - } - } else { - UnknownCTargetFeature { feature, rust_feature: PossibleFeature::None } - }; - sess.dcx().emit_warn(unknown_feature); - } - Some((_, stability, _)) => { - stability.verify_feature_enabled_by_flag(sess, enable, feature); - } - } - - // FIXME(nagisa): figure out how to not allocate a full hashset here. - featsmap.insert(feature, enable); - } - } - - // Translate this into LLVM features. - let feats = all_rust_features - .iter() - .filter_map(|&(enable, feature)| { + target_features::flag_to_backend_features( + sess, + diagnostics, + |feature| { + to_llvm_features(sess, feature) + .map(|f| SmallVec::<[&str; 2]>::from_iter(f.into_iter())) + .unwrap_or_default() + }, + |feature, enable| { let enable_disable = if enable { '+' } else { '-' }; // We run through `to_llvm_features` when // passing requests down to LLVM. This means that all in-language // features also work on the command line instead of having two // different names when the LLVM name and the Rust name differ. - let llvm_feature = to_llvm_features(sess, feature)?; + let Some(llvm_feature) = to_llvm_features(sess, feature) else { return }; - Some( + features.extend( std::iter::once(format!( "{}{}", enable_disable, llvm_feature.llvm_feature_name @@ -881,27 +743,12 @@ pub(crate) fn global_llvm_features( }, )), ) - }) - .flatten(); - features.extend(feats); - - if diagnostics && let Some(f) = check_tied_features(sess, &featsmap) { - sess.dcx().emit_err(rustc_codegen_ssa::errors::TargetFeatureDisableOrEnable { - features: f, - span: None, - missing_features: None, - }); - } + }, + ); } - // -Zfixed-x18 - if sess.opts.unstable_opts.fixed_x18 { - if sess.target.arch != "aarch64" { - sess.dcx().emit_fatal(FixedX18InvalidArch { arch: &sess.target.arch }); - } else { - features.push("+reserve-x18".into()); - } - } + // We add this in the "base target" so that these show up in `sess.unstable_target_features`. + llvm_features_by_flags(sess, &mut features); features } diff --git a/compiler/rustc_codegen_ssa/Cargo.toml b/compiler/rustc_codegen_ssa/Cargo.toml index e9c4c255bce..cfae1b3ec98 100644 --- a/compiler/rustc_codegen_ssa/Cargo.toml +++ b/compiler/rustc_codegen_ssa/Cargo.toml @@ -6,13 +6,11 @@ edition = "2024" [dependencies] # tidy-alphabetical-start ar_archive_writer = "0.4.2" -arrayvec = { version = "0.7", default-features = false } bitflags = "2.4.1" bstr = "1.11.3" # Pinned so `cargo update` bumps don't cause breakage. Please also update the # `cc` in `rustc_llvm` if you update the `cc` here. cc = "=1.2.16" -either = "1.5.0" itertools = "0.12" pathdiff = "0.2.0" regex = "1.4" diff --git a/compiler/rustc_codegen_ssa/messages.ftl b/compiler/rustc_codegen_ssa/messages.ftl index 5322fe58cf3..b2e86414d90 100644 --- a/compiler/rustc_codegen_ssa/messages.ftl +++ b/compiler/rustc_codegen_ssa/messages.ftl @@ -48,8 +48,6 @@ codegen_ssa_error_writing_def_file = codegen_ssa_expected_name_value_pair = expected name value pair -codegen_ssa_expected_one_argument = expected one argument - codegen_ssa_expected_used_symbol = expected `used`, `used(compiler)` or `used(linker)` codegen_ssa_extern_funcs_not_found = some `extern` functions couldn't be found; some native libraries may need to be installed or have their path specified @@ -68,6 +66,11 @@ codegen_ssa_failed_to_write = failed to write {$path}: {$error} codegen_ssa_field_associated_value_expected = associated value expected for `{$name}` +codegen_ssa_forbidden_ctarget_feature = + target feature `{$feature}` cannot be {$enabled} with `-Ctarget-feature`: {$reason} + .note = this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! +codegen_ssa_forbidden_ctarget_feature_issue = for more information, see issue #116344 <https://github.com/rust-lang/rust/issues/116344> + codegen_ssa_forbidden_target_feature_attr = target feature `{$feature}` cannot be enabled with `#[target_feature]`: {$reason} @@ -86,9 +89,6 @@ codegen_ssa_incorrect_cgu_reuse_type = codegen_ssa_insufficient_vs_code_product = VS Code is a different product, and is not sufficient. -codegen_ssa_invalid_argument = invalid argument - .help = valid inline arguments are `always` and `never` - codegen_ssa_invalid_instruction_set = invalid instruction set specified codegen_ssa_invalid_link_ordinal_nargs = incorrect number of arguments to `#[link_ordinal]` @@ -221,6 +221,8 @@ codegen_ssa_multiple_main_functions = entry symbol `main` declared multiple time codegen_ssa_no_field = no field `{$name}` +codegen_ssa_no_mangle_nameless = `#[no_mangle]` cannot be used on {$definition} as it has no name + codegen_ssa_no_module_named = no module named `{$user_path}` (mangled: {$cgu_name}). available modules: {$cgu_names} @@ -368,8 +370,22 @@ codegen_ssa_unexpected_parameter_name = unexpected parameter name codegen_ssa_unknown_archive_kind = Don't know how to build archive of type: {$kind} +codegen_ssa_unknown_ctarget_feature = + unknown and unstable feature specified for `-Ctarget-feature`: `{$feature}` + .note = it is still passed through to the codegen backend, but use of this feature might be unsound and the behavior of this feature can change in the future + .possible_feature = you might have meant: `{$rust_feature}` + .consider_filing_feature_request = consider filing a feature request + +codegen_ssa_unknown_ctarget_feature_prefix = + unknown feature specified for `-Ctarget-feature`: `{$feature}` + .note = features must begin with a `+` to enable or `-` to disable it + codegen_ssa_unknown_reuse_kind = unknown cgu-reuse-kind `{$kind}` specified +codegen_ssa_unstable_ctarget_feature = + unstable feature specified for `-Ctarget-feature`: `{$feature}` + .note = this feature is not stably supported; its behavior can change in the future + codegen_ssa_unsupported_instruction_set = target does not support `#[instruction_set]` codegen_ssa_unsupported_link_self_contained = option `-C link-self-contained` is not supported on this target diff --git a/compiler/rustc_codegen_ssa/src/back/symbol_export.rs b/compiler/rustc_codegen_ssa/src/back/symbol_export.rs index 92b9b6e132e..d0b6c7470fb 100644 --- a/compiler/rustc_codegen_ssa/src/back/symbol_export.rs +++ b/compiler/rustc_codegen_ssa/src/back/symbol_export.rs @@ -219,6 +219,7 @@ fn exported_symbols_provider_local<'tcx>( .chain([ mangle_internal_symbol(tcx, "__rust_alloc_error_handler"), mangle_internal_symbol(tcx, OomStrategy::SYMBOL), + mangle_internal_symbol(tcx, NO_ALLOC_SHIM_IS_UNSTABLE), ]) { let exported_symbol = ExportedSymbol::NoDefId(SymbolName::new(tcx, &symbol_name)); @@ -232,19 +233,6 @@ fn exported_symbols_provider_local<'tcx>( }, )); } - - let exported_symbol = ExportedSymbol::NoDefId(SymbolName::new( - tcx, - &mangle_internal_symbol(tcx, NO_ALLOC_SHIM_IS_UNSTABLE), - )); - symbols.push(( - exported_symbol, - SymbolExportInfo { - level: SymbolExportLevel::Rust, - kind: SymbolExportKind::Data, - used: false, - }, - )) } if tcx.sess.instrument_coverage() || tcx.sess.opts.cg.profile_generate.enabled() { diff --git a/compiler/rustc_codegen_ssa/src/back/write.rs b/compiler/rustc_codegen_ssa/src/back/write.rs index bbf9cceef2a..c3bfe4c13cd 100644 --- a/compiler/rustc_codegen_ssa/src/back/write.rs +++ b/compiler/rustc_codegen_ssa/src/back/write.rs @@ -14,10 +14,10 @@ use rustc_data_structures::jobserver::{self, Acquired}; use rustc_data_structures::memmap::Mmap; use rustc_data_structures::profiling::{SelfProfilerRef, VerboseTimingGuard}; use rustc_errors::emitter::Emitter; -use rustc_errors::translation::Translate; +use rustc_errors::translation::Translator; use rustc_errors::{ - Diag, DiagArgMap, DiagCtxt, DiagMessage, ErrCode, FatalError, FluentBundle, Level, MultiSpan, - Style, Suggestions, + Diag, DiagArgMap, DiagCtxt, DiagMessage, ErrCode, FatalError, Level, MultiSpan, Style, + Suggestions, }; use rustc_fs_util::link_or_copy; use rustc_hir::def_id::{CrateNum, LOCAL_CRATE}; @@ -1889,16 +1889,6 @@ impl SharedEmitter { } } -impl Translate for SharedEmitter { - fn fluent_bundle(&self) -> Option<&FluentBundle> { - None - } - - fn fallback_fluent_bundle(&self) -> &FluentBundle { - panic!("shared emitter attempted to translate a diagnostic"); - } -} - impl Emitter for SharedEmitter { fn emit_diagnostic( &mut self, @@ -1932,6 +1922,10 @@ impl Emitter for SharedEmitter { fn source_map(&self) -> Option<&SourceMap> { None } + + fn translator(&self) -> &Translator { + panic!("shared emitter attempted to translate a diagnostic"); + } } impl SharedEmitterMain { diff --git a/compiler/rustc_codegen_ssa/src/codegen_attrs.rs b/compiler/rustc_codegen_ssa/src/codegen_attrs.rs index 0b31fa8fa88..b006fdbb658 100644 --- a/compiler/rustc_codegen_ssa/src/codegen_attrs.rs +++ b/compiler/rustc_codegen_ssa/src/codegen_attrs.rs @@ -3,9 +3,9 @@ use std::str::FromStr; use rustc_abi::ExternAbi; use rustc_ast::expand::autodiff_attrs::{AutoDiffAttrs, DiffActivity, DiffMode}; use rustc_ast::{LitKind, MetaItem, MetaItemInner, attr}; -use rustc_attr_data_structures::ReprAttr::ReprAlign; -use rustc_attr_data_structures::{AttributeKind, InlineAttr, InstructionSetAttr, OptimizeAttr}; -use rustc_data_structures::fx::FxHashMap; +use rustc_attr_data_structures::{ + AttributeKind, InlineAttr, InstructionSetAttr, OptimizeAttr, ReprAttr, find_attr, +}; use rustc_hir::def::DefKind; use rustc_hir::def_id::{DefId, LOCAL_CRATE, LocalDefId}; use rustc_hir::weak_lang_items::WEAK_LANG_ITEMS; @@ -17,14 +17,16 @@ use rustc_middle::mir::mono::Linkage; use rustc_middle::query::Providers; use rustc_middle::span_bug; use rustc_middle::ty::{self as ty, TyCtxt}; +use rustc_session::lint; use rustc_session::parse::feature_err; -use rustc_session::{Session, lint}; use rustc_span::{Ident, Span, sym}; use rustc_target::spec::SanitizerSet; -use tracing::debug; use crate::errors; -use crate::target_features::{check_target_feature_trait_unsafe, from_target_feature_attr}; +use crate::errors::NoMangleNameless; +use crate::target_features::{ + check_target_feature_trait_unsafe, check_tied_features, from_target_feature_attr, +}; fn linkage_by_name(tcx: TyCtxt<'_>, def_id: LocalDefId, name: &str) -> Linkage { use rustc_middle::mir::mono::Linkage::*; @@ -83,11 +85,9 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { let rust_target_features = tcx.rust_target_features(LOCAL_CRATE); - let mut inline_span = None; let mut link_ordinal_span = None; let mut no_sanitize_span = None; let mut mixed_export_name_no_mangle_lint_state = MixedExportNameAndNoMangleState::default(); - let mut no_mangle_span = None; for attr in attrs.iter() { // In some cases, attribute are only valid on functions, but it's the `check_attr` @@ -115,20 +115,47 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { AttributeKind::Repr(reprs) => { codegen_fn_attrs.alignment = reprs .iter() - .filter_map(|(r, _)| if let ReprAlign(x) = r { Some(*x) } else { None }) + .filter_map( + |(r, _)| if let ReprAttr::ReprAlign(x) = r { Some(*x) } else { None }, + ) .max(); } - + AttributeKind::Cold(_) => codegen_fn_attrs.flags |= CodegenFnAttrFlags::COLD, + AttributeKind::Align { align, .. } => codegen_fn_attrs.alignment = Some(*align), + AttributeKind::NoMangle(attr_span) => { + if tcx.opt_item_name(did.to_def_id()).is_some() { + codegen_fn_attrs.flags |= CodegenFnAttrFlags::NO_MANGLE; + mixed_export_name_no_mangle_lint_state.track_no_mangle( + *attr_span, + tcx.local_def_id_to_hir_id(did), + attr, + ); + } else { + tcx.dcx().emit_err(NoMangleNameless { + span: *attr_span, + definition: format!( + "{} {}", + tcx.def_descr_article(did.to_def_id()), + tcx.def_descr(did.to_def_id()) + ), + }); + } + } _ => {} } } + // Apply the minimum function alignment here, so that individual backends don't have to. + codegen_fn_attrs.alignment = Ord::max( + codegen_fn_attrs.alignment, + tcx.sess.opts.unstable_opts.min_function_alignment, + ); + let Some(Ident { name, .. }) = attr.ident() else { continue; }; match name { - sym::cold => codegen_fn_attrs.flags |= CodegenFnAttrFlags::COLD, sym::rustc_allocator => codegen_fn_attrs.flags |= CodegenFnAttrFlags::ALLOCATOR, sym::ffi_pure => codegen_fn_attrs.flags |= CodegenFnAttrFlags::FFI_PURE, sym::ffi_const => codegen_fn_attrs.flags |= CodegenFnAttrFlags::FFI_CONST, @@ -139,28 +166,6 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { codegen_fn_attrs.flags |= CodegenFnAttrFlags::ALLOCATOR_ZEROED } sym::naked => codegen_fn_attrs.flags |= CodegenFnAttrFlags::NAKED, - sym::no_mangle => { - no_mangle_span = Some(attr.span()); - if tcx.opt_item_name(did.to_def_id()).is_some() { - codegen_fn_attrs.flags |= CodegenFnAttrFlags::NO_MANGLE; - mixed_export_name_no_mangle_lint_state.track_no_mangle( - attr.span(), - tcx.local_def_id_to_hir_id(did), - attr, - ); - } else { - tcx.dcx() - .struct_span_err( - attr.span(), - format!( - "`#[no_mangle]` cannot be used on {} {} as it has no name", - tcx.def_descr_article(did.to_def_id()), - tcx.def_descr(did.to_def_id()), - ), - ) - .emit(); - } - } sym::rustc_std_internal_symbol => { codegen_fn_attrs.flags |= CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL } @@ -449,48 +454,14 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { mixed_export_name_no_mangle_lint_state.lint_if_mixed(tcx); - codegen_fn_attrs.inline = attrs.iter().fold(InlineAttr::None, |ia, attr| { - if !attr.has_name(sym::inline) { - return ia; - } - - if attr.is_word() { - return InlineAttr::Hint; - } - let Some(ref items) = attr.meta_item_list() else { - return ia; - }; - inline_span = Some(attr.span()); - - let [item] = &items[..] else { - tcx.dcx().emit_err(errors::ExpectedOneArgument { span: attr.span() }); - return InlineAttr::None; - }; - - if item.has_name(sym::always) { - InlineAttr::Always - } else if item.has_name(sym::never) { - InlineAttr::Never - } else { - tcx.dcx().emit_err(errors::InvalidArgument { span: items[0].span() }); - - InlineAttr::None - } - }); - codegen_fn_attrs.inline = attrs.iter().fold(codegen_fn_attrs.inline, |ia, attr| { - if !attr.has_name(sym::rustc_force_inline) || !tcx.features().rustc_attrs() { - return ia; - } - - if attr.is_word() { - InlineAttr::Force { attr_span: attr.span(), reason: None } - } else if let Some(val) = attr.value_str() { - InlineAttr::Force { attr_span: attr.span(), reason: Some(val) } - } else { - debug!("`rustc_force_inline` not checked by attribute validation"); - ia - } - }); + let inline_span; + (codegen_fn_attrs.inline, inline_span) = if let Some((inline_attr, span)) = + find_attr!(attrs, AttributeKind::Inline(i, span) => (*i, *span)) + { + (inline_attr, Some(span)) + } else { + (InlineAttr::None, None) + }; // naked function MUST NOT be inlined! This attribute is required for the rust compiler itself, // but not for the code generation backend because at that point the naked function will just be @@ -499,34 +470,8 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { codegen_fn_attrs.inline = InlineAttr::Never; } - codegen_fn_attrs.optimize = attrs.iter().fold(OptimizeAttr::Default, |ia, attr| { - if !attr.has_name(sym::optimize) { - return ia; - } - if attr.is_word() { - tcx.dcx().emit_err(errors::ExpectedOneArgumentOptimize { span: attr.span() }); - return ia; - } - let Some(ref items) = attr.meta_item_list() else { - return OptimizeAttr::Default; - }; - - inline_span = Some(attr.span()); - let [item] = &items[..] else { - tcx.dcx().emit_err(errors::ExpectedOneArgumentOptimize { span: attr.span() }); - return OptimizeAttr::Default; - }; - if item.has_name(sym::size) { - OptimizeAttr::Size - } else if item.has_name(sym::speed) { - OptimizeAttr::Speed - } else if item.has_name(sym::none) { - OptimizeAttr::DoNotOptimize - } else { - tcx.dcx().emit_err(errors::InvalidArgumentOptimize { span: item.span() }); - OptimizeAttr::Default - } - }); + codegen_fn_attrs.optimize = + find_attr!(attrs, AttributeKind::Optimize(i, _) => *i).unwrap_or(OptimizeAttr::Default); // #73631: closures inherit `#[target_feature]` annotations // @@ -602,12 +547,15 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL) && codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::NO_MANGLE) { + let no_mangle_span = + find_attr!(attrs, AttributeKind::NoMangle(no_mangle_span) => *no_mangle_span) + .unwrap_or_default(); let lang_item = lang_items::extract(attrs).map_or(None, |(name, _span)| LangItem::from_name(name)); let mut err = tcx .dcx() .struct_span_err( - no_mangle_span.unwrap_or_default(), + no_mangle_span, "`#[no_mangle]` cannot be used on internal language items", ) .with_note("Rustc requires this item to have a specific mangled name.") @@ -660,25 +608,6 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { codegen_fn_attrs } -/// Given a map from target_features to whether they are enabled or disabled, ensure only valid -/// combinations are allowed. -pub fn check_tied_features( - sess: &Session, - features: &FxHashMap<&str, bool>, -) -> Option<&'static [&'static str]> { - if !features.is_empty() { - for tied in sess.target.tied_target_features() { - // Tied features must be set to the same value, or not set at all - let mut tied_iter = tied.iter(); - let enabled = features.get(tied_iter.next().unwrap()); - if tied_iter.any(|f| enabled != features.get(f)) { - return Some(tied); - } - } - } - None -} - /// Checks if the provided DefId is a method in a trait impl for a trait which has track_caller /// applied to the method prototype. fn should_inherit_track_caller(tcx: TyCtxt<'_>, def_id: DefId) -> bool { diff --git a/compiler/rustc_codegen_ssa/src/errors.rs b/compiler/rustc_codegen_ssa/src/errors.rs index bac02bdf983..caac0f83f9d 100644 --- a/compiler/rustc_codegen_ssa/src/errors.rs +++ b/compiler/rustc_codegen_ssa/src/errors.rs @@ -209,35 +209,6 @@ pub(crate) struct OutOfRangeInteger { } #[derive(Diagnostic)] -#[diag(codegen_ssa_expected_one_argument, code = E0534)] -pub(crate) struct ExpectedOneArgument { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(codegen_ssa_expected_one_argument, code = E0722)] -pub(crate) struct ExpectedOneArgumentOptimize { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(codegen_ssa_invalid_argument, code = E0535)] -#[help] -pub(crate) struct InvalidArgument { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(codegen_ssa_invalid_argument, code = E0722)] -pub(crate) struct InvalidArgumentOptimize { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] #[diag(codegen_ssa_copy_path_buf)] pub(crate) struct CopyPathBuf { pub source_file: PathBuf, @@ -1232,30 +1203,6 @@ pub(crate) struct ErrorCreatingImportLibrary<'a> { pub error: String, } -pub struct TargetFeatureDisableOrEnable<'a> { - pub features: &'a [&'a str], - pub span: Option<Span>, - pub missing_features: Option<MissingFeatures>, -} - -#[derive(Subdiagnostic)] -#[help(codegen_ssa_missing_features)] -pub struct MissingFeatures; - -impl<G: EmissionGuarantee> Diagnostic<'_, G> for TargetFeatureDisableOrEnable<'_> { - fn into_diag(self, dcx: DiagCtxtHandle<'_>, level: Level) -> Diag<'_, G> { - let mut diag = Diag::new(dcx, level, fluent::codegen_ssa_target_feature_disable_or_enable); - if let Some(span) = self.span { - diag.span(span); - }; - if let Some(missing_features) = self.missing_features { - diag.subdiagnostic(missing_features); - } - diag.arg("features", self.features.join(", ")); - diag - } -} - #[derive(Diagnostic)] #[diag(codegen_ssa_aix_strip_not_used)] pub(crate) struct AixStripNotUsed; @@ -1298,3 +1245,76 @@ pub(crate) struct XcrunSdkPathWarning { #[derive(LintDiagnostic)] #[diag(codegen_ssa_aarch64_softfloat_neon)] pub(crate) struct Aarch64SoftfloatNeon; + +#[derive(Diagnostic)] +#[diag(codegen_ssa_unknown_ctarget_feature_prefix)] +#[note] +pub(crate) struct UnknownCTargetFeaturePrefix<'a> { + pub feature: &'a str, +} + +#[derive(Subdiagnostic)] +pub(crate) enum PossibleFeature<'a> { + #[help(codegen_ssa_possible_feature)] + Some { rust_feature: &'a str }, + #[help(codegen_ssa_consider_filing_feature_request)] + None, +} + +#[derive(Diagnostic)] +#[diag(codegen_ssa_unknown_ctarget_feature)] +#[note] +pub(crate) struct UnknownCTargetFeature<'a> { + pub feature: &'a str, + #[subdiagnostic] + pub rust_feature: PossibleFeature<'a>, +} + +#[derive(Diagnostic)] +#[diag(codegen_ssa_unstable_ctarget_feature)] +#[note] +pub(crate) struct UnstableCTargetFeature<'a> { + pub feature: &'a str, +} + +#[derive(Diagnostic)] +#[diag(codegen_ssa_forbidden_ctarget_feature)] +#[note] +#[note(codegen_ssa_forbidden_ctarget_feature_issue)] +pub(crate) struct ForbiddenCTargetFeature<'a> { + pub feature: &'a str, + pub enabled: &'a str, + pub reason: &'a str, +} + +pub struct TargetFeatureDisableOrEnable<'a> { + pub features: &'a [&'a str], + pub span: Option<Span>, + pub missing_features: Option<MissingFeatures>, +} + +#[derive(Subdiagnostic)] +#[help(codegen_ssa_missing_features)] +pub struct MissingFeatures; + +impl<G: EmissionGuarantee> Diagnostic<'_, G> for TargetFeatureDisableOrEnable<'_> { + fn into_diag(self, dcx: DiagCtxtHandle<'_>, level: Level) -> Diag<'_, G> { + let mut diag = Diag::new(dcx, level, fluent::codegen_ssa_target_feature_disable_or_enable); + if let Some(span) = self.span { + diag.span(span); + }; + if let Some(missing_features) = self.missing_features { + diag.subdiagnostic(missing_features); + } + diag.arg("features", self.features.join(", ")); + diag + } +} + +#[derive(Diagnostic)] +#[diag(codegen_ssa_no_mangle_nameless)] +pub(crate) struct NoMangleNameless { + #[primary_span] + pub span: Span, + pub definition: String, +} diff --git a/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs b/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs index f731613d67e..025f5fb54f4 100644 --- a/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs +++ b/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs @@ -317,7 +317,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { let name = if bx.sess().fewer_names() { None } else { - Some(match whole_local_var.or(fallback_var.clone()) { + Some(match whole_local_var.or_else(|| fallback_var.clone()) { Some(var) if var.name != sym::empty => var.name.to_string(), _ => format!("{local:?}"), }) diff --git a/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs b/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs index b805dc094e9..9da4b8cc8fd 100644 --- a/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs +++ b/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs @@ -131,12 +131,8 @@ fn prefix_and_suffix<'tcx>( let attrs = tcx.codegen_fn_attrs(instance.def_id()); let link_section = attrs.link_section.map(|symbol| symbol.as_str().to_string()); - // function alignment can be set globally with the `-Zmin-function-alignment=<n>` flag; - // the alignment from a `#[repr(align(<n>))]` is used if it specifies a higher alignment. - // if no alignment is specified, an alignment of 4 bytes is used. - let min_function_alignment = tcx.sess.opts.unstable_opts.min_function_alignment; - let align_bytes = - Ord::max(min_function_alignment, attrs.alignment).map(|a| a.bytes()).unwrap_or(4); + // If no alignment is specified, an alignment of 4 bytes is used. + let align_bytes = attrs.alignment.map(|a| a.bytes()).unwrap_or(4); // In particular, `.arm` can also be written `.code 32` and `.thumb` as `.code 16`. let (arch_prefix, arch_suffix) = if is_arm { @@ -205,7 +201,7 @@ fn prefix_and_suffix<'tcx>( let mut end = String::new(); match asm_binary_format { BinaryFormat::Elf => { - let section = link_section.unwrap_or(format!(".text.{asm_name}")); + let section = link_section.unwrap_or_else(|| format!(".text.{asm_name}")); let progbits = match is_arm { true => "%progbits", @@ -239,7 +235,7 @@ fn prefix_and_suffix<'tcx>( } } BinaryFormat::MachO => { - let section = link_section.unwrap_or("__TEXT,__text".to_string()); + let section = link_section.unwrap_or_else(|| "__TEXT,__text".to_string()); writeln!(begin, ".pushsection {},regular,pure_instructions", section).unwrap(); writeln!(begin, ".balign {align_bytes}").unwrap(); write_linkage(&mut begin).unwrap(); @@ -256,7 +252,7 @@ fn prefix_and_suffix<'tcx>( } } BinaryFormat::Coff => { - let section = link_section.unwrap_or(format!(".text.{asm_name}")); + let section = link_section.unwrap_or_else(|| format!(".text.{asm_name}")); writeln!(begin, ".pushsection {},\"xr\"", section).unwrap(); writeln!(begin, ".balign {align_bytes}").unwrap(); write_linkage(&mut begin).unwrap(); @@ -273,7 +269,7 @@ fn prefix_and_suffix<'tcx>( } } BinaryFormat::Wasm => { - let section = link_section.unwrap_or(format!(".text.{asm_name}")); + let section = link_section.unwrap_or_else(|| format!(".text.{asm_name}")); writeln!(begin, ".section {section},\"\",@").unwrap(); // wasm functions cannot be aligned, so skip diff --git a/compiler/rustc_codegen_ssa/src/mir/operand.rs b/compiler/rustc_codegen_ssa/src/mir/operand.rs index e9389ddf93b..99957c67708 100644 --- a/compiler/rustc_codegen_ssa/src/mir/operand.rs +++ b/compiler/rustc_codegen_ssa/src/mir/operand.rs @@ -1,9 +1,9 @@ use std::fmt; -use arrayvec::ArrayVec; -use either::Either; use rustc_abi as abi; -use rustc_abi::{Align, BackendRepr, FIRST_VARIANT, Primitive, Size, TagEncoding, Variants}; +use rustc_abi::{ + Align, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, TagEncoding, VariantIdx, Variants, +}; use rustc_middle::mir::interpret::{Pointer, Scalar, alloc_range}; use rustc_middle::mir::{self, ConstValue}; use rustc_middle::ty::Ty; @@ -13,6 +13,7 @@ use rustc_session::config::OptLevel; use tracing::{debug, instrument}; use super::place::{PlaceRef, PlaceValue}; +use super::rvalue::transmute_immediate; use super::{FunctionCx, LocalRef}; use crate::common::IntPredicate; use crate::traits::*; @@ -69,31 +70,6 @@ pub enum OperandValue<V> { } impl<V: CodegenObject> OperandValue<V> { - /// If this is ZeroSized/Immediate/Pair, return an array of the 0/1/2 values. - /// If this is Ref, return the place. - #[inline] - pub(crate) fn immediates_or_place(self) -> Either<ArrayVec<V, 2>, PlaceValue<V>> { - match self { - OperandValue::ZeroSized => Either::Left(ArrayVec::new()), - OperandValue::Immediate(a) => Either::Left(ArrayVec::from_iter([a])), - OperandValue::Pair(a, b) => Either::Left([a, b].into()), - OperandValue::Ref(p) => Either::Right(p), - } - } - - /// Given an array of 0/1/2 immediate values, return ZeroSized/Immediate/Pair. - #[inline] - pub(crate) fn from_immediates(immediates: ArrayVec<V, 2>) -> Self { - let mut it = immediates.into_iter(); - let Some(a) = it.next() else { - return OperandValue::ZeroSized; - }; - let Some(b) = it.next() else { - return OperandValue::Immediate(a); - }; - OperandValue::Pair(a, b) - } - /// Treat this value as a pointer and return the data pointer and /// optional metadata as backend values. /// @@ -595,6 +571,105 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> { } } } + + /// Creates an incomplete operand containing the [`abi::Scalar`]s expected based + /// on the `layout` passed. This is for use with [`OperandRef::insert_field`] + /// later to set the necessary immediate(s). + /// + /// Returns `None` for `layout`s which cannot be built this way. + pub(crate) fn builder( + layout: TyAndLayout<'tcx>, + ) -> Option<OperandRef<'tcx, Result<V, abi::Scalar>>> { + let val = match layout.backend_repr { + BackendRepr::Memory { .. } if layout.is_zst() => OperandValue::ZeroSized, + BackendRepr::Scalar(s) => OperandValue::Immediate(Err(s)), + BackendRepr::ScalarPair(a, b) => OperandValue::Pair(Err(a), Err(b)), + BackendRepr::Memory { .. } | BackendRepr::SimdVector { .. } => return None, + }; + Some(OperandRef { val, layout }) + } +} + +impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, Result<V, abi::Scalar>> { + pub(crate) fn insert_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>( + &mut self, + bx: &mut Bx, + v: VariantIdx, + f: FieldIdx, + operand: OperandRef<'tcx, V>, + ) { + let (expect_zst, is_zero_offset) = if let abi::FieldsShape::Primitive = self.layout.fields { + // The other branch looking at field layouts ICEs for primitives, + // so we need to handle them separately. + // Multiple fields is possible for cases such as aggregating + // a thin pointer, where the second field is the unit. + assert!(!self.layout.is_zst()); + assert_eq!(v, FIRST_VARIANT); + let first_field = f == FieldIdx::ZERO; + (!first_field, first_field) + } else { + let variant_layout = self.layout.for_variant(bx.cx(), v); + let field_layout = variant_layout.field(bx.cx(), f.as_usize()); + let field_offset = variant_layout.fields.offset(f.as_usize()); + (field_layout.is_zst(), field_offset == Size::ZERO) + }; + + let mut update = |tgt: &mut Result<V, abi::Scalar>, src, from_scalar| { + let from_bty = bx.cx().type_from_scalar(from_scalar); + let to_scalar = tgt.unwrap_err(); + let to_bty = bx.cx().type_from_scalar(to_scalar); + let imm = transmute_immediate(bx, src, from_scalar, from_bty, to_scalar, to_bty); + *tgt = Ok(imm); + }; + + match (operand.val, operand.layout.backend_repr) { + (OperandValue::ZeroSized, _) if expect_zst => {} + (OperandValue::Immediate(v), BackendRepr::Scalar(from_scalar)) => match &mut self.val { + OperandValue::Immediate(val @ Err(_)) if is_zero_offset => { + update(val, v, from_scalar); + } + OperandValue::Pair(fst @ Err(_), _) if is_zero_offset => { + update(fst, v, from_scalar); + } + OperandValue::Pair(_, snd @ Err(_)) if !is_zero_offset => { + update(snd, v, from_scalar); + } + _ => bug!("Tried to insert {operand:?} into {v:?}.{f:?} of {self:?}"), + }, + (OperandValue::Pair(a, b), BackendRepr::ScalarPair(from_sa, from_sb)) => { + match &mut self.val { + OperandValue::Pair(fst @ Err(_), snd @ Err(_)) => { + update(fst, a, from_sa); + update(snd, b, from_sb); + } + _ => bug!("Tried to insert {operand:?} into {v:?}.{f:?} of {self:?}"), + } + } + _ => bug!("Unsupported operand {operand:?} inserting into {v:?}.{f:?} of {self:?}"), + } + } + + /// After having set all necessary fields, this converts the + /// `OperandValue<Result<V, _>>` (as obtained from [`OperandRef::builder`]) + /// to the normal `OperandValue<V>`. + /// + /// ICEs if any required fields were not set. + pub fn build(&self) -> OperandRef<'tcx, V> { + let OperandRef { val, layout } = *self; + + let unwrap = |r: Result<V, abi::Scalar>| match r { + Ok(v) => v, + Err(_) => bug!("OperandRef::build called while fields are missing {self:?}"), + }; + + let val = match val { + OperandValue::ZeroSized => OperandValue::ZeroSized, + OperandValue::Immediate(v) => OperandValue::Immediate(unwrap(v)), + OperandValue::Pair(a, b) => OperandValue::Pair(unwrap(a), unwrap(b)), + OperandValue::Ref(_) => bug!(), + }; + OperandRef { val, layout } + } } impl<'a, 'tcx, V: CodegenObject> OperandValue<V> { diff --git a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs index b62ac89661f..e1d8b7546cf 100644 --- a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs +++ b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs @@ -1,7 +1,6 @@ use std::assert_matches::assert_matches; -use arrayvec::ArrayVec; -use rustc_abi::{self as abi, FIRST_VARIANT, FieldIdx}; +use rustc_abi::{self as abi, FIRST_VARIANT}; use rustc_middle::ty::adjustment::PointerCoercion; use rustc_middle::ty::layout::{HasTyCtxt, HasTypingEnv, LayoutOf, TyAndLayout}; use rustc_middle::ty::{self, Instance, Ty, TyCtxt}; @@ -708,38 +707,15 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { // `rvalue_creates_operand` has arranged that we only get here if // we can build the aggregate immediate from the field immediates. - let mut inputs = ArrayVec::<Bx::Value, 2>::new(); - let mut input_scalars = ArrayVec::<abi::Scalar, 2>::new(); - for field_idx in layout.fields.index_by_increasing_offset() { - let field_idx = FieldIdx::from_usize(field_idx); - let op = self.codegen_operand(bx, &fields[field_idx]); - let values = op.val.immediates_or_place().left_or_else(|p| { - bug!("Field {field_idx:?} is {p:?} making {layout:?}"); - }); - let scalars = self.value_kind(op.layout).scalars().unwrap(); - assert_eq!(values.len(), scalars.len()); - inputs.extend(values); - input_scalars.extend(scalars); + let Some(mut builder) = OperandRef::builder(layout) else { + bug!("Cannot use type in operand builder: {layout:?}") + }; + for (field_idx, field) in fields.iter_enumerated() { + let op = self.codegen_operand(bx, field); + builder.insert_field(bx, FIRST_VARIANT, field_idx, op); } - let output_scalars = self.value_kind(layout).scalars().unwrap(); - itertools::izip!(&mut inputs, input_scalars, output_scalars).for_each( - |(v, in_s, out_s)| { - if in_s != out_s { - // We have to be really careful about bool here, because - // `(bool,)` stays i1 but `Cell<bool>` becomes i8. - *v = bx.from_immediate(*v); - *v = bx.to_immediate_scalar(*v, out_s); - } - }, - ); - - let val = OperandValue::from_immediates(inputs); - assert!( - val.is_expected_variant_for_type(self.cx, layout), - "Made wrong variant {val:?} for type {layout:?}", - ); - OperandRef { val, layout } + builder.build() } mir::Rvalue::ShallowInitBox(ref operand, content_ty) => { let operand = self.codegen_operand(bx, operand); @@ -1082,10 +1058,10 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { mir::AggregateKind::Coroutine(..) | mir::AggregateKind::CoroutineClosure(..) => false, }; allowed_kind && { - let ty = rvalue.ty(self.mir, self.cx.tcx()); - let ty = self.monomorphize(ty); + let ty = rvalue.ty(self.mir, self.cx.tcx()); + let ty = self.monomorphize(ty); let layout = self.cx.spanned_layout_of(ty, span); - !self.cx.is_backend_ref(layout) + OperandRef::<Bx::Value>::builder(layout).is_some() } } } @@ -1129,23 +1105,12 @@ enum OperandValueKind { ZeroSized, } -impl OperandValueKind { - fn scalars(self) -> Option<ArrayVec<abi::Scalar, 2>> { - Some(match self { - OperandValueKind::ZeroSized => ArrayVec::new(), - OperandValueKind::Immediate(a) => ArrayVec::from_iter([a]), - OperandValueKind::Pair(a, b) => [a, b].into(), - OperandValueKind::Ref => return None, - }) - } -} - /// Transmutes one of the immediates from an [`OperandValue::Immediate`] /// or an [`OperandValue::Pair`] to an immediate of the target type. /// /// `to_backend_ty` must be the *non*-immediate backend type (so it will be /// `i8`, not `i1`, for `bool`-like types.) -fn transmute_immediate<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( +pub(super) fn transmute_immediate<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( bx: &mut Bx, mut imm: Bx::Value, from_scalar: abi::Scalar, diff --git a/compiler/rustc_codegen_ssa/src/target_features.rs b/compiler/rustc_codegen_ssa/src/target_features.rs index 640d197c219..67ac619091b 100644 --- a/compiler/rustc_codegen_ssa/src/target_features.rs +++ b/compiler/rustc_codegen_ssa/src/target_features.rs @@ -1,5 +1,5 @@ use rustc_attr_data_structures::InstructionSetAttr; -use rustc_data_structures::fx::FxIndexSet; +use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet}; use rustc_data_structures::unord::{UnordMap, UnordSet}; use rustc_errors::Applicability; use rustc_hir as hir; @@ -8,11 +8,12 @@ use rustc_hir::def_id::{DefId, LOCAL_CRATE, LocalDefId}; use rustc_middle::middle::codegen_fn_attrs::TargetFeature; use rustc_middle::query::Providers; use rustc_middle::ty::TyCtxt; -use rustc_session::features::StabilityExt; +use rustc_session::Session; use rustc_session::lint::builtin::AARCH64_SOFTFLOAT_NEON; use rustc_session::parse::feature_err; use rustc_span::{Span, Symbol, sym}; -use rustc_target::target_features::{self, Stability}; +use rustc_target::target_features::{self, RUSTC_SPECIFIC_FEATURES, Stability}; +use smallvec::SmallVec; use crate::errors; @@ -67,7 +68,7 @@ pub(crate) fn from_target_feature_attr( // Only allow target features whose feature gates have been enabled // and which are permitted to be toggled. - if let Err(reason) = stability.is_toggle_permitted(tcx.sess) { + if let Err(reason) = stability.toggle_allowed() { tcx.dcx().emit_err(errors::ForbiddenTargetFeatureAttr { span: item.span(), feature, @@ -88,7 +89,7 @@ pub(crate) fn from_target_feature_attr( let feature_sym = Symbol::intern(feature); for &name in tcx.implied_target_features(feature_sym) { // But ensure the ABI does not forbid enabling this. - // Here we do assume that LLVM doesn't add even more implied features + // Here we do assume that the backend doesn't add even more implied features // we don't know about, at least no features that would have ABI effects! // We skip this logic in rustdoc, where we want to allow all target features of // all targets, so we can't check their ABI compatibility and anyway we are not @@ -156,6 +157,276 @@ pub(crate) fn check_target_feature_trait_unsafe(tcx: TyCtxt<'_>, id: LocalDefId, } } +/// Parse the value of `-Ctarget-feature`, also expanding implied features, +/// and call the closure for each (expanded) Rust feature. If the list contains +/// a syntactically invalid item (not starting with `+`/`-`), the error callback is invoked. +fn parse_rust_feature_flag<'a>( + sess: &'a Session, + err_callback: impl Fn(&'a str), + mut callback: impl FnMut( + /* base_feature */ &'a str, + /* with_implied */ FxHashSet<&'a str>, + /* enable */ bool, + ), +) { + // A cache for the backwards implication map. + let mut inverse_implied_features: Option<FxHashMap<&str, FxHashSet<&str>>> = None; + + for feature in sess.opts.cg.target_feature.split(',') { + if let Some(base_feature) = feature.strip_prefix('+') { + // Skip features that are not target features, but rustc features. + if RUSTC_SPECIFIC_FEATURES.contains(&base_feature) { + return; + } + + callback(base_feature, sess.target.implied_target_features(base_feature), true) + } else if let Some(base_feature) = feature.strip_prefix('-') { + // Skip features that are not target features, but rustc features. + if RUSTC_SPECIFIC_FEATURES.contains(&base_feature) { + return; + } + + // If `f1` implies `f2`, then `!f2` implies `!f1` -- this is standard logical + // contraposition. So we have to find all the reverse implications of `base_feature` and + // disable them, too. + + let inverse_implied_features = inverse_implied_features.get_or_insert_with(|| { + let mut set: FxHashMap<&str, FxHashSet<&str>> = FxHashMap::default(); + for (f, _, is) in sess.target.rust_target_features() { + for i in is.iter() { + set.entry(i).or_default().insert(f); + } + } + set + }); + + // Inverse implied target features have their own inverse implied target features, so we + // traverse the map until there are no more features to add. + let mut features = FxHashSet::default(); + let mut new_features = vec![base_feature]; + while let Some(new_feature) = new_features.pop() { + if features.insert(new_feature) { + if let Some(implied_features) = inverse_implied_features.get(&new_feature) { + new_features.extend(implied_features) + } + } + } + + callback(base_feature, features, false) + } else if !feature.is_empty() { + err_callback(feature) + } + } +} + +/// Utility function for a codegen backend to compute `cfg(target_feature)`, or more specifically, +/// to populate `sess.unstable_target_features` and `sess.target_features` (these are the first and +/// 2nd component of the return value, respectively). +/// +/// `target_base_has_feature` should check whether the given feature (a Rust feature name!) is +/// enabled in the "base" target machine, i.e., without applying `-Ctarget-feature`. +/// +/// We do not have to worry about RUSTC_SPECIFIC_FEATURES here, those are handled elsewhere. +pub fn cfg_target_feature( + sess: &Session, + mut target_base_has_feature: impl FnMut(&str) -> bool, +) -> (Vec<Symbol>, Vec<Symbol>) { + // Compute which of the known target features are enabled in the 'base' target machine. We only + // consider "supported" features; "forbidden" features are not reflected in `cfg` as of now. + let mut features: UnordSet<Symbol> = sess + .target + .rust_target_features() + .iter() + .filter(|(feature, _, _)| target_base_has_feature(feature)) + .map(|(feature, _, _)| Symbol::intern(feature)) + .collect(); + + // Add enabled and remove disabled features. + parse_rust_feature_flag( + sess, + /* err_callback */ + |_| { + // Errors are already emitted in `flag_to_backend_features`; avoid duplicates. + }, + |_base_feature, new_features, enabled| { + // Iteration order is irrelevant since this only influences an `UnordSet`. + #[allow(rustc::potential_query_instability)] + if enabled { + features.extend(new_features.into_iter().map(|f| Symbol::intern(f))); + } else { + // Remove `new_features` from `features`. + for new in new_features { + features.remove(&Symbol::intern(new)); + } + } + }, + ); + + // Filter enabled features based on feature gates. + let f = |allow_unstable| { + sess.target + .rust_target_features() + .iter() + .filter_map(|(feature, gate, _)| { + // The `allow_unstable` set is used by rustc internally to determine which target + // features are truly available, so we want to return even perma-unstable + // "forbidden" features. + if allow_unstable + || (gate.in_cfg() + && (sess.is_nightly_build() || gate.requires_nightly().is_none())) + { + Some(Symbol::intern(feature)) + } else { + None + } + }) + .filter(|feature| features.contains(&feature)) + .collect() + }; + + (f(true), f(false)) +} + +/// Given a map from target_features to whether they are enabled or disabled, ensure only valid +/// combinations are allowed. +pub fn check_tied_features( + sess: &Session, + features: &FxHashMap<&str, bool>, +) -> Option<&'static [&'static str]> { + if !features.is_empty() { + for tied in sess.target.tied_target_features() { + // Tied features must be set to the same value, or not set at all + let mut tied_iter = tied.iter(); + let enabled = features.get(tied_iter.next().unwrap()); + if tied_iter.any(|f| enabled != features.get(f)) { + return Some(tied); + } + } + } + None +} + +/// Translates the `-Ctarget-feature` flag into a backend target feature list. +/// +/// `to_backend_features` converts a Rust feature name into a list of backend feature names; this is +/// used for diagnostic purposes only. +/// +/// `extend_backend_features` extends the set of backend features (assumed to be in mutable state +/// accessible by that closure) to enable/disable the given Rust feature name. +pub fn flag_to_backend_features<'a, const N: usize>( + sess: &'a Session, + diagnostics: bool, + to_backend_features: impl Fn(&'a str) -> SmallVec<[&'a str; N]>, + mut extend_backend_features: impl FnMut(&'a str, /* enable */ bool), +) { + let known_features = sess.target.rust_target_features(); + + // Compute implied features + let mut rust_features = vec![]; + parse_rust_feature_flag( + sess, + /* err_callback */ + |feature| { + if diagnostics { + sess.dcx().emit_warn(errors::UnknownCTargetFeaturePrefix { feature }); + } + }, + |base_feature, new_features, enable| { + rust_features.extend( + UnordSet::from(new_features).to_sorted_stable_ord().iter().map(|&&s| (enable, s)), + ); + // Check feature validity. + if diagnostics { + let feature_state = known_features.iter().find(|&&(v, _, _)| v == base_feature); + match feature_state { + None => { + // This is definitely not a valid Rust feature name. Maybe it is a backend + // feature name? If so, give a better error message. + let rust_feature = + known_features.iter().find_map(|&(rust_feature, _, _)| { + let backend_features = to_backend_features(rust_feature); + if backend_features.contains(&base_feature) + && !backend_features.contains(&rust_feature) + { + Some(rust_feature) + } else { + None + } + }); + let unknown_feature = if let Some(rust_feature) = rust_feature { + errors::UnknownCTargetFeature { + feature: base_feature, + rust_feature: errors::PossibleFeature::Some { rust_feature }, + } + } else { + errors::UnknownCTargetFeature { + feature: base_feature, + rust_feature: errors::PossibleFeature::None, + } + }; + sess.dcx().emit_warn(unknown_feature); + } + Some((_, stability, _)) => { + if let Err(reason) = stability.toggle_allowed() { + sess.dcx().emit_warn(errors::ForbiddenCTargetFeature { + feature: base_feature, + enabled: if enable { "enabled" } else { "disabled" }, + reason, + }); + } else if stability.requires_nightly().is_some() { + // An unstable feature. Warn about using it. It makes little sense + // to hard-error here since we just warn about fully unknown + // features above. + sess.dcx().emit_warn(errors::UnstableCTargetFeature { + feature: base_feature, + }); + } + } + } + } + }, + ); + + if diagnostics { + // FIXME(nagisa): figure out how to not allocate a full hashmap here. + if let Some(f) = check_tied_features( + sess, + &FxHashMap::from_iter(rust_features.iter().map(|&(enable, feature)| (feature, enable))), + ) { + sess.dcx().emit_err(errors::TargetFeatureDisableOrEnable { + features: f, + span: None, + missing_features: None, + }); + } + } + + // Add this to the backend features. + for (enable, feature) in rust_features { + extend_backend_features(feature, enable); + } +} + +/// Computes the backend target features to be added to account for retpoline flags. +/// Used by both LLVM and GCC since their target features are, conveniently, the same. +pub fn retpoline_features_by_flags(sess: &Session, features: &mut Vec<String>) { + // -Zretpoline without -Zretpoline-external-thunk enables + // retpoline-indirect-branches and retpoline-indirect-calls target features + let unstable_opts = &sess.opts.unstable_opts; + if unstable_opts.retpoline && !unstable_opts.retpoline_external_thunk { + features.push("+retpoline-indirect-branches".into()); + features.push("+retpoline-indirect-calls".into()); + } + // -Zretpoline-external-thunk (maybe, with -Zretpoline too) enables + // retpoline-external-thunk, retpoline-indirect-branches and + // retpoline-indirect-calls target features + if unstable_opts.retpoline_external_thunk { + features.push("+retpoline-external-thunk".into()); + features.push("+retpoline-indirect-branches".into()); + features.push("+retpoline-indirect-calls".into()); + } +} + pub(crate) fn provide(providers: &mut Providers) { *providers = Providers { rust_target_features: |tcx, cnum| { @@ -182,7 +453,8 @@ pub(crate) fn provide(providers: &mut Providers) { Stability::Unstable { .. } | Stability::Forbidden { .. }, ) | (Stability::Forbidden { .. }, Stability::Forbidden { .. }) => { - // The stability in the entry is at least as good as the new one, just keep it. + // The stability in the entry is at least as good as the new + // one, just keep it. } _ => { // Overwrite stabilite. diff --git a/compiler/rustc_codegen_ssa/src/traits/type_.rs b/compiler/rustc_codegen_ssa/src/traits/type_.rs index 70331b72353..dcd9e25b2c9 100644 --- a/compiler/rustc_codegen_ssa/src/traits/type_.rs +++ b/compiler/rustc_codegen_ssa/src/traits/type_.rs @@ -1,4 +1,4 @@ -use rustc_abi::{AddressSpace, Float, Integer, Reg}; +use rustc_abi::{AddressSpace, Float, Integer, Primitive, Reg, Scalar}; use rustc_middle::bug; use rustc_middle::ty::Ty; use rustc_middle::ty::layout::{HasTyCtxt, HasTypingEnv, TyAndLayout}; @@ -84,6 +84,24 @@ pub trait DerivedTypeCodegenMethods<'tcx>: fn type_is_freeze(&self, ty: Ty<'tcx>) -> bool { ty.is_freeze(self.tcx(), self.typing_env()) } + + fn type_from_primitive(&self, p: Primitive) -> Self::Type { + use Primitive::*; + match p { + Int(i, _) => self.type_from_integer(i), + Float(f) => self.type_from_float(f), + Pointer(address_space) => self.type_ptr_ext(address_space), + } + } + + fn type_from_scalar(&self, s: Scalar) -> Self::Type { + // `MaybeUninit` being `repr(transparent)` somewhat implies that the type + // of a scalar has to be the type of its primitive (which is true in LLVM, + // where noundef is a parameter attribute or metadata) but if we ever get + // a backend where that's no longer true, every use of this will need to + // to carefully scrutinized and re-evaluated. + self.type_from_primitive(s.primitive()) + } } impl<'tcx, T> DerivedTypeCodegenMethods<'tcx> for T where diff --git a/compiler/rustc_const_eval/src/check_consts/check.rs b/compiler/rustc_const_eval/src/check_consts/check.rs index 4f252f3ccd4..576b174369d 100644 --- a/compiler/rustc_const_eval/src/check_consts/check.rs +++ b/compiler/rustc_const_eval/src/check_consts/check.rs @@ -463,12 +463,6 @@ impl<'mir, 'tcx> Checker<'mir, 'tcx> { ); } - fn crate_inject_span(&self) -> Option<Span> { - self.tcx.hir_crate_items(()).definitions().next().and_then(|id| { - self.tcx.crate_level_attribute_injection_span(self.tcx.local_def_id_to_hir_id(id)) - }) - } - /// Check the const stability of the given item (fn or trait). fn check_callee_stability(&mut self, def_id: DefId) { match self.tcx.lookup_const_stability(def_id) { @@ -543,7 +537,6 @@ impl<'mir, 'tcx> Checker<'mir, 'tcx> { feature, feature_enabled, safe_to_expose_on_stable: callee_safe_to_expose_on_stable, - suggestion_span: self.crate_inject_span(), is_function_call: self.tcx.def_kind(def_id) != DefKind::Trait, }); } @@ -919,7 +912,6 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> { name: intrinsic.name, feature, const_stable_indirect: is_const_stable, - suggestion: self.crate_inject_span(), }); } Some(attrs::ConstStability { diff --git a/compiler/rustc_const_eval/src/check_consts/ops.rs b/compiler/rustc_const_eval/src/check_consts/ops.rs index 9c30dbff99e..887275e7294 100644 --- a/compiler/rustc_const_eval/src/check_consts/ops.rs +++ b/compiler/rustc_const_eval/src/check_consts/ops.rs @@ -1,8 +1,8 @@ //! Concrete error types for all operations which may be invalid in a certain const context. use hir::{ConstContext, LangItem}; +use rustc_errors::Diag; use rustc_errors::codes::*; -use rustc_errors::{Applicability, Diag}; use rustc_hir as hir; use rustc_hir::def_id::DefId; use rustc_infer::infer::TyCtxtInferExt; @@ -384,7 +384,6 @@ pub(crate) struct CallUnstable { /// expose on stable. pub feature_enabled: bool, pub safe_to_expose_on_stable: bool, - pub suggestion_span: Option<Span>, /// true if `def_id` is the function we are calling, false if `def_id` is an unstable trait. pub is_function_call: bool, } @@ -412,20 +411,7 @@ impl<'tcx> NonConstOp<'tcx> for CallUnstable { def_path: ccx.tcx.def_path_str(self.def_id), }) }; - // FIXME: make this translatable - let msg = format!("add `#![feature({})]` to the crate attributes to enable", self.feature); - #[allow(rustc::untranslatable_diagnostic)] - if let Some(span) = self.suggestion_span { - err.span_suggestion_verbose( - span, - msg, - format!("#![feature({})]\n", self.feature), - Applicability::MachineApplicable, - ); - } else { - err.help(msg); - } - + ccx.tcx.disabled_nightly_features(&mut err, [(String::new(), self.feature)]); err } } @@ -452,7 +438,6 @@ pub(crate) struct IntrinsicUnstable { pub name: Symbol, pub feature: Symbol, pub const_stable_indirect: bool, - pub suggestion: Option<Span>, } impl<'tcx> NonConstOp<'tcx> for IntrinsicUnstable { @@ -472,8 +457,7 @@ impl<'tcx> NonConstOp<'tcx> for IntrinsicUnstable { span, name: self.name, feature: self.feature, - suggestion: self.suggestion, - help: self.suggestion.is_none(), + suggestion: ccx.tcx.crate_level_attribute_injection_span(), }) } } diff --git a/compiler/rustc_const_eval/src/errors.rs b/compiler/rustc_const_eval/src/errors.rs index 037cbf777e7..69c71aef9f3 100644 --- a/compiler/rustc_const_eval/src/errors.rs +++ b/compiler/rustc_const_eval/src/errors.rs @@ -136,9 +136,7 @@ pub(crate) struct UnstableIntrinsic { code = "#![feature({feature})]\n", applicability = "machine-applicable" )] - pub suggestion: Option<Span>, - #[help(const_eval_unstable_intrinsic_suggestion)] - pub help: bool, + pub suggestion: Span, } #[derive(Diagnostic)] diff --git a/compiler/rustc_const_eval/src/interpret/memory.rs b/compiler/rustc_const_eval/src/interpret/memory.rs index 99a4bc1b7d6..57bf867e389 100644 --- a/compiler/rustc_const_eval/src/interpret/memory.rs +++ b/compiler/rustc_const_eval/src/interpret/memory.rs @@ -877,12 +877,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { if let Some(fn_val) = self.get_fn_alloc(id) { let align = match fn_val { FnVal::Instance(instance) => { - // Function alignment can be set globally with the `-Zmin-function-alignment=<n>` flag; - // the alignment from a `#[repr(align(<n>))]` is used if it specifies a higher alignment. - let fn_align = self.tcx.codegen_fn_attrs(instance.def_id()).alignment; - let global_align = self.tcx.sess.opts.unstable_opts.min_function_alignment; - - Ord::max(global_align, fn_align).unwrap_or(Align::ONE) + self.tcx.codegen_fn_attrs(instance.def_id()).alignment.unwrap_or(Align::ONE) } // Machine-specific extra functions currently do not support alignment restrictions. FnVal::Other(_) => Align::ONE, @@ -1412,8 +1407,13 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { let src_alloc = self.get_alloc_raw(src_alloc_id)?; let src_range = alloc_range(src_offset, size); assert!(!self.memory.validation_in_progress.get(), "we can't be copying during validation"); - // For the overlapping case, it is crucial that we trigger the read hook + + // Trigger read hooks. + // For the overlapping case, it is crucial that we trigger the read hooks // before the write hook -- the aliasing model cares about the order. + if let Ok((alloc_id, ..)) = self.ptr_try_get_alloc_id(src, size.bytes() as i64) { + M::before_alloc_read(self, alloc_id)?; + } M::before_memory_read( tcx, &self.machine, diff --git a/compiler/rustc_data_structures/Cargo.toml b/compiler/rustc_data_structures/Cargo.toml index f6a02011618..17204883fb0 100644 --- a/compiler/rustc_data_structures/Cargo.toml +++ b/compiler/rustc_data_structures/Cargo.toml @@ -14,7 +14,6 @@ indexmap = "2.4.0" jobserver_crate = { version = "0.1.28", package = "jobserver" } measureme = "12.0.1" rustc-hash = "2.0.0" -rustc-rayon-core = { version = "0.5.0" } rustc-stable-hash = { version = "0.1.0", features = ["nightly"] } rustc_arena = { path = "../rustc_arena" } rustc_graphviz = { path = "../rustc_graphviz" } @@ -22,6 +21,7 @@ rustc_hashes = { path = "../rustc_hashes" } rustc_index = { path = "../rustc_index", package = "rustc_index" } rustc_macros = { path = "../rustc_macros" } rustc_serialize = { path = "../rustc_serialize" } +rustc_thread_pool = { path = "../rustc_thread_pool" } smallvec = { version = "1.8.1", features = ["const_generics", "union", "may_dangle"] } stacker = "0.1.17" tempfile = "3.2" diff --git a/compiler/rustc_data_structures/src/sync.rs b/compiler/rustc_data_structures/src/sync.rs index b28c333d860..3881f3c2aa8 100644 --- a/compiler/rustc_data_structures/src/sync.rs +++ b/compiler/rustc_data_structures/src/sync.rs @@ -22,8 +22,6 @@ //! | | | `parking_lot::Mutex<T>` | //! | `RwLock<T>` | `RefCell<T>` | `parking_lot::RwLock<T>` | //! | `MTLock<T>` [^1] | `T` | `Lock<T>` | -//! | | | | -//! | `ParallelIterator` | `Iterator` | `rayon::iter::ParallelIterator` | //! //! [^1]: `MTLock` is similar to `Lock`, but the serial version avoids the cost //! of a `RefCell`. This is appropriate when interior mutability is not diff --git a/compiler/rustc_data_structures/src/sync/lock.rs b/compiler/rustc_data_structures/src/sync/lock.rs index 2ccf06ccd4f..a8161c51511 100644 --- a/compiler/rustc_data_structures/src/sync/lock.rs +++ b/compiler/rustc_data_structures/src/sync/lock.rs @@ -1,8 +1,6 @@ //! This module implements a lock which only uses synchronization if `might_be_dyn_thread_safe` is true. //! It implements `DynSend` and `DynSync` instead of the typical `Send` and `Sync` traits. -#![allow(dead_code)] - use std::fmt; #[derive(Clone, Copy, PartialEq)] diff --git a/compiler/rustc_data_structures/src/sync/parallel.rs b/compiler/rustc_data_structures/src/sync/parallel.rs index ab65c7f3a6b..b515c0bee8a 100644 --- a/compiler/rustc_data_structures/src/sync/parallel.rs +++ b/compiler/rustc_data_structures/src/sync/parallel.rs @@ -1,8 +1,6 @@ //! This module defines parallel operations that are implemented in //! one way for the serial compiler, and another way the parallel compiler. -#![allow(dead_code)] - use std::any::Any; use std::panic::{AssertUnwindSafe, catch_unwind, resume_unwind}; @@ -96,7 +94,7 @@ macro_rules! parallel { pub fn spawn(func: impl FnOnce() + DynSend + 'static) { if mode::is_dyn_thread_safe() { let func = FromDyn::from(func); - rayon_core::spawn(|| { + rustc_thread_pool::spawn(|| { (func.into_inner())(); }); } else { @@ -107,11 +105,11 @@ pub fn spawn(func: impl FnOnce() + DynSend + 'static) { // This function only works when `mode::is_dyn_thread_safe()`. pub fn scope<'scope, OP, R>(op: OP) -> R where - OP: FnOnce(&rayon_core::Scope<'scope>) -> R + DynSend, + OP: FnOnce(&rustc_thread_pool::Scope<'scope>) -> R + DynSend, R: DynSend, { let op = FromDyn::from(op); - rayon_core::scope(|s| FromDyn::from(op.into_inner()(s))).into_inner() + rustc_thread_pool::scope(|s| FromDyn::from(op.into_inner()(s))).into_inner() } #[inline] @@ -124,7 +122,7 @@ where let oper_a = FromDyn::from(oper_a); let oper_b = FromDyn::from(oper_b); let (a, b) = parallel_guard(|guard| { - rayon_core::join( + rustc_thread_pool::join( move || guard.run(move || FromDyn::from(oper_a.into_inner()())), move || guard.run(move || FromDyn::from(oper_b.into_inner()())), ) @@ -158,7 +156,7 @@ fn par_slice<I: DynSend>( let (left, right) = items.split_at_mut(items.len() / 2); let mut left = state.for_each.derive(left); let mut right = state.for_each.derive(right); - rayon_core::join(move || par_rec(*left, state), move || par_rec(*right, state)); + rustc_thread_pool::join(move || par_rec(*left, state), move || par_rec(*right, state)); } } @@ -241,7 +239,7 @@ pub fn par_map<I: DynSend, T: IntoIterator<Item = I>, R: DynSend, C: FromIterato pub fn broadcast<R: DynSend>(op: impl Fn(usize) -> R + DynSync) -> Vec<R> { if mode::is_dyn_thread_safe() { let op = FromDyn::from(op); - let results = rayon_core::broadcast(|context| op.derive(op(context.index()))); + let results = rustc_thread_pool::broadcast(|context| op.derive(op(context.index()))); results.into_iter().map(|r| r.into_inner()).collect() } else { vec![op(0)] diff --git a/compiler/rustc_driver_impl/src/lib.rs b/compiler/rustc_driver_impl/src/lib.rs index d53126d0414..daeca43169d 100644 --- a/compiler/rustc_driver_impl/src/lib.rs +++ b/compiler/rustc_driver_impl/src/lib.rs @@ -38,6 +38,7 @@ use rustc_data_structures::profiling::{ }; use rustc_errors::emitter::stderr_destination; use rustc_errors::registry::Registry; +use rustc_errors::translation::Translator; use rustc_errors::{ColorConfig, DiagCtxt, ErrCode, FatalError, PResult, markdown}; use rustc_feature::find_gated_cfg; // This avoids a false positive with `-Wunused_crate_dependencies`. @@ -109,6 +110,10 @@ use crate::session_diagnostics::{ rustc_fluent_macro::fluent_messages! { "../messages.ftl" } +pub fn default_translator() -> Translator { + Translator::with_fallback_bundle(DEFAULT_LOCALE_RESOURCES.to_vec(), false) +} + pub static DEFAULT_LOCALE_RESOURCES: &[&str] = &[ // tidy-alphabetical-start crate::DEFAULT_LOCALE_RESOURCE, @@ -1413,11 +1418,10 @@ fn report_ice( extra_info: fn(&DiagCtxt), using_internal_features: &AtomicBool, ) { - let fallback_bundle = - rustc_errors::fallback_fluent_bundle(crate::DEFAULT_LOCALE_RESOURCES.to_vec(), false); + let translator = default_translator(); let emitter = Box::new(rustc_errors::emitter::HumanEmitter::new( stderr_destination(rustc_errors::ColorConfig::Auto), - fallback_bundle, + translator, )); let dcx = rustc_errors::DiagCtxt::new(emitter); let dcx = dcx.handle(); diff --git a/compiler/rustc_driver_impl/src/pretty.rs b/compiler/rustc_driver_impl/src/pretty.rs index ec77043cd12..688307a941f 100644 --- a/compiler/rustc_driver_impl/src/pretty.rs +++ b/compiler/rustc_driver_impl/src/pretty.rs @@ -292,7 +292,11 @@ pub fn print<'tcx>(sess: &Session, ppm: PpMode, ex: PrintExtra<'tcx>) { } HirTree => { debug!("pretty printing HIR tree"); - format!("{:#?}", ex.tcx().hir_crate(())) + ex.tcx() + .hir_crate_items(()) + .owners() + .map(|owner| format!("{:#?} => {:#?}\n", owner, ex.tcx().hir_owner_nodes(owner))) + .collect() } Mir => { let mut out = Vec::new(); diff --git a/compiler/rustc_error_codes/src/error_codes/E0534.md b/compiler/rustc_error_codes/src/error_codes/E0534.md index 1ca9411b8d4..023c38c730c 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0534.md +++ b/compiler/rustc_error_codes/src/error_codes/E0534.md @@ -1,8 +1,14 @@ +#### Note: this error code is no longer emitted by the compiler + +This is because it was too specific to the `inline` attribute. +Similar diagnostics occur for other attributes too. +The example here will now emit `E0805` + The `inline` attribute was malformed. Erroneous code example: -```compile_fail,E0534 +```compile_fail,E0805 #[inline()] // error: expected one argument pub fn something() {} diff --git a/compiler/rustc_error_codes/src/error_codes/E0535.md b/compiler/rustc_error_codes/src/error_codes/E0535.md index 0cf3118b02c..93e2ba53826 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0535.md +++ b/compiler/rustc_error_codes/src/error_codes/E0535.md @@ -1,8 +1,13 @@ -An unknown argument was given to the `inline` attribute. +#### Note: this error code is no longer emitted by the compiler + +This is because it was too specific to the `inline` attribute. +Similar diagnostics occur for other attributes too. +The example here will now emit `E0539` + Erroneous code example: -```compile_fail,E0535 +```compile_fail,E0539 #[inline(unknown)] // error: invalid argument pub fn something() {} diff --git a/compiler/rustc_error_codes/src/error_codes/E0539.md b/compiler/rustc_error_codes/src/error_codes/E0539.md index 6b2e23ba2d8..c76b60ac108 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0539.md +++ b/compiler/rustc_error_codes/src/error_codes/E0539.md @@ -24,8 +24,7 @@ struct Stable; const fn stable_fn() {} ``` -Meta items are the key-value pairs inside of an attribute. -To fix these issues you need to give required key-value pairs. +To fix the above example, you can write the following: ``` #![feature(staged_api)] @@ -49,3 +48,29 @@ struct Stable; #[rustc_const_stable(feature = "stable_fn", since = "1.39.0")] // ok! const fn stable_fn() {} ``` + +Several causes of this are, +an attribute may have expected you to give a list but you gave a +`name = value` pair: + +```compile_fail,E0539 +// wrong, should be `#[repr(C)]` +#[repr = "C"] +struct Foo {} +``` + +Or a `name = value` pair, but you gave a list: + +```compile_fail,E0539 +// wrong, should be `note = "reason"` +#[deprecated(since = "1.0.0", note("reason"))] +struct Foo {} +``` + +Or it expected some specific word but you gave an unexpected one: + +```compile_fail,E0539 +// should be `always` or `never` +#[inline(maybe_if_you_feel_like_it)] +fn foo() {} +``` diff --git a/compiler/rustc_error_codes/src/error_codes/E0565.md b/compiler/rustc_error_codes/src/error_codes/E0565.md index d5bba941c1d..34152eb7cfe 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0565.md +++ b/compiler/rustc_error_codes/src/error_codes/E0565.md @@ -9,10 +9,9 @@ struct Repr {} fn main() {} ``` -Literals in attributes are new and largely unsupported in built-in attributes. -Work to support literals where appropriate is ongoing. Try using an unquoted -name instead: - +Not all attributes support literals in their input, +and in some cases they expect an identifier instead. +That would be the solution in the case of `repr`: ``` #[repr(C)] // ok! struct Repr {} diff --git a/compiler/rustc_error_codes/src/error_codes/E0722.md b/compiler/rustc_error_codes/src/error_codes/E0722.md index 570717a92bd..1799458d46c 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0722.md +++ b/compiler/rustc_error_codes/src/error_codes/E0722.md @@ -1,8 +1,14 @@ +#### Note: this error code is no longer emitted by the compiler + +This is because it was too specific to the `optimize` attribute. +Similar diagnostics occur for other attributes too. +The example here will now emit `E0539` + The `optimize` attribute was malformed. Erroneous code example: -```compile_fail,E0722 +```compile_fail,E0539 #![feature(optimize_attribute)] #[optimize(something)] // error: invalid argument diff --git a/compiler/rustc_error_codes/src/error_codes/E0805.md b/compiler/rustc_error_codes/src/error_codes/E0805.md new file mode 100644 index 00000000000..b1ed3a11d48 --- /dev/null +++ b/compiler/rustc_error_codes/src/error_codes/E0805.md @@ -0,0 +1,26 @@ +An attribute was given an invalid number of arguments + +Erroneous code example: + +```compile_fail,E0805 +#[inline()] // error! should either have a single argument, or no parentheses +fn foo() {} + +#[inline(always, never)] // error! should have only one argument, not two +fn bar() {} +``` + +To fix this, either give the right number of arguments the attribute needs. +In the case of inline, this could be none at all: + +``` +#[inline] +fn foo() {} +``` + +or only one: + +``` +#[inline(always)] +fn foo() {} +``` diff --git a/compiler/rustc_error_codes/src/lib.rs b/compiler/rustc_error_codes/src/lib.rs index 2488d870899..22cc1e894da 100644 --- a/compiler/rustc_error_codes/src/lib.rs +++ b/compiler/rustc_error_codes/src/lib.rs @@ -547,6 +547,7 @@ E0801: 0801, E0802: 0802, E0803: 0803, E0804: 0804, +E0805: 0805, ); ) } @@ -685,6 +686,7 @@ E0804: 0804, // E0707, // multiple elided lifetimes used in arguments of `async fn` // E0709, // multiple different lifetimes used in arguments of `async fn` // E0721, // `await` keyword +// E0722, // replaced with a generic attribute input check // E0723, // unstable feature in `const` context // E0738, // Removed; errored on `#[track_caller] fn`s in `extern "Rust" { ... }`. // E0744, // merged into E0728 diff --git a/compiler/rustc_error_messages/src/lib.rs b/compiler/rustc_error_messages/src/lib.rs index 1d3b5b20751..194fc2450ba 100644 --- a/compiler/rustc_error_messages/src/lib.rs +++ b/compiler/rustc_error_messages/src/lib.rs @@ -18,7 +18,7 @@ pub use fluent_bundle::{self, FluentArgs, FluentError, FluentValue}; use fluent_syntax::parser::ParserError; use icu_provider_adapters::fallback::{LocaleFallbackProvider, LocaleFallbacker}; use intl_memoizer::concurrent::IntlLangMemoizer; -use rustc_data_structures::sync::IntoDynSyncSend; +use rustc_data_structures::sync::{DynSend, IntoDynSyncSend}; use rustc_macros::{Decodable, Encodable}; use rustc_span::Span; use smallvec::SmallVec; @@ -204,16 +204,16 @@ fn register_functions(bundle: &mut FluentBundle) { /// Type alias for the result of `fallback_fluent_bundle` - a reference-counted pointer to a lazily /// evaluated fluent bundle. -pub type LazyFallbackBundle = Arc<LazyLock<FluentBundle, impl FnOnce() -> FluentBundle>>; +pub type LazyFallbackBundle = + Arc<LazyLock<FluentBundle, Box<dyn FnOnce() -> FluentBundle + DynSend>>>; /// Return the default `FluentBundle` with standard "en-US" diagnostic messages. #[instrument(level = "trace", skip(resources))] -#[define_opaque(LazyFallbackBundle)] pub fn fallback_fluent_bundle( resources: Vec<&'static str>, with_directionality_markers: bool, ) -> LazyFallbackBundle { - Arc::new(LazyLock::new(move || { + Arc::new(LazyLock::new(Box::new(move || { let mut fallback_bundle = new_bundle(vec![langid!("en-US")]); register_functions(&mut fallback_bundle); @@ -228,7 +228,7 @@ pub fn fallback_fluent_bundle( } fallback_bundle - })) + }))) } /// Identifier for the Fluent message/attribute corresponding to a diagnostic message. diff --git a/compiler/rustc_errors/src/annotate_snippet_emitter_writer.rs b/compiler/rustc_errors/src/annotate_snippet_emitter_writer.rs index f3aeb8d224b..2eb3c23259f 100644 --- a/compiler/rustc_errors/src/annotate_snippet_emitter_writer.rs +++ b/compiler/rustc_errors/src/annotate_snippet_emitter_writer.rs @@ -15,17 +15,15 @@ use rustc_span::source_map::SourceMap; use crate::emitter::FileWithAnnotatedLines; use crate::registry::Registry; use crate::snippet::Line; -use crate::translation::{Translate, to_fluent_args}; +use crate::translation::{Translator, to_fluent_args}; use crate::{ - CodeSuggestion, DiagInner, DiagMessage, Emitter, ErrCode, FluentBundle, LazyFallbackBundle, - Level, MultiSpan, Style, Subdiag, + CodeSuggestion, DiagInner, DiagMessage, Emitter, ErrCode, Level, MultiSpan, Style, Subdiag, }; /// Generates diagnostics using annotate-snippet pub struct AnnotateSnippetEmitter { source_map: Option<Arc<SourceMap>>, - fluent_bundle: Option<Arc<FluentBundle>>, - fallback_bundle: LazyFallbackBundle, + translator: Translator, /// If true, hides the longer explanation text short_message: bool, @@ -35,16 +33,6 @@ pub struct AnnotateSnippetEmitter { macro_backtrace: bool, } -impl Translate for AnnotateSnippetEmitter { - fn fluent_bundle(&self) -> Option<&FluentBundle> { - self.fluent_bundle.as_deref() - } - - fn fallback_fluent_bundle(&self) -> &FluentBundle { - &self.fallback_bundle - } -} - impl Emitter for AnnotateSnippetEmitter { /// The entry point for the diagnostics generation fn emit_diagnostic(&mut self, mut diag: DiagInner, _registry: &Registry) { @@ -78,6 +66,10 @@ impl Emitter for AnnotateSnippetEmitter { fn should_show_explain(&self) -> bool { !self.short_message } + + fn translator(&self) -> &Translator { + &self.translator + } } /// Provides the source string for the given `line` of `file` @@ -104,19 +96,11 @@ fn annotation_level_for_level(level: Level) -> annotate_snippets::Level { impl AnnotateSnippetEmitter { pub fn new( source_map: Option<Arc<SourceMap>>, - fluent_bundle: Option<Arc<FluentBundle>>, - fallback_bundle: LazyFallbackBundle, + translator: Translator, short_message: bool, macro_backtrace: bool, ) -> Self { - Self { - source_map, - fluent_bundle, - fallback_bundle, - short_message, - ui_testing: false, - macro_backtrace, - } + Self { source_map, translator, short_message, ui_testing: false, macro_backtrace } } /// Allows to modify `Self` to enable or disable the `ui_testing` flag. @@ -137,7 +121,7 @@ impl AnnotateSnippetEmitter { _children: &[Subdiag], _suggestions: &[CodeSuggestion], ) { - let message = self.translate_messages(messages, args); + let message = self.translator.translate_messages(messages, args); if let Some(source_map) = &self.source_map { // Make sure our primary file comes first let primary_lo = if let Some(primary_span) = msp.primary_span().as_ref() { diff --git a/compiler/rustc_errors/src/emitter.rs b/compiler/rustc_errors/src/emitter.rs index fe01e289334..e333de4b660 100644 --- a/compiler/rustc_errors/src/emitter.rs +++ b/compiler/rustc_errors/src/emitter.rs @@ -34,10 +34,11 @@ use crate::snippet::{ Annotation, AnnotationColumn, AnnotationType, Line, MultilineAnnotation, Style, StyledString, }; use crate::styled_buffer::StyledBuffer; -use crate::translation::{Translate, to_fluent_args}; +use crate::timings::TimingRecord; +use crate::translation::{Translator, to_fluent_args}; use crate::{ - CodeSuggestion, DiagInner, DiagMessage, ErrCode, FluentBundle, LazyFallbackBundle, Level, - MultiSpan, Subdiag, SubstitutionHighlight, SuggestionStyle, TerminalUrl, + CodeSuggestion, DiagInner, DiagMessage, ErrCode, Level, MultiSpan, Subdiag, + SubstitutionHighlight, SuggestionStyle, TerminalUrl, }; /// Default column width, used in tests and when terminal dimensions cannot be determined. @@ -164,12 +165,17 @@ impl Margin { } } +pub enum TimingEvent { + Start, + End, +} + const ANONYMIZED_LINE_NUM: &str = "LL"; pub type DynEmitter = dyn Emitter + DynSend; -/// Emitter trait for emitting errors. -pub trait Emitter: Translate { +/// Emitter trait for emitting errors and other structured information. +pub trait Emitter { /// Emit a structured diagnostic. fn emit_diagnostic(&mut self, diag: DiagInner, registry: &Registry); @@ -177,6 +183,10 @@ pub trait Emitter: Translate { /// Currently only supported for the JSON format. fn emit_artifact_notification(&mut self, _path: &Path, _artifact_type: &str) {} + /// Emit a timestamp with start/end of a timing section. + /// Currently only supported for the JSON format. + fn emit_timing_section(&mut self, _record: TimingRecord, _event: TimingEvent) {} + /// Emit a report about future breakage. /// Currently only supported for the JSON format. fn emit_future_breakage_report(&mut self, _diags: Vec<DiagInner>, _registry: &Registry) {} @@ -202,6 +212,8 @@ pub trait Emitter: Translate { fn source_map(&self) -> Option<&SourceMap>; + fn translator(&self) -> &Translator; + /// Formats the substitutions of the primary_span /// /// There are a lot of conditions to this method, but in short: @@ -214,13 +226,17 @@ pub trait Emitter: Translate { /// * If the current `DiagInner` has multiple suggestions, /// we leave `primary_span` and the suggestions untouched. fn primary_span_formatted( - &mut self, + &self, primary_span: &mut MultiSpan, suggestions: &mut Vec<CodeSuggestion>, fluent_args: &FluentArgs<'_>, ) { if let Some((sugg, rest)) = suggestions.split_first() { - let msg = self.translate_message(&sugg.msg, fluent_args).map_err(Report::new).unwrap(); + let msg = self + .translator() + .translate_message(&sugg.msg, fluent_args) + .map_err(Report::new) + .unwrap(); if rest.is_empty() // ^ if there is only one suggestion // don't display multi-suggestions as labels @@ -481,16 +497,6 @@ pub trait Emitter: Translate { } } -impl Translate for HumanEmitter { - fn fluent_bundle(&self) -> Option<&FluentBundle> { - self.fluent_bundle.as_deref() - } - - fn fallback_fluent_bundle(&self) -> &FluentBundle { - &self.fallback_bundle - } -} - impl Emitter for HumanEmitter { fn source_map(&self) -> Option<&SourceMap> { self.sm.as_deref() @@ -528,39 +534,52 @@ impl Emitter for HumanEmitter { fn supports_color(&self) -> bool { self.dst.supports_color() } + + fn translator(&self) -> &Translator { + &self.translator + } } /// An emitter that does nothing when emitting a non-fatal diagnostic. /// Fatal diagnostics are forwarded to `fatal_emitter` to avoid silent /// failures of rustc, as witnessed e.g. in issue #89358. -pub struct SilentEmitter { +pub struct FatalOnlyEmitter { pub fatal_emitter: Box<dyn Emitter + DynSend>, pub fatal_note: Option<String>, - pub emit_fatal_diagnostic: bool, } -impl Translate for SilentEmitter { - fn fluent_bundle(&self) -> Option<&FluentBundle> { +impl Emitter for FatalOnlyEmitter { + fn source_map(&self) -> Option<&SourceMap> { None } - fn fallback_fluent_bundle(&self) -> &FluentBundle { - self.fatal_emitter.fallback_fluent_bundle() + fn emit_diagnostic(&mut self, mut diag: DiagInner, registry: &Registry) { + if diag.level == Level::Fatal { + if let Some(fatal_note) = &self.fatal_note { + diag.sub(Level::Note, fatal_note.clone(), MultiSpan::new()); + } + self.fatal_emitter.emit_diagnostic(diag, registry); + } + } + + fn translator(&self) -> &Translator { + self.fatal_emitter.translator() } } +pub struct SilentEmitter { + pub translator: Translator, +} + impl Emitter for SilentEmitter { fn source_map(&self) -> Option<&SourceMap> { None } - fn emit_diagnostic(&mut self, mut diag: DiagInner, registry: &Registry) { - if self.emit_fatal_diagnostic && diag.level == Level::Fatal { - if let Some(fatal_note) = &self.fatal_note { - diag.sub(Level::Note, fatal_note.clone(), MultiSpan::new()); - } - self.fatal_emitter.emit_diagnostic(diag, registry); - } + fn emit_diagnostic(&mut self, _diag: DiagInner, _registry: &Registry) {} + + fn translator(&self) -> &Translator { + &self.translator } } @@ -605,9 +624,8 @@ pub struct HumanEmitter { #[setters(skip)] dst: IntoDynSyncSend<Destination>, sm: Option<Arc<SourceMap>>, - fluent_bundle: Option<Arc<FluentBundle>>, #[setters(skip)] - fallback_bundle: LazyFallbackBundle, + translator: Translator, short_message: bool, ui_testing: bool, ignored_directories_in_source_blocks: Vec<String>, @@ -627,12 +645,11 @@ pub(crate) struct FileWithAnnotatedLines { } impl HumanEmitter { - pub fn new(dst: Destination, fallback_bundle: LazyFallbackBundle) -> HumanEmitter { + pub fn new(dst: Destination, translator: Translator) -> HumanEmitter { HumanEmitter { dst: IntoDynSyncSend(dst), sm: None, - fluent_bundle: None, - fallback_bundle, + translator, short_message: false, ui_testing: false, ignored_directories_in_source_blocks: Vec::new(), @@ -1423,7 +1440,7 @@ impl HumanEmitter { // very *weird* formats // see? for (text, style) in msgs.iter() { - let text = self.translate_message(text, args).map_err(Report::new).unwrap(); + let text = self.translator.translate_message(text, args).map_err(Report::new).unwrap(); let text = &normalize_whitespace(&text); let lines = text.split('\n').collect::<Vec<_>>(); if lines.len() > 1 { @@ -1518,7 +1535,8 @@ impl HumanEmitter { } let mut line = 0; for (text, style) in msgs.iter() { - let text = self.translate_message(text, args).map_err(Report::new).unwrap(); + let text = + self.translator.translate_message(text, args).map_err(Report::new).unwrap(); // Account for newlines to align output to its label. for text in normalize_whitespace(&text).lines() { buffer.append( @@ -1550,7 +1568,7 @@ impl HumanEmitter { .into_iter() .filter_map(|label| match label.label { Some(msg) if label.is_primary => { - let text = self.translate_message(&msg, args).ok()?; + let text = self.translator.translate_message(&msg, args).ok()?; if !text.trim().is_empty() { Some(text.to_string()) } else { None } } _ => None, @@ -3094,7 +3112,11 @@ impl FileWithAnnotatedLines { let label = label.as_ref().map(|m| { normalize_whitespace( - &emitter.translate_message(m, args).map_err(Report::new).unwrap(), + &emitter + .translator() + .translate_message(m, args) + .map_err(Report::new) + .unwrap(), ) }); diff --git a/compiler/rustc_errors/src/json.rs b/compiler/rustc_errors/src/json.rs index a6583407b7e..6d600f896a0 100644 --- a/compiler/rustc_errors/src/json.rs +++ b/compiler/rustc_errors/src/json.rs @@ -28,14 +28,12 @@ use termcolor::{ColorSpec, WriteColor}; use crate::diagnostic::IsLint; use crate::emitter::{ ColorConfig, Destination, Emitter, HumanEmitter, HumanReadableErrorType, OutputTheme, - should_show_source_code, + TimingEvent, should_show_source_code, }; use crate::registry::Registry; -use crate::translation::{Translate, to_fluent_args}; -use crate::{ - CodeSuggestion, FluentBundle, LazyFallbackBundle, MultiSpan, SpanLabel, Subdiag, Suggestions, - TerminalUrl, -}; +use crate::timings::{TimingRecord, TimingSection}; +use crate::translation::{Translator, to_fluent_args}; +use crate::{CodeSuggestion, MultiSpan, SpanLabel, Subdiag, Suggestions, TerminalUrl}; #[cfg(test)] mod tests; @@ -46,9 +44,8 @@ pub struct JsonEmitter { dst: IntoDynSyncSend<Box<dyn Write + Send>>, #[setters(skip)] sm: Option<Arc<SourceMap>>, - fluent_bundle: Option<Arc<FluentBundle>>, #[setters(skip)] - fallback_bundle: LazyFallbackBundle, + translator: Translator, #[setters(skip)] pretty: bool, ui_testing: bool, @@ -66,7 +63,7 @@ impl JsonEmitter { pub fn new( dst: Box<dyn Write + Send>, sm: Option<Arc<SourceMap>>, - fallback_bundle: LazyFallbackBundle, + translator: Translator, pretty: bool, json_rendered: HumanReadableErrorType, color_config: ColorConfig, @@ -74,8 +71,7 @@ impl JsonEmitter { JsonEmitter { dst: IntoDynSyncSend(dst), sm, - fluent_bundle: None, - fallback_bundle, + translator, pretty, ui_testing: false, ignored_directories_in_source_blocks: Vec::new(), @@ -104,20 +100,11 @@ impl JsonEmitter { enum EmitTyped<'a> { Diagnostic(Diagnostic), Artifact(ArtifactNotification<'a>), + SectionTiming(SectionTimestamp<'a>), FutureIncompat(FutureIncompatReport<'a>), UnusedExtern(UnusedExterns<'a>), } -impl Translate for JsonEmitter { - fn fluent_bundle(&self) -> Option<&FluentBundle> { - self.fluent_bundle.as_deref() - } - - fn fallback_fluent_bundle(&self) -> &FluentBundle { - &self.fallback_bundle - } -} - impl Emitter for JsonEmitter { fn emit_diagnostic(&mut self, diag: crate::DiagInner, registry: &Registry) { let data = Diagnostic::from_errors_diagnostic(diag, self, registry); @@ -135,6 +122,21 @@ impl Emitter for JsonEmitter { } } + fn emit_timing_section(&mut self, record: TimingRecord, event: TimingEvent) { + let event = match event { + TimingEvent::Start => "start", + TimingEvent::End => "end", + }; + let name = match record.section { + TimingSection::Linking => "link", + }; + let data = SectionTimestamp { name, event, timestamp: record.timestamp }; + let result = self.emit(EmitTyped::SectionTiming(data)); + if let Err(e) = result { + panic!("failed to print timing section: {e:?}"); + } + } + fn emit_future_breakage_report(&mut self, diags: Vec<crate::DiagInner>, registry: &Registry) { let data: Vec<FutureBreakageItem<'_>> = diags .into_iter() @@ -177,6 +179,10 @@ impl Emitter for JsonEmitter { fn should_show_explain(&self) -> bool { !self.json_rendered.short() } + + fn translator(&self) -> &Translator { + &self.translator + } } // The following data types are provided just for serialisation. @@ -264,6 +270,16 @@ struct ArtifactNotification<'a> { } #[derive(Serialize)] +struct SectionTimestamp<'a> { + /// Name of the section + name: &'a str, + /// Start/end of the section + event: &'a str, + /// Opaque timestamp. + timestamp: u128, +} + +#[derive(Serialize)] struct FutureBreakageItem<'a> { // Always EmitTyped::Diagnostic, but we want to make sure it gets serialized // with "$message_type". @@ -297,7 +313,7 @@ impl Diagnostic { let args = to_fluent_args(diag.args.iter()); let sugg_to_diag = |sugg: &CodeSuggestion| { let translated_message = - je.translate_message(&sugg.msg, &args).map_err(Report::new).unwrap(); + je.translator.translate_message(&sugg.msg, &args).map_err(Report::new).unwrap(); Diagnostic { message: translated_message.to_string(), code: None, @@ -341,7 +357,7 @@ impl Diagnostic { } } - let translated_message = je.translate_messages(&diag.messages, &args); + let translated_message = je.translator.translate_messages(&diag.messages, &args); let code = if let Some(code) = diag.code { Some(DiagnosticCode { @@ -369,10 +385,9 @@ impl Diagnostic { ColorConfig::Always | ColorConfig::Auto => dst = Box::new(termcolor::Ansi::new(dst)), ColorConfig::Never => {} } - HumanEmitter::new(dst, Arc::clone(&je.fallback_bundle)) + HumanEmitter::new(dst, je.translator.clone()) .short_message(short) .sm(je.sm.clone()) - .fluent_bundle(je.fluent_bundle.clone()) .diagnostic_width(je.diagnostic_width) .macro_backtrace(je.macro_backtrace) .track_diagnostics(je.track_diagnostics) @@ -403,7 +418,7 @@ impl Diagnostic { args: &FluentArgs<'_>, je: &JsonEmitter, ) -> Diagnostic { - let translated_message = je.translate_messages(&subdiag.messages, args); + let translated_message = je.translator.translate_messages(&subdiag.messages, args); Diagnostic { message: translated_message.to_string(), code: None, @@ -427,7 +442,7 @@ impl DiagnosticSpan { span.is_primary, span.label .as_ref() - .map(|m| je.translate_message(m, args).unwrap()) + .map(|m| je.translator.translate_message(m, args).unwrap()) .map(|m| m.to_string()), suggestion, je, diff --git a/compiler/rustc_errors/src/json/tests.rs b/compiler/rustc_errors/src/json/tests.rs index 40973e8e5d8..8cf81f467d8 100644 --- a/compiler/rustc_errors/src/json/tests.rs +++ b/compiler/rustc_errors/src/json/tests.rs @@ -41,14 +41,14 @@ fn test_positions(code: &str, span: (u32, u32), expected_output: SpanTestData) { rustc_span::create_default_session_globals_then(|| { let sm = Arc::new(SourceMap::new(FilePathMapping::empty())); sm.new_source_file(Path::new("test.rs").to_owned().into(), code.to_owned()); - let fallback_bundle = - crate::fallback_fluent_bundle(vec![crate::DEFAULT_LOCALE_RESOURCE], false); + let translator = + Translator::with_fallback_bundle(vec![crate::DEFAULT_LOCALE_RESOURCE], false); let output = Arc::new(Mutex::new(Vec::new())); let je = JsonEmitter::new( Box::new(Shared { data: output.clone() }), Some(sm), - fallback_bundle, + translator, true, // pretty HumanReadableErrorType::Short, ColorConfig::Never, diff --git a/compiler/rustc_errors/src/lib.rs b/compiler/rustc_errors/src/lib.rs index 9f72fc4705a..207aed8c755 100644 --- a/compiler/rustc_errors/src/lib.rs +++ b/compiler/rustc_errors/src/lib.rs @@ -7,6 +7,7 @@ #![allow(internal_features)] #![allow(rustc::diagnostic_outside_of_impl)] #![allow(rustc::untranslatable_diagnostic)] +#![cfg_attr(not(bootstrap), allow(rustc::direct_use_of_rustc_type_ir))] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] #![feature(array_windows)] @@ -74,7 +75,9 @@ pub use snippet::Style; pub use termcolor::{Color, ColorSpec, WriteColor}; use tracing::debug; +use crate::emitter::TimingEvent; use crate::registry::Registry; +use crate::timings::TimingRecord; pub mod annotate_snippet_emitter_writer; pub mod codes; @@ -90,6 +93,7 @@ mod snippet; mod styled_buffer; #[cfg(test)] mod tests; +pub mod timings; pub mod translation; pub type PResult<'a, T> = Result<T, Diag<'a>>; @@ -744,40 +748,10 @@ impl DiagCtxt { Self { inner: Lock::new(DiagCtxtInner::new(emitter)) } } - pub fn make_silent(&self, fatal_note: Option<String>, emit_fatal_diagnostic: bool) { - // An empty type that implements `Emitter` to temporarily swap in place of the real one, - // which will be used in constructing its replacement. - struct FalseEmitter; - - impl Emitter for FalseEmitter { - fn emit_diagnostic(&mut self, _: DiagInner, _: &Registry) { - unimplemented!("false emitter must only used during `make_silent`") - } - - fn source_map(&self) -> Option<&SourceMap> { - unimplemented!("false emitter must only used during `make_silent`") - } - } - - impl translation::Translate for FalseEmitter { - fn fluent_bundle(&self) -> Option<&FluentBundle> { - unimplemented!("false emitter must only used during `make_silent`") - } - - fn fallback_fluent_bundle(&self) -> &FluentBundle { - unimplemented!("false emitter must only used during `make_silent`") - } - } - + pub fn make_silent(&self) { let mut inner = self.inner.borrow_mut(); - let mut prev_emitter = Box::new(FalseEmitter) as Box<dyn Emitter + DynSend>; - std::mem::swap(&mut inner.emitter, &mut prev_emitter); - let new_emitter = Box::new(emitter::SilentEmitter { - fatal_emitter: prev_emitter, - fatal_note, - emit_fatal_diagnostic, - }); - inner.emitter = new_emitter; + let translator = inner.emitter.translator().clone(); + inner.emitter = Box::new(emitter::SilentEmitter { translator }); } pub fn set_emitter(&self, emitter: Box<dyn Emitter + DynSend>) { @@ -1156,6 +1130,14 @@ impl<'a> DiagCtxtHandle<'a> { self.inner.borrow_mut().emitter.emit_artifact_notification(path, artifact_type); } + pub fn emit_timing_section_start(&self, record: TimingRecord) { + self.inner.borrow_mut().emitter.emit_timing_section(record, TimingEvent::Start); + } + + pub fn emit_timing_section_end(&self, record: TimingRecord) { + self.inner.borrow_mut().emitter.emit_timing_section(record, TimingEvent::End); + } + pub fn emit_future_breakage_report(&self) { let inner = &mut *self.inner.borrow_mut(); let diags = std::mem::take(&mut inner.future_breakage_diagnostics); @@ -1759,7 +1741,12 @@ impl DiagCtxtInner { args: impl Iterator<Item = DiagArg<'a>>, ) -> String { let args = crate::translation::to_fluent_args(args); - self.emitter.translate_message(&message, &args).map_err(Report::new).unwrap().to_string() + self.emitter + .translator() + .translate_message(&message, &args) + .map_err(Report::new) + .unwrap() + .to_string() } fn eagerly_translate_for_subdiag( diff --git a/compiler/rustc_errors/src/tests.rs b/compiler/rustc_errors/src/tests.rs index 376fd24d57b..34ebac0fde1 100644 --- a/compiler/rustc_errors/src/tests.rs +++ b/compiler/rustc_errors/src/tests.rs @@ -1,3 +1,5 @@ +use std::sync::{Arc, LazyLock}; + use rustc_data_structures::sync::IntoDynSyncSend; use rustc_error_messages::fluent_bundle::resolver::errors::{ReferenceKind, ResolverError}; use rustc_error_messages::{DiagMessage, langid}; @@ -5,23 +7,9 @@ use rustc_error_messages::{DiagMessage, langid}; use crate::FluentBundle; use crate::error::{TranslateError, TranslateErrorKind}; use crate::fluent_bundle::*; -use crate::translation::Translate; - -struct Dummy { - bundle: FluentBundle, -} - -impl Translate for Dummy { - fn fluent_bundle(&self) -> Option<&FluentBundle> { - None - } +use crate::translation::Translator; - fn fallback_fluent_bundle(&self) -> &FluentBundle { - &self.bundle - } -} - -fn make_dummy(ftl: &'static str) -> Dummy { +fn make_translator(ftl: &'static str) -> Translator { let resource = FluentResource::try_new(ftl.into()).expect("Failed to parse an FTL string."); let langid_en = langid!("en-US"); @@ -33,12 +21,15 @@ fn make_dummy(ftl: &'static str) -> Dummy { bundle.add_resource(resource).expect("Failed to add FTL resources to the bundle."); - Dummy { bundle } + Translator { + fluent_bundle: None, + fallback_fluent_bundle: Arc::new(LazyLock::new(Box::new(|| bundle))), + } } #[test] fn wellformed_fluent() { - let dummy = make_dummy("mir_build_borrow_of_moved_value = borrow of moved value + let translator = make_translator("mir_build_borrow_of_moved_value = borrow of moved value .label = value moved into `{$name}` here .occurs_because_label = move occurs because `{$name}` has type `{$ty}` which does not implement the `Copy` trait .value_borrowed_label = value borrowed here after move @@ -54,7 +45,7 @@ fn wellformed_fluent() { ); assert_eq!( - dummy.translate_message(&message, &args).unwrap(), + translator.translate_message(&message, &args).unwrap(), "borrow this binding in the pattern to avoid moving the value" ); } @@ -66,7 +57,7 @@ fn wellformed_fluent() { ); assert_eq!( - dummy.translate_message(&message, &args).unwrap(), + translator.translate_message(&message, &args).unwrap(), "value borrowed here after move" ); } @@ -78,7 +69,7 @@ fn wellformed_fluent() { ); assert_eq!( - dummy.translate_message(&message, &args).unwrap(), + translator.translate_message(&message, &args).unwrap(), "move occurs because `\u{2068}Foo\u{2069}` has type `\u{2068}std::string::String\u{2069}` which does not implement the `Copy` trait" ); @@ -89,7 +80,7 @@ fn wellformed_fluent() { ); assert_eq!( - dummy.translate_message(&message, &args).unwrap(), + translator.translate_message(&message, &args).unwrap(), "value moved into `\u{2068}Foo\u{2069}` here" ); } @@ -98,7 +89,7 @@ fn wellformed_fluent() { #[test] fn misformed_fluent() { - let dummy = make_dummy("mir_build_borrow_of_moved_value = borrow of moved value + let translator = make_translator("mir_build_borrow_of_moved_value = borrow of moved value .label = value moved into `{name}` here .occurs_because_label = move occurs because `{$oops}` has type `{$ty}` which does not implement the `Copy` trait .suggestion = borrow this binding in the pattern to avoid moving the value"); @@ -112,7 +103,7 @@ fn misformed_fluent() { Some("value_borrowed_label".into()), ); - let err = dummy.translate_message(&message, &args).unwrap_err(); + let err = translator.translate_message(&message, &args).unwrap_err(); assert!( matches!( &err, @@ -141,7 +132,7 @@ fn misformed_fluent() { Some("label".into()), ); - let err = dummy.translate_message(&message, &args).unwrap_err(); + let err = translator.translate_message(&message, &args).unwrap_err(); if let TranslateError::Two { primary: box TranslateError::One { kind: TranslateErrorKind::PrimaryBundleMissing, .. }, fallback: box TranslateError::One { kind: TranslateErrorKind::Fluent { errs }, .. }, @@ -168,7 +159,7 @@ fn misformed_fluent() { Some("occurs_because_label".into()), ); - let err = dummy.translate_message(&message, &args).unwrap_err(); + let err = translator.translate_message(&message, &args).unwrap_err(); if let TranslateError::Two { primary: box TranslateError::One { kind: TranslateErrorKind::PrimaryBundleMissing, .. }, fallback: box TranslateError::One { kind: TranslateErrorKind::Fluent { errs }, .. }, diff --git a/compiler/rustc_errors/src/timings.rs b/compiler/rustc_errors/src/timings.rs new file mode 100644 index 00000000000..27fc9df8d79 --- /dev/null +++ b/compiler/rustc_errors/src/timings.rs @@ -0,0 +1,80 @@ +use std::time::Instant; + +use crate::DiagCtxtHandle; + +/// A high-level section of the compilation process. +#[derive(Copy, Clone, Debug)] +pub enum TimingSection { + /// Time spent linking. + Linking, +} + +/// Section with attached timestamp +#[derive(Copy, Clone, Debug)] +pub struct TimingRecord { + pub section: TimingSection, + /// Microseconds elapsed since some predetermined point in time (~start of the rustc process). + pub timestamp: u128, +} + +impl TimingRecord { + fn from_origin(origin: Instant, section: TimingSection) -> Self { + Self { section, timestamp: Instant::now().duration_since(origin).as_micros() } + } + + pub fn section(&self) -> TimingSection { + self.section + } + + pub fn timestamp(&self) -> u128 { + self.timestamp + } +} + +/// Manages emission of start/end section timings, enabled through `--json=timings`. +pub struct TimingSectionHandler { + /// Time when the compilation session started. + /// If `None`, timing is disabled. + origin: Option<Instant>, +} + +impl TimingSectionHandler { + pub fn new(enabled: bool) -> Self { + let origin = if enabled { Some(Instant::now()) } else { None }; + Self { origin } + } + + /// Returns a RAII guard that will immediately emit a start the provided section, and then emit + /// its end when it is dropped. + pub fn start_section<'a>( + &self, + diag_ctxt: DiagCtxtHandle<'a>, + section: TimingSection, + ) -> TimingSectionGuard<'a> { + TimingSectionGuard::create(diag_ctxt, section, self.origin) + } +} + +/// RAII wrapper for starting and ending section timings. +pub struct TimingSectionGuard<'a> { + dcx: DiagCtxtHandle<'a>, + section: TimingSection, + origin: Option<Instant>, +} + +impl<'a> TimingSectionGuard<'a> { + fn create(dcx: DiagCtxtHandle<'a>, section: TimingSection, origin: Option<Instant>) -> Self { + if let Some(origin) = origin { + dcx.emit_timing_section_start(TimingRecord::from_origin(origin, section)); + } + Self { dcx, section, origin } + } +} + +impl<'a> Drop for TimingSectionGuard<'a> { + fn drop(&mut self) { + if let Some(origin) = self.origin { + self.dcx.emit_timing_section_end(TimingRecord::from_origin(origin, self.section)); + } + } +} diff --git a/compiler/rustc_errors/src/translation.rs b/compiler/rustc_errors/src/translation.rs index 156f5e5d26e..c0bcec093c7 100644 --- a/compiler/rustc_errors/src/translation.rs +++ b/compiler/rustc_errors/src/translation.rs @@ -1,8 +1,9 @@ use std::borrow::Cow; use std::env; use std::error::Report; +use std::sync::Arc; -pub use rustc_error_messages::FluentArgs; +pub use rustc_error_messages::{FluentArgs, LazyFallbackBundle}; use tracing::{debug, trace}; use crate::error::{TranslateError, TranslateErrorKind}; @@ -28,19 +29,33 @@ pub fn to_fluent_args<'iter>(iter: impl Iterator<Item = DiagArg<'iter>>) -> Flue args } -pub trait Translate { - /// Return `FluentBundle` with localized diagnostics for the locale requested by the user. If no - /// language was requested by the user then this will be `None` and `fallback_fluent_bundle` - /// should be used. - fn fluent_bundle(&self) -> Option<&FluentBundle>; - +#[derive(Clone)] +pub struct Translator { + /// Localized diagnostics for the locale requested by the user. If no language was requested by + /// the user then this will be `None` and `fallback_fluent_bundle` should be used. + pub fluent_bundle: Option<Arc<FluentBundle>>, /// Return `FluentBundle` with localized diagnostics for the default locale of the compiler. /// Used when the user has not requested a specific language or when a localized diagnostic is /// unavailable for the requested locale. - fn fallback_fluent_bundle(&self) -> &FluentBundle; + pub fallback_fluent_bundle: LazyFallbackBundle, +} + +impl Translator { + pub fn with_fallback_bundle( + resources: Vec<&'static str>, + with_directionality_markers: bool, + ) -> Translator { + Translator { + fluent_bundle: None, + fallback_fluent_bundle: crate::fallback_fluent_bundle( + resources, + with_directionality_markers, + ), + } + } /// Convert `DiagMessage`s to a string, performing translation if necessary. - fn translate_messages( + pub fn translate_messages( &self, messages: &[(DiagMessage, Style)], args: &FluentArgs<'_>, @@ -54,7 +69,7 @@ pub trait Translate { } /// Convert a `DiagMessage` to a string, performing translation if necessary. - fn translate_message<'a>( + pub fn translate_message<'a>( &'a self, message: &'a DiagMessage, args: &'a FluentArgs<'_>, @@ -91,7 +106,7 @@ pub trait Translate { }; try { - match self.fluent_bundle().map(|b| translate_with_bundle(b)) { + match self.fluent_bundle.as_ref().map(|b| translate_with_bundle(b)) { // The primary bundle was present and translation succeeded Some(Ok(t)) => t, @@ -102,7 +117,7 @@ pub trait Translate { primary @ TranslateError::One { kind: TranslateErrorKind::MessageMissing, .. }, - )) => translate_with_bundle(self.fallback_fluent_bundle()) + )) => translate_with_bundle(&self.fallback_fluent_bundle) .map_err(|fallback| primary.and(fallback))?, // Always yeet out for errors on debug (unless @@ -118,11 +133,11 @@ pub trait Translate { // ..otherwise, for end users, an error about this wouldn't be useful or actionable, so // just hide it and try with the fallback bundle. - Some(Err(primary)) => translate_with_bundle(self.fallback_fluent_bundle()) + Some(Err(primary)) => translate_with_bundle(&self.fallback_fluent_bundle) .map_err(|fallback| primary.and(fallback))?, // The primary bundle is missing, proceed to the fallback bundle - None => translate_with_bundle(self.fallback_fluent_bundle()) + None => translate_with_bundle(&self.fallback_fluent_bundle) .map_err(|fallback| TranslateError::primary(identifier, args).and(fallback))?, } } diff --git a/compiler/rustc_expand/src/errors.rs b/compiler/rustc_expand/src/errors.rs index ec0af67c046..714ba3bf0f4 100644 --- a/compiler/rustc_expand/src/errors.rs +++ b/compiler/rustc_expand/src/errors.rs @@ -183,12 +183,12 @@ pub(crate) struct FeatureNotAllowed { #[derive(Diagnostic)] #[diag(expand_recursion_limit_reached)] #[help] -pub(crate) struct RecursionLimitReached<'a> { +pub(crate) struct RecursionLimitReached { #[primary_span] pub span: Span, pub descr: String, pub suggested_limit: Limit, - pub crate_name: &'a str, + pub crate_name: Symbol, } #[derive(Diagnostic)] diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs index 9fd524ef45c..0474413e762 100644 --- a/compiler/rustc_expand/src/expand.rs +++ b/compiler/rustc_expand/src/expand.rs @@ -26,7 +26,7 @@ use rustc_session::lint::builtin::{UNUSED_ATTRIBUTES, UNUSED_DOC_COMMENTS}; use rustc_session::parse::feature_err; use rustc_session::{Limit, Session}; use rustc_span::hygiene::SyntaxContext; -use rustc_span::{ErrorGuaranteed, FileName, Ident, LocalExpnId, Span, sym}; +use rustc_span::{ErrorGuaranteed, FileName, Ident, LocalExpnId, Span, Symbol, sym}; use smallvec::SmallVec; use crate::base::*; @@ -86,7 +86,7 @@ macro_rules! ast_fragments { } } - fn make_from<'a>(self, result: Box<dyn MacResult + 'a>) -> Option<AstFragment> { + fn make_from(self, result: Box<dyn MacResult + '_>) -> Option<AstFragment> { match self { AstFragmentKind::OptExpr => result.make_expr().map(Some).map(AstFragment::OptExpr), @@ -136,7 +136,7 @@ macro_rules! ast_fragments { T::fragment_to_output(self) } - pub(crate) fn mut_visit_with<F: MutVisitor>(&mut self, vis: &mut F) { + pub(crate) fn mut_visit_with(&mut self, vis: &mut impl MutVisitor) { match self { AstFragment::OptExpr(opt_expr) => { if let Some(expr) = opt_expr.take() { @@ -316,9 +316,9 @@ impl AstFragmentKind { } } - pub(crate) fn expect_from_annotatables<I: IntoIterator<Item = Annotatable>>( + pub(crate) fn expect_from_annotatables( self, - items: I, + items: impl IntoIterator<Item = Annotatable>, ) -> AstFragment { let mut items = items.into_iter(); match self { @@ -473,7 +473,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { let dir_path = file_path.parent().unwrap_or(&file_path).to_owned(); self.cx.root_path = dir_path.clone(); self.cx.current_expansion.module = Rc::new(ModuleData { - mod_path: vec![Ident::from_str(&self.cx.ecfg.crate_name)], + mod_path: vec![Ident::with_dummy_span(self.cx.ecfg.crate_name)], file_path_stack: vec![file_path], dir_path, }); @@ -689,7 +689,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { span: expn_data.call_site, descr: expn_data.kind.descr(), suggested_limit, - crate_name: &self.cx.ecfg.crate_name, + crate_name: self.cx.ecfg.crate_name, }); self.cx.trace_macros_diag(); @@ -1218,10 +1218,10 @@ trait InvocationCollectorNode: HasAttrs + HasNodeId + Sized { fn descr() -> &'static str { unreachable!() } - fn walk_flat_map<V: MutVisitor>(self, _visitor: &mut V) -> Self::OutputTy { + fn walk_flat_map(self, _collector: &mut InvocationCollector<'_, '_>) -> Self::OutputTy { unreachable!() } - fn walk<V: MutVisitor>(&mut self, _visitor: &mut V) { + fn walk(&mut self, _collector: &mut InvocationCollector<'_, '_>) { unreachable!() } fn is_mac_call(&self) -> bool { @@ -1276,8 +1276,8 @@ impl InvocationCollectorNode for P<ast::Item> { fn fragment_to_output(fragment: AstFragment) -> Self::OutputTy { fragment.make_items() } - fn walk_flat_map<V: MutVisitor>(self, visitor: &mut V) -> Self::OutputTy { - walk_flat_map_item(visitor, self) + fn walk_flat_map(self, collector: &mut InvocationCollector<'_, '_>) -> Self::OutputTy { + walk_flat_map_item(collector, self) } fn is_mac_call(&self) -> bool { matches!(self.kind, ItemKind::MacCall(..)) @@ -1431,8 +1431,8 @@ impl InvocationCollectorNode for AstNodeWrapper<P<ast::AssocItem>, TraitItemTag> fn fragment_to_output(fragment: AstFragment) -> Self::OutputTy { fragment.make_trait_items() } - fn walk_flat_map<V: MutVisitor>(self, visitor: &mut V) -> Self::OutputTy { - walk_flat_map_assoc_item(visitor, self.wrapped, AssocCtxt::Trait) + fn walk_flat_map(self, collector: &mut InvocationCollector<'_, '_>) -> Self::OutputTy { + walk_flat_map_assoc_item(collector, self.wrapped, AssocCtxt::Trait) } fn is_mac_call(&self) -> bool { matches!(self.wrapped.kind, AssocItemKind::MacCall(..)) @@ -1472,8 +1472,8 @@ impl InvocationCollectorNode for AstNodeWrapper<P<ast::AssocItem>, ImplItemTag> fn fragment_to_output(fragment: AstFragment) -> Self::OutputTy { fragment.make_impl_items() } - fn walk_flat_map<V: MutVisitor>(self, visitor: &mut V) -> Self::OutputTy { - walk_flat_map_assoc_item(visitor, self.wrapped, AssocCtxt::Impl { of_trait: false }) + fn walk_flat_map(self, collector: &mut InvocationCollector<'_, '_>) -> Self::OutputTy { + walk_flat_map_assoc_item(collector, self.wrapped, AssocCtxt::Impl { of_trait: false }) } fn is_mac_call(&self) -> bool { matches!(self.wrapped.kind, AssocItemKind::MacCall(..)) @@ -1513,8 +1513,8 @@ impl InvocationCollectorNode for AstNodeWrapper<P<ast::AssocItem>, TraitImplItem fn fragment_to_output(fragment: AstFragment) -> Self::OutputTy { fragment.make_trait_impl_items() } - fn walk_flat_map<V: MutVisitor>(self, visitor: &mut V) -> Self::OutputTy { - walk_flat_map_assoc_item(visitor, self.wrapped, AssocCtxt::Impl { of_trait: true }) + fn walk_flat_map(self, collector: &mut InvocationCollector<'_, '_>) -> Self::OutputTy { + walk_flat_map_assoc_item(collector, self.wrapped, AssocCtxt::Impl { of_trait: true }) } fn is_mac_call(&self) -> bool { matches!(self.wrapped.kind, AssocItemKind::MacCall(..)) @@ -1551,8 +1551,8 @@ impl InvocationCollectorNode for P<ast::ForeignItem> { fn fragment_to_output(fragment: AstFragment) -> Self::OutputTy { fragment.make_foreign_items() } - fn walk_flat_map<V: MutVisitor>(self, visitor: &mut V) -> Self::OutputTy { - walk_flat_map_foreign_item(visitor, self) + fn walk_flat_map(self, collector: &mut InvocationCollector<'_, '_>) -> Self::OutputTy { + walk_flat_map_foreign_item(collector, self) } fn is_mac_call(&self) -> bool { matches!(self.kind, ForeignItemKind::MacCall(..)) @@ -1573,8 +1573,8 @@ impl InvocationCollectorNode for ast::Variant { fn fragment_to_output(fragment: AstFragment) -> Self::OutputTy { fragment.make_variants() } - fn walk_flat_map<V: MutVisitor>(self, visitor: &mut V) -> Self::OutputTy { - walk_flat_map_variant(visitor, self) + fn walk_flat_map(self, collector: &mut InvocationCollector<'_, '_>) -> Self::OutputTy { + walk_flat_map_variant(collector, self) } } @@ -1586,8 +1586,8 @@ impl InvocationCollectorNode for ast::WherePredicate { fn fragment_to_output(fragment: AstFragment) -> Self::OutputTy { fragment.make_where_predicates() } - fn walk_flat_map<V: MutVisitor>(self, visitor: &mut V) -> Self::OutputTy { - walk_flat_map_where_predicate(visitor, self) + fn walk_flat_map(self, collector: &mut InvocationCollector<'_, '_>) -> Self::OutputTy { + walk_flat_map_where_predicate(collector, self) } } @@ -1599,8 +1599,8 @@ impl InvocationCollectorNode for ast::FieldDef { fn fragment_to_output(fragment: AstFragment) -> Self::OutputTy { fragment.make_field_defs() } - fn walk_flat_map<V: MutVisitor>(self, visitor: &mut V) -> Self::OutputTy { - walk_flat_map_field_def(visitor, self) + fn walk_flat_map(self, collector: &mut InvocationCollector<'_, '_>) -> Self::OutputTy { + walk_flat_map_field_def(collector, self) } } @@ -1612,8 +1612,8 @@ impl InvocationCollectorNode for ast::PatField { fn fragment_to_output(fragment: AstFragment) -> Self::OutputTy { fragment.make_pat_fields() } - fn walk_flat_map<V: MutVisitor>(self, visitor: &mut V) -> Self::OutputTy { - walk_flat_map_pat_field(visitor, self) + fn walk_flat_map(self, collector: &mut InvocationCollector<'_, '_>) -> Self::OutputTy { + walk_flat_map_pat_field(collector, self) } } @@ -1625,8 +1625,8 @@ impl InvocationCollectorNode for ast::ExprField { fn fragment_to_output(fragment: AstFragment) -> Self::OutputTy { fragment.make_expr_fields() } - fn walk_flat_map<V: MutVisitor>(self, visitor: &mut V) -> Self::OutputTy { - walk_flat_map_expr_field(visitor, self) + fn walk_flat_map(self, collector: &mut InvocationCollector<'_, '_>) -> Self::OutputTy { + walk_flat_map_expr_field(collector, self) } } @@ -1638,8 +1638,8 @@ impl InvocationCollectorNode for ast::Param { fn fragment_to_output(fragment: AstFragment) -> Self::OutputTy { fragment.make_params() } - fn walk_flat_map<V: MutVisitor>(self, visitor: &mut V) -> Self::OutputTy { - walk_flat_map_param(visitor, self) + fn walk_flat_map(self, collector: &mut InvocationCollector<'_, '_>) -> Self::OutputTy { + walk_flat_map_param(collector, self) } } @@ -1651,8 +1651,8 @@ impl InvocationCollectorNode for ast::GenericParam { fn fragment_to_output(fragment: AstFragment) -> Self::OutputTy { fragment.make_generic_params() } - fn walk_flat_map<V: MutVisitor>(self, visitor: &mut V) -> Self::OutputTy { - walk_flat_map_generic_param(visitor, self) + fn walk_flat_map(self, collector: &mut InvocationCollector<'_, '_>) -> Self::OutputTy { + walk_flat_map_generic_param(collector, self) } } @@ -1664,8 +1664,8 @@ impl InvocationCollectorNode for ast::Arm { fn fragment_to_output(fragment: AstFragment) -> Self::OutputTy { fragment.make_arms() } - fn walk_flat_map<V: MutVisitor>(self, visitor: &mut V) -> Self::OutputTy { - walk_flat_map_arm(visitor, self) + fn walk_flat_map(self, collector: &mut InvocationCollector<'_, '_>) -> Self::OutputTy { + walk_flat_map_arm(collector, self) } } @@ -1677,8 +1677,8 @@ impl InvocationCollectorNode for ast::Stmt { fn fragment_to_output(fragment: AstFragment) -> Self::OutputTy { fragment.make_stmts() } - fn walk_flat_map<V: MutVisitor>(self, visitor: &mut V) -> Self::OutputTy { - walk_flat_map_stmt(visitor, self) + fn walk_flat_map(self, collector: &mut InvocationCollector<'_, '_>) -> Self::OutputTy { + walk_flat_map_stmt(collector, self) } fn is_mac_call(&self) -> bool { match &self.kind { @@ -1751,8 +1751,8 @@ impl InvocationCollectorNode for ast::Crate { fn fragment_to_output(fragment: AstFragment) -> Self::OutputTy { fragment.make_crate() } - fn walk<V: MutVisitor>(&mut self, visitor: &mut V) { - walk_crate(visitor, self) + fn walk(&mut self, collector: &mut InvocationCollector<'_, '_>) { + walk_crate(collector, self) } fn expand_cfg_false( &mut self, @@ -1777,8 +1777,8 @@ impl InvocationCollectorNode for ast::Ty { fn fragment_to_output(fragment: AstFragment) -> Self::OutputTy { fragment.make_ty() } - fn walk<V: MutVisitor>(&mut self, visitor: &mut V) { - walk_ty(visitor, self) + fn walk(&mut self, collector: &mut InvocationCollector<'_, '_>) { + walk_ty(collector, self) } fn is_mac_call(&self) -> bool { matches!(self.kind, ast::TyKind::MacCall(..)) @@ -1800,8 +1800,8 @@ impl InvocationCollectorNode for ast::Pat { fn fragment_to_output(fragment: AstFragment) -> Self::OutputTy { fragment.make_pat() } - fn walk<V: MutVisitor>(&mut self, visitor: &mut V) { - walk_pat(visitor, self) + fn walk(&mut self, collector: &mut InvocationCollector<'_, '_>) { + walk_pat(collector, self) } fn is_mac_call(&self) -> bool { matches!(self.kind, PatKind::MacCall(..)) @@ -1826,8 +1826,8 @@ impl InvocationCollectorNode for ast::Expr { fn descr() -> &'static str { "an expression" } - fn walk<V: MutVisitor>(&mut self, visitor: &mut V) { - walk_expr(visitor, self) + fn walk(&mut self, collector: &mut InvocationCollector<'_, '_>) { + walk_expr(collector, self) } fn is_mac_call(&self) -> bool { matches!(self.kind, ExprKind::MacCall(..)) @@ -1850,8 +1850,8 @@ impl InvocationCollectorNode for AstNodeWrapper<P<ast::Expr>, OptExprTag> { fn fragment_to_output(fragment: AstFragment) -> Self::OutputTy { fragment.make_opt_expr() } - fn walk_flat_map<V: MutVisitor>(mut self, visitor: &mut V) -> Self::OutputTy { - walk_expr(visitor, &mut self.wrapped); + fn walk_flat_map(mut self, collector: &mut InvocationCollector<'_, '_>) -> Self::OutputTy { + walk_expr(collector, &mut self.wrapped); Some(self.wrapped) } fn is_mac_call(&self) -> bool { @@ -1873,20 +1873,20 @@ impl InvocationCollectorNode for AstNodeWrapper<P<ast::Expr>, OptExprTag> { /// It can be removed once that feature is stabilized. struct MethodReceiverTag; -impl InvocationCollectorNode for AstNodeWrapper<P<ast::Expr>, MethodReceiverTag> { - type OutputTy = Self; +impl InvocationCollectorNode for AstNodeWrapper<ast::Expr, MethodReceiverTag> { + type OutputTy = AstNodeWrapper<P<ast::Expr>, MethodReceiverTag>; const KIND: AstFragmentKind = AstFragmentKind::MethodReceiverExpr; fn descr() -> &'static str { "an expression" } fn to_annotatable(self) -> Annotatable { - Annotatable::Expr(self.wrapped) + Annotatable::Expr(P(self.wrapped)) } fn fragment_to_output(fragment: AstFragment) -> Self::OutputTy { AstNodeWrapper::new(fragment.make_method_receiver_expr(), MethodReceiverTag) } - fn walk<V: MutVisitor>(&mut self, visitor: &mut V) { - walk_expr(visitor, &mut self.wrapped) + fn walk(&mut self, collector: &mut InvocationCollector<'_, '_>) { + walk_expr(collector, &mut self.wrapped) } fn is_mac_call(&self) -> bool { matches!(self.wrapped.kind, ast::ExprKind::MacCall(..)) @@ -1983,9 +1983,9 @@ impl DummyAstNode for ast::Expr { } } -impl DummyAstNode for AstNodeWrapper<P<ast::Expr>, MethodReceiverTag> { +impl DummyAstNode for AstNodeWrapper<ast::Expr, MethodReceiverTag> { fn dummy() -> Self { - AstNodeWrapper::new(P(ast::Expr::dummy()), MethodReceiverTag) + AstNodeWrapper::new(ast::Expr::dummy(), MethodReceiverTag) } } @@ -2431,7 +2431,7 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> { self.visit_node(node) } - fn visit_method_receiver_expr(&mut self, node: &mut P<ast::Expr>) { + fn visit_method_receiver_expr(&mut self, node: &mut ast::Expr) { self.visit_node(AstNodeWrapper::from_mut(node, MethodReceiverTag)) } @@ -2458,7 +2458,7 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> { } pub struct ExpansionConfig<'feat> { - pub crate_name: String, + pub crate_name: Symbol, pub features: &'feat Features, pub recursion_limit: Limit, pub trace_mac: bool, @@ -2471,7 +2471,7 @@ pub struct ExpansionConfig<'feat> { } impl ExpansionConfig<'_> { - pub fn default(crate_name: String, features: &Features) -> ExpansionConfig<'_> { + pub fn default(crate_name: Symbol, features: &Features) -> ExpansionConfig<'_> { ExpansionConfig { crate_name, features, diff --git a/compiler/rustc_expand/src/mbe/transcribe.rs b/compiler/rustc_expand/src/mbe/transcribe.rs index 2d3fd7702da..0520be5fbae 100644 --- a/compiler/rustc_expand/src/mbe/transcribe.rs +++ b/compiler/rustc_expand/src/mbe/transcribe.rs @@ -9,7 +9,7 @@ use rustc_data_structures::fx::FxHashMap; use rustc_errors::{Diag, DiagCtxtHandle, PResult, pluralize}; use rustc_parse::lexer::nfc_normalize; use rustc_parse::parser::ParseNtResult; -use rustc_session::parse::{ParseSess, SymbolGallery}; +use rustc_session::parse::ParseSess; use rustc_span::hygiene::{LocalExpnId, Transparency}; use rustc_span::{ Ident, MacroRulesNormalizedIdent, Span, Symbol, SyntaxContext, sym, with_metavar_spans, @@ -25,20 +25,77 @@ use crate::mbe::macro_parser::NamedMatch::*; use crate::mbe::metavar_expr::{MetaVarExprConcatElem, RAW_IDENT_ERR}; use crate::mbe::{self, KleeneOp, MetaVarExpr}; -// A Marker adds the given mark to the syntax context. -struct Marker(LocalExpnId, Transparency, FxHashMap<SyntaxContext, SyntaxContext>); +/// Context needed to perform transcription of metavariable expressions. +struct TranscrCtx<'psess, 'itp> { + psess: &'psess ParseSess, + + /// Map from metavars to matched tokens + interp: &'itp FxHashMap<MacroRulesNormalizedIdent, NamedMatch>, + + /// Allow marking spans. + marker: Marker, + + /// The stack of things yet to be completely expanded. + /// + /// We descend into the RHS (`src`), expanding things as we go. This stack contains the things + /// we have yet to expand/are still expanding. We start the stack off with the whole RHS. The + /// choice of spacing values doesn't matter. + stack: SmallVec<[Frame<'itp>; 1]>, + + /// A stack of where we are in the repeat expansion. + /// + /// As we descend in the RHS, we will need to be able to match nested sequences of matchers. + /// `repeats` keeps track of where we are in matching at each level, with the last element + /// being the most deeply nested sequence. This is used as a stack. + repeats: Vec<(usize, usize)>, + + /// The resulting token stream from the `TokenTree` we just finished processing. + /// + /// At the end, this will contain the full result of transcription, but at arbitrary points + /// during `transcribe`, `result` will contain subsets of the final result. + /// + /// Specifically, as we descend into each TokenTree, we will push the existing results onto the + /// `result_stack` and clear `results`. We will then produce the results of transcribing the + /// TokenTree into `results`. Then, as we unwind back out of the `TokenTree`, we will pop the + /// `result_stack` and append `results` too it to produce the new `results` up to that point. + /// + /// Thus, if we try to pop the `result_stack` and it is empty, we have reached the top-level + /// again, and we are done transcribing. + result: Vec<TokenTree>, + + /// The in-progress `result` lives at the top of this stack. Each entered `TokenTree` adds a + /// new entry. + result_stack: Vec<Vec<TokenTree>>, +} + +impl<'psess> TranscrCtx<'psess, '_> { + /// Span marked with the correct expansion and transparency. + fn visited_dspan(&mut self, dspan: DelimSpan) -> Span { + let mut span = dspan.entire(); + self.marker.mark_span(&mut span); + span + } +} + +/// A Marker adds the given mark to the syntax context. +struct Marker { + expand_id: LocalExpnId, + transparency: Transparency, + cache: FxHashMap<SyntaxContext, SyntaxContext>, +} impl Marker { + /// Mark a span with the stored expansion ID and transparency. fn mark_span(&mut self, span: &mut Span) { // `apply_mark` is a relatively expensive operation, both due to taking hygiene lock, and // by itself. All tokens in a macro body typically have the same syntactic context, unless // it's some advanced case with macro-generated macros. So if we cache the marked version // of that context once, we'll typically have a 100% cache hit rate after that. - let Marker(expn_id, transparency, ref mut cache) = *self; *span = span.map_ctxt(|ctxt| { - *cache + *self + .cache .entry(ctxt) - .or_insert_with(|| ctxt.apply_mark(expn_id.to_expn_id(), transparency)) + .or_insert_with(|| ctxt.apply_mark(self.expand_id.to_expn_id(), self.transparency)) }); } } @@ -116,52 +173,36 @@ pub(super) fn transcribe<'a>( return Ok(TokenStream::default()); } - // We descend into the RHS (`src`), expanding things as we go. This stack contains the things - // we have yet to expand/are still expanding. We start the stack off with the whole RHS. The - // choice of spacing values doesn't matter. - let mut stack: SmallVec<[Frame<'_>; 1]> = smallvec![Frame::new_delimited( - src, - src_span, - DelimSpacing::new(Spacing::Alone, Spacing::Alone) - )]; - - // As we descend in the RHS, we will need to be able to match nested sequences of matchers. - // `repeats` keeps track of where we are in matching at each level, with the last element being - // the most deeply nested sequence. This is used as a stack. - let mut repeats: Vec<(usize, usize)> = Vec::new(); - - // `result` contains resulting token stream from the TokenTree we just finished processing. At - // the end, this will contain the full result of transcription, but at arbitrary points during - // `transcribe`, `result` will contain subsets of the final result. - // - // Specifically, as we descend into each TokenTree, we will push the existing results onto the - // `result_stack` and clear `results`. We will then produce the results of transcribing the - // TokenTree into `results`. Then, as we unwind back out of the `TokenTree`, we will pop the - // `result_stack` and append `results` too it to produce the new `results` up to that point. - // - // Thus, if we try to pop the `result_stack` and it is empty, we have reached the top-level - // again, and we are done transcribing. - let mut result: Vec<TokenTree> = Vec::new(); - let mut result_stack = Vec::new(); - let mut marker = Marker(expand_id, transparency, Default::default()); - - let dcx = psess.dcx(); + let mut tscx = TranscrCtx { + psess, + interp, + marker: Marker { expand_id, transparency, cache: Default::default() }, + repeats: Vec::new(), + stack: smallvec![Frame::new_delimited( + src, + src_span, + DelimSpacing::new(Spacing::Alone, Spacing::Alone) + )], + result: Vec::new(), + result_stack: Vec::new(), + }; + loop { // Look at the last frame on the stack. // If it still has a TokenTree we have not looked at yet, use that tree. - let Some(tree) = stack.last_mut().unwrap().next() else { + let Some(tree) = tscx.stack.last_mut().unwrap().next() else { // This else-case never produces a value for `tree` (it `continue`s or `return`s). // Otherwise, if we have just reached the end of a sequence and we can keep repeating, // go back to the beginning of the sequence. - let frame = stack.last_mut().unwrap(); + let frame = tscx.stack.last_mut().unwrap(); if let FrameKind::Sequence { sep, .. } = &frame.kind { - let (repeat_idx, repeat_len) = repeats.last_mut().unwrap(); + let (repeat_idx, repeat_len) = tscx.repeats.last_mut().unwrap(); *repeat_idx += 1; if repeat_idx < repeat_len { frame.idx = 0; if let Some(sep) = sep { - result.push(TokenTree::Token(*sep, Spacing::Alone)); + tscx.result.push(TokenTree::Token(*sep, Spacing::Alone)); } continue; } @@ -170,10 +211,10 @@ pub(super) fn transcribe<'a>( // We are done with the top of the stack. Pop it. Depending on what it was, we do // different things. Note that the outermost item must be the delimited, wrapped RHS // that was passed in originally to `transcribe`. - match stack.pop().unwrap().kind { + match tscx.stack.pop().unwrap().kind { // Done with a sequence. Pop from repeats. FrameKind::Sequence { .. } => { - repeats.pop(); + tscx.repeats.pop(); } // We are done processing a Delimited. If this is the top-level delimited, we are @@ -185,15 +226,16 @@ pub(super) fn transcribe<'a>( if delim == Delimiter::Bracket { spacing.close = Spacing::Alone; } - if result_stack.is_empty() { + if tscx.result_stack.is_empty() { // No results left to compute! We are back at the top-level. - return Ok(TokenStream::new(result)); + return Ok(TokenStream::new(tscx.result)); } // Step back into the parent Delimited. - let tree = TokenTree::Delimited(span, spacing, delim, TokenStream::new(result)); - result = result_stack.pop().unwrap(); - result.push(tree); + let tree = + TokenTree::Delimited(span, spacing, delim, TokenStream::new(tscx.result)); + tscx.result = tscx.result_stack.pop().unwrap(); + tscx.result.push(tree); } } continue; @@ -202,223 +244,19 @@ pub(super) fn transcribe<'a>( // At this point, we know we are in the middle of a TokenTree (the last one on `stack`). // `tree` contains the next `TokenTree` to be processed. match tree { - // We are descending into a sequence. We first make sure that the matchers in the RHS - // and the matches in `interp` have the same shape. Otherwise, either the caller or the - // macro writer has made a mistake. + // Replace the sequence with its expansion. seq @ mbe::TokenTree::Sequence(_, seq_rep) => { - match lockstep_iter_size(seq, interp, &repeats) { - LockstepIterSize::Unconstrained => { - return Err(dcx.create_err(NoSyntaxVarsExprRepeat { span: seq.span() })); - } - - LockstepIterSize::Contradiction(msg) => { - // FIXME: this really ought to be caught at macro definition time... It - // happens when two meta-variables are used in the same repetition in a - // sequence, but they come from different sequence matchers and repeat - // different amounts. - return Err( - dcx.create_err(MetaVarsDifSeqMatchers { span: seq.span(), msg }) - ); - } - - LockstepIterSize::Constraint(len, _) => { - // We do this to avoid an extra clone above. We know that this is a - // sequence already. - let mbe::TokenTree::Sequence(sp, seq) = seq else { unreachable!() }; - - // Is the repetition empty? - if len == 0 { - if seq.kleene.op == KleeneOp::OneOrMore { - // FIXME: this really ought to be caught at macro definition - // time... It happens when the Kleene operator in the matcher and - // the body for the same meta-variable do not match. - return Err(dcx.create_err(MustRepeatOnce { span: sp.entire() })); - } - } else { - // 0 is the initial counter (we have done 0 repetitions so far). `len` - // is the total number of repetitions we should generate. - repeats.push((0, len)); - - // The first time we encounter the sequence we push it to the stack. It - // then gets reused (see the beginning of the loop) until we are done - // repeating. - stack.push(Frame::new_sequence( - seq_rep, - seq.separator.clone(), - seq.kleene.op, - )); - } - } - } + transcribe_sequence(&mut tscx, seq, seq_rep)?; } // Replace the meta-var with the matched token tree from the invocation. - &mbe::TokenTree::MetaVar(mut sp, mut original_ident) => { - // Find the matched nonterminal from the macro invocation, and use it to replace - // the meta-var. - // - // We use `Spacing::Alone` everywhere here, because that's the conservative choice - // and spacing of declarative macros is tricky. E.g. in this macro: - // ``` - // macro_rules! idents { - // ($($a:ident,)*) => { stringify!($($a)*) } - // } - // ``` - // `$a` has no whitespace after it and will be marked `JointHidden`. If you then - // call `idents!(x,y,z,)`, each of `x`, `y`, and `z` will be marked as `Joint`. So - // if you choose to use `$x`'s spacing or the identifier's spacing, you'll end up - // producing "xyz", which is bad because it effectively merges tokens. - // `Spacing::Alone` is the safer option. Fortunately, `space_between` will avoid - // some of the unnecessary whitespace. - let ident = MacroRulesNormalizedIdent::new(original_ident); - if let Some(cur_matched) = lookup_cur_matched(ident, interp, &repeats) { - // We wrap the tokens in invisible delimiters, unless they are already wrapped - // in invisible delimiters with the same `MetaVarKind`. Because some proc - // macros can't handle multiple layers of invisible delimiters of the same - // `MetaVarKind`. This loses some span info, though it hopefully won't matter. - let mut mk_delimited = |mk_span, mv_kind, mut stream: TokenStream| { - if stream.len() == 1 { - let tree = stream.iter().next().unwrap(); - if let TokenTree::Delimited(_, _, delim, inner) = tree - && let Delimiter::Invisible(InvisibleOrigin::MetaVar(mvk)) = delim - && mv_kind == *mvk - { - stream = inner.clone(); - } - } - - // Emit as a token stream within `Delimiter::Invisible` to maintain - // parsing priorities. - marker.mark_span(&mut sp); - with_metavar_spans(|mspans| mspans.insert(mk_span, sp)); - // Both the open delim and close delim get the same span, which covers the - // `$foo` in the decl macro RHS. - TokenTree::Delimited( - DelimSpan::from_single(sp), - DelimSpacing::new(Spacing::Alone, Spacing::Alone), - Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)), - stream, - ) - }; - let tt = match cur_matched { - MatchedSingle(ParseNtResult::Tt(tt)) => { - // `tt`s are emitted into the output stream directly as "raw tokens", - // without wrapping them into groups. Other variables are emitted into - // the output stream as groups with `Delimiter::Invisible` to maintain - // parsing priorities. - maybe_use_metavar_location(psess, &stack, sp, tt, &mut marker) - } - MatchedSingle(ParseNtResult::Ident(ident, is_raw)) => { - marker.mark_span(&mut sp); - with_metavar_spans(|mspans| mspans.insert(ident.span, sp)); - let kind = token::NtIdent(*ident, *is_raw); - TokenTree::token_alone(kind, sp) - } - MatchedSingle(ParseNtResult::Lifetime(ident, is_raw)) => { - marker.mark_span(&mut sp); - with_metavar_spans(|mspans| mspans.insert(ident.span, sp)); - let kind = token::NtLifetime(*ident, *is_raw); - TokenTree::token_alone(kind, sp) - } - MatchedSingle(ParseNtResult::Item(item)) => { - mk_delimited(item.span, MetaVarKind::Item, TokenStream::from_ast(item)) - } - MatchedSingle(ParseNtResult::Block(block)) => mk_delimited( - block.span, - MetaVarKind::Block, - TokenStream::from_ast(block), - ), - MatchedSingle(ParseNtResult::Stmt(stmt)) => { - let stream = if let StmtKind::Empty = stmt.kind { - // FIXME: Properly collect tokens for empty statements. - TokenStream::token_alone(token::Semi, stmt.span) - } else { - TokenStream::from_ast(stmt) - }; - mk_delimited(stmt.span, MetaVarKind::Stmt, stream) - } - MatchedSingle(ParseNtResult::Pat(pat, pat_kind)) => mk_delimited( - pat.span, - MetaVarKind::Pat(*pat_kind), - TokenStream::from_ast(pat), - ), - MatchedSingle(ParseNtResult::Expr(expr, kind)) => { - let (can_begin_literal_maybe_minus, can_begin_string_literal) = - match &expr.kind { - ExprKind::Lit(_) => (true, true), - ExprKind::Unary(UnOp::Neg, e) - if matches!(&e.kind, ExprKind::Lit(_)) => - { - (true, false) - } - _ => (false, false), - }; - mk_delimited( - expr.span, - MetaVarKind::Expr { - kind: *kind, - can_begin_literal_maybe_minus, - can_begin_string_literal, - }, - TokenStream::from_ast(expr), - ) - } - MatchedSingle(ParseNtResult::Literal(lit)) => { - mk_delimited(lit.span, MetaVarKind::Literal, TokenStream::from_ast(lit)) - } - MatchedSingle(ParseNtResult::Ty(ty)) => { - let is_path = matches!(&ty.kind, TyKind::Path(None, _path)); - mk_delimited( - ty.span, - MetaVarKind::Ty { is_path }, - TokenStream::from_ast(ty), - ) - } - MatchedSingle(ParseNtResult::Meta(attr_item)) => { - let has_meta_form = attr_item.meta_kind().is_some(); - mk_delimited( - attr_item.span(), - MetaVarKind::Meta { has_meta_form }, - TokenStream::from_ast(attr_item), - ) - } - MatchedSingle(ParseNtResult::Path(path)) => { - mk_delimited(path.span, MetaVarKind::Path, TokenStream::from_ast(path)) - } - MatchedSingle(ParseNtResult::Vis(vis)) => { - mk_delimited(vis.span, MetaVarKind::Vis, TokenStream::from_ast(vis)) - } - MatchedSeq(..) => { - // We were unable to descend far enough. This is an error. - return Err(dcx.create_err(VarStillRepeating { span: sp, ident })); - } - }; - result.push(tt) - } else { - // If we aren't able to match the meta-var, we push it back into the result but - // with modified syntax context. (I believe this supports nested macros). - marker.mark_span(&mut sp); - marker.mark_span(&mut original_ident.span); - result.push(TokenTree::token_joint_hidden(token::Dollar, sp)); - result.push(TokenTree::Token( - Token::from_ast_ident(original_ident), - Spacing::Alone, - )); - } + &mbe::TokenTree::MetaVar(sp, original_ident) => { + transcribe_metavar(&mut tscx, sp, original_ident)?; } // Replace meta-variable expressions with the result of their expansion. - mbe::TokenTree::MetaVarExpr(sp, expr) => { - transcribe_metavar_expr( - dcx, - expr, - interp, - &mut marker, - &repeats, - &mut result, - sp, - &psess.symbol_gallery, - )?; + mbe::TokenTree::MetaVarExpr(dspan, expr) => { + transcribe_metavar_expr(&mut tscx, *dspan, expr)?; } // If we are entering a new delimiter, we push its contents to the `stack` to be @@ -427,21 +265,21 @@ pub(super) fn transcribe<'a>( // jump back out of the Delimited, pop the result_stack and add the new results back to // the previous results (from outside the Delimited). &mbe::TokenTree::Delimited(mut span, ref spacing, ref delimited) => { - marker.mark_span(&mut span.open); - marker.mark_span(&mut span.close); - stack.push(Frame::new_delimited(delimited, span, *spacing)); - result_stack.push(mem::take(&mut result)); + tscx.marker.mark_span(&mut span.open); + tscx.marker.mark_span(&mut span.close); + tscx.stack.push(Frame::new_delimited(delimited, span, *spacing)); + tscx.result_stack.push(mem::take(&mut tscx.result)); } // Nothing much to do here. Just push the token to the result, being careful to // preserve syntax context. &mbe::TokenTree::Token(mut token) => { - marker.mark_span(&mut token.span); + tscx.marker.mark_span(&mut token.span); if let token::NtIdent(ident, _) | token::NtLifetime(ident, _) = &mut token.kind { - marker.mark_span(&mut ident.span); + tscx.marker.mark_span(&mut ident.span); } let tt = TokenTree::Token(token, Spacing::Alone); - result.push(tt); + tscx.result.push(tt); } // There should be no meta-var declarations in the invocation of a macro. @@ -450,6 +288,305 @@ pub(super) fn transcribe<'a>( } } +/// Turn `$(...)*` sequences into tokens. +fn transcribe_sequence<'tx, 'itp>( + tscx: &mut TranscrCtx<'tx, 'itp>, + seq: &mbe::TokenTree, + seq_rep: &'itp mbe::SequenceRepetition, +) -> PResult<'tx, ()> { + let dcx = tscx.psess.dcx(); + + // We are descending into a sequence. We first make sure that the matchers in the RHS + // and the matches in `interp` have the same shape. Otherwise, either the caller or the + // macro writer has made a mistake. + match lockstep_iter_size(seq, tscx.interp, &tscx.repeats) { + LockstepIterSize::Unconstrained => { + return Err(dcx.create_err(NoSyntaxVarsExprRepeat { span: seq.span() })); + } + + LockstepIterSize::Contradiction(msg) => { + // FIXME: this really ought to be caught at macro definition time... It + // happens when two meta-variables are used in the same repetition in a + // sequence, but they come from different sequence matchers and repeat + // different amounts. + return Err(dcx.create_err(MetaVarsDifSeqMatchers { span: seq.span(), msg })); + } + + LockstepIterSize::Constraint(len, _) => { + // We do this to avoid an extra clone above. We know that this is a + // sequence already. + let mbe::TokenTree::Sequence(sp, seq) = seq else { unreachable!() }; + + // Is the repetition empty? + if len == 0 { + if seq.kleene.op == KleeneOp::OneOrMore { + // FIXME: this really ought to be caught at macro definition + // time... It happens when the Kleene operator in the matcher and + // the body for the same meta-variable do not match. + return Err(dcx.create_err(MustRepeatOnce { span: sp.entire() })); + } + } else { + // 0 is the initial counter (we have done 0 repetitions so far). `len` + // is the total number of repetitions we should generate. + tscx.repeats.push((0, len)); + + // The first time we encounter the sequence we push it to the stack. It + // then gets reused (see the beginning of the loop) until we are done + // repeating. + tscx.stack.push(Frame::new_sequence(seq_rep, seq.separator.clone(), seq.kleene.op)); + } + } + } + + Ok(()) +} + +/// Find the matched nonterminal from the macro invocation, and use it to replace +/// the meta-var. +/// +/// We use `Spacing::Alone` everywhere here, because that's the conservative choice +/// and spacing of declarative macros is tricky. E.g. in this macro: +/// ``` +/// macro_rules! idents { +/// ($($a:ident,)*) => { stringify!($($a)*) } +/// } +/// ``` +/// `$a` has no whitespace after it and will be marked `JointHidden`. If you then +/// call `idents!(x,y,z,)`, each of `x`, `y`, and `z` will be marked as `Joint`. So +/// if you choose to use `$x`'s spacing or the identifier's spacing, you'll end up +/// producing "xyz", which is bad because it effectively merges tokens. +/// `Spacing::Alone` is the safer option. Fortunately, `space_between` will avoid +/// some of the unnecessary whitespace. +fn transcribe_metavar<'tx>( + tscx: &mut TranscrCtx<'tx, '_>, + mut sp: Span, + mut original_ident: Ident, +) -> PResult<'tx, ()> { + let dcx = tscx.psess.dcx(); + + let ident = MacroRulesNormalizedIdent::new(original_ident); + let Some(cur_matched) = lookup_cur_matched(ident, tscx.interp, &tscx.repeats) else { + // If we aren't able to match the meta-var, we push it back into the result but + // with modified syntax context. (I believe this supports nested macros). + tscx.marker.mark_span(&mut sp); + tscx.marker.mark_span(&mut original_ident.span); + tscx.result.push(TokenTree::token_joint_hidden(token::Dollar, sp)); + tscx.result.push(TokenTree::Token(Token::from_ast_ident(original_ident), Spacing::Alone)); + return Ok(()); + }; + + // We wrap the tokens in invisible delimiters, unless they are already wrapped + // in invisible delimiters with the same `MetaVarKind`. Because some proc + // macros can't handle multiple layers of invisible delimiters of the same + // `MetaVarKind`. This loses some span info, though it hopefully won't matter. + let mut mk_delimited = |mk_span, mv_kind, mut stream: TokenStream| { + if stream.len() == 1 { + let tree = stream.iter().next().unwrap(); + if let TokenTree::Delimited(_, _, delim, inner) = tree + && let Delimiter::Invisible(InvisibleOrigin::MetaVar(mvk)) = delim + && mv_kind == *mvk + { + stream = inner.clone(); + } + } + + // Emit as a token stream within `Delimiter::Invisible` to maintain + // parsing priorities. + tscx.marker.mark_span(&mut sp); + with_metavar_spans(|mspans| mspans.insert(mk_span, sp)); + // Both the open delim and close delim get the same span, which covers the + // `$foo` in the decl macro RHS. + TokenTree::Delimited( + DelimSpan::from_single(sp), + DelimSpacing::new(Spacing::Alone, Spacing::Alone), + Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)), + stream, + ) + }; + + let tt = match cur_matched { + MatchedSingle(ParseNtResult::Tt(tt)) => { + // `tt`s are emitted into the output stream directly as "raw tokens", + // without wrapping them into groups. Other variables are emitted into + // the output stream as groups with `Delimiter::Invisible` to maintain + // parsing priorities. + maybe_use_metavar_location(tscx.psess, &tscx.stack, sp, tt, &mut tscx.marker) + } + MatchedSingle(ParseNtResult::Ident(ident, is_raw)) => { + tscx.marker.mark_span(&mut sp); + with_metavar_spans(|mspans| mspans.insert(ident.span, sp)); + let kind = token::NtIdent(*ident, *is_raw); + TokenTree::token_alone(kind, sp) + } + MatchedSingle(ParseNtResult::Lifetime(ident, is_raw)) => { + tscx.marker.mark_span(&mut sp); + with_metavar_spans(|mspans| mspans.insert(ident.span, sp)); + let kind = token::NtLifetime(*ident, *is_raw); + TokenTree::token_alone(kind, sp) + } + MatchedSingle(ParseNtResult::Item(item)) => { + mk_delimited(item.span, MetaVarKind::Item, TokenStream::from_ast(item)) + } + MatchedSingle(ParseNtResult::Block(block)) => { + mk_delimited(block.span, MetaVarKind::Block, TokenStream::from_ast(block)) + } + MatchedSingle(ParseNtResult::Stmt(stmt)) => { + let stream = if let StmtKind::Empty = stmt.kind { + // FIXME: Properly collect tokens for empty statements. + TokenStream::token_alone(token::Semi, stmt.span) + } else { + TokenStream::from_ast(stmt) + }; + mk_delimited(stmt.span, MetaVarKind::Stmt, stream) + } + MatchedSingle(ParseNtResult::Pat(pat, pat_kind)) => { + mk_delimited(pat.span, MetaVarKind::Pat(*pat_kind), TokenStream::from_ast(pat)) + } + MatchedSingle(ParseNtResult::Expr(expr, kind)) => { + let (can_begin_literal_maybe_minus, can_begin_string_literal) = match &expr.kind { + ExprKind::Lit(_) => (true, true), + ExprKind::Unary(UnOp::Neg, e) if matches!(&e.kind, ExprKind::Lit(_)) => { + (true, false) + } + _ => (false, false), + }; + mk_delimited( + expr.span, + MetaVarKind::Expr { + kind: *kind, + can_begin_literal_maybe_minus, + can_begin_string_literal, + }, + TokenStream::from_ast(expr), + ) + } + MatchedSingle(ParseNtResult::Literal(lit)) => { + mk_delimited(lit.span, MetaVarKind::Literal, TokenStream::from_ast(lit)) + } + MatchedSingle(ParseNtResult::Ty(ty)) => { + let is_path = matches!(&ty.kind, TyKind::Path(None, _path)); + mk_delimited(ty.span, MetaVarKind::Ty { is_path }, TokenStream::from_ast(ty)) + } + MatchedSingle(ParseNtResult::Meta(attr_item)) => { + let has_meta_form = attr_item.meta_kind().is_some(); + mk_delimited( + attr_item.span(), + MetaVarKind::Meta { has_meta_form }, + TokenStream::from_ast(attr_item), + ) + } + MatchedSingle(ParseNtResult::Path(path)) => { + mk_delimited(path.span, MetaVarKind::Path, TokenStream::from_ast(path)) + } + MatchedSingle(ParseNtResult::Vis(vis)) => { + mk_delimited(vis.span, MetaVarKind::Vis, TokenStream::from_ast(vis)) + } + MatchedSeq(..) => { + // We were unable to descend far enough. This is an error. + return Err(dcx.create_err(VarStillRepeating { span: sp, ident })); + } + }; + + tscx.result.push(tt); + Ok(()) +} + +/// Turn `${expr(...)}` metavariable expressionss into tokens. +fn transcribe_metavar_expr<'tx>( + tscx: &mut TranscrCtx<'tx, '_>, + dspan: DelimSpan, + expr: &MetaVarExpr, +) -> PResult<'tx, ()> { + let dcx = tscx.psess.dcx(); + let tt = match *expr { + MetaVarExpr::Concat(ref elements) => metavar_expr_concat(tscx, dspan, elements)?, + MetaVarExpr::Count(original_ident, depth) => { + let matched = matched_from_ident(dcx, original_ident, tscx.interp)?; + let count = count_repetitions(dcx, depth, matched, &tscx.repeats, &dspan)?; + TokenTree::token_alone( + TokenKind::lit(token::Integer, sym::integer(count), None), + tscx.visited_dspan(dspan), + ) + } + MetaVarExpr::Ignore(original_ident) => { + // Used to ensure that `original_ident` is present in the LHS + let _ = matched_from_ident(dcx, original_ident, tscx.interp)?; + return Ok(()); + } + MetaVarExpr::Index(depth) => match tscx.repeats.iter().nth_back(depth) { + Some((index, _)) => TokenTree::token_alone( + TokenKind::lit(token::Integer, sym::integer(*index), None), + tscx.visited_dspan(dspan), + ), + None => { + return Err(out_of_bounds_err(dcx, tscx.repeats.len(), dspan.entire(), "index")); + } + }, + MetaVarExpr::Len(depth) => match tscx.repeats.iter().nth_back(depth) { + Some((_, length)) => TokenTree::token_alone( + TokenKind::lit(token::Integer, sym::integer(*length), None), + tscx.visited_dspan(dspan), + ), + None => { + return Err(out_of_bounds_err(dcx, tscx.repeats.len(), dspan.entire(), "len")); + } + }, + }; + tscx.result.push(tt); + Ok(()) +} + +/// Handle the `${concat(...)}` metavariable expression. +fn metavar_expr_concat<'tx>( + tscx: &mut TranscrCtx<'tx, '_>, + dspan: DelimSpan, + elements: &[MetaVarExprConcatElem], +) -> PResult<'tx, TokenTree> { + let dcx = tscx.psess.dcx(); + let mut concatenated = String::new(); + for element in elements.into_iter() { + let symbol = match element { + MetaVarExprConcatElem::Ident(elem) => elem.name, + MetaVarExprConcatElem::Literal(elem) => *elem, + MetaVarExprConcatElem::Var(ident) => { + match matched_from_ident(dcx, *ident, tscx.interp)? { + NamedMatch::MatchedSeq(named_matches) => { + let Some((curr_idx, _)) = tscx.repeats.last() else { + return Err(dcx.struct_span_err(dspan.entire(), "invalid syntax")); + }; + match &named_matches[*curr_idx] { + // FIXME(c410-f3r) Nested repetitions are unimplemented + MatchedSeq(_) => unimplemented!(), + MatchedSingle(pnr) => extract_symbol_from_pnr(dcx, pnr, ident.span)?, + } + } + NamedMatch::MatchedSingle(pnr) => { + extract_symbol_from_pnr(dcx, pnr, ident.span)? + } + } + } + }; + concatenated.push_str(symbol.as_str()); + } + let symbol = nfc_normalize(&concatenated); + let concatenated_span = tscx.visited_dspan(dspan); + if !rustc_lexer::is_ident(symbol.as_str()) { + return Err(dcx.struct_span_err( + concatenated_span, + "`${concat(..)}` is not generating a valid identifier", + )); + } + tscx.psess.symbol_gallery.insert(symbol, concatenated_span); + + // The current implementation marks the span as coming from the macro regardless of + // contexts of the concatenated identifiers but this behavior may change in the + // future. + Ok(TokenTree::Token( + Token::from_ast_ident(Ident::new(symbol, concatenated_span)), + Spacing::Alone, + )) +} + /// Store the metavariable span for this original span into a side table. /// FIXME: Try to put the metavariable span into `SpanData` instead of a side table (#118517). /// An optimal encoding for inlined spans will need to be selected to minimize regressions. @@ -671,13 +808,13 @@ fn lockstep_iter_size( /// * `[ $( ${count(foo, 0)} ),* ]` will be the same as `[ $( ${count(foo)} ),* ]` /// * `[ $( ${count(foo, 1)} ),* ]` will return an error because `${count(foo, 1)}` is /// declared inside a single repetition and the index `1` implies two nested repetitions. -fn count_repetitions<'a>( - dcx: DiagCtxtHandle<'a>, +fn count_repetitions<'dx>( + dcx: DiagCtxtHandle<'dx>, depth_user: usize, mut matched: &NamedMatch, repeats: &[(usize, usize)], sp: &DelimSpan, -) -> PResult<'a, usize> { +) -> PResult<'dx, usize> { // Recursively count the number of matches in `matched` at given depth // (or at the top-level of `matched` if no depth is given). fn count<'a>(depth_curr: usize, depth_max: usize, matched: &NamedMatch) -> PResult<'a, usize> { @@ -762,102 +899,6 @@ fn out_of_bounds_err<'a>(dcx: DiagCtxtHandle<'a>, max: usize, span: Span, ty: &s dcx.struct_span_err(span, msg) } -fn transcribe_metavar_expr<'a>( - dcx: DiagCtxtHandle<'a>, - expr: &MetaVarExpr, - interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>, - marker: &mut Marker, - repeats: &[(usize, usize)], - result: &mut Vec<TokenTree>, - sp: &DelimSpan, - symbol_gallery: &SymbolGallery, -) -> PResult<'a, ()> { - let mut visited_span = || { - let mut span = sp.entire(); - marker.mark_span(&mut span); - span - }; - match *expr { - MetaVarExpr::Concat(ref elements) => { - let mut concatenated = String::new(); - for element in elements.into_iter() { - let symbol = match element { - MetaVarExprConcatElem::Ident(elem) => elem.name, - MetaVarExprConcatElem::Literal(elem) => *elem, - MetaVarExprConcatElem::Var(ident) => { - match matched_from_ident(dcx, *ident, interp)? { - NamedMatch::MatchedSeq(named_matches) => { - let Some((curr_idx, _)) = repeats.last() else { - return Err(dcx.struct_span_err(sp.entire(), "invalid syntax")); - }; - match &named_matches[*curr_idx] { - // FIXME(c410-f3r) Nested repetitions are unimplemented - MatchedSeq(_) => unimplemented!(), - MatchedSingle(pnr) => { - extract_symbol_from_pnr(dcx, pnr, ident.span)? - } - } - } - NamedMatch::MatchedSingle(pnr) => { - extract_symbol_from_pnr(dcx, pnr, ident.span)? - } - } - } - }; - concatenated.push_str(symbol.as_str()); - } - let symbol = nfc_normalize(&concatenated); - let concatenated_span = visited_span(); - if !rustc_lexer::is_ident(symbol.as_str()) { - return Err(dcx.struct_span_err( - concatenated_span, - "`${concat(..)}` is not generating a valid identifier", - )); - } - symbol_gallery.insert(symbol, concatenated_span); - // The current implementation marks the span as coming from the macro regardless of - // contexts of the concatenated identifiers but this behavior may change in the - // future. - result.push(TokenTree::Token( - Token::from_ast_ident(Ident::new(symbol, concatenated_span)), - Spacing::Alone, - )); - } - MetaVarExpr::Count(original_ident, depth) => { - let matched = matched_from_ident(dcx, original_ident, interp)?; - let count = count_repetitions(dcx, depth, matched, repeats, sp)?; - let tt = TokenTree::token_alone( - TokenKind::lit(token::Integer, sym::integer(count), None), - visited_span(), - ); - result.push(tt); - } - MetaVarExpr::Ignore(original_ident) => { - // Used to ensure that `original_ident` is present in the LHS - let _ = matched_from_ident(dcx, original_ident, interp)?; - } - MetaVarExpr::Index(depth) => match repeats.iter().nth_back(depth) { - Some((index, _)) => { - result.push(TokenTree::token_alone( - TokenKind::lit(token::Integer, sym::integer(*index), None), - visited_span(), - )); - } - None => return Err(out_of_bounds_err(dcx, repeats.len(), sp.entire(), "index")), - }, - MetaVarExpr::Len(depth) => match repeats.iter().nth_back(depth) { - Some((_, length)) => { - result.push(TokenTree::token_alone( - TokenKind::lit(token::Integer, sym::integer(*length), None), - visited_span(), - )); - } - None => return Err(out_of_bounds_err(dcx, repeats.len(), sp.entire(), "len")), - }, - } - Ok(()) -} - /// Extracts an metavariable symbol that can be an identifier, a token tree or a literal. fn extract_symbol_from_pnr<'a>( dcx: DiagCtxtHandle<'a>, diff --git a/compiler/rustc_expand/src/placeholders.rs b/compiler/rustc_expand/src/placeholders.rs index 2c486a02bdf..6e1c6df4bcb 100644 --- a/compiler/rustc_expand/src/placeholders.rs +++ b/compiler/rustc_expand/src/placeholders.rs @@ -339,9 +339,9 @@ impl MutVisitor for PlaceholderExpander { } } - fn visit_method_receiver_expr(&mut self, expr: &mut P<ast::Expr>) { + fn visit_method_receiver_expr(&mut self, expr: &mut ast::Expr) { match expr.kind { - ast::ExprKind::MacCall(_) => *expr = self.remove(expr.id).make_method_receiver_expr(), + ast::ExprKind::MacCall(_) => *expr = *self.remove(expr.id).make_method_receiver_expr(), _ => walk_expr(self, expr), } } diff --git a/compiler/rustc_feature/src/accepted.rs b/compiler/rustc_feature/src/accepted.rs index b1c185220f4..cfe0f4e5d6c 100644 --- a/compiler/rustc_feature/src/accepted.rs +++ b/compiler/rustc_feature/src/accepted.rs @@ -220,6 +220,8 @@ declare_features! ( (accepted, fn_must_use, "1.27.0", Some(43302)), /// Allows capturing variables in scope using format_args! (accepted, format_args_capture, "1.58.0", Some(67984)), + /// Infer generic args for both consts and types. + (accepted, generic_arg_infer, "CURRENT_RUSTC_VERSION", Some(85077)), /// Allows associated types to be generic, e.g., `type Foo<T>;` (RFC 1598). (accepted, generic_associated_types, "1.65.0", Some(44265)), /// Allows attributes on lifetime/type formal parameters in generics (RFC 1327). diff --git a/compiler/rustc_feature/src/builtin_attrs.rs b/compiler/rustc_feature/src/builtin_attrs.rs index 7d6e471e7e9..280b33f0723 100644 --- a/compiler/rustc_feature/src/builtin_attrs.rs +++ b/compiler/rustc_feature/src/builtin_attrs.rs @@ -111,6 +111,7 @@ pub enum AttributeGate { Ungated, } +// FIXME(jdonszelmann): move to rustc_attr_data_structures /// A template that the attribute input must match. /// Only top-level shape (`#[attr]` vs `#[attr(...)]` vs `#[attr = ...]`) is considered now. #[derive(Clone, Copy, Default)] @@ -127,6 +128,26 @@ pub struct AttributeTemplate { pub name_value_str: Option<&'static str>, } +impl AttributeTemplate { + pub fn suggestions(&self, inner: bool, name: impl std::fmt::Display) -> Vec<String> { + let mut suggestions = vec![]; + let inner = if inner { "!" } else { "" }; + if self.word { + suggestions.push(format!("#{inner}[{name}]")); + } + if let Some(descr) = self.list { + suggestions.push(format!("#{inner}[{name}({descr})]")); + } + suggestions.extend(self.one_of.iter().map(|&word| format!("#{inner}[{name}({word})]"))); + if let Some(descr) = self.name_value_str { + suggestions.push(format!("#{inner}[{name} = \"{descr}\"]")); + } + suggestions.sort(); + + suggestions + } +} + /// How to handle multiple duplicate attributes on the same item. #[derive(Clone, Copy, Default)] pub enum AttributeDuplicates { @@ -181,20 +202,21 @@ pub enum AttributeDuplicates { /// A convenience macro for constructing attribute templates. /// E.g., `template!(Word, List: "description")` means that the attribute /// supports forms `#[attr]` and `#[attr(description)]`. +#[macro_export] macro_rules! template { - (Word) => { template!(@ true, None, &[], None) }; - (List: $descr: expr) => { template!(@ false, Some($descr), &[], None) }; - (OneOf: $one_of: expr) => { template!(@ false, None, $one_of, None) }; - (NameValueStr: $descr: expr) => { template!(@ false, None, &[], Some($descr)) }; - (Word, List: $descr: expr) => { template!(@ true, Some($descr), &[], None) }; - (Word, NameValueStr: $descr: expr) => { template!(@ true, None, &[], Some($descr)) }; + (Word) => { $crate::template!(@ true, None, &[], None) }; + (List: $descr: expr) => { $crate::template!(@ false, Some($descr), &[], None) }; + (OneOf: $one_of: expr) => { $crate::template!(@ false, None, $one_of, None) }; + (NameValueStr: $descr: expr) => { $crate::template!(@ false, None, &[], Some($descr)) }; + (Word, List: $descr: expr) => { $crate::template!(@ true, Some($descr), &[], None) }; + (Word, NameValueStr: $descr: expr) => { $crate::template!(@ true, None, &[], Some($descr)) }; (List: $descr1: expr, NameValueStr: $descr2: expr) => { - template!(@ false, Some($descr1), &[], Some($descr2)) + $crate::template!(@ false, Some($descr1), &[], Some($descr2)) }; (Word, List: $descr1: expr, NameValueStr: $descr2: expr) => { - template!(@ true, Some($descr1), &[], Some($descr2)) + $crate::template!(@ true, Some($descr1), &[], Some($descr2)) }; - (@ $word: expr, $list: expr, $one_of: expr, $name_value_str: expr) => { AttributeTemplate { + (@ $word: expr, $list: expr, $one_of: expr, $name_value_str: expr) => { $crate::AttributeTemplate { word: $word, list: $list, one_of: $one_of, name_value_str: $name_value_str } }; } @@ -473,6 +495,7 @@ pub static BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ ), ungated!(no_link, Normal, template!(Word), WarnFollowing, EncodeCrossCrate::No), ungated!(repr, Normal, template!(List: "C"), DuplicatesOk, EncodeCrossCrate::No), + gated!(align, Normal, template!(List: "alignment"), DuplicatesOk, EncodeCrossCrate::No, fn_align, experimental!(align)), ungated!(unsafe(Edition2024) export_name, Normal, template!(NameValueStr: "name"), FutureWarnPreceding, EncodeCrossCrate::No), ungated!(unsafe(Edition2024) link_section, Normal, template!(NameValueStr: "name"), FutureWarnPreceding, EncodeCrossCrate::No), ungated!(unsafe(Edition2024) no_mangle, Normal, template!(Word), WarnFollowing, EncodeCrossCrate::No), @@ -687,7 +710,7 @@ pub static BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ ), rustc_attr!( rustc_pub_transparent, Normal, template!(Word), - WarnFollowing, EncodeCrossCrate::Yes, + ErrorFollowing, EncodeCrossCrate::Yes, "used internally to mark types with a `transparent` representation when it is guaranteed by the documentation", ), diff --git a/compiler/rustc_feature/src/unstable.rs b/compiler/rustc_feature/src/unstable.rs index 5e42b919f9d..91715851226 100644 --- a/compiler/rustc_feature/src/unstable.rs +++ b/compiler/rustc_feature/src/unstable.rs @@ -510,7 +510,7 @@ declare_features! ( (unstable, ffi_pure, "1.45.0", Some(58329)), /// Controlling the behavior of fmt::Debug (unstable, fmt_debug, "1.82.0", Some(129709)), - /// Allows using `#[repr(align(...))]` on function items + /// Allows using `#[align(...)]` on function items (unstable, fn_align, "1.53.0", Some(82232)), /// Support delegating implementation of functions to other already implemented functions. (incomplete, fn_delegation, "1.76.0", Some(118212)), @@ -520,8 +520,6 @@ declare_features! ( (unstable, frontmatter, "1.88.0", Some(136889)), /// Allows defining gen blocks and `gen fn`. (unstable, gen_blocks, "1.75.0", Some(117078)), - /// Infer generic args for both consts and types. - (unstable, generic_arg_infer, "1.55.0", Some(85077)), /// Allows non-trivial generic constants which have to have wfness manually propagated to callers (incomplete, generic_const_exprs, "1.56.0", Some(76560)), /// Allows generic parameters and where-clauses on free & associated const items. diff --git a/compiler/rustc_hir/src/hir.rs b/compiler/rustc_hir/src/hir.rs index 556f50a85af..679904c7cfe 100644 --- a/compiler/rustc_hir/src/hir.rs +++ b/compiler/rustc_hir/src/hir.rs @@ -1302,6 +1302,7 @@ impl AttributeExt for Attribute { // FIXME: should not be needed anymore when all attrs are parsed Attribute::Parsed(AttributeKind::Deprecation { span, .. }) => *span, Attribute::Parsed(AttributeKind::DocComment { span, .. }) => *span, + Attribute::Parsed(AttributeKind::MayDangle(span)) => *span, a => panic!("can't get the span of an arbitrary parsed attribute: {a:?}"), } } @@ -1345,12 +1346,13 @@ impl AttributeExt for Attribute { } } - #[inline] - fn style(&self) -> AttrStyle { - match &self { - Attribute::Unparsed(u) => u.style, - Attribute::Parsed(AttributeKind::DocComment { style, .. }) => *style, - _ => panic!(), + fn doc_resolution_scope(&self) -> Option<AttrStyle> { + match self { + Attribute::Parsed(AttributeKind::DocComment { style, .. }) => Some(*style), + Attribute::Unparsed(attr) if self.has_name(sym::doc) && self.value_str().is_some() => { + Some(attr.style) + } + _ => None, } } } @@ -1441,11 +1443,6 @@ impl Attribute { pub fn doc_str_and_comment_kind(&self) -> Option<(Symbol, CommentKind)> { AttributeExt::doc_str_and_comment_kind(self) } - - #[inline] - pub fn style(&self) -> AttrStyle { - AttributeExt::style(self) - } } /// Attributes owned by a HIR owner. @@ -2285,16 +2282,9 @@ pub struct Expr<'hir> { } impl Expr<'_> { - pub fn precedence( - &self, - for_each_attr: &dyn Fn(HirId, &mut dyn FnMut(&Attribute)), - ) -> ExprPrecedence { + pub fn precedence(&self, has_attr: &dyn Fn(HirId) -> bool) -> ExprPrecedence { let prefix_attrs_precedence = || -> ExprPrecedence { - let mut has_outer_attr = false; - for_each_attr(self.hir_id, &mut |attr: &Attribute| { - has_outer_attr |= matches!(attr.style(), AttrStyle::Outer) - }); - if has_outer_attr { ExprPrecedence::Prefix } else { ExprPrecedence::Unambiguous } + if has_attr(self.hir_id) { ExprPrecedence::Prefix } else { ExprPrecedence::Unambiguous } }; match &self.kind { @@ -2350,7 +2340,7 @@ impl Expr<'_> { | ExprKind::Use(..) | ExprKind::Err(_) => prefix_attrs_precedence(), - ExprKind::DropTemps(expr, ..) => expr.precedence(for_each_attr), + ExprKind::DropTemps(expr, ..) => expr.precedence(has_attr), } } diff --git a/compiler/rustc_hir_analysis/messages.ftl b/compiler/rustc_hir_analysis/messages.ftl index f768bd157ab..bd2252c1bf8 100644 --- a/compiler/rustc_hir_analysis/messages.ftl +++ b/compiler/rustc_hir_analysis/messages.ftl @@ -46,6 +46,9 @@ hir_analysis_associated_type_trait_uninferred_generic_params = cannot use the {$ hir_analysis_associated_type_trait_uninferred_generic_params_multipart_suggestion = use a fully qualified path with explicit lifetimes +hir_analysis_async_drop_without_sync_drop = `AsyncDrop` impl without `Drop` impl + .help = type implementing `AsyncDrop` trait must also implement `Drop` trait to be used in sync context and unwinds + hir_analysis_auto_deref_reached_recursion_limit = reached the recursion limit while auto-dereferencing `{$ty}` .label = deref recursion limit reached .help = consider increasing the recursion limit by adding a `#![recursion_limit = "{$suggested_limit}"]` attribute to your crate (`{$crate_name}`) diff --git a/compiler/rustc_hir_analysis/src/check/check.rs b/compiler/rustc_hir_analysis/src/check/check.rs index 32fec0604c0..752cc2eff97 100644 --- a/compiler/rustc_hir_analysis/src/check/check.rs +++ b/compiler/rustc_hir_analysis/src/check/check.rs @@ -2,6 +2,7 @@ use std::cell::LazyCell; use std::ops::ControlFlow; use rustc_abi::FieldIdx; +use rustc_attr_data_structures::AttributeKind; use rustc_attr_data_structures::ReprAttr::ReprPacked; use rustc_data_structures::unord::{UnordMap, UnordSet}; use rustc_errors::codes::*; @@ -1384,7 +1385,11 @@ pub(super) fn check_transparent<'tcx>(tcx: TyCtxt<'tcx>, adt: ty::AdtDef<'tcx>) ty::Tuple(list) => list.iter().try_for_each(|t| check_non_exhaustive(tcx, t)), ty::Array(ty, _) => check_non_exhaustive(tcx, *ty), ty::Adt(def, args) => { - if !def.did().is_local() && !tcx.has_attr(def.did(), sym::rustc_pub_transparent) + if !def.did().is_local() + && !attrs::find_attr!( + tcx.get_all_attrs(def.did()), + AttributeKind::PubTransparent(_) + ) { let non_exhaustive = def.is_variant_list_non_exhaustive() || def diff --git a/compiler/rustc_hir_analysis/src/check/mod.rs b/compiler/rustc_hir_analysis/src/check/mod.rs index 5b8aa28102c..bf2d4f662ef 100644 --- a/compiler/rustc_hir_analysis/src/check/mod.rs +++ b/compiler/rustc_hir_analysis/src/check/mod.rs @@ -114,7 +114,15 @@ pub fn provide(providers: &mut Providers) { } fn adt_destructor(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Option<ty::Destructor> { - tcx.calculate_dtor(def_id, always_applicable::check_drop_impl) + let dtor = tcx.calculate_dtor(def_id, always_applicable::check_drop_impl); + if dtor.is_none() && tcx.features().async_drop() { + if let Some(async_dtor) = adt_async_destructor(tcx, def_id) { + // When type has AsyncDrop impl, but doesn't have Drop impl, generate error + let span = tcx.def_span(async_dtor.impl_did); + tcx.dcx().emit_err(errors::AsyncDropWithoutSyncDrop { span }); + } + } + dtor } fn adt_async_destructor(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Option<ty::AsyncDestructor> { @@ -303,9 +311,7 @@ fn default_body_is_unstable( reason: reason_str, }); - let inject_span = item_did - .as_local() - .and_then(|id| tcx.crate_level_attribute_injection_span(tcx.local_def_id_to_hir_id(id))); + let inject_span = item_did.is_local().then(|| tcx.crate_level_attribute_injection_span()); rustc_session::parse::add_feature_diagnostics_for_issue( &mut err, &tcx.sess, diff --git a/compiler/rustc_hir_analysis/src/check/wfcheck.rs b/compiler/rustc_hir_analysis/src/check/wfcheck.rs index 20d0e87b7a7..d05e381f8c8 100644 --- a/compiler/rustc_hir_analysis/src/check/wfcheck.rs +++ b/compiler/rustc_hir_analysis/src/check/wfcheck.rs @@ -382,8 +382,6 @@ fn check_trait_item<'tcx>( _ => (None, trait_item.span), }; - check_dyn_incompatible_self_trait_by_name(tcx, trait_item); - // Check that an item definition in a subtrait is shadowing a supertrait item. lint_item_shadowing_supertrait_item(tcx, def_id); @@ -832,70 +830,6 @@ impl<'tcx> TypeVisitor<TyCtxt<'tcx>> for GATArgsCollector<'tcx> { } } -fn could_be_self(trait_def_id: LocalDefId, ty: &hir::Ty<'_>) -> bool { - match ty.kind { - hir::TyKind::TraitObject([trait_ref], ..) => match trait_ref.trait_ref.path.segments { - [s] => s.res.opt_def_id() == Some(trait_def_id.to_def_id()), - _ => false, - }, - _ => false, - } -} - -/// Detect when a dyn-incompatible trait is referring to itself in one of its associated items. -/// -/// In such cases, suggest using `Self` instead. -fn check_dyn_incompatible_self_trait_by_name(tcx: TyCtxt<'_>, item: &hir::TraitItem<'_>) { - let (trait_ident, trait_def_id) = - match tcx.hir_node_by_def_id(tcx.hir_get_parent_item(item.hir_id()).def_id) { - hir::Node::Item(item) => match item.kind { - hir::ItemKind::Trait(_, _, ident, ..) => (ident, item.owner_id), - _ => return, - }, - _ => return, - }; - let mut trait_should_be_self = vec![]; - match &item.kind { - hir::TraitItemKind::Const(ty, _) | hir::TraitItemKind::Type(_, Some(ty)) - if could_be_self(trait_def_id.def_id, ty) => - { - trait_should_be_self.push(ty.span) - } - hir::TraitItemKind::Fn(sig, _) => { - for ty in sig.decl.inputs { - if could_be_self(trait_def_id.def_id, ty) { - trait_should_be_self.push(ty.span); - } - } - match sig.decl.output { - hir::FnRetTy::Return(ty) if could_be_self(trait_def_id.def_id, ty) => { - trait_should_be_self.push(ty.span); - } - _ => {} - } - } - _ => {} - } - if !trait_should_be_self.is_empty() { - if tcx.is_dyn_compatible(trait_def_id) { - return; - } - let sugg = trait_should_be_self.iter().map(|span| (*span, "Self".to_string())).collect(); - tcx.dcx() - .struct_span_err( - trait_should_be_self, - "associated item referring to unboxed trait object for its own trait", - ) - .with_span_label(trait_ident.span, "in this trait") - .with_multipart_suggestion( - "you might have meant to use `Self` to refer to the implementing type", - sugg, - Applicability::MachineApplicable, - ) - .emit(); - } -} - fn lint_item_shadowing_supertrait_item<'tcx>(tcx: TyCtxt<'tcx>, trait_item_def_id: LocalDefId) { let item_name = tcx.item_name(trait_item_def_id.to_def_id()); let trait_def_id = tcx.local_parent(trait_item_def_id); @@ -1064,7 +998,7 @@ fn check_param_wf(tcx: TyCtxt<'_>, param: &hir::GenericParam<'_>) -> Result<(), Ok(..) => Some(vec![(adt_const_params_feature_string, sym::adt_const_params)]), }; if let Some(features) = may_suggest_feature { - tcx.disabled_nightly_features(&mut diag, Some(param.hir_id), features); + tcx.disabled_nightly_features(&mut diag, features); } Err(diag.emit()) diff --git a/compiler/rustc_hir_analysis/src/collect.rs b/compiler/rustc_hir_analysis/src/collect.rs index 6e22ac5a28a..176d955bf03 100644 --- a/compiler/rustc_hir_analysis/src/collect.rs +++ b/compiler/rustc_hir_analysis/src/collect.rs @@ -34,16 +34,22 @@ use rustc_infer::infer::{InferCtxt, TyCtxtInferExt}; use rustc_infer::traits::{DynCompatibilityViolation, ObligationCause}; use rustc_middle::query::Providers; use rustc_middle::ty::util::{Discr, IntTypeExt}; -use rustc_middle::ty::{self, AdtKind, Const, IsSuggestable, Ty, TyCtxt, TypingMode, fold_regions}; +use rustc_middle::ty::{ + self, AdtKind, Const, IsSuggestable, Ty, TyCtxt, TypeVisitableExt, TypingMode, fold_regions, +}; use rustc_middle::{bug, span_bug}; use rustc_span::{DUMMY_SP, Ident, Span, Symbol, kw, sym}; use rustc_trait_selection::error_reporting::traits::suggestions::NextTypeParamName; use rustc_trait_selection::infer::InferCtxtExt; -use rustc_trait_selection::traits::{ObligationCtxt, hir_ty_lowering_dyn_compatibility_violations}; +use rustc_trait_selection::traits::{ + FulfillmentError, ObligationCtxt, hir_ty_lowering_dyn_compatibility_violations, +}; use tracing::{debug, instrument}; use crate::errors; -use crate::hir_ty_lowering::{FeedConstTy, HirTyLowerer, RegionInferReason}; +use crate::hir_ty_lowering::{ + FeedConstTy, HirTyLowerer, InherentAssocCandidate, RegionInferReason, +}; pub(crate) mod dump; mod generics_of; @@ -364,6 +370,58 @@ impl<'tcx> HirTyLowerer<'tcx> for ItemCtxt<'tcx> { self.tcx.at(span).type_param_predicates((self.item_def_id, def_id, assoc_ident)) } + #[instrument(level = "debug", skip(self, _span), ret)] + fn select_inherent_assoc_candidates( + &self, + _span: Span, + self_ty: Ty<'tcx>, + candidates: Vec<InherentAssocCandidate>, + ) -> (Vec<InherentAssocCandidate>, Vec<FulfillmentError<'tcx>>) { + assert!(!self_ty.has_infer()); + + // We don't just call the normal normalization routine here as we can't provide the + // correct `ParamEnv` and it would be wrong to invoke arbitrary trait solving under + // the wrong `ParamEnv`. Expanding free aliases doesn't need a `ParamEnv` so we do + // this just to make resolution a little bit smarter. + let self_ty = self.tcx.expand_free_alias_tys(self_ty); + debug!("select_inherent_assoc_candidates: self_ty={:?}", self_ty); + + let candidates = candidates + .into_iter() + .filter(|&InherentAssocCandidate { impl_, .. }| { + let impl_ty = self.tcx().type_of(impl_).instantiate_identity(); + + // See comment on doing this operation for `self_ty` + let impl_ty = self.tcx.expand_free_alias_tys(impl_ty); + debug!("select_inherent_assoc_candidates: impl_ty={:?}", impl_ty); + + // We treat parameters in the self ty as rigid and parameters in the impl ty as infers + // because it allows `impl<T> Foo<T>` to unify with `Foo<u8>::IAT`, while also disallowing + // `Foo<T>::IAT` from unifying with `impl Foo<u8>`. + // + // We don't really care about a depth limit here because we're only working with user-written + // types and if they wrote a type that would take hours to walk then that's kind of on them. On + // the other hand the default depth limit is relatively low and could realistically be hit by + // users in normal cases. + // + // `DeepRejectCtxt` leads to slightly worse IAT resolution than real type equality in cases + // where the `impl_ty` has repeated uses of generic parameters. E.g. `impl<T> Foo<T, T>` would + // be considered a valid candidate when resolving `Foo<u8, u16>::IAT`. + // + // Not replacing escaping bound vars in `self_ty` with placeholders also leads to slightly worse + // resolution, but it probably won't come up in practice and it would be backwards compatible + // to switch over to doing that. + ty::DeepRejectCtxt::relate_rigid_infer(self.tcx).types_may_unify_with_depth( + self_ty, + impl_ty, + usize::MAX, + ) + }) + .collect(); + + (candidates, vec![]) + } + fn lower_assoc_item_path( &self, span: Span, diff --git a/compiler/rustc_hir_analysis/src/collect/resolve_bound_vars.rs b/compiler/rustc_hir_analysis/src/collect/resolve_bound_vars.rs index d45f0475e99..95743f9a63e 100644 --- a/compiler/rustc_hir_analysis/src/collect/resolve_bound_vars.rs +++ b/compiler/rustc_hir_analysis/src/collect/resolve_bound_vars.rs @@ -2177,84 +2177,80 @@ impl<'a, 'tcx> BoundVarContext<'a, 'tcx> { /// Walk the generics of the item for a trait bound whose self type /// corresponds to the expected res, and return the trait def id. fn for_each_trait_bound_on_res(&self, expected_res: Res) -> impl Iterator<Item = DefId> { - std::iter::from_coroutine( - #[coroutine] - move || { - let mut scope = self.scope; - loop { - let hir_id = match *scope { - Scope::Binder { hir_id, .. } => Some(hir_id), - Scope::Root { opt_parent_item: Some(parent_def_id) } => { - Some(self.tcx.local_def_id_to_hir_id(parent_def_id)) - } - Scope::Body { .. } - | Scope::ObjectLifetimeDefault { .. } - | Scope::Supertrait { .. } - | Scope::TraitRefBoundary { .. } - | Scope::LateBoundary { .. } - | Scope::Opaque { .. } - | Scope::Root { opt_parent_item: None } => None, - }; + gen move { + let mut scope = self.scope; + loop { + let hir_id = match *scope { + Scope::Binder { hir_id, .. } => Some(hir_id), + Scope::Root { opt_parent_item: Some(parent_def_id) } => { + Some(self.tcx.local_def_id_to_hir_id(parent_def_id)) + } + Scope::Body { .. } + | Scope::ObjectLifetimeDefault { .. } + | Scope::Supertrait { .. } + | Scope::TraitRefBoundary { .. } + | Scope::LateBoundary { .. } + | Scope::Opaque { .. } + | Scope::Root { opt_parent_item: None } => None, + }; - if let Some(hir_id) = hir_id { - let node = self.tcx.hir_node(hir_id); - // If this is a `Self` bound in a trait, yield the trait itself. - // Specifically, we don't need to look at any supertraits since - // we already do that in `BoundVarContext::supertrait_hrtb_vars`. - if let Res::SelfTyParam { trait_: _ } = expected_res - && let hir::Node::Item(item) = node - && let hir::ItemKind::Trait(..) = item.kind - { - // Yield the trait's def id. Supertraits will be - // elaborated from that. - yield item.owner_id.def_id.to_def_id(); - } else if let Some(generics) = node.generics() { - for pred in generics.predicates { - let hir::WherePredicateKind::BoundPredicate(pred) = pred.kind - else { - continue; - }; - let hir::TyKind::Path(hir::QPath::Resolved(None, bounded_path)) = - pred.bounded_ty.kind - else { - continue; - }; - // Match the expected res. - if bounded_path.res != expected_res { - continue; - } - for pred in pred.bounds { - match pred { - hir::GenericBound::Trait(poly_trait_ref) => { - if let Some(def_id) = - poly_trait_ref.trait_ref.trait_def_id() - { - yield def_id; - } + if let Some(hir_id) = hir_id { + let node = self.tcx.hir_node(hir_id); + // If this is a `Self` bound in a trait, yield the trait itself. + // Specifically, we don't need to look at any supertraits since + // we already do that in `BoundVarContext::supertrait_hrtb_vars`. + if let Res::SelfTyParam { trait_: _ } = expected_res + && let hir::Node::Item(item) = node + && let hir::ItemKind::Trait(..) = item.kind + { + // Yield the trait's def id. Supertraits will be + // elaborated from that. + yield item.owner_id.def_id.to_def_id(); + } else if let Some(generics) = node.generics() { + for pred in generics.predicates { + let hir::WherePredicateKind::BoundPredicate(pred) = pred.kind else { + continue; + }; + let hir::TyKind::Path(hir::QPath::Resolved(None, bounded_path)) = + pred.bounded_ty.kind + else { + continue; + }; + // Match the expected res. + if bounded_path.res != expected_res { + continue; + } + for pred in pred.bounds { + match pred { + hir::GenericBound::Trait(poly_trait_ref) => { + if let Some(def_id) = + poly_trait_ref.trait_ref.trait_def_id() + { + yield def_id; } - hir::GenericBound::Outlives(_) - | hir::GenericBound::Use(_, _) => {} } + hir::GenericBound::Outlives(_) + | hir::GenericBound::Use(_, _) => {} } } } } + } - match *scope { - Scope::Binder { s, .. } - | Scope::Body { s, .. } - | Scope::ObjectLifetimeDefault { s, .. } - | Scope::Supertrait { s, .. } - | Scope::TraitRefBoundary { s } - | Scope::LateBoundary { s, .. } - | Scope::Opaque { s, .. } => { - scope = s; - } - Scope::Root { .. } => break, + match *scope { + Scope::Binder { s, .. } + | Scope::Body { s, .. } + | Scope::ObjectLifetimeDefault { s, .. } + | Scope::Supertrait { s, .. } + | Scope::TraitRefBoundary { s } + | Scope::LateBoundary { s, .. } + | Scope::Opaque { s, .. } => { + scope = s; } + Scope::Root { .. } => break, } - }, - ) + } + } } } diff --git a/compiler/rustc_hir_analysis/src/collect/type_of.rs b/compiler/rustc_hir_analysis/src/collect/type_of.rs index 141d96b57e5..902a2e15dff 100644 --- a/compiler/rustc_hir_analysis/src/collect/type_of.rs +++ b/compiler/rustc_hir_analysis/src/collect/type_of.rs @@ -452,13 +452,6 @@ fn infer_placeholder_type<'tcx>( if let Some(ty) = node.ty() { visitor.visit_ty_unambig(ty); } - // If we have just one span, let's try to steal a const `_` feature error. - let try_steal_span = if !tcx.features().generic_arg_infer() && visitor.spans.len() == 1 - { - visitor.spans.first().copied() - } else { - None - }; // If we didn't find any infer tys, then just fallback to `span`. if visitor.spans.is_empty() { visitor.spans.push(span); @@ -489,15 +482,7 @@ fn infer_placeholder_type<'tcx>( } } - if let Some(try_steal_span) = try_steal_span { - cx.dcx().try_steal_replace_and_emit_err( - try_steal_span, - StashKey::UnderscoreForArrayLengths, - diag, - ) - } else { - diag.emit() - } + diag.emit() }); Ty::new_error(tcx, guar) } diff --git a/compiler/rustc_hir_analysis/src/errors.rs b/compiler/rustc_hir_analysis/src/errors.rs index 8de2aec95a7..318aaab50f4 100644 --- a/compiler/rustc_hir_analysis/src/errors.rs +++ b/compiler/rustc_hir_analysis/src/errors.rs @@ -1712,3 +1712,11 @@ pub(crate) struct AbiCustomClothedFunction { )] pub naked_span: Span, } + +#[derive(Diagnostic)] +#[diag(hir_analysis_async_drop_without_sync_drop)] +#[help] +pub(crate) struct AsyncDropWithoutSyncDrop { + #[primary_span] + pub span: Span, +} diff --git a/compiler/rustc_hir_analysis/src/hir_ty_lowering/errors.rs b/compiler/rustc_hir_analysis/src/hir_ty_lowering/errors.rs index 1cda6dff21e..0e79a8918b0 100644 --- a/compiler/rustc_hir_analysis/src/hir_ty_lowering/errors.rs +++ b/compiler/rustc_hir_analysis/src/hir_ty_lowering/errors.rs @@ -26,6 +26,7 @@ use rustc_trait_selection::traits::{ use smallvec::SmallVec; use tracing::debug; +use super::InherentAssocCandidate; use crate::errors::{ self, AssocItemConstraintsNotAllowedHere, ManualImplementation, MissingTypeParams, ParenthesizedFnTraitExpansion, TraitObjectDeclaredWithNoTraits, @@ -793,7 +794,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { &self, name: Ident, self_ty: Ty<'tcx>, - candidates: Vec<(DefId, (DefId, DefId))>, + candidates: Vec<InherentAssocCandidate>, fulfillment_errors: Vec<FulfillmentError<'tcx>>, span: Span, assoc_tag: ty::AssocTag, @@ -827,8 +828,8 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { let type_candidates = candidates .iter() .take(limit) - .map(|&(impl_, _)| { - format!("- `{}`", tcx.at(span).type_of(impl_).instantiate_identity()) + .map(|cand| { + format!("- `{}`", tcx.at(span).type_of(cand.impl_).instantiate_identity()) }) .collect::<Vec<_>>() .join("\n"); diff --git a/compiler/rustc_hir_analysis/src/hir_ty_lowering/generics.rs b/compiler/rustc_hir_analysis/src/hir_ty_lowering/generics.rs index 3a26b8331f8..8c7c3750865 100644 --- a/compiler/rustc_hir_analysis/src/hir_ty_lowering/generics.rs +++ b/compiler/rustc_hir_analysis/src/hir_ty_lowering/generics.rs @@ -8,7 +8,7 @@ use rustc_middle::ty::{ self, GenericArgsRef, GenericParamDef, GenericParamDefKind, IsSuggestable, Ty, }; use rustc_session::lint::builtin::LATE_BOUND_LIFETIME_ARGUMENTS; -use rustc_span::{kw, sym}; +use rustc_span::kw; use smallvec::SmallVec; use tracing::{debug, instrument}; @@ -258,19 +258,6 @@ pub fn lower_generic_args<'tcx: 'a, 'a>( GenericParamDefKind::Const { .. }, _, ) => { - if let GenericParamDefKind::Const { .. } = param.kind - && let GenericArg::Infer(inf) = arg - && !tcx.features().generic_arg_infer() - { - rustc_session::parse::feature_err( - tcx.sess, - sym::generic_arg_infer, - inf.span, - "const arguments cannot yet be inferred with `_`", - ) - .emit(); - } - // We lower to an infer even when the feature gate is not enabled // as it is useful for diagnostics to be able to see a `ConstKind::Infer` args.push(ctx.provided_kind(&args, param, arg)); diff --git a/compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs b/compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs index bf407cbaccb..b99f7b44661 100644 --- a/compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs +++ b/compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs @@ -33,13 +33,14 @@ use rustc_hir::def::{CtorKind, CtorOf, DefKind, Res}; use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_hir::{self as hir, AnonConst, GenericArg, GenericArgs, HirId}; use rustc_infer::infer::{InferCtxt, TyCtxtInferExt}; -use rustc_infer::traits::{DynCompatibilityViolation, ObligationCause}; +use rustc_infer::traits::DynCompatibilityViolation; +use rustc_macros::{TypeFoldable, TypeVisitable}; use rustc_middle::middle::stability::AllowUnstable; use rustc_middle::mir::interpret::LitToConstInput; use rustc_middle::ty::print::PrintPolyTraitRefExt as _; use rustc_middle::ty::{ - self, Const, GenericArgKind, GenericArgsRef, GenericParamDefKind, ParamEnv, Ty, TyCtxt, - TypeVisitableExt, TypingMode, Upcast, fold_regions, + self, Const, GenericArgKind, GenericArgsRef, GenericParamDefKind, Ty, TyCtxt, TypeVisitableExt, + TypingMode, Upcast, fold_regions, }; use rustc_middle::{bug, span_bug}; use rustc_session::lint::builtin::AMBIGUOUS_ASSOCIATED_ITEMS; @@ -47,7 +48,7 @@ use rustc_session::parse::feature_err; use rustc_span::{DUMMY_SP, Ident, Span, kw, sym}; use rustc_trait_selection::infer::InferCtxtExt; use rustc_trait_selection::traits::wf::object_region_bounds; -use rustc_trait_selection::traits::{self, ObligationCtxt}; +use rustc_trait_selection::traits::{self, FulfillmentError}; use tracing::{debug, instrument}; use crate::check::check_abi_fn_ptr; @@ -99,6 +100,13 @@ pub enum RegionInferReason<'a> { OutlivesBound, } +#[derive(Copy, Clone, TypeFoldable, TypeVisitable, Debug)] +pub struct InherentAssocCandidate { + pub impl_: DefId, + pub assoc_item: DefId, + pub scope: DefId, +} + /// A context which can lower type-system entities from the [HIR][hir] to /// the [`rustc_middle::ty`] representation. /// @@ -148,6 +156,13 @@ pub trait HirTyLowerer<'tcx> { assoc_ident: Ident, ) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]>; + fn select_inherent_assoc_candidates( + &self, + span: Span, + self_ty: Ty<'tcx>, + candidates: Vec<InherentAssocCandidate>, + ) -> (Vec<InherentAssocCandidate>, Vec<FulfillmentError<'tcx>>); + /// Lower a path to an associated item (of a trait) to a projection. /// /// This method has to be defined by the concrete lowering context because @@ -1449,48 +1464,32 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { .filter_map(|&impl_| { let (item, scope) = self.probe_assoc_item_unchecked(name, assoc_tag, block, impl_)?; - Some((impl_, (item.def_id, scope))) + Some(InherentAssocCandidate { impl_, assoc_item: item.def_id, scope }) }) .collect(); - if candidates.is_empty() { - return Ok(None); - } - - // - // Select applicable inherent associated type candidates modulo regions. - // - - // In contexts that have no inference context, just make a new one. - // We do need a local variable to store it, though. - let infcx = match self.infcx() { - Some(infcx) => infcx, - None => { - assert!(!self_ty.has_infer()); - &tcx.infer_ctxt().ignoring_regions().build(TypingMode::non_body_analysis()) - } - }; + let (applicable_candidates, fulfillment_errors) = + self.select_inherent_assoc_candidates(span, self_ty, candidates.clone()); - // FIXME(inherent_associated_types): Acquiring the ParamEnv this early leads to cycle errors - // when inside of an ADT (#108491) or where clause. - let param_env = tcx.param_env(block.owner); + let InherentAssocCandidate { impl_, assoc_item, scope: def_scope } = + match &applicable_candidates[..] { + &[] => Err(self.report_unresolved_inherent_assoc_item( + name, + self_ty, + candidates, + fulfillment_errors, + span, + assoc_tag, + )), - let mut universes = if self_ty.has_escaping_bound_vars() { - vec![None; self_ty.outer_exclusive_binder().as_usize()] - } else { - vec![] - }; + &[applicable_candidate] => Ok(applicable_candidate), - let (impl_, (assoc_item, def_scope)) = crate::traits::with_replaced_escaping_bound_vars( - infcx, - &mut universes, - self_ty, - |self_ty| { - self.select_inherent_assoc_candidates( - infcx, name, span, self_ty, param_env, candidates, assoc_tag, - ) - }, - )?; + &[_, ..] => Err(self.report_ambiguous_inherent_assoc_item( + name, + candidates.into_iter().map(|cand| cand.assoc_item).collect(), + span, + )), + }?; self.check_assoc_item(assoc_item, name, def_scope, block, span); @@ -1507,78 +1506,6 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { Ok(Some((assoc_item, args))) } - fn select_inherent_assoc_candidates( - &self, - infcx: &InferCtxt<'tcx>, - name: Ident, - span: Span, - self_ty: Ty<'tcx>, - param_env: ParamEnv<'tcx>, - candidates: Vec<(DefId, (DefId, DefId))>, - assoc_tag: ty::AssocTag, - ) -> Result<(DefId, (DefId, DefId)), ErrorGuaranteed> { - let tcx = self.tcx(); - let mut fulfillment_errors = Vec::new(); - - let applicable_candidates: Vec<_> = candidates - .iter() - .copied() - .filter(|&(impl_, _)| { - infcx.probe(|_| { - let ocx = ObligationCtxt::new_with_diagnostics(infcx); - let self_ty = ocx.normalize(&ObligationCause::dummy(), param_env, self_ty); - - let impl_args = infcx.fresh_args_for_item(span, impl_); - let impl_ty = tcx.type_of(impl_).instantiate(tcx, impl_args); - let impl_ty = ocx.normalize(&ObligationCause::dummy(), param_env, impl_ty); - - // Check that the self types can be related. - if ocx.eq(&ObligationCause::dummy(), param_env, impl_ty, self_ty).is_err() { - return false; - } - - // Check whether the impl imposes obligations we have to worry about. - let impl_bounds = tcx.predicates_of(impl_).instantiate(tcx, impl_args); - let impl_bounds = - ocx.normalize(&ObligationCause::dummy(), param_env, impl_bounds); - let impl_obligations = traits::predicates_for_generics( - |_, _| ObligationCause::dummy(), - param_env, - impl_bounds, - ); - ocx.register_obligations(impl_obligations); - - let mut errors = ocx.select_where_possible(); - if !errors.is_empty() { - fulfillment_errors.append(&mut errors); - return false; - } - - true - }) - }) - .collect(); - - match &applicable_candidates[..] { - &[] => Err(self.report_unresolved_inherent_assoc_item( - name, - self_ty, - candidates, - fulfillment_errors, - span, - assoc_tag, - )), - - &[applicable_candidate] => Ok(applicable_candidate), - - &[_, ..] => Err(self.report_ambiguous_inherent_assoc_item( - name, - applicable_candidates.into_iter().map(|(_, (candidate, _))| candidate).collect(), - span, - )), - } - } - /// Given name and kind search for the assoc item in the provided scope and check if it's accessible[^1]. /// /// [^1]: I.e., accessible in the provided scope wrt. visibility and stability. diff --git a/compiler/rustc_hir_analysis/src/hir_wf_check.rs b/compiler/rustc_hir_analysis/src/hir_wf_check.rs index 4633f3951a7..fef0dbf2ece 100644 --- a/compiler/rustc_hir_analysis/src/hir_wf_check.rs +++ b/compiler/rustc_hir_analysis/src/hir_wf_check.rs @@ -1,7 +1,8 @@ +use rustc_hir::def::DefKind; use rustc_hir::intravisit::{self, Visitor, VisitorExt}; use rustc_hir::{self as hir, AmbigArg, ForeignItem, ForeignItemKind}; use rustc_infer::infer::TyCtxtInferExt; -use rustc_infer::traits::{ObligationCause, WellFormedLoc}; +use rustc_infer::traits::{ObligationCause, ObligationCauseCode, WellFormedLoc}; use rustc_middle::bug; use rustc_middle::query::Providers; use rustc_middle::ty::{self, TyCtxt, TypeVisitableExt, TypingMode, fold_regions}; @@ -107,6 +108,17 @@ fn diagnostic_hir_wf_check<'tcx>( // over less-specific types (e.g. `Option<MyStruct<u8>>`) if self.depth >= self.cause_depth { self.cause = Some(error.obligation.cause); + if let hir::TyKind::TraitObject(..) = ty.kind { + if let DefKind::AssocTy | DefKind::AssocConst | DefKind::AssocFn = + self.tcx.def_kind(self.def_id) + { + self.cause = Some(ObligationCause::new( + ty.span, + self.def_id, + ObligationCauseCode::DynCompatible(ty.span), + )); + } + } self.cause_depth = self.depth } } diff --git a/compiler/rustc_hir_analysis/src/lib.rs b/compiler/rustc_hir_analysis/src/lib.rs index 7c8c9425a03..76ab2e57a1b 100644 --- a/compiler/rustc_hir_analysis/src/lib.rs +++ b/compiler/rustc_hir_analysis/src/lib.rs @@ -62,8 +62,8 @@ This API is completely unstable and subject to change. #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] #![feature(assert_matches)] -#![feature(coroutines)] #![feature(debug_closure_helpers)] +#![feature(gen_blocks)] #![feature(if_let_guard)] #![feature(iter_from_coroutine)] #![feature(iter_intersperse)] diff --git a/compiler/rustc_hir_pretty/src/lib.rs b/compiler/rustc_hir_pretty/src/lib.rs index fc507285860..d3289e4cc6d 100644 --- a/compiler/rustc_hir_pretty/src/lib.rs +++ b/compiler/rustc_hir_pretty/src/lib.rs @@ -10,7 +10,7 @@ use std::vec; use rustc_abi::ExternAbi; use rustc_ast::util::parser::{self, ExprPrecedence, Fixity}; -use rustc_ast::{AttrStyle, DUMMY_NODE_ID, DelimArgs}; +use rustc_ast::{DUMMY_NODE_ID, DelimArgs}; use rustc_ast_pretty::pp::Breaks::{Consistent, Inconsistent}; use rustc_ast_pretty::pp::{self, BoxMarker, Breaks}; use rustc_ast_pretty::pprust::state::MacHeader; @@ -22,7 +22,7 @@ use rustc_hir::{ TyPatKind, }; use rustc_span::source_map::SourceMap; -use rustc_span::{FileName, Ident, Span, Symbol, kw}; +use rustc_span::{FileName, Ident, Span, Symbol, kw, sym}; use {rustc_ast as ast, rustc_hir as hir}; pub fn id_to_string(cx: &dyn rustc_hir::intravisit::HirTyCtxt<'_>, hir_id: HirId) -> String { @@ -81,32 +81,24 @@ impl<'a> State<'a> { } fn precedence(&self, expr: &hir::Expr<'_>) -> ExprPrecedence { - let for_each_attr = |id: HirId, callback: &mut dyn FnMut(&hir::Attribute)| { - self.attrs(id).iter().for_each(callback); - }; - expr.precedence(&for_each_attr) - } - - fn print_attrs_as_inner(&mut self, attrs: &[hir::Attribute]) { - self.print_either_attributes(attrs, ast::AttrStyle::Inner) - } - - fn print_attrs_as_outer(&mut self, attrs: &[hir::Attribute]) { - self.print_either_attributes(attrs, ast::AttrStyle::Outer) + let has_attr = |id: HirId| !self.attrs(id).is_empty(); + expr.precedence(&has_attr) } - fn print_either_attributes(&mut self, attrs: &[hir::Attribute], style: ast::AttrStyle) { + fn print_attrs(&mut self, attrs: &[hir::Attribute]) { if attrs.is_empty() { return; } for attr in attrs { - self.print_attribute_inline(attr, style); + self.print_attribute_as_style(attr, ast::AttrStyle::Outer); } self.hardbreak_if_not_bol(); } - fn print_attribute_inline(&mut self, attr: &hir::Attribute, style: AttrStyle) { + /// Print a single attribute as if it has style `style`, disregarding the + /// actual style of the attribute. + fn print_attribute_as_style(&mut self, attr: &hir::Attribute, style: ast::AttrStyle) { match &attr { hir::Attribute::Unparsed(unparsed) => { self.maybe_print_comment(unparsed.span.lo()); @@ -118,14 +110,17 @@ impl<'a> State<'a> { self.word("]"); self.hardbreak() } - hir::Attribute::Parsed(AttributeKind::DocComment { style, kind, comment, .. }) => { + hir::Attribute::Parsed(AttributeKind::DocComment { kind, comment, .. }) => { self.word(rustc_ast_pretty::pprust::state::doc_comment_to_string( - *kind, *style, *comment, + *kind, style, *comment, )); self.hardbreak() } hir::Attribute::Parsed(pa) => { - self.word("#[attr = "); + match style { + ast::AttrStyle::Inner => self.word("#![attr = "), + ast::AttrStyle::Outer => self.word("#[attr = "), + } pa.print_attribute(self); self.word("]"); self.hardbreak() @@ -281,10 +276,17 @@ pub fn print_crate<'a>( ann, }; + // Print all attributes, regardless of actual style, as inner attributes + // since this is the crate root with nothing above it to print outer + // attributes. + for attr in s.attrs(hir::CRATE_HIR_ID) { + s.print_attribute_as_style(attr, ast::AttrStyle::Inner); + } + // When printing the AST, we sometimes need to inject `#[no_std]` here. // Since you can't compile the HIR, it's not necessary. - s.print_mod(krate, (*attrs)(hir::CRATE_HIR_ID)); + s.print_mod(krate); s.print_remaining_comments(); s.s.eof() } @@ -299,7 +301,7 @@ where } pub fn attribute_to_string(ann: &dyn PpAnn, attr: &hir::Attribute) -> String { - to_string(ann, |s| s.print_attribute_inline(attr, AttrStyle::Outer)) + to_string(ann, |s| s.print_attribute_as_style(attr, ast::AttrStyle::Outer)) } pub fn ty_to_string(ann: &dyn PpAnn, ty: &hir::Ty<'_>) -> String { @@ -361,8 +363,7 @@ impl<'a> State<'a> { self.commasep_cmnt(b, exprs, |s, e| s.print_expr(e), |e| e.span); } - fn print_mod(&mut self, _mod: &hir::Mod<'_>, attrs: &[hir::Attribute]) { - self.print_attrs_as_inner(attrs); + fn print_mod(&mut self, _mod: &hir::Mod<'_>) { for &item_id in _mod.item_ids { self.ann.nested(self, Nested::Item(item_id)); } @@ -479,7 +480,7 @@ impl<'a> State<'a> { fn print_foreign_item(&mut self, item: &hir::ForeignItem<'_>) { self.hardbreak_if_not_bol(); self.maybe_print_comment(item.span.lo()); - self.print_attrs_as_outer(self.attrs(item.hir_id())); + self.print_attrs(self.attrs(item.hir_id())); match item.kind { hir::ForeignItemKind::Fn(sig, arg_idents, generics) => { let (cb, ib) = self.head(""); @@ -565,7 +566,7 @@ impl<'a> State<'a> { self.hardbreak_if_not_bol(); self.maybe_print_comment(item.span.lo()); let attrs = self.attrs(item.hir_id()); - self.print_attrs_as_outer(attrs); + self.print_attrs(attrs); self.ann.pre(self, AnnNode::Item(item)); match item.kind { hir::ItemKind::ExternCrate(orig_name, ident) => { @@ -647,14 +648,13 @@ impl<'a> State<'a> { self.print_ident(ident); self.nbsp(); self.bopen(ib); - self.print_mod(mod_, attrs); + self.print_mod(mod_); self.bclose(item.span, cb); } hir::ItemKind::ForeignMod { abi, items } => { let (cb, ib) = self.head("extern"); self.word_nbsp(abi.to_string()); self.bopen(ib); - self.print_attrs_as_inner(self.attrs(item.hir_id())); for item in items { self.ann.nested(self, Nested::ForeignItem(item.id)); } @@ -731,7 +731,6 @@ impl<'a> State<'a> { self.space(); self.bopen(ib); - self.print_attrs_as_inner(attrs); for impl_item in items { self.ann.nested(self, Nested::ImplItem(impl_item.id)); } @@ -822,7 +821,7 @@ impl<'a> State<'a> { for v in variants { self.space_if_not_bol(); self.maybe_print_comment(v.span.lo()); - self.print_attrs_as_outer(self.attrs(v.hir_id)); + self.print_attrs(self.attrs(v.hir_id)); let ib = self.ibox(INDENT_UNIT); self.print_variant(v); self.word(","); @@ -857,7 +856,7 @@ impl<'a> State<'a> { self.popen(); self.commasep(Inconsistent, struct_def.fields(), |s, field| { s.maybe_print_comment(field.span.lo()); - s.print_attrs_as_outer(s.attrs(field.hir_id)); + s.print_attrs(s.attrs(field.hir_id)); s.print_type(field.ty); }); self.pclose(); @@ -878,7 +877,7 @@ impl<'a> State<'a> { for field in struct_def.fields() { self.hardbreak_if_not_bol(); self.maybe_print_comment(field.span.lo()); - self.print_attrs_as_outer(self.attrs(field.hir_id)); + self.print_attrs(self.attrs(field.hir_id)); self.print_ident(field.ident); self.word_nbsp(":"); self.print_type(field.ty); @@ -916,7 +915,7 @@ impl<'a> State<'a> { self.ann.pre(self, AnnNode::SubItem(ti.hir_id())); self.hardbreak_if_not_bol(); self.maybe_print_comment(ti.span.lo()); - self.print_attrs_as_outer(self.attrs(ti.hir_id())); + self.print_attrs(self.attrs(ti.hir_id())); match ti.kind { hir::TraitItemKind::Const(ty, default) => { self.print_associated_const(ti.ident, ti.generics, ty, default); @@ -944,7 +943,7 @@ impl<'a> State<'a> { self.ann.pre(self, AnnNode::SubItem(ii.hir_id())); self.hardbreak_if_not_bol(); self.maybe_print_comment(ii.span.lo()); - self.print_attrs_as_outer(self.attrs(ii.hir_id())); + self.print_attrs(self.attrs(ii.hir_id())); match ii.kind { hir::ImplItemKind::Const(ty, expr) => { @@ -1028,27 +1027,16 @@ impl<'a> State<'a> { } fn print_block(&mut self, blk: &hir::Block<'_>, cb: BoxMarker, ib: BoxMarker) { - self.print_block_with_attrs(blk, &[], cb, ib) + self.print_block_maybe_unclosed(blk, Some(cb), ib) } fn print_block_unclosed(&mut self, blk: &hir::Block<'_>, ib: BoxMarker) { - self.print_block_maybe_unclosed(blk, &[], None, ib) - } - - fn print_block_with_attrs( - &mut self, - blk: &hir::Block<'_>, - attrs: &[hir::Attribute], - cb: BoxMarker, - ib: BoxMarker, - ) { - self.print_block_maybe_unclosed(blk, attrs, Some(cb), ib) + self.print_block_maybe_unclosed(blk, None, ib) } fn print_block_maybe_unclosed( &mut self, blk: &hir::Block<'_>, - attrs: &[hir::Attribute], cb: Option<BoxMarker>, ib: BoxMarker, ) { @@ -1060,8 +1048,6 @@ impl<'a> State<'a> { self.ann.pre(self, AnnNode::Block(blk)); self.bopen(ib); - self.print_attrs_as_inner(attrs); - for st in blk.stmts { self.print_stmt(st); } @@ -1251,7 +1237,7 @@ impl<'a> State<'a> { fn print_expr_field(&mut self, field: &hir::ExprField<'_>) { let cb = self.cbox(INDENT_UNIT); - self.print_attrs_as_outer(self.attrs(field.hir_id)); + self.print_attrs(self.attrs(field.hir_id)); if !field.is_shorthand { self.print_ident(field.ident); self.word_space(":"); @@ -1451,7 +1437,7 @@ impl<'a> State<'a> { fn print_expr(&mut self, expr: &hir::Expr<'_>) { self.maybe_print_comment(expr.span.lo()); - self.print_attrs_as_outer(self.attrs(expr.hir_id)); + self.print_attrs(self.attrs(expr.hir_id)); let ib = self.ibox(INDENT_UNIT); self.ann.pre(self, AnnNode::Expr(expr)); match expr.kind { @@ -1517,7 +1503,7 @@ impl<'a> State<'a> { self.bopen(ib); // Print `let _t = $init;`: - let temp = Ident::from_str("_t"); + let temp = Ident::with_dummy_span(sym::_t); self.print_local(false, Some(init), None, |this| this.print_ident(temp)); self.word(";"); @@ -2076,7 +2062,7 @@ impl<'a> State<'a> { self.space(); } let cb = self.cbox(INDENT_UNIT); - self.print_attrs_as_outer(self.attrs(field.hir_id)); + self.print_attrs(self.attrs(field.hir_id)); if !field.is_shorthand { self.print_ident(field.ident); self.word_nbsp(":"); @@ -2086,7 +2072,7 @@ impl<'a> State<'a> { } fn print_param(&mut self, arg: &hir::Param<'_>) { - self.print_attrs_as_outer(self.attrs(arg.hir_id)); + self.print_attrs(self.attrs(arg.hir_id)); self.print_pat(arg.pat); } @@ -2121,7 +2107,7 @@ impl<'a> State<'a> { let cb = self.cbox(INDENT_UNIT); self.ann.pre(self, AnnNode::Arm(arm)); let ib = self.ibox(0); - self.print_attrs_as_outer(self.attrs(arm.hir_id)); + self.print_attrs(self.attrs(arm.hir_id)); self.print_pat(arm.pat); self.space(); if let Some(ref g) = arm.guard { @@ -2409,7 +2395,7 @@ impl<'a> State<'a> { } fn print_where_predicate(&mut self, predicate: &hir::WherePredicate<'_>) { - self.print_attrs_as_outer(self.attrs(predicate.hir_id)); + self.print_attrs(self.attrs(predicate.hir_id)); match *predicate.kind { hir::WherePredicateKind::BoundPredicate(hir::WhereBoundPredicate { bound_generic_params, diff --git a/compiler/rustc_hir_typeck/messages.ftl b/compiler/rustc_hir_typeck/messages.ftl index ac7ff65528d..258535f3742 100644 --- a/compiler/rustc_hir_typeck/messages.ftl +++ b/compiler/rustc_hir_typeck/messages.ftl @@ -1,6 +1,6 @@ -hir_typeck_abi_custom_call = - functions with the `"custom"` ABI cannot be called - .note = an `extern "custom"` function can only be called from within inline assembly +hir_typeck_abi_cannot_be_called = + functions with the {$abi} ABI cannot be called + .note = an `extern {$abi}` function can only be called using inline assembly hir_typeck_add_missing_parentheses_in_range = you must surround the range in parentheses to call its `{$func_name}` function diff --git a/compiler/rustc_hir_typeck/src/callee.rs b/compiler/rustc_hir_typeck/src/callee.rs index 80bff09d0a4..7a3647df0c4 100644 --- a/compiler/rustc_hir_typeck/src/callee.rs +++ b/compiler/rustc_hir_typeck/src/callee.rs @@ -1,6 +1,6 @@ use std::iter; -use rustc_abi::ExternAbi; +use rustc_abi::{CanonAbi, ExternAbi}; use rustc_ast::util::parser::ExprPrecedence; use rustc_errors::{Applicability, Diag, ErrorGuaranteed, StashKey}; use rustc_hir::def::{self, CtorKind, Namespace, Res}; @@ -16,6 +16,7 @@ use rustc_middle::ty::{self, GenericArgsRef, Ty, TyCtxt, TypeVisitableExt}; use rustc_middle::{bug, span_bug}; use rustc_span::def_id::LocalDefId; use rustc_span::{Span, sym}; +use rustc_target::spec::{AbiMap, AbiMapping}; use rustc_trait_selection::error_reporting::traits::DefIdOrName; use rustc_trait_selection::infer::InferCtxtExt as _; use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt as _; @@ -84,7 +85,18 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { while result.is_none() && autoderef.next().is_some() { result = self.try_overloaded_call_step(call_expr, callee_expr, arg_exprs, &autoderef); } - self.check_call_custom_abi(autoderef.final_ty(false), call_expr.span); + + match autoderef.final_ty(false).kind() { + ty::FnDef(def_id, _) => { + let abi = self.tcx.fn_sig(def_id).skip_binder().skip_binder().abi; + self.check_call_abi(abi, call_expr.span); + } + ty::FnPtr(_, header) => { + self.check_call_abi(header.abi, call_expr.span); + } + _ => { /* cannot have a non-rust abi */ } + } + self.register_predicates(autoderef.into_obligations()); let output = match result { @@ -137,19 +149,37 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { output } - /// Functions of type `extern "custom" fn(/* ... */)` cannot be called using `ExprKind::Call`. + /// Can a function with this ABI be called with a rust call expression? /// - /// These functions have a calling convention that is unknown to rust, hence it cannot generate - /// code for the call. The only way to execute such a function is via inline assembly. - fn check_call_custom_abi(&self, callee_ty: Ty<'tcx>, span: Span) { - let abi = match callee_ty.kind() { - ty::FnDef(def_id, _) => self.tcx.fn_sig(def_id).skip_binder().skip_binder().abi, - ty::FnPtr(_, header) => header.abi, - _ => return, + /// Some ABIs cannot be called from rust, either because rust does not know how to generate + /// code for the call, or because a call does not semantically make sense. + pub(crate) fn check_call_abi(&self, abi: ExternAbi, span: Span) { + let canon_abi = match AbiMap::from_target(&self.sess().target).canonize_abi(abi, false) { + AbiMapping::Direct(canon_abi) | AbiMapping::Deprecated(canon_abi) => canon_abi, + AbiMapping::Invalid => return, + }; + + let valid = match canon_abi { + // Rust doesn't know how to call functions with this ABI. + CanonAbi::Custom => false, + + // These is an entry point for the host, and cannot be called on the GPU. + CanonAbi::GpuKernel => false, + + // The interrupt ABIs should only be called by the CPU. They have complex + // pre- and postconditions, and can use non-standard instructions like `iret` on x86. + CanonAbi::Interrupt(_) => false, + + CanonAbi::C + | CanonAbi::Rust + | CanonAbi::RustCold + | CanonAbi::Arm(_) + | CanonAbi::X86(_) => true, }; - if let ExternAbi::Custom = abi { - self.tcx.dcx().emit_err(errors::AbiCustomCall { span }); + if !valid { + let err = crate::errors::AbiCannotBeCalled { span, abi }; + self.tcx.dcx().emit_err(err); } } diff --git a/compiler/rustc_hir_typeck/src/demand.rs b/compiler/rustc_hir_typeck/src/demand.rs index 5b55fbe9150..e5684f8cbe6 100644 --- a/compiler/rustc_hir_typeck/src/demand.rs +++ b/compiler/rustc_hir_typeck/src/demand.rs @@ -1110,27 +1110,26 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } - // Returns whether the given expression is a destruct assignment desugaring. - // For example, `(a, b) = (1, &2);` - // Here we try to find the pattern binding of the expression, - // `default_binding_modes` is false only for destruct assignment desugaring. + /// Returns whether the given expression is a destruct assignment desugaring. + /// For example, `(a, b) = (1, &2);` + /// Here we try to find the pattern binding of the expression, + /// `default_binding_modes` is false only for destruct assignment desugaring. pub(crate) fn is_destruct_assignment_desugaring(&self, expr: &hir::Expr<'_>) -> bool { if let hir::ExprKind::Path(hir::QPath::Resolved( _, hir::Path { res: hir::def::Res::Local(bind_hir_id), .. }, )) = expr.kind - { - let bind = self.tcx.hir_node(*bind_hir_id); - let parent = self.tcx.parent_hir_node(*bind_hir_id); - if let hir::Node::Pat(hir::Pat { + && let bind = self.tcx.hir_node(*bind_hir_id) + && let parent = self.tcx.parent_hir_node(*bind_hir_id) + && let hir::Node::Pat(hir::Pat { kind: hir::PatKind::Binding(_, _hir_id, _, _), .. }) = bind - && let hir::Node::Pat(hir::Pat { default_binding_modes: false, .. }) = parent - { - return true; - } + && let hir::Node::Pat(hir::Pat { default_binding_modes: false, .. }) = parent + { + true + } else { + false } - false } fn explain_self_literal( diff --git a/compiler/rustc_hir_typeck/src/errors.rs b/compiler/rustc_hir_typeck/src/errors.rs index abb8cdc1cdf..5fea0c62843 100644 --- a/compiler/rustc_hir_typeck/src/errors.rs +++ b/compiler/rustc_hir_typeck/src/errors.rs @@ -2,6 +2,7 @@ use std::borrow::Cow; +use rustc_abi::ExternAbi; use rustc_ast::Label; use rustc_errors::codes::*; use rustc_errors::{ @@ -30,8 +31,6 @@ pub(crate) struct BaseExpressionDoubleDot { )] pub default_field_values_suggestion: Option<Span>, #[subdiagnostic] - pub default_field_values_help: Option<BaseExpressionDoubleDotEnableDefaultFieldValues>, - #[subdiagnostic] pub add_expr: Option<BaseExpressionDoubleDotAddExpr>, #[subdiagnostic] pub remove_dots: Option<BaseExpressionDoubleDotRemove>, @@ -61,10 +60,6 @@ pub(crate) struct BaseExpressionDoubleDotAddExpr { pub span: Span, } -#[derive(Subdiagnostic)] -#[help(hir_typeck_base_expression_double_dot_enable_default_field_values)] -pub(crate) struct BaseExpressionDoubleDotEnableDefaultFieldValues; - #[derive(Diagnostic)] #[diag(hir_typeck_field_multiply_specified_in_initializer, code = E0062)] pub(crate) struct FieldMultiplySpecifiedInInitializer { @@ -1165,8 +1160,10 @@ pub(crate) struct NakedFunctionsMustNakedAsm { } #[derive(Diagnostic)] -#[diag(hir_typeck_abi_custom_call)] -pub(crate) struct AbiCustomCall { +#[diag(hir_typeck_abi_cannot_be_called)] +pub(crate) struct AbiCannotBeCalled { #[primary_span] + #[note] pub span: Span, + pub abi: ExternAbi, } diff --git a/compiler/rustc_hir_typeck/src/expr.rs b/compiler/rustc_hir_typeck/src/expr.rs index 55c39d960e7..bd3ca8317eb 100644 --- a/compiler/rustc_hir_typeck/src/expr.rs +++ b/compiler/rustc_hir_typeck/src/expr.rs @@ -5,7 +5,7 @@ //! //! See [`rustc_hir_analysis::check`] for more context on type checking in general. -use rustc_abi::{ExternAbi, FIRST_VARIANT, FieldIdx}; +use rustc_abi::{FIRST_VARIANT, FieldIdx}; use rustc_ast::util::parser::ExprPrecedence; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::stack::ensure_sufficient_stack; @@ -18,7 +18,7 @@ use rustc_errors::{ use rustc_hir::def::{CtorKind, DefKind, Res}; use rustc_hir::def_id::DefId; use rustc_hir::lang_items::LangItem; -use rustc_hir::{Attribute, ExprKind, HirId, QPath}; +use rustc_hir::{ExprKind, HirId, QPath}; use rustc_hir_analysis::NoVariantNamed; use rustc_hir_analysis::hir_ty_lowering::{FeedConstTy, HirTyLowerer as _}; use rustc_infer::infer; @@ -43,10 +43,9 @@ use crate::Expectation::{self, ExpectCastableToType, ExpectHasType, NoExpectatio use crate::coercion::{CoerceMany, DynamicCoerceMany}; use crate::errors::{ AddressOfTemporaryTaken, BaseExpressionDoubleDot, BaseExpressionDoubleDotAddExpr, - BaseExpressionDoubleDotEnableDefaultFieldValues, BaseExpressionDoubleDotRemove, - CantDereference, FieldMultiplySpecifiedInInitializer, FunctionalRecordUpdateOnNonStruct, - HelpUseLatestEdition, NakedAsmOutsideNakedFn, NoFieldOnType, NoFieldOnVariant, - ReturnLikeStatementKind, ReturnStmtOutsideOfFnBody, StructExprNonExhaustive, + BaseExpressionDoubleDotRemove, CantDereference, FieldMultiplySpecifiedInInitializer, + FunctionalRecordUpdateOnNonStruct, HelpUseLatestEdition, NakedAsmOutsideNakedFn, NoFieldOnType, + NoFieldOnVariant, ReturnLikeStatementKind, ReturnStmtOutsideOfFnBody, StructExprNonExhaustive, TypeMismatchFruTypo, YieldExprOutsideOfCoroutine, }; use crate::{ @@ -56,7 +55,7 @@ use crate::{ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { pub(crate) fn precedence(&self, expr: &hir::Expr<'_>) -> ExprPrecedence { - let for_each_attr = |id: HirId, callback: &mut dyn FnMut(&Attribute)| { + let has_attr = |id: HirId| -> bool { for attr in self.tcx.hir_attrs(id) { // For the purpose of rendering suggestions, disregard attributes // that originate from desugaring of any kind. For example, `x?` @@ -72,11 +71,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // let y: u32 = (x?).try_into().unwrap(); // + +++++++++++++++++++++ if attr.span().desugaring_kind().is_none() { - callback(attr); + return true; } } + false }; - expr.precedence(&for_each_attr) + expr.precedence(&has_attr) } /// Check an expr with an expectation type, and also demand that the expr's @@ -1651,13 +1651,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { Some(method.def_id), ); - // Functions of type `extern "custom" fn(/* ... */)` cannot be called using - // `ExprKind::MethodCall`. These functions have a calling convention that is - // unknown to rust, hence it cannot generate code for the call. The only way - // to execute such a function is via inline assembly. - if let ExternAbi::Custom = method.sig.abi { - self.tcx.dcx().emit_err(crate::errors::AbiCustomCall { span: expr.span }); - } + self.check_call_abi(method.sig.abi, expr.span); method.sig.output() } @@ -2158,7 +2152,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } if !self.tcx.features().default_field_values() { - let sugg = self.tcx.crate_level_attribute_injection_span(expr.hir_id); + let sugg = self.tcx.crate_level_attribute_injection_span(); self.dcx().emit_err(BaseExpressionDoubleDot { span: span.shrink_to_hi(), // We only mention enabling the feature if this is a nightly rustc *and* the @@ -2166,18 +2160,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { default_field_values_suggestion: if self.tcx.sess.is_nightly_build() && missing_mandatory_fields.is_empty() && !missing_optional_fields.is_empty() - && sugg.is_some() - { - sugg - } else { - None - }, - default_field_values_help: if self.tcx.sess.is_nightly_build() - && missing_mandatory_fields.is_empty() - && !missing_optional_fields.is_empty() - && sugg.is_none() { - Some(BaseExpressionDoubleDotEnableDefaultFieldValues) + Some(sugg) } else { None }, diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs index e979798a402..8c18642e54a 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs @@ -12,7 +12,9 @@ use hir::def_id::CRATE_DEF_ID; use rustc_errors::DiagCtxtHandle; use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_hir::{self as hir, HirId, ItemLocalMap}; -use rustc_hir_analysis::hir_ty_lowering::{HirTyLowerer, RegionInferReason}; +use rustc_hir_analysis::hir_ty_lowering::{ + HirTyLowerer, InherentAssocCandidate, RegionInferReason, +}; use rustc_infer::infer; use rustc_infer::traits::{DynCompatibilityViolation, Obligation}; use rustc_middle::ty::{self, Const, Ty, TyCtxt, TypeVisitableExt}; @@ -20,7 +22,9 @@ use rustc_session::Session; use rustc_span::{self, DUMMY_SP, ErrorGuaranteed, Ident, Span, sym}; use rustc_trait_selection::error_reporting::TypeErrCtxt; use rustc_trait_selection::error_reporting::infer::sub_relations::SubRelations; -use rustc_trait_selection::traits::{ObligationCause, ObligationCauseCode, ObligationCtxt}; +use rustc_trait_selection::traits::{ + self, FulfillmentError, ObligationCause, ObligationCauseCode, ObligationCtxt, +}; use crate::coercion::DynamicCoerceMany; use crate::fallback::DivergingFallbackBehavior; @@ -310,6 +314,67 @@ impl<'tcx> HirTyLowerer<'tcx> for FnCtxt<'_, 'tcx> { )) } + fn select_inherent_assoc_candidates( + &self, + span: Span, + self_ty: Ty<'tcx>, + candidates: Vec<InherentAssocCandidate>, + ) -> (Vec<InherentAssocCandidate>, Vec<FulfillmentError<'tcx>>) { + let tcx = self.tcx(); + let infcx = &self.infcx; + let mut fulfillment_errors = vec![]; + + let mut filter_iat_candidate = |self_ty, impl_| { + let ocx = ObligationCtxt::new_with_diagnostics(self); + let self_ty = ocx.normalize(&ObligationCause::dummy(), self.param_env, self_ty); + + let impl_args = infcx.fresh_args_for_item(span, impl_); + let impl_ty = tcx.type_of(impl_).instantiate(tcx, impl_args); + let impl_ty = ocx.normalize(&ObligationCause::dummy(), self.param_env, impl_ty); + + // Check that the self types can be related. + if ocx.eq(&ObligationCause::dummy(), self.param_env, impl_ty, self_ty).is_err() { + return false; + } + + // Check whether the impl imposes obligations we have to worry about. + let impl_bounds = tcx.predicates_of(impl_).instantiate(tcx, impl_args); + let impl_bounds = ocx.normalize(&ObligationCause::dummy(), self.param_env, impl_bounds); + let impl_obligations = traits::predicates_for_generics( + |_, _| ObligationCause::dummy(), + self.param_env, + impl_bounds, + ); + ocx.register_obligations(impl_obligations); + + let mut errors = ocx.select_where_possible(); + if !errors.is_empty() { + fulfillment_errors.append(&mut errors); + return false; + } + + true + }; + + let mut universes = if self_ty.has_escaping_bound_vars() { + vec![None; self_ty.outer_exclusive_binder().as_usize()] + } else { + vec![] + }; + + let candidates = + traits::with_replaced_escaping_bound_vars(infcx, &mut universes, self_ty, |self_ty| { + candidates + .into_iter() + .filter(|&InherentAssocCandidate { impl_, .. }| { + infcx.probe(|_| filter_iat_candidate(self_ty, impl_)) + }) + .collect() + }); + + (candidates, fulfillment_errors) + } + fn lower_assoc_item_path( &self, span: Span, diff --git a/compiler/rustc_hir_typeck/src/method/probe.rs b/compiler/rustc_hir_typeck/src/method/probe.rs index a3fdf200c8e..589dbb53116 100644 --- a/compiler/rustc_hir_typeck/src/method/probe.rs +++ b/compiler/rustc_hir_typeck/src/method/probe.rs @@ -1727,7 +1727,6 @@ impl<'tcx> Pick<'tcx> { } tcx.disabled_nightly_features( lint, - Some(scope_expr_id), self.unstable_candidates.iter().map(|(candidate, feature)| { (format!(" `{}`", tcx.def_path_str(candidate.item.def_id)), *feature) }), diff --git a/compiler/rustc_hir_typeck/src/op.rs b/compiler/rustc_hir_typeck/src/op.rs index 7f7921b66b5..b9d24506986 100644 --- a/compiler/rustc_hir_typeck/src/op.rs +++ b/compiler/rustc_hir_typeck/src/op.rs @@ -706,7 +706,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { .sess .source_map() .span_to_snippet(lhs_expr.span) - .unwrap_or("_".to_string()), + .unwrap_or_else(|_| "_".to_string()), }; if op.span().can_be_used_for_suggestions() { diff --git a/compiler/rustc_infer/src/infer/canonical/instantiate.rs b/compiler/rustc_infer/src/infer/canonical/instantiate.rs index 67f13192b52..2385c68ef6b 100644 --- a/compiler/rustc_infer/src/infer/canonical/instantiate.rs +++ b/compiler/rustc_infer/src/infer/canonical/instantiate.rs @@ -7,8 +7,11 @@ //! [c]: https://rust-lang.github.io/chalk/book/canonical_queries/canonicalization.html use rustc_macros::extension; -use rustc_middle::bug; -use rustc_middle::ty::{self, FnMutDelegate, GenericArgKind, TyCtxt, TypeFoldable}; +use rustc_middle::ty::{ + self, DelayedMap, Ty, TyCtxt, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeSuperVisitable, + TypeVisitableExt, TypeVisitor, +}; +use rustc_type_ir::TypeVisitable; use crate::infer::canonical::{Canonical, CanonicalVarValues}; @@ -58,23 +61,169 @@ where T: TypeFoldable<TyCtxt<'tcx>>, { if var_values.var_values.is_empty() { - value - } else { - let delegate = FnMutDelegate { - regions: &mut |br: ty::BoundRegion| match var_values[br.var].kind() { - GenericArgKind::Lifetime(l) => l, - r => bug!("{:?} is a region but value is {:?}", br, r), - }, - types: &mut |bound_ty: ty::BoundTy| match var_values[bound_ty.var].kind() { - GenericArgKind::Type(ty) => ty, - r => bug!("{:?} is a type but value is {:?}", bound_ty, r), - }, - consts: &mut |bound_ct: ty::BoundVar| match var_values[bound_ct].kind() { - GenericArgKind::Const(ct) => ct, - c => bug!("{:?} is a const but value is {:?}", bound_ct, c), - }, - }; - - tcx.replace_escaping_bound_vars_uncached(value, delegate) + return value; } + + value.fold_with(&mut CanonicalInstantiator { + tcx, + current_index: ty::INNERMOST, + var_values: var_values.var_values, + cache: Default::default(), + }) +} + +/// Replaces the bound vars in a canonical binder with var values. +struct CanonicalInstantiator<'tcx> { + tcx: TyCtxt<'tcx>, + + // The values that the bound vars are are being instantiated with. + var_values: ty::GenericArgsRef<'tcx>, + + /// As with `BoundVarReplacer`, represents the index of a binder *just outside* + /// the ones we have visited. + current_index: ty::DebruijnIndex, + + // Instantiation is a pure function of `DebruijnIndex` and `Ty`. + cache: DelayedMap<(ty::DebruijnIndex, Ty<'tcx>), Ty<'tcx>>, +} + +impl<'tcx> TypeFolder<TyCtxt<'tcx>> for CanonicalInstantiator<'tcx> { + fn cx(&self) -> TyCtxt<'tcx> { + self.tcx + } + + fn fold_binder<T: TypeFoldable<TyCtxt<'tcx>>>( + &mut self, + t: ty::Binder<'tcx, T>, + ) -> ty::Binder<'tcx, T> { + self.current_index.shift_in(1); + let t = t.super_fold_with(self); + self.current_index.shift_out(1); + t + } + + fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> { + match *t.kind() { + ty::Bound(debruijn, bound_ty) if debruijn == self.current_index => { + self.var_values[bound_ty.var.as_usize()].expect_ty() + } + _ => { + if !t.has_vars_bound_at_or_above(self.current_index) { + t + } else if let Some(&t) = self.cache.get(&(self.current_index, t)) { + t + } else { + let res = t.super_fold_with(self); + assert!(self.cache.insert((self.current_index, t), res)); + res + } + } + } + } + + fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> { + match r.kind() { + ty::ReBound(debruijn, br) if debruijn == self.current_index => { + self.var_values[br.var.as_usize()].expect_region() + } + _ => r, + } + } + + fn fold_const(&mut self, ct: ty::Const<'tcx>) -> ty::Const<'tcx> { + match ct.kind() { + ty::ConstKind::Bound(debruijn, bound_const) if debruijn == self.current_index => { + self.var_values[bound_const.as_usize()].expect_const() + } + _ => ct.super_fold_with(self), + } + } + + fn fold_predicate(&mut self, p: ty::Predicate<'tcx>) -> ty::Predicate<'tcx> { + if p.has_vars_bound_at_or_above(self.current_index) { p.super_fold_with(self) } else { p } + } + + fn fold_clauses(&mut self, c: ty::Clauses<'tcx>) -> ty::Clauses<'tcx> { + if !c.has_vars_bound_at_or_above(self.current_index) { + return c; + } + + // Since instantiation is a function of `DebruijnIndex`, we don't want + // to have to cache more copies of clauses when we're inside of binders. + // Since we currently expect to only have clauses in the outermost + // debruijn index, we just fold if we're inside of a binder. + if self.current_index > ty::INNERMOST { + return c.super_fold_with(self); + } + + // Our cache key is `(clauses, var_values)`, but we also don't care about + // var values that aren't named in the clauses, since they can change without + // affecting the output. Since `ParamEnv`s are cached first, we compute the + // last var value that is mentioned in the clauses, and cut off the list so + // that we have more hits in the cache. + + // We also cache the computation of "highest var named by clauses" since that + // is both expensive (depending on the size of the clauses) and a pure function. + let index = *self + .tcx + .highest_var_in_clauses_cache + .lock() + .entry(c) + .or_insert_with(|| highest_var_in_clauses(c)); + let c_args = &self.var_values[..=index]; + + if let Some(c) = self.tcx.clauses_cache.lock().get(&(c, c_args)) { + c + } else { + let folded = c.super_fold_with(self); + self.tcx.clauses_cache.lock().insert((c, c_args), folded); + folded + } + } +} + +fn highest_var_in_clauses<'tcx>(c: ty::Clauses<'tcx>) -> usize { + struct HighestVarInClauses { + max_var: usize, + current_index: ty::DebruijnIndex, + } + impl<'tcx> TypeVisitor<TyCtxt<'tcx>> for HighestVarInClauses { + fn visit_binder<T: TypeVisitable<TyCtxt<'tcx>>>( + &mut self, + t: &ty::Binder<'tcx, T>, + ) -> Self::Result { + self.current_index.shift_in(1); + let t = t.super_visit_with(self); + self.current_index.shift_out(1); + t + } + fn visit_ty(&mut self, t: Ty<'tcx>) { + if let ty::Bound(debruijn, bound_ty) = *t.kind() + && debruijn == self.current_index + { + self.max_var = self.max_var.max(bound_ty.var.as_usize()); + } else if t.has_vars_bound_at_or_above(self.current_index) { + t.super_visit_with(self); + } + } + fn visit_region(&mut self, r: ty::Region<'tcx>) { + if let ty::ReBound(debruijn, bound_region) = r.kind() + && debruijn == self.current_index + { + self.max_var = self.max_var.max(bound_region.var.as_usize()); + } + } + fn visit_const(&mut self, ct: ty::Const<'tcx>) { + if let ty::ConstKind::Bound(debruijn, bound_const) = ct.kind() + && debruijn == self.current_index + { + self.max_var = self.max_var.max(bound_const.as_usize()); + } else if ct.has_vars_bound_at_or_above(self.current_index) { + ct.super_visit_with(self); + } + } + } + let mut visitor = HighestVarInClauses { max_var: 0, current_index: ty::INNERMOST }; + c.visit_with(&mut visitor); + visitor.max_var } diff --git a/compiler/rustc_infer/src/lib.rs b/compiler/rustc_infer/src/lib.rs index 550707ed4bc..18cee03ba2e 100644 --- a/compiler/rustc_infer/src/lib.rs +++ b/compiler/rustc_infer/src/lib.rs @@ -16,6 +16,7 @@ #![allow(internal_features)] #![allow(rustc::diagnostic_outside_of_impl)] #![allow(rustc::untranslatable_diagnostic)] +#![cfg_attr(not(bootstrap), allow(rustc::direct_use_of_rustc_type_ir))] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] #![feature(assert_matches)] diff --git a/compiler/rustc_interface/Cargo.toml b/compiler/rustc_interface/Cargo.toml index ff28dbeaee6..a72a7958787 100644 --- a/compiler/rustc_interface/Cargo.toml +++ b/compiler/rustc_interface/Cargo.toml @@ -5,7 +5,6 @@ edition = "2024" [dependencies] # tidy-alphabetical-start -rustc-rayon-core = { version = "0.5.0" } rustc_ast = { path = "../rustc_ast" } rustc_ast_lowering = { path = "../rustc_ast_lowering" } rustc_ast_passes = { path = "../rustc_ast_passes" } @@ -43,6 +42,7 @@ rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } rustc_symbol_mangling = { path = "../rustc_symbol_mangling" } rustc_target = { path = "../rustc_target" } +rustc_thread_pool = { path = "../rustc_thread_pool" } rustc_trait_selection = { path = "../rustc_trait_selection" } rustc_traits = { path = "../rustc_traits" } rustc_ty_utils = { path = "../rustc_ty_utils" } diff --git a/compiler/rustc_interface/src/interface.rs b/compiler/rustc_interface/src/interface.rs index e824e9d4aa9..d62bf7f85e0 100644 --- a/compiler/rustc_interface/src/interface.rs +++ b/compiler/rustc_interface/src/interface.rs @@ -52,10 +52,9 @@ pub struct Compiler { pub(crate) fn parse_cfg(dcx: DiagCtxtHandle<'_>, cfgs: Vec<String>) -> Cfg { cfgs.into_iter() .map(|s| { - let psess = ParseSess::with_silent_emitter( + let psess = ParseSess::with_fatal_emitter( vec![crate::DEFAULT_LOCALE_RESOURCE, rustc_parse::DEFAULT_LOCALE_RESOURCE], format!("this error occurred on the command line: `--cfg={s}`"), - true, ); let filename = FileName::cfg_spec_source_code(&s); @@ -116,10 +115,9 @@ pub(crate) fn parse_check_cfg(dcx: DiagCtxtHandle<'_>, specs: Vec<String>) -> Ch let mut check_cfg = CheckCfg { exhaustive_names, exhaustive_values, ..CheckCfg::default() }; for s in specs { - let psess = ParseSess::with_silent_emitter( + let psess = ParseSess::with_fatal_emitter( vec![crate::DEFAULT_LOCALE_RESOURCE, rustc_parse::DEFAULT_LOCALE_RESOURCE], format!("this error occurred on the command line: `--check-cfg={s}`"), - true, ); let filename = FileName::cfg_spec_source_code(&s); diff --git a/compiler/rustc_interface/src/passes.rs b/compiler/rustc_interface/src/passes.rs index 02d1ebdb31a..201b7e2b940 100644 --- a/compiler/rustc_interface/src/passes.rs +++ b/compiler/rustc_interface/src/passes.rs @@ -192,7 +192,7 @@ fn configure_and_expand( // Create the config for macro expansion let recursion_limit = get_recursion_limit(pre_configured_attrs, sess); let cfg = rustc_expand::expand::ExpansionConfig { - crate_name: crate_name.to_string(), + crate_name, features, recursion_limit, trace_mac: sess.opts.unstable_opts.trace_macros, @@ -1011,8 +1011,8 @@ fn run_required_analyses(tcx: TyCtxt<'_>) { // Prefetch this to prevent multiple threads from blocking on it later. // This is needed since the `hir_id_validator::check_crate` call above is not guaranteed - // to use `hir_crate`. - tcx.ensure_done().hir_crate(()); + // to use `hir_crate_items`. + tcx.ensure_done().hir_crate_items(()); let sess = tcx.sess; sess.time("misc_checking_1", || { diff --git a/compiler/rustc_interface/src/queries.rs b/compiler/rustc_interface/src/queries.rs index 9a474b910f6..877440ec7d2 100644 --- a/compiler/rustc_interface/src/queries.rs +++ b/compiler/rustc_interface/src/queries.rs @@ -4,6 +4,7 @@ use std::sync::Arc; use rustc_codegen_ssa::CodegenResults; use rustc_codegen_ssa::traits::CodegenBackend; use rustc_data_structures::svh::Svh; +use rustc_errors::timings::TimingSection; use rustc_hir::def_id::LOCAL_CRATE; use rustc_metadata::EncodedMetadata; use rustc_middle::dep_graph::DepGraph; @@ -88,6 +89,7 @@ impl Linker { } let _timer = sess.prof.verbose_generic_activity("link_crate"); + let _timing = sess.timings.start_section(sess.dcx(), TimingSection::Linking); codegen_backend.link(sess, codegen_results, self.metadata, &self.output_filenames) } } diff --git a/compiler/rustc_interface/src/tests.rs b/compiler/rustc_interface/src/tests.rs index 82823581c12..a0012b04c4f 100644 --- a/compiler/rustc_interface/src/tests.rs +++ b/compiler/rustc_interface/src/tests.rs @@ -802,6 +802,7 @@ fn test_unstable_options_tracking_hash() { tracked!(force_unstable_if_unmarked, true); tracked!(function_return, FunctionReturn::ThunkExtern); tracked!(function_sections, Some(false)); + tracked!(hint_mostly_unused, true); tracked!(human_readable_cgu_names, true); tracked!(incremental_ignore_spans, true); tracked!(inline_mir, Some(true)); diff --git a/compiler/rustc_interface/src/util.rs b/compiler/rustc_interface/src/util.rs index 8bdc24d47d9..8a7d6117265 100644 --- a/compiler/rustc_interface/src/util.rs +++ b/compiler/rustc_interface/src/util.rs @@ -208,7 +208,7 @@ pub(crate) fn run_in_thread_pool_with_globals< let proxy_ = Arc::clone(&proxy); let proxy__ = Arc::clone(&proxy); - let builder = rayon_core::ThreadPoolBuilder::new() + let builder = rustc_thread_pool::ThreadPoolBuilder::new() .thread_name(|_| "rustc".to_string()) .acquire_thread_handler(move || proxy_.acquire_thread()) .release_thread_handler(move || proxy__.release_thread()) @@ -218,7 +218,7 @@ pub(crate) fn run_in_thread_pool_with_globals< // locals to it. The new thread runs the deadlock handler. let current_gcx2 = current_gcx2.clone(); - let registry = rayon_core::Registry::current(); + let registry = rustc_thread_pool::Registry::current(); let session_globals = rustc_span::with_session_globals(|session_globals| { session_globals as *const SessionGlobals as usize }); @@ -265,7 +265,7 @@ pub(crate) fn run_in_thread_pool_with_globals< builder .build_scoped( // Initialize each new worker thread when created. - move |thread: rayon_core::ThreadBuilder| { + move |thread: rustc_thread_pool::ThreadBuilder| { // Register the thread for use with the `WorkerLocal` type. registry.register(); @@ -274,7 +274,7 @@ pub(crate) fn run_in_thread_pool_with_globals< }) }, // Run `f` on the first thread in the thread pool. - move |pool: &rayon_core::ThreadPool| { + move |pool: &rustc_thread_pool::ThreadPool| { pool.install(|| f(current_gcx.into_inner(), proxy)) }, ) diff --git a/compiler/rustc_lint/messages.ftl b/compiler/rustc_lint/messages.ftl index f92f8307808..8d9f2385b71 100644 --- a/compiler/rustc_lint/messages.ftl +++ b/compiler/rustc_lint/messages.ftl @@ -812,6 +812,9 @@ lint_tykind = usage of `ty::TyKind` lint_tykind_kind = usage of `ty::TyKind::<kind>` .suggestion = try using `ty::<kind>` directly +lint_type_ir_direct_use = do not use `rustc_type_ir` unless you are implementing type system internals + .note = use `rustc_middle::ty` instead + lint_type_ir_inherent_usage = do not use `rustc_type_ir::inherent` unless you're inside of the trait solver .note = the method or struct you're looking for is likely defined somewhere else downstream in the compiler diff --git a/compiler/rustc_lint/src/builtin.rs b/compiler/rustc_lint/src/builtin.rs index dedea54f8e0..ac405277c4e 100644 --- a/compiler/rustc_lint/src/builtin.rs +++ b/compiler/rustc_lint/src/builtin.rs @@ -21,6 +21,7 @@ use rustc_ast::tokenstream::{TokenStream, TokenTree}; use rustc_ast::visit::{FnCtxt, FnKind}; use rustc_ast::{self as ast, *}; use rustc_ast_pretty::pprust::expr_to_string; +use rustc_attr_data_structures::{AttributeKind, find_attr}; use rustc_errors::{Applicability, LintDiagnostic}; use rustc_feature::GateIssue; use rustc_hir as hir; @@ -954,7 +955,7 @@ declare_lint_pass!(InvalidNoMangleItems => [NO_MANGLE_CONST_ITEMS, NO_MANGLE_GEN impl<'tcx> LateLintPass<'tcx> for InvalidNoMangleItems { fn check_item(&mut self, cx: &LateContext<'_>, it: &hir::Item<'_>) { let attrs = cx.tcx.hir_attrs(it.hir_id()); - let check_no_mangle_on_generic_fn = |attr: &hir::Attribute, + let check_no_mangle_on_generic_fn = |attr_span: Span, impl_generics: Option<&hir::Generics<'_>>, generics: &hir::Generics<'_>, span| { @@ -967,7 +968,7 @@ impl<'tcx> LateLintPass<'tcx> for InvalidNoMangleItems { cx.emit_span_lint( NO_MANGLE_GENERIC_ITEMS, span, - BuiltinNoMangleGeneric { suggestion: attr.span() }, + BuiltinNoMangleGeneric { suggestion: attr_span }, ); break; } @@ -976,14 +977,15 @@ impl<'tcx> LateLintPass<'tcx> for InvalidNoMangleItems { }; match it.kind { hir::ItemKind::Fn { generics, .. } => { - if let Some(attr) = attr::find_by_name(attrs, sym::export_name) - .or_else(|| attr::find_by_name(attrs, sym::no_mangle)) + if let Some(attr_span) = attr::find_by_name(attrs, sym::export_name) + .map(|at| at.span()) + .or_else(|| find_attr!(attrs, AttributeKind::NoMangle(span) => *span)) { - check_no_mangle_on_generic_fn(attr, None, generics, it.span); + check_no_mangle_on_generic_fn(attr_span, None, generics, it.span); } } hir::ItemKind::Const(..) => { - if attr::contains_name(attrs, sym::no_mangle) { + if find_attr!(attrs, AttributeKind::NoMangle(..)) { // account for "pub const" (#45562) let start = cx .tcx @@ -1008,11 +1010,12 @@ impl<'tcx> LateLintPass<'tcx> for InvalidNoMangleItems { for it in *items { if let hir::AssocItemKind::Fn { .. } = it.kind { let attrs = cx.tcx.hir_attrs(it.id.hir_id()); - if let Some(attr) = attr::find_by_name(attrs, sym::export_name) - .or_else(|| attr::find_by_name(attrs, sym::no_mangle)) + if let Some(attr_span) = attr::find_by_name(attrs, sym::export_name) + .map(|at| at.span()) + .or_else(|| find_attr!(attrs, AttributeKind::NoMangle(span) => *span)) { check_no_mangle_on_generic_fn( - attr, + attr_span, Some(generics), cx.tcx.hir_get_generics(it.id.owner_id.def_id).unwrap(), it.span, diff --git a/compiler/rustc_lint/src/context.rs b/compiler/rustc_lint/src/context.rs index b6bf45dfbcf..297b8ef7e76 100644 --- a/compiler/rustc_lint/src/context.rs +++ b/compiler/rustc_lint/src/context.rs @@ -711,6 +711,15 @@ impl<'tcx> LateContext<'tcx> { /// Gets the absolute path of `def_id` as a vector of `Symbol`. /// + /// Note that this is kinda expensive because it has to + /// travel the tree and pretty-print. Use sparingly. + /// + /// If you're trying to match for an item given by its path, use a + /// diagnostic item. If you're only interested in given sections, use more + /// specific functions, such as [`TyCtxt::crate_name`] + /// + /// FIXME: It would be great if this could be optimized. + /// /// # Examples /// /// ```rust,ignore (no context or def id available) @@ -855,14 +864,15 @@ impl<'tcx> LateContext<'tcx> { /// rendering diagnostic. This is not the same as the precedence that would /// be used for pretty-printing HIR by rustc_hir_pretty. pub fn precedence(&self, expr: &hir::Expr<'_>) -> ExprPrecedence { - let for_each_attr = |id: hir::HirId, callback: &mut dyn FnMut(&hir::Attribute)| { + let has_attr = |id: hir::HirId| -> bool { for attr in self.tcx.hir_attrs(id) { if attr.span().desugaring_kind().is_none() { - callback(attr); + return true; } } + false }; - expr.precedence(&for_each_attr) + expr.precedence(&has_attr) } /// If the given expression is a local binding, find the initializer expression. diff --git a/compiler/rustc_lint/src/expect.rs b/compiler/rustc_lint/src/expect.rs index 4c2b82a9a23..481e116d06e 100644 --- a/compiler/rustc_lint/src/expect.rs +++ b/compiler/rustc_lint/src/expect.rs @@ -1,5 +1,4 @@ use rustc_data_structures::fx::FxHashSet; -use rustc_hir::CRATE_OWNER_ID; use rustc_middle::lint::LintExpectation; use rustc_middle::query::Providers; use rustc_middle::ty::TyCtxt; @@ -18,7 +17,7 @@ fn lint_expectations(tcx: TyCtxt<'_>, (): ()) -> Vec<(LintExpectationId, LintExp let mut expectations = Vec::new(); - for owner in std::iter::once(CRATE_OWNER_ID).chain(krate.owners()) { + for owner in krate.owners() { let lints = tcx.shallow_lint_levels_on(owner); expectations.extend_from_slice(&lints.expectations); } diff --git a/compiler/rustc_lint/src/internal.rs b/compiler/rustc_lint/src/internal.rs index 1805a674d68..d8fc46aa9ab 100644 --- a/compiler/rustc_lint/src/internal.rs +++ b/compiler/rustc_lint/src/internal.rs @@ -14,8 +14,8 @@ use {rustc_ast as ast, rustc_hir as hir}; use crate::lints::{ BadOptAccessDiag, DefaultHashTypesDiag, DiagOutOfImpl, LintPassByHand, NonGlobImportTypeIrInherent, QueryInstability, QueryUntracked, SpanUseEqCtxtDiag, - SymbolInternStringLiteralDiag, TyQualified, TykindDiag, TykindKind, TypeIrInherentUsage, - TypeIrTraitUsage, UntranslatableDiag, + SymbolInternStringLiteralDiag, TyQualified, TykindDiag, TykindKind, TypeIrDirectUse, + TypeIrInherentUsage, TypeIrTraitUsage, UntranslatableDiag, }; use crate::{EarlyContext, EarlyLintPass, LateContext, LateLintPass, LintContext}; @@ -301,8 +301,18 @@ declare_tool_lint! { "usage `rustc_type_ir`-specific abstraction traits outside of trait system", report_in_external_macro: true } +declare_tool_lint! { + /// The `direct_use_of_rustc_type_ir` lint detects usage of `rustc_type_ir`. + /// + /// This module should only be used within the trait solver and some desirable + /// crates like rustc_middle. + pub rustc::DIRECT_USE_OF_RUSTC_TYPE_IR, + Allow, + "usage `rustc_type_ir` abstraction outside of trait system", + report_in_external_macro: true +} -declare_lint_pass!(TypeIr => [NON_GLOB_IMPORT_OF_TYPE_IR_INHERENT, USAGE_OF_TYPE_IR_INHERENT, USAGE_OF_TYPE_IR_TRAITS]); +declare_lint_pass!(TypeIr => [DIRECT_USE_OF_RUSTC_TYPE_IR, NON_GLOB_IMPORT_OF_TYPE_IR_INHERENT, USAGE_OF_TYPE_IR_INHERENT, USAGE_OF_TYPE_IR_TRAITS]); impl<'tcx> LateLintPass<'tcx> for TypeIr { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'tcx>) { @@ -372,6 +382,21 @@ impl<'tcx> LateLintPass<'tcx> for TypeIr { NonGlobImportTypeIrInherent { suggestion: lo.eq_ctxt(hi).then(|| lo.to(hi)), snippet }, ); } + + fn check_path( + &mut self, + cx: &LateContext<'tcx>, + path: &rustc_hir::Path<'tcx>, + _: rustc_hir::HirId, + ) { + if let Some(seg) = path.segments.iter().find(|seg| { + seg.res + .opt_def_id() + .is_some_and(|def_id| cx.tcx.is_diagnostic_item(sym::type_ir, def_id)) + }) { + cx.emit_span_lint(DIRECT_USE_OF_RUSTC_TYPE_IR, seg.ident.span, TypeIrDirectUse); + } + } } declare_tool_lint! { diff --git a/compiler/rustc_lint/src/lib.rs b/compiler/rustc_lint/src/lib.rs index 9a1490d3eea..20568f35a47 100644 --- a/compiler/rustc_lint/src/lib.rs +++ b/compiler/rustc_lint/src/lib.rs @@ -668,6 +668,7 @@ fn register_internals(store: &mut LintStore) { LintId::of(USAGE_OF_TYPE_IR_TRAITS), LintId::of(BAD_OPT_ACCESS), LintId::of(SPAN_USE_EQ_CTXT), + LintId::of(DIRECT_USE_OF_RUSTC_TYPE_IR), ], ); } diff --git a/compiler/rustc_lint/src/lints.rs b/compiler/rustc_lint/src/lints.rs index 0b8c68404f1..abdf8e3853b 100644 --- a/compiler/rustc_lint/src/lints.rs +++ b/compiler/rustc_lint/src/lints.rs @@ -970,6 +970,11 @@ pub(crate) struct TypeIrInherentUsage; pub(crate) struct TypeIrTraitUsage; #[derive(LintDiagnostic)] +#[diag(lint_type_ir_direct_use)] +#[note] +pub(crate) struct TypeIrDirectUse; + +#[derive(LintDiagnostic)] #[diag(lint_non_glob_import_type_ir_inherent)] pub(crate) struct NonGlobImportTypeIrInherent { #[suggestion(code = "{snippet}", applicability = "maybe-incorrect")] @@ -2626,6 +2631,7 @@ pub(crate) struct UnusedCrateDependency { pub local_crate: Symbol, } +// FIXME(jdonszelmann): duplicated in rustc_attr_parsing, should be moved there completely. #[derive(LintDiagnostic)] #[diag(lint_ill_formed_attribute_input)] pub(crate) struct IllFormedAttributeInput { diff --git a/compiler/rustc_lint/src/nonstandard_style.rs b/compiler/rustc_lint/src/nonstandard_style.rs index 1b60466a589..f39e1506390 100644 --- a/compiler/rustc_lint/src/nonstandard_style.rs +++ b/compiler/rustc_lint/src/nonstandard_style.rs @@ -1,5 +1,5 @@ use rustc_abi::ExternAbi; -use rustc_attr_data_structures::{AttributeKind, ReprAttr}; +use rustc_attr_data_structures::{AttributeKind, ReprAttr, find_attr}; use rustc_attr_parsing::AttributeParser; use rustc_hir::def::{DefKind, Res}; use rustc_hir::intravisit::FnKind; @@ -396,7 +396,9 @@ impl<'tcx> LateLintPass<'tcx> for NonSnakeCase { match &fk { FnKind::Method(ident, sig, ..) => match method_context(cx, id) { MethodLateContext::PlainImpl => { - if sig.header.abi != ExternAbi::Rust && cx.tcx.has_attr(id, sym::no_mangle) { + if sig.header.abi != ExternAbi::Rust + && find_attr!(cx.tcx.get_all_attrs(id), AttributeKind::NoMangle(..)) + { return; } self.check_snake_case(cx, "method", ident); @@ -408,7 +410,9 @@ impl<'tcx> LateLintPass<'tcx> for NonSnakeCase { }, FnKind::ItemFn(ident, _, header) => { // Skip foreign-ABI #[no_mangle] functions (Issue #31924) - if header.abi != ExternAbi::Rust && cx.tcx.has_attr(id, sym::no_mangle) { + if header.abi != ExternAbi::Rust + && find_attr!(cx.tcx.get_all_attrs(id), AttributeKind::NoMangle(..)) + { return; } self.check_snake_case(cx, "function", ident); @@ -514,7 +518,7 @@ impl<'tcx> LateLintPass<'tcx> for NonUpperCaseGlobals { let attrs = cx.tcx.hir_attrs(it.hir_id()); match it.kind { hir::ItemKind::Static(_, ident, ..) - if !ast::attr::contains_name(attrs, sym::no_mangle) => + if !find_attr!(attrs, AttributeKind::NoMangle(..)) => { NonUpperCaseGlobals::check_upper_case(cx, "static variable", &ident); } diff --git a/compiler/rustc_lint/src/unused.rs b/compiler/rustc_lint/src/unused.rs index 1620f425794..a868c887493 100644 --- a/compiler/rustc_lint/src/unused.rs +++ b/compiler/rustc_lint/src/unused.rs @@ -2,6 +2,7 @@ use std::iter; use rustc_ast::util::{classify, parser}; use rustc_ast::{self as ast, ExprKind, HasAttrs as _, StmtKind}; +use rustc_attr_data_structures::{AttributeKind, find_attr}; use rustc_errors::{MultiSpan, pluralize}; use rustc_hir::def::{DefKind, Res}; use rustc_hir::def_id::DefId; @@ -368,10 +369,12 @@ impl<'tcx> LateLintPass<'tcx> for UnusedResults { } fn is_def_must_use(cx: &LateContext<'_>, def_id: DefId, span: Span) -> Option<MustUsePath> { - if let Some(attr) = cx.tcx.get_attr(def_id, sym::must_use) { + if let Some(reason) = find_attr!( + cx.tcx.get_all_attrs(def_id), + AttributeKind::MustUse { reason, .. } => reason + ) { // check for #[must_use = "..."] - let reason = attr.value_str(); - Some(MustUsePath::Def(span, def_id, reason)) + Some(MustUsePath::Def(span, def_id, *reason)) } else { None } diff --git a/compiler/rustc_metadata/src/lib.rs b/compiler/rustc_metadata/src/lib.rs index 389a4ab7466..23ffb1e487f 100644 --- a/compiler/rustc_metadata/src/lib.rs +++ b/compiler/rustc_metadata/src/lib.rs @@ -2,10 +2,10 @@ #![allow(internal_features)] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] -#![feature(coroutines)] #![feature(decl_macro)] #![feature(error_iter)] #![feature(file_buffered)] +#![feature(gen_blocks)] #![feature(if_let_guard)] #![feature(iter_from_coroutine)] #![feature(macro_metavar_expr)] diff --git a/compiler/rustc_metadata/src/locator.rs b/compiler/rustc_metadata/src/locator.rs index 79015aab5d3..259bcb1b96d 100644 --- a/compiler/rustc_metadata/src/locator.rs +++ b/compiler/rustc_metadata/src/locator.rs @@ -1196,7 +1196,7 @@ impl CrateError { .opts .crate_name .clone() - .unwrap_or("<unknown>".to_string()), + .unwrap_or_else(|| "<unknown>".to_string()), is_nightly_build: sess.is_nightly_build(), profiler_runtime: Symbol::intern(&sess.opts.unstable_opts.profiler_runtime), locator_triple: locator.triple, @@ -1217,7 +1217,11 @@ impl CrateError { crate_name, add_info: String::new(), missing_core, - current_crate: sess.opts.crate_name.clone().unwrap_or("<unknown>".to_string()), + current_crate: sess + .opts + .crate_name + .clone() + .unwrap_or_else(|| "<unknown>".to_string()), is_nightly_build: sess.is_nightly_build(), profiler_runtime: Symbol::intern(&sess.opts.unstable_opts.profiler_runtime), locator_triple: sess.opts.target_triple.clone(), diff --git a/compiler/rustc_metadata/src/native_libs.rs b/compiler/rustc_metadata/src/native_libs.rs index 5cdeb8935f7..f10d71f4c65 100644 --- a/compiler/rustc_metadata/src/native_libs.rs +++ b/compiler/rustc_metadata/src/native_libs.rs @@ -705,7 +705,7 @@ impl<'tcx> Collector<'tcx> { .map_or(import_name_type, |ord| Some(PeImportNameType::Ordinal(ord))); DllImport { - name: codegen_fn_attrs.link_name.unwrap_or(self.tcx.item_name(item)), + name: codegen_fn_attrs.link_name.unwrap_or_else(|| self.tcx.item_name(item)), import_name_type, calling_convention, span, diff --git a/compiler/rustc_metadata/src/rmeta/decoder.rs b/compiler/rustc_metadata/src/rmeta/decoder.rs index 1953eef8170..d886f25247f 100644 --- a/compiler/rustc_metadata/src/rmeta/decoder.rs +++ b/compiler/rustc_metadata/src/rmeta/decoder.rs @@ -1,9 +1,9 @@ // Decoding metadata from a single crate's metadata use std::iter::TrustedLen; -use std::path::Path; +use std::path::{Path, PathBuf}; use std::sync::{Arc, OnceLock}; -use std::{io, iter, mem}; +use std::{io, mem}; pub(super) use cstore_impl::provide; use rustc_ast as ast; @@ -1272,34 +1272,30 @@ impl<'a> CrateMetadataRef<'a> { id: DefIndex, sess: &'a Session, ) -> impl Iterator<Item = ModChild> { - iter::from_coroutine( - #[coroutine] - move || { - if let Some(data) = &self.root.proc_macro_data { - // If we are loading as a proc macro, we want to return - // the view of this crate as a proc macro crate. - if id == CRATE_DEF_INDEX { - for child_index in data.macros.decode(self) { - yield self.get_mod_child(child_index, sess); - } - } - } else { - // Iterate over all children. - let non_reexports = - self.root.tables.module_children_non_reexports.get(self, id); - for child_index in non_reexports.unwrap().decode(self) { + gen move { + if let Some(data) = &self.root.proc_macro_data { + // If we are loading as a proc macro, we want to return + // the view of this crate as a proc macro crate. + if id == CRATE_DEF_INDEX { + for child_index in data.macros.decode(self) { yield self.get_mod_child(child_index, sess); } + } + } else { + // Iterate over all children. + let non_reexports = self.root.tables.module_children_non_reexports.get(self, id); + for child_index in non_reexports.unwrap().decode(self) { + yield self.get_mod_child(child_index, sess); + } - let reexports = self.root.tables.module_children_reexports.get(self, id); - if !reexports.is_default() { - for reexport in reexports.decode((self, sess)) { - yield reexport; - } + let reexports = self.root.tables.module_children_reexports.get(self, id); + if !reexports.is_default() { + for reexport in reexports.decode((self, sess)) { + yield reexport; } } - }, - ) + } + } } fn is_ctfe_mir_available(self, id: DefIndex) -> bool { @@ -1610,10 +1606,14 @@ impl<'a> CrateMetadataRef<'a> { /// Proc macro crates don't currently export spans, so this function does not have /// to work for them. fn imported_source_file(self, source_file_index: u32, sess: &Session) -> ImportedSourceFile { - fn filter<'a>(sess: &Session, path: Option<&'a Path>) -> Option<&'a Path> { + fn filter<'a>( + sess: &Session, + real_source_base_dir: &Option<PathBuf>, + path: Option<&'a Path>, + ) -> Option<&'a Path> { path.filter(|_| { // Only spend time on further checks if we have what to translate *to*. - sess.opts.real_rust_source_base_dir.is_some() + real_source_base_dir.is_some() // Some tests need the translation to be always skipped. && sess.opts.unstable_opts.translate_remapped_path_to_local_path }) @@ -1625,57 +1625,92 @@ impl<'a> CrateMetadataRef<'a> { }) } - let try_to_translate_virtual_to_real = |name: &mut rustc_span::FileName| { - // Translate the virtual `/rustc/$hash` prefix back to a real directory - // that should hold actual sources, where possible. - // - // NOTE: if you update this, you might need to also update bootstrap's code for generating - // the `rust-src` component in `Src::run` in `src/bootstrap/dist.rs`. - let virtual_rust_source_base_dir = [ - filter(sess, option_env!("CFG_VIRTUAL_RUST_SOURCE_BASE_DIR").map(Path::new)), - filter(sess, sess.opts.unstable_opts.simulate_remapped_rust_src_base.as_deref()), - ]; + let try_to_translate_virtual_to_real = + |virtual_source_base_dir: Option<&str>, + real_source_base_dir: &Option<PathBuf>, + name: &mut rustc_span::FileName| { + let virtual_source_base_dir = [ + filter(sess, real_source_base_dir, virtual_source_base_dir.map(Path::new)), + filter( + sess, + real_source_base_dir, + sess.opts.unstable_opts.simulate_remapped_rust_src_base.as_deref(), + ), + ]; - debug!( - "try_to_translate_virtual_to_real(name={:?}): \ - virtual_rust_source_base_dir={:?}, real_rust_source_base_dir={:?}", - name, virtual_rust_source_base_dir, sess.opts.real_rust_source_base_dir, - ); + debug!( + "try_to_translate_virtual_to_real(name={:?}): \ + virtual_source_base_dir={:?}, real_source_base_dir={:?}", + name, virtual_source_base_dir, real_source_base_dir, + ); + + for virtual_dir in virtual_source_base_dir.iter().flatten() { + if let Some(real_dir) = &real_source_base_dir + && let rustc_span::FileName::Real(old_name) = name + && let rustc_span::RealFileName::Remapped { local_path: _, virtual_name } = + old_name + && let Ok(rest) = virtual_name.strip_prefix(virtual_dir) + { + let new_path = real_dir.join(rest); + + debug!( + "try_to_translate_virtual_to_real: `{}` -> `{}`", + virtual_name.display(), + new_path.display(), + ); + + // Check if the translated real path is affected by any user-requested + // remaps via --remap-path-prefix. Apply them if so. + // Note that this is a special case for imported rust-src paths specified by + // https://rust-lang.github.io/rfcs/3127-trim-paths.html#handling-sysroot-paths. + // Other imported paths are not currently remapped (see #66251). + let (user_remapped, applied) = + sess.source_map().path_mapping().map_prefix(&new_path); + let new_name = if applied { + rustc_span::RealFileName::Remapped { + local_path: Some(new_path.clone()), + virtual_name: user_remapped.to_path_buf(), + } + } else { + rustc_span::RealFileName::LocalPath(new_path) + }; + *old_name = new_name; + } + } + }; - for virtual_dir in virtual_rust_source_base_dir.iter().flatten() { - if let Some(real_dir) = &sess.opts.real_rust_source_base_dir + let try_to_translate_real_to_virtual = + |virtual_source_base_dir: Option<&str>, + real_source_base_dir: &Option<PathBuf>, + subdir: &str, + name: &mut rustc_span::FileName| { + if let Some(virtual_dir) = &sess.opts.unstable_opts.simulate_remapped_rust_src_base + && let Some(real_dir) = real_source_base_dir && let rustc_span::FileName::Real(old_name) = name - && let rustc_span::RealFileName::Remapped { local_path: _, virtual_name } = - old_name - && let Ok(rest) = virtual_name.strip_prefix(virtual_dir) { - let new_path = real_dir.join(rest); - - debug!( - "try_to_translate_virtual_to_real: `{}` -> `{}`", - virtual_name.display(), - new_path.display(), - ); - - // Check if the translated real path is affected by any user-requested - // remaps via --remap-path-prefix. Apply them if so. - // Note that this is a special case for imported rust-src paths specified by - // https://rust-lang.github.io/rfcs/3127-trim-paths.html#handling-sysroot-paths. - // Other imported paths are not currently remapped (see #66251). - let (user_remapped, applied) = - sess.source_map().path_mapping().map_prefix(&new_path); - let new_name = if applied { - rustc_span::RealFileName::Remapped { - local_path: Some(new_path.clone()), - virtual_name: user_remapped.to_path_buf(), + let relative_path = match old_name { + rustc_span::RealFileName::LocalPath(local) => { + local.strip_prefix(real_dir).ok() + } + rustc_span::RealFileName::Remapped { virtual_name, .. } => { + virtual_source_base_dir + .and_then(|virtual_dir| virtual_name.strip_prefix(virtual_dir).ok()) } - } else { - rustc_span::RealFileName::LocalPath(new_path) }; - *old_name = new_name; + debug!( + ?relative_path, + ?virtual_dir, + ?subdir, + "simulate_remapped_rust_src_base" + ); + if let Some(rest) = relative_path.and_then(|p| p.strip_prefix(subdir).ok()) { + *old_name = rustc_span::RealFileName::Remapped { + local_path: None, + virtual_name: virtual_dir.join(subdir).join(rest), + }; + } } - } - }; + }; let mut import_info = self.cdata.source_map_import_info.lock(); for _ in import_info.len()..=(source_file_index as usize) { @@ -1713,36 +1748,45 @@ impl<'a> CrateMetadataRef<'a> { // This is useful for testing so that tests about the effects of // `try_to_translate_virtual_to_real` don't have to worry about how the // compiler is bootstrapped. - if let Some(virtual_dir) = &sess.opts.unstable_opts.simulate_remapped_rust_src_base - && let Some(real_dir) = &sess.opts.real_rust_source_base_dir - && let rustc_span::FileName::Real(ref mut old_name) = name - { - let relative_path = match old_name { - rustc_span::RealFileName::LocalPath(local) => { - local.strip_prefix(real_dir).ok() - } - rustc_span::RealFileName::Remapped { virtual_name, .. } => { - option_env!("CFG_VIRTUAL_RUST_SOURCE_BASE_DIR") - .and_then(|virtual_dir| virtual_name.strip_prefix(virtual_dir).ok()) - } - }; - debug!(?relative_path, ?virtual_dir, "simulate_remapped_rust_src_base"); - for subdir in ["library", "compiler"] { - if let Some(rest) = relative_path.and_then(|p| p.strip_prefix(subdir).ok()) - { - *old_name = rustc_span::RealFileName::Remapped { - local_path: None, // FIXME: maybe we should preserve this? - virtual_name: virtual_dir.join(subdir).join(rest), - }; - break; - } - } - } + try_to_translate_real_to_virtual( + option_env!("CFG_VIRTUAL_RUST_SOURCE_BASE_DIR"), + &sess.opts.real_rust_source_base_dir, + "library", + &mut name, + ); + + // If this file is under $sysroot/lib/rustlib/rustc-src/ + // and the user wish to simulate remapping with -Z simulate-remapped-rust-src-base, + // then we change `name` to a similar state as if the rust was bootstrapped + // with `remap-debuginfo = true`. + try_to_translate_real_to_virtual( + option_env!("CFG_VIRTUAL_RUSTC_DEV_SOURCE_BASE_DIR"), + &sess.opts.real_rustc_dev_source_base_dir, + "compiler", + &mut name, + ); // If this file's path has been remapped to `/rustc/$hash`, - // we might be able to reverse that (also see comments above, - // on `try_to_translate_virtual_to_real`). - try_to_translate_virtual_to_real(&mut name); + // we might be able to reverse that. + // + // NOTE: if you update this, you might need to also update bootstrap's code for generating + // the `rust-src` component in `Src::run` in `src/bootstrap/dist.rs`. + try_to_translate_virtual_to_real( + option_env!("CFG_VIRTUAL_RUST_SOURCE_BASE_DIR"), + &sess.opts.real_rust_source_base_dir, + &mut name, + ); + + // If this file's path has been remapped to `/rustc-dev/$hash`, + // we might be able to reverse that. + // + // NOTE: if you update this, you might need to also update bootstrap's code for generating + // the `rustc-dev` component in `Src::run` in `src/bootstrap/dist.rs`. + try_to_translate_virtual_to_real( + option_env!("CFG_VIRTUAL_RUSTC_DEV_SOURCE_BASE_DIR"), + &sess.opts.real_rustc_dev_source_base_dir, + &mut name, + ); let local_version = sess.source_map().new_imported_source_file( name, diff --git a/compiler/rustc_middle/Cargo.toml b/compiler/rustc_middle/Cargo.toml index 43c1af642dd..edd0af6e4f5 100644 --- a/compiler/rustc_middle/Cargo.toml +++ b/compiler/rustc_middle/Cargo.toml @@ -9,7 +9,6 @@ bitflags = "2.4.1" either = "1.5.0" gsgdt = "0.1.2" polonius-engine = "0.13.0" -rustc-rayon-core = { version = "0.5.0" } rustc_abi = { path = "../rustc_abi" } rustc_apfloat = "0.2.0" rustc_arena = { path = "../rustc_arena" } @@ -33,6 +32,7 @@ rustc_serialize = { path = "../rustc_serialize" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } +rustc_thread_pool = { path = "../rustc_thread_pool" } rustc_type_ir = { path = "../rustc_type_ir" } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } thin-vec = "0.2.12" diff --git a/compiler/rustc_middle/src/hir/map.rs b/compiler/rustc_middle/src/hir/map.rs index e5e1ae508ed..03bb97095a4 100644 --- a/compiler/rustc_middle/src/hir/map.rs +++ b/compiler/rustc_middle/src/hir/map.rs @@ -328,8 +328,7 @@ impl<'tcx> TyCtxt<'tcx> { } /// Returns an iterator of the `DefId`s for all body-owners in this - /// crate. If you would prefer to iterate over the bodies - /// themselves, you can do `self.hir_crate(()).body_ids.iter()`. + /// crate. #[inline] pub fn hir_body_owners(self) -> impl Iterator<Item = LocalDefId> { self.hir_crate_items(()).body_owners.iter().copied() @@ -396,12 +395,11 @@ impl<'tcx> TyCtxt<'tcx> { where V: Visitor<'tcx>, { - let krate = self.hir_crate(()); - for info in krate.owners.iter() { - if let MaybeOwner::Owner(info) = info { - for attrs in info.attrs.map.values() { - walk_list!(visitor, visit_attribute, *attrs); - } + let krate = self.hir_crate_items(()); + for owner in krate.owners() { + let attrs = self.hir_attr_map(owner); + for attrs in attrs.map.values() { + walk_list!(visitor, visit_attribute, *attrs); } } V::Result::output() @@ -1225,6 +1223,7 @@ pub(super) fn hir_module_items(tcx: TyCtxt<'_>, module_id: LocalModDefId) -> Mod .. } = collector; ModuleItems { + add_root: false, submodules: submodules.into_boxed_slice(), free_items: items.into_boxed_slice(), trait_items: trait_items.into_boxed_slice(), @@ -1260,6 +1259,7 @@ pub(crate) fn hir_crate_items(tcx: TyCtxt<'_>, _: ()) -> ModuleItems { } = collector; ModuleItems { + add_root: true, submodules: submodules.into_boxed_slice(), free_items: items.into_boxed_slice(), trait_items: trait_items.into_boxed_slice(), diff --git a/compiler/rustc_middle/src/hir/mod.rs b/compiler/rustc_middle/src/hir/mod.rs index 9f79ed4b5a5..d7a8dce0536 100644 --- a/compiler/rustc_middle/src/hir/mod.rs +++ b/compiler/rustc_middle/src/hir/mod.rs @@ -24,6 +24,9 @@ use crate::ty::{EarlyBinder, ImplSubject, TyCtxt}; /// bodies. The Ids are in visitor order. This is used to partition a pass between modules. #[derive(Debug, HashStable, Encodable, Decodable)] pub struct ModuleItems { + /// Whether this represents the whole crate, in which case we need to add `CRATE_OWNER_ID` to + /// the iterators if we want to account for the crate root. + add_root: bool, submodules: Box<[OwnerId]>, free_items: Box<[ItemId]>, trait_items: Box<[TraitItemId]>, @@ -66,9 +69,10 @@ impl ModuleItems { } pub fn owners(&self) -> impl Iterator<Item = OwnerId> { - self.free_items - .iter() - .map(|id| id.owner_id) + self.add_root + .then_some(CRATE_OWNER_ID) + .into_iter() + .chain(self.free_items.iter().map(|id| id.owner_id)) .chain(self.trait_items.iter().map(|id| id.owner_id)) .chain(self.impl_items.iter().map(|id| id.owner_id)) .chain(self.foreign_items.iter().map(|id| id.owner_id)) diff --git a/compiler/rustc_middle/src/lib.rs b/compiler/rustc_middle/src/lib.rs index 6cb1d8c5fc4..ce2cb33c173 100644 --- a/compiler/rustc_middle/src/lib.rs +++ b/compiler/rustc_middle/src/lib.rs @@ -28,6 +28,7 @@ #![allow(internal_features)] #![allow(rustc::diagnostic_outside_of_impl)] #![allow(rustc::untranslatable_diagnostic)] +#![cfg_attr(not(bootstrap), allow(rustc::direct_use_of_rustc_type_ir))] #![cfg_attr(not(bootstrap), feature(sized_hierarchy))] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] @@ -39,12 +40,12 @@ #![feature(box_patterns)] #![feature(closure_track_caller)] #![feature(core_intrinsics)] -#![feature(coroutines)] #![feature(debug_closure_helpers)] #![feature(decl_macro)] #![feature(discriminant_kind)] #![feature(extern_types)] #![feature(file_buffered)] +#![feature(gen_blocks)] #![feature(if_let_guard)] #![feature(intra_doc_pointers)] #![feature(iter_from_coroutine)] diff --git a/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs b/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs index f21cf5fa45e..2f16d385efb 100644 --- a/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs +++ b/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs @@ -47,8 +47,7 @@ pub struct CodegenFnAttrs { /// be generated against a specific instruction set. Only usable on architectures which allow /// switching between multiple instruction sets. pub instruction_set: Option<InstructionSetAttr>, - /// The `#[repr(align(...))]` attribute. Indicates the value of which the function should be - /// aligned to. + /// The `#[align(...)]` attribute. Determines the alignment of the function body. pub alignment: Option<Align>, /// The `#[patchable_function_entry(...)]` attribute. Indicates how many nops should be around /// the function entry. diff --git a/compiler/rustc_middle/src/query/mod.rs b/compiler/rustc_middle/src/query/mod.rs index 930d9fba433..3668f4e12f5 100644 --- a/compiler/rustc_middle/src/query/mod.rs +++ b/compiler/rustc_middle/src/query/mod.rs @@ -1576,7 +1576,7 @@ rustc_queries! { query vtable_allocation(key: (Ty<'tcx>, Option<ty::ExistentialTraitRef<'tcx>>)) -> mir::interpret::AllocId { desc { |tcx| "vtable const allocation for <{} as {}>", key.0, - key.1.map(|trait_ref| format!("{trait_ref}")).unwrap_or("_".to_owned()) + key.1.map(|trait_ref| format!("{trait_ref}")).unwrap_or_else(|| "_".to_owned()) } } diff --git a/compiler/rustc_middle/src/traits/mod.rs b/compiler/rustc_middle/src/traits/mod.rs index 27079af06fc..d877bd5c626 100644 --- a/compiler/rustc_middle/src/traits/mod.rs +++ b/compiler/rustc_middle/src/traits/mod.rs @@ -397,6 +397,8 @@ pub enum ObligationCauseCode<'tcx> { RustCall, + DynCompatible(Span), + /// Obligations to prove that a `Drop` or negative auto trait impl is not stronger than /// the ADT it's being implemented for. AlwaysApplicableImpl, diff --git a/compiler/rustc_middle/src/ty/closure.rs b/compiler/rustc_middle/src/ty/closure.rs index df67bb505a6..b8c7d6cf3b1 100644 --- a/compiler/rustc_middle/src/ty/closure.rs +++ b/compiler/rustc_middle/src/ty/closure.rs @@ -422,53 +422,49 @@ pub fn analyze_coroutine_closure_captures<'a, 'tcx: 'a, T>( child_captures: impl IntoIterator<Item = &'a CapturedPlace<'tcx>>, mut for_each: impl FnMut((usize, &'a CapturedPlace<'tcx>), (usize, &'a CapturedPlace<'tcx>)) -> T, ) -> impl Iterator<Item = T> { - std::iter::from_coroutine( - #[coroutine] - move || { - let mut child_captures = child_captures.into_iter().enumerate().peekable(); - - // One parent capture may correspond to several child captures if we end up - // refining the set of captures via edition-2021 precise captures. We want to - // match up any number of child captures with one parent capture, so we keep - // peeking off this `Peekable` until the child doesn't match anymore. - for (parent_field_idx, parent_capture) in parent_captures.into_iter().enumerate() { - // Make sure we use every field at least once, b/c why are we capturing something - // if it's not used in the inner coroutine. - let mut field_used_at_least_once = false; - - // A parent matches a child if they share the same prefix of projections. - // The child may have more, if it is capturing sub-fields out of - // something that is captured by-move in the parent closure. - while child_captures.peek().is_some_and(|(_, child_capture)| { - child_prefix_matches_parent_projections(parent_capture, child_capture) - }) { - let (child_field_idx, child_capture) = child_captures.next().unwrap(); - // This analysis only makes sense if the parent capture is a - // prefix of the child capture. - assert!( - child_capture.place.projections.len() - >= parent_capture.place.projections.len(), - "parent capture ({parent_capture:#?}) expected to be prefix of \ + gen move { + let mut child_captures = child_captures.into_iter().enumerate().peekable(); + + // One parent capture may correspond to several child captures if we end up + // refining the set of captures via edition-2021 precise captures. We want to + // match up any number of child captures with one parent capture, so we keep + // peeking off this `Peekable` until the child doesn't match anymore. + for (parent_field_idx, parent_capture) in parent_captures.into_iter().enumerate() { + // Make sure we use every field at least once, b/c why are we capturing something + // if it's not used in the inner coroutine. + let mut field_used_at_least_once = false; + + // A parent matches a child if they share the same prefix of projections. + // The child may have more, if it is capturing sub-fields out of + // something that is captured by-move in the parent closure. + while child_captures.peek().is_some_and(|(_, child_capture)| { + child_prefix_matches_parent_projections(parent_capture, child_capture) + }) { + let (child_field_idx, child_capture) = child_captures.next().unwrap(); + // This analysis only makes sense if the parent capture is a + // prefix of the child capture. + assert!( + child_capture.place.projections.len() >= parent_capture.place.projections.len(), + "parent capture ({parent_capture:#?}) expected to be prefix of \ child capture ({child_capture:#?})" - ); - - yield for_each( - (parent_field_idx, parent_capture), - (child_field_idx, child_capture), - ); - - field_used_at_least_once = true; - } + ); - // Make sure the field was used at least once. - assert!( - field_used_at_least_once, - "we captured {parent_capture:#?} but it was not used in the child coroutine?" + yield for_each( + (parent_field_idx, parent_capture), + (child_field_idx, child_capture), ); + + field_used_at_least_once = true; } - assert_eq!(child_captures.next(), None, "leftover child captures?"); - }, - ) + + // Make sure the field was used at least once. + assert!( + field_used_at_least_once, + "we captured {parent_capture:#?} but it was not used in the child coroutine?" + ); + } + assert_eq!(child_captures.next(), None, "leftover child captures?"); + } } fn child_prefix_matches_parent_projections( diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs index f1395c242f2..c5f4b95cbbe 100644 --- a/compiler/rustc_middle/src/ty/context.rs +++ b/compiler/rustc_middle/src/ty/context.rs @@ -1479,6 +1479,12 @@ pub struct GlobalCtxt<'tcx> { pub canonical_param_env_cache: CanonicalParamEnvCache<'tcx>, + /// Caches the index of the highest bound var in clauses in a canonical binder. + pub highest_var_in_clauses_cache: Lock<FxHashMap<ty::Clauses<'tcx>, usize>>, + /// Caches the instantiation of a canonical binder given a set of args. + pub clauses_cache: + Lock<FxHashMap<(ty::Clauses<'tcx>, &'tcx [ty::GenericArg<'tcx>]), ty::Clauses<'tcx>>>, + /// Data layout specification for the current target. pub data_layout: TargetDataLayout, @@ -1727,6 +1733,8 @@ impl<'tcx> TyCtxt<'tcx> { new_solver_evaluation_cache: Default::default(), new_solver_canonical_param_env_cache: Default::default(), canonical_param_env_cache: Default::default(), + highest_var_in_clauses_cache: Default::default(), + clauses_cache: Default::default(), data_layout, alloc_map: interpret::AllocMap::new(), current_gcx, @@ -2079,23 +2087,20 @@ impl<'tcx> TyCtxt<'tcx> { self.dep_graph.read_index(DepNodeIndex::FOREVER_RED_NODE); let definitions = &self.untracked.definitions; - std::iter::from_coroutine( - #[coroutine] - || { - let mut i = 0; - - // Recompute the number of definitions each time, because our caller may be creating - // new ones. - while i < { definitions.read().num_definitions() } { - let local_def_index = rustc_span::def_id::DefIndex::from_usize(i); - yield LocalDefId { local_def_index }; - i += 1; - } + gen { + let mut i = 0; + + // Recompute the number of definitions each time, because our caller may be creating + // new ones. + while i < { definitions.read().num_definitions() } { + let local_def_index = rustc_span::def_id::DefIndex::from_usize(i); + yield LocalDefId { local_def_index }; + i += 1; + } - // Freeze definitions once we finish iterating on them, to prevent adding new ones. - definitions.freeze(); - }, - ) + // Freeze definitions once we finish iterating on them, to prevent adding new ones. + definitions.freeze(); + } } pub fn def_path_table(self) -> &'tcx rustc_hir::definitions::DefPathTable { @@ -2113,7 +2118,7 @@ impl<'tcx> TyCtxt<'tcx> { ) -> &'tcx rustc_hir::def_path_hash_map::DefPathHashMap { // Create a dependency to the crate to be sure we re-execute this when the amount of // definitions change. - self.ensure_ok().hir_crate(()); + self.ensure_ok().hir_crate_items(()); // Freeze definitions once we start iterating on them, to prevent adding new ones // while iterating. If some query needs to add definitions, it should be `ensure`d above. self.untracked.definitions.freeze().def_path_hash_to_def_index_map() @@ -3160,42 +3165,33 @@ impl<'tcx> TyCtxt<'tcx> { lint_level(self.sess, lint, level, Some(span.into()), decorate); } - /// Find the crate root and the appropriate span where `use` and outer attributes can be - /// inserted at. - pub fn crate_level_attribute_injection_span(self, hir_id: HirId) -> Option<Span> { - for (_hir_id, node) in self.hir_parent_iter(hir_id) { - if let hir::Node::Crate(m) = node { - return Some(m.spans.inject_use_span.shrink_to_lo()); - } - } - None + /// Find the appropriate span where `use` and outer attributes can be inserted at. + pub fn crate_level_attribute_injection_span(self) -> Span { + let node = self.hir_node(hir::CRATE_HIR_ID); + let hir::Node::Crate(m) = node else { bug!() }; + m.spans.inject_use_span.shrink_to_lo() } pub fn disabled_nightly_features<E: rustc_errors::EmissionGuarantee>( self, diag: &mut Diag<'_, E>, - hir_id: Option<HirId>, features: impl IntoIterator<Item = (String, Symbol)>, ) { if !self.sess.is_nightly_build() { return; } - let span = hir_id.and_then(|id| self.crate_level_attribute_injection_span(id)); + let span = self.crate_level_attribute_injection_span(); for (desc, feature) in features { // FIXME: make this string translatable let msg = format!("add `#![feature({feature})]` to the crate attributes to enable{desc}"); - if let Some(span) = span { - diag.span_suggestion_verbose( - span, - msg, - format!("#![feature({feature})]\n"), - Applicability::MaybeIncorrect, - ); - } else { - diag.help(msg); - } + diag.span_suggestion_verbose( + span, + msg, + format!("#![feature({feature})]\n"), + Applicability::MaybeIncorrect, + ); } } diff --git a/compiler/rustc_middle/src/ty/context/tls.rs b/compiler/rustc_middle/src/ty/context/tls.rs index 5fc80bc7936..fa9995898ac 100644 --- a/compiler/rustc_middle/src/ty/context/tls.rs +++ b/compiler/rustc_middle/src/ty/context/tls.rs @@ -36,7 +36,7 @@ impl<'a, 'tcx> ImplicitCtxt<'a, 'tcx> { } // Import the thread-local variable from Rayon, which is preserved for Rayon jobs. -use rayon_core::tlv::TLV; +use rustc_thread_pool::tlv::TLV; #[inline] fn erase(context: &ImplicitCtxt<'_, '_>) -> *const () { diff --git a/compiler/rustc_middle/src/ty/inhabitedness/mod.rs b/compiler/rustc_middle/src/ty/inhabitedness/mod.rs index d8bab58545f..2a336cc21f4 100644 --- a/compiler/rustc_middle/src/ty/inhabitedness/mod.rs +++ b/compiler/rustc_middle/src/ty/inhabitedness/mod.rs @@ -127,7 +127,9 @@ impl<'tcx> Ty<'tcx> { InhabitedPredicate::True } Never => InhabitedPredicate::False, - Param(_) | Alias(ty::Projection | ty::Free, _) => InhabitedPredicate::GenericType(self), + Param(_) | Alias(ty::Inherent | ty::Projection | ty::Free, _) => { + InhabitedPredicate::GenericType(self) + } Alias(ty::Opaque, alias_ty) => { match alias_ty.def_id.as_local() { // Foreign opaque is considered inhabited. @@ -139,12 +141,6 @@ impl<'tcx> Ty<'tcx> { } } } - // FIXME(inherent_associated_types): Most likely we can just map to `GenericType` like above. - // However it's unclear if the args passed to `InhabitedPredicate::instantiate` are of the correct - // format, i.e. don't contain parent args. If you hit this case, please verify this beforehand. - Alias(ty::Inherent, _) => { - bug!("unimplemented: inhabitedness checking for inherent projections") - } Tuple(tys) if tys.is_empty() => InhabitedPredicate::True, // use a query for more complex cases Adt(..) | Array(..) | Tuple(_) => tcx.inhabited_predicate_type(self), diff --git a/compiler/rustc_mir_transform/src/copy_prop.rs b/compiler/rustc_mir_transform/src/copy_prop.rs index 27af5818982..fe78a104fa0 100644 --- a/compiler/rustc_mir_transform/src/copy_prop.rs +++ b/compiler/rustc_mir_transform/src/copy_prop.rs @@ -30,6 +30,8 @@ impl<'tcx> crate::MirPass<'tcx> for CopyProp { let typing_env = body.typing_env(tcx); let ssa = SsaLocals::new(tcx, body, typing_env); + debug!(borrowed_locals = ?ssa.borrowed_locals()); + debug!(copy_classes = ?ssa.copy_classes()); let fully_moved = fully_moved_locals(&ssa, body); debug!(?fully_moved); @@ -43,14 +45,8 @@ impl<'tcx> crate::MirPass<'tcx> for CopyProp { let any_replacement = ssa.copy_classes().iter_enumerated().any(|(l, &h)| l != h); - Replacer { - tcx, - copy_classes: ssa.copy_classes(), - fully_moved, - borrowed_locals: ssa.borrowed_locals(), - storage_to_remove, - } - .visit_body_preserves_cfg(body); + Replacer { tcx, copy_classes: ssa.copy_classes(), fully_moved, storage_to_remove } + .visit_body_preserves_cfg(body); if any_replacement { crate::simplify::remove_unused_definitions(body); @@ -102,7 +98,6 @@ struct Replacer<'a, 'tcx> { tcx: TyCtxt<'tcx>, fully_moved: DenseBitSet<Local>, storage_to_remove: DenseBitSet<Local>, - borrowed_locals: &'a DenseBitSet<Local>, copy_classes: &'a IndexSlice<Local, Local>, } @@ -111,34 +106,18 @@ impl<'tcx> MutVisitor<'tcx> for Replacer<'_, 'tcx> { self.tcx } + #[tracing::instrument(level = "trace", skip(self))] fn visit_local(&mut self, local: &mut Local, ctxt: PlaceContext, _: Location) { let new_local = self.copy_classes[*local]; - // We must not unify two locals that are borrowed. But this is fine if one is borrowed and - // the other is not. We chose to check the original local, and not the target. That way, if - // the original local is borrowed and the target is not, we do not pessimize the whole class. - if self.borrowed_locals.contains(*local) { - return; - } match ctxt { // Do not modify the local in storage statements. PlaceContext::NonUse(NonUseContext::StorageLive | NonUseContext::StorageDead) => {} - // The local should have been marked as non-SSA. - PlaceContext::MutatingUse(_) => assert_eq!(*local, new_local), // We access the value. _ => *local = new_local, } } - fn visit_place(&mut self, place: &mut Place<'tcx>, _: PlaceContext, loc: Location) { - if let Some(new_projection) = self.process_projection(place.projection, loc) { - place.projection = self.tcx().mk_place_elems(&new_projection); - } - - // Any non-mutating use context is ok. - let ctxt = PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy); - self.visit_local(&mut place.local, ctxt, loc) - } - + #[tracing::instrument(level = "trace", skip(self))] fn visit_operand(&mut self, operand: &mut Operand<'tcx>, loc: Location) { if let Operand::Move(place) = *operand // A move out of a projection of a copy is equivalent to a copy of the original @@ -151,6 +130,7 @@ impl<'tcx> MutVisitor<'tcx> for Replacer<'_, 'tcx> { self.super_operand(operand, loc); } + #[tracing::instrument(level = "trace", skip(self))] fn visit_statement(&mut self, stmt: &mut Statement<'tcx>, loc: Location) { // When removing storage statements, we need to remove both (#107511). if let StatementKind::StorageLive(l) | StatementKind::StorageDead(l) = stmt.kind diff --git a/compiler/rustc_mir_transform/src/cross_crate_inline.rs b/compiler/rustc_mir_transform/src/cross_crate_inline.rs index 727d4a126d2..6d7b7e10ef6 100644 --- a/compiler/rustc_mir_transform/src/cross_crate_inline.rs +++ b/compiler/rustc_mir_transform/src/cross_crate_inline.rs @@ -50,6 +50,13 @@ fn cross_crate_inlinable(tcx: TyCtxt<'_>, def_id: LocalDefId) -> bool { _ => {} } + // If the crate is likely to be mostly unused, use cross-crate inlining to defer codegen until + // the function is referenced, in order to skip codegen for unused functions. This is + // intentionally after the check for `inline(never)`, so that `inline(never)` wins. + if tcx.sess.opts.unstable_opts.hint_mostly_unused { + return true; + } + let sig = tcx.fn_sig(def_id).instantiate_identity(); for ty in sig.inputs().skip_binder().iter().chain(std::iter::once(&sig.output().skip_binder())) { diff --git a/compiler/rustc_mir_transform/src/jump_threading.rs b/compiler/rustc_mir_transform/src/jump_threading.rs index 48db536c122..b45bff2af44 100644 --- a/compiler/rustc_mir_transform/src/jump_threading.rs +++ b/compiler/rustc_mir_transform/src/jump_threading.rs @@ -89,7 +89,7 @@ impl<'tcx> crate::MirPass<'tcx> for JumpThreading { opportunities: Vec::new(), }; - for bb in body.basic_blocks.indices() { + for (bb, _) in traversal::preorder(body) { finder.start_from_switch(bb); } diff --git a/compiler/rustc_mir_transform/src/simplify.rs b/compiler/rustc_mir_transform/src/simplify.rs index a54e548ad70..db933da6413 100644 --- a/compiler/rustc_mir_transform/src/simplify.rs +++ b/compiler/rustc_mir_transform/src/simplify.rs @@ -82,7 +82,7 @@ pub(super) fn simplify_cfg<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { remove_dead_blocks(body); // FIXME: Should probably be moved into some kind of pass manager - body.basic_blocks_mut().raw.shrink_to_fit(); + body.basic_blocks.as_mut_preserves_cfg().shrink_to_fit(); } impl<'tcx> crate::MirPass<'tcx> for SimplifyCfg { diff --git a/compiler/rustc_mir_transform/src/ssa.rs b/compiler/rustc_mir_transform/src/ssa.rs index edd0cabca49..03b6f9b7ff3 100644 --- a/compiler/rustc_mir_transform/src/ssa.rs +++ b/compiler/rustc_mir_transform/src/ssa.rs @@ -293,6 +293,10 @@ impl<'tcx> Visitor<'tcx> for SsaVisitor<'_, 'tcx> { fn compute_copy_classes(ssa: &mut SsaLocals, body: &Body<'_>) { let mut direct_uses = std::mem::take(&mut ssa.direct_uses); let mut copies = IndexVec::from_fn_n(|l| l, body.local_decls.len()); + // We must not unify two locals that are borrowed. But this is fine if one is borrowed and + // the other is not. This bitset is keyed by *class head* and contains whether any member of + // the class is borrowed. + let mut borrowed_classes = ssa.borrowed_locals().clone(); for (local, rvalue, _) in ssa.assignments(body) { let (Rvalue::Use(Operand::Copy(place) | Operand::Move(place)) @@ -318,6 +322,11 @@ fn compute_copy_classes(ssa: &mut SsaLocals, body: &Body<'_>) { // visited before `local`, and we just have to copy the representing local. let head = copies[rhs]; + // Do not unify two borrowed locals. + if borrowed_classes.contains(local) && borrowed_classes.contains(head) { + continue; + } + if local == RETURN_PLACE { // `_0` is special, we cannot rename it. Instead, rename the class of `rhs` to // `RETURN_PLACE`. This is only possible if the class head is a temporary, not an @@ -330,14 +339,21 @@ fn compute_copy_classes(ssa: &mut SsaLocals, body: &Body<'_>) { *h = RETURN_PLACE; } } + if borrowed_classes.contains(head) { + borrowed_classes.insert(RETURN_PLACE); + } } else { copies[local] = head; + if borrowed_classes.contains(local) { + borrowed_classes.insert(head); + } } direct_uses[rhs] -= 1; } debug!(?copies); debug!(?direct_uses); + debug!(?borrowed_classes); // Invariant: `copies` must point to the head of an equivalence class. #[cfg(debug_assertions)] @@ -346,6 +362,13 @@ fn compute_copy_classes(ssa: &mut SsaLocals, body: &Body<'_>) { } debug_assert_eq!(copies[RETURN_PLACE], RETURN_PLACE); + // Invariant: `borrowed_classes` must be true if any member of the class is borrowed. + #[cfg(debug_assertions)] + for &head in copies.iter() { + let any_borrowed = ssa.borrowed_locals.iter().any(|l| copies[l] == head); + assert_eq!(borrowed_classes.contains(head), any_borrowed); + } + ssa.direct_uses = direct_uses; ssa.copy_classes = copies; } diff --git a/compiler/rustc_monomorphize/src/collector.rs b/compiler/rustc_monomorphize/src/collector.rs index 173030e0326..e90e32ebebb 100644 --- a/compiler/rustc_monomorphize/src/collector.rs +++ b/compiler/rustc_monomorphize/src/collector.rs @@ -949,6 +949,9 @@ fn visit_instance_use<'tcx>( } ty::InstanceKind::DropGlue(_, None) => { // Don't need to emit noop drop glue if we are calling directly. + // + // Note that we also optimize away the call to visit_instance_use in vtable construction + // (see create_mono_items_for_vtable_methods). if !is_direct_call { output.push(create_fn_mono_item(tcx, instance, source)); } @@ -1177,8 +1180,13 @@ fn create_mono_items_for_vtable_methods<'tcx>( output.extend(methods); } - // Also add the destructor. - visit_drop_use(tcx, impl_ty, false, source, output); + // Also add the destructor, if it's necessary. + // + // This matches the check in vtable_allocation_provider in middle/ty/vtable.rs, + // if we don't need drop we're not adding an actual pointer to the vtable. + if impl_ty.needs_drop(tcx, ty::TypingEnv::fully_monomorphized()) { + visit_drop_use(tcx, impl_ty, false, source, output); + } } /// Scans the CTFE alloc in order to find function pointers and statics that must be monomorphized. diff --git a/compiler/rustc_next_trait_solver/src/lib.rs b/compiler/rustc_next_trait_solver/src/lib.rs index 77f098e6f26..e3f42c181fa 100644 --- a/compiler/rustc_next_trait_solver/src/lib.rs +++ b/compiler/rustc_next_trait_solver/src/lib.rs @@ -7,6 +7,7 @@ // tidy-alphabetical-start #![allow(rustc::usage_of_type_ir_inherent)] #![allow(rustc::usage_of_type_ir_traits)] +#![cfg_attr(not(bootstrap), allow(rustc::direct_use_of_rustc_type_ir))] // tidy-alphabetical-end pub mod canonicalizer; diff --git a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs index 7ead0a6d6b7..00fd3ba8046 100644 --- a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs +++ b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs @@ -430,7 +430,7 @@ where canonical_input, step_kind_from_parent, &mut canonical_goal_evaluation, - |search_graph, canonical_goal_evaluation| { + |search_graph, cx, canonical_input, canonical_goal_evaluation| { EvalCtxt::enter_canonical( cx, search_graph, diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index 658ed4bd41c..5088caa80f8 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -1781,7 +1781,7 @@ impl<'a> Parser<'a> { let mut recovered = Recovered::No; if self.eat(exp!(OpenBrace)) { while self.token != token::CloseBrace { - match self.parse_field_def(adt_ty) { + match self.parse_field_def(adt_ty, ident_span) { Ok(field) => { fields.push(field); } @@ -1894,7 +1894,7 @@ impl<'a> Parser<'a> { } /// Parses an element of a struct declaration. - fn parse_field_def(&mut self, adt_ty: &str) -> PResult<'a, FieldDef> { + fn parse_field_def(&mut self, adt_ty: &str, ident_span: Span) -> PResult<'a, FieldDef> { self.recover_vcs_conflict_marker(); let attrs = self.parse_outer_attributes()?; self.recover_vcs_conflict_marker(); @@ -1902,7 +1902,7 @@ impl<'a> Parser<'a> { let lo = this.token.span; let vis = this.parse_visibility(FollowedByType::No)?; let safety = this.parse_unsafe_field(); - this.parse_single_struct_field(adt_ty, lo, vis, safety, attrs) + this.parse_single_struct_field(adt_ty, lo, vis, safety, attrs, ident_span) .map(|field| (field, Trailing::No, UsePreAttrPos::No)) }) } @@ -1915,28 +1915,27 @@ impl<'a> Parser<'a> { vis: Visibility, safety: Safety, attrs: AttrVec, + ident_span: Span, ) -> PResult<'a, FieldDef> { - let mut seen_comma: bool = false; let a_var = self.parse_name_and_ty(adt_ty, lo, vis, safety, attrs)?; - if self.token == token::Comma { - seen_comma = true; - } - if self.eat(exp!(Semi)) { - let sp = self.prev_token.span; - let mut err = - self.dcx().struct_span_err(sp, format!("{adt_ty} fields are separated by `,`")); - err.span_suggestion_short( - sp, - "replace `;` with `,`", - ",", - Applicability::MachineApplicable, - ); - return Err(err); - } match self.token.kind { token::Comma => { self.bump(); } + token::Semi => { + self.bump(); + let sp = self.prev_token.span; + let mut err = + self.dcx().struct_span_err(sp, format!("{adt_ty} fields are separated by `,`")); + err.span_suggestion_short( + sp, + "replace `;` with `,`", + ",", + Applicability::MachineApplicable, + ); + err.span_label(ident_span, format!("while parsing this {adt_ty}")); + err.emit(); + } token::CloseBrace => {} token::DocComment(..) => { let previous_span = self.prev_token.span; @@ -1945,19 +1944,11 @@ impl<'a> Parser<'a> { missing_comma: None, }; self.bump(); // consume the doc comment - let comma_after_doc_seen = self.eat(exp!(Comma)); - // `seen_comma` is always false, because we are inside doc block - // condition is here to make code more readable - if !seen_comma && comma_after_doc_seen { - seen_comma = true; - } - if comma_after_doc_seen || self.token == token::CloseBrace { + if self.eat(exp!(Comma)) || self.token == token::CloseBrace { self.dcx().emit_err(err); } else { - if !seen_comma { - let sp = previous_span.shrink_to_hi(); - err.missing_comma = Some(sp); - } + let sp = previous_span.shrink_to_hi(); + err.missing_comma = Some(sp); return Err(self.dcx().create_err(err)); } } diff --git a/compiler/rustc_parse/src/parser/tests.rs b/compiler/rustc_parse/src/parser/tests.rs index 2a44c90abc1..15679d23bc5 100644 --- a/compiler/rustc_parse/src/parser/tests.rs +++ b/compiler/rustc_parse/src/parser/tests.rs @@ -14,6 +14,7 @@ use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, Toke use rustc_ast::{self as ast, PatKind, visit}; use rustc_ast_pretty::pprust::item_to_string; use rustc_errors::emitter::{HumanEmitter, OutputTheme}; +use rustc_errors::translation::Translator; use rustc_errors::{DiagCtxt, MultiSpan, PResult}; use rustc_session::parse::ParseSess; use rustc_span::source_map::{FilePathMapping, SourceMap}; @@ -41,9 +42,8 @@ fn string_to_parser(psess: &ParseSess, source_str: String) -> Parser<'_> { fn create_test_handler(theme: OutputTheme) -> (DiagCtxt, Arc<SourceMap>, Arc<Mutex<Vec<u8>>>) { let output = Arc::new(Mutex::new(Vec::new())); let source_map = Arc::new(SourceMap::new(FilePathMapping::empty())); - let fallback_bundle = - rustc_errors::fallback_fluent_bundle(vec![crate::DEFAULT_LOCALE_RESOURCE], false); - let mut emitter = HumanEmitter::new(Box::new(Shared { data: output.clone() }), fallback_bundle) + let translator = Translator::with_fallback_bundle(vec![crate::DEFAULT_LOCALE_RESOURCE], false); + let mut emitter = HumanEmitter::new(Box::new(Shared { data: output.clone() }), translator) .sm(Some(source_map.clone())) .diagnostic_width(Some(140)); emitter = emitter.theme(theme); diff --git a/compiler/rustc_parse/src/validate_attr.rs b/compiler/rustc_parse/src/validate_attr.rs index 555ab3cdb2b..8e6442353c3 100644 --- a/compiler/rustc_parse/src/validate_attr.rs +++ b/compiler/rustc_parse/src/validate_attr.rs @@ -282,11 +282,26 @@ fn emit_malformed_attribute( name: Symbol, template: AttributeTemplate, ) { + // attrs with new parsers are locally validated so excluded here + if matches!( + name, + sym::inline + | sym::rustc_force_inline + | sym::rustc_confusables + | sym::repr + | sym::align + | sym::deprecated + | sym::optimize + | sym::cold + | sym::must_use + ) { + return; + } + // Some of previously accepted forms were used in practice, // report them as warnings for now. - let should_warn = |name| { - matches!(name, sym::doc | sym::ignore | sym::inline | sym::link | sym::test | sym::bench) - }; + let should_warn = + |name| matches!(name, sym::doc | sym::ignore | sym::link | sym::test | sym::bench); let error_msg = format!("malformed `{name}` attribute input"); let mut suggestions = vec![]; diff --git a/compiler/rustc_passes/messages.ftl b/compiler/rustc_passes/messages.ftl index 7c237d708c0..c1a2b3b2973 100644 --- a/compiler/rustc_passes/messages.ftl +++ b/compiler/rustc_passes/messages.ftl @@ -13,6 +13,10 @@ passes_abi_ne = passes_abi_of = fn_abi_of({$fn_name}) = {$fn_abi} +passes_align_should_be_repr_align = + `#[align(...)]` is not supported on {$item} items + .suggestion = use `#[repr(align(...))]` instead + passes_allow_incoherent_impl = `rustc_allow_incoherent_impl` attribute should be applied to impl items .label = the only currently supported targets are inherent methods @@ -29,10 +33,6 @@ passes_attr_application_struct = attribute should be applied to a struct .label = not a struct -passes_attr_application_struct_enum_function_method_union = - attribute should be applied to a struct, enum, function, associated function, or union - .label = not a struct, enum, function, associated function, or union - passes_attr_application_struct_enum_union = attribute should be applied to a struct, enum, or union .label = not a struct, enum, or union @@ -583,13 +583,14 @@ passes_remove_fields = *[other] fields } -passes_repr_align_function = - `repr(align)` attributes on functions are unstable - passes_repr_align_greater_than_target_max = alignment must not be greater than `isize::MAX` bytes .note = `isize::MAX` is {$size} for the current target +passes_repr_align_should_be_align = + `#[repr(align(...))]` is not supported on {$item} items + .help = use `#[align(...)]` instead + passes_repr_conflicting = conflicting representation hints diff --git a/compiler/rustc_passes/src/check_attr.rs b/compiler/rustc_passes/src/check_attr.rs index dddbf65db72..ad1a2a04273 100644 --- a/compiler/rustc_passes/src/check_attr.rs +++ b/compiler/rustc_passes/src/check_attr.rs @@ -10,7 +10,7 @@ use std::collections::hash_map::Entry; use rustc_abi::{Align, ExternAbi, Size}; use rustc_ast::{AttrStyle, LitKind, MetaItemInner, MetaItemKind, MetaItemLit, ast}; -use rustc_attr_data_structures::{AttributeKind, ReprAttr, find_attr}; +use rustc_attr_data_structures::{AttributeKind, InlineAttr, ReprAttr, find_attr}; use rustc_data_structures::fx::FxHashMap; use rustc_errors::{Applicability, DiagCtxtHandle, IntoDiagArg, MultiSpan, StashKey}; use rustc_feature::{AttributeDuplicates, AttributeType, BUILTIN_ATTRIBUTE_MAP, BuiltinAttribute}; @@ -35,7 +35,7 @@ use rustc_session::lint::builtin::{ UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES, UNUSED_ATTRIBUTES, }; use rustc_session::parse::feature_err; -use rustc_span::{BytePos, DUMMY_SP, Span, Symbol, edition, sym}; +use rustc_span::{BytePos, DUMMY_SP, Span, Symbol, edition, kw, sym}; use rustc_trait_selection::error_reporting::InferCtxtErrorExt; use rustc_trait_selection::infer::{TyCtxtInferExt, ValuePairs}; use rustc_trait_selection::traits::ObligationCtxt; @@ -116,6 +116,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { let mut seen = FxHashMap::default(); let attrs = self.tcx.hir_attrs(hir_id); for attr in attrs { + let mut style = None; match attr { Attribute::Parsed(AttributeKind::Confusables { first_span, .. }) => { self.check_confusables(*first_span, target); @@ -124,6 +125,13 @@ impl<'tcx> CheckAttrVisitor<'tcx> { AttributeKind::Stability { span, .. } | AttributeKind::ConstStability { span, .. }, ) => self.check_stability_promotable(*span, target), + Attribute::Parsed(AttributeKind::Inline(InlineAttr::Force { .. }, ..)) => {} // handled separately below + Attribute::Parsed(AttributeKind::Inline(kind, attr_span)) => { + self.check_inline(hir_id, *attr_span, span, kind, target) + } + Attribute::Parsed(AttributeKind::Optimize(_, attr_span)) => { + self.check_optimize(hir_id, *attr_span, span, target) + } Attribute::Parsed(AttributeKind::AllowInternalUnstable(syms)) => self .check_allow_internal_unstable( hir_id, @@ -142,6 +150,16 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } Attribute::Parsed(AttributeKind::Repr(_)) => { /* handled below this loop and elsewhere */ } + + &Attribute::Parsed(AttributeKind::PubTransparent(attr_span)) => { + self.check_rustc_pub_transparent(attr_span, span, attrs) + } + Attribute::Parsed(AttributeKind::Cold(attr_span)) => { + self.check_cold(hir_id, *attr_span, span, target) + } + Attribute::Parsed(AttributeKind::Align { align, span: repr_span }) => { + self.check_align(span, target, *align, *repr_span) + } Attribute::Parsed( AttributeKind::BodyStability { .. } | AttributeKind::ConstStabilityIndirect @@ -150,7 +168,17 @@ impl<'tcx> CheckAttrVisitor<'tcx> { Attribute::Parsed(AttributeKind::AsPtr(attr_span)) => { self.check_applied_to_fn_or_method(hir_id, *attr_span, span, target) } - Attribute::Unparsed(_) => { + Attribute::Parsed(AttributeKind::MayDangle(attr_span)) => { + self.check_may_dangle(hir_id, *attr_span) + } + Attribute::Parsed(AttributeKind::MustUse { span, .. }) => { + self.check_must_use(hir_id, *span, target) + } + Attribute::Parsed(AttributeKind::NoMangle(attr_span)) => { + self.check_no_mangle(hir_id, *attr_span, span, target) + } + Attribute::Unparsed(attr_item) => { + style = Some(attr_item.style); match attr.path().as_slice() { [sym::diagnostic, sym::do_not_recommend, ..] => { self.check_do_not_recommend(attr.span(), hir_id, target, attr, item) @@ -158,9 +186,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { [sym::diagnostic, sym::on_unimplemented, ..] => { self.check_diagnostic_on_unimplemented(attr.span(), hir_id, target) } - [sym::inline, ..] => self.check_inline(hir_id, attr, span, target), [sym::coverage, ..] => self.check_coverage(attr, span, target), - [sym::optimize, ..] => self.check_optimize(hir_id, attr, span, target), [sym::no_sanitize, ..] => { self.check_no_sanitize(attr, span, target) } @@ -175,6 +201,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } [sym::doc, ..] => self.check_doc_attrs( attr, + attr_item.style, hir_id, target, &mut specified_inline, @@ -224,8 +251,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> { | [sym::const_trait, ..] => self.check_must_be_applied_to_trait(attr, span, target), [sym::collapse_debuginfo, ..] => self.check_collapse_debuginfo(attr, span, target), [sym::must_not_suspend, ..] => self.check_must_not_suspend(attr, span, target), - [sym::must_use, ..] => self.check_must_use(hir_id, attr, target), - [sym::may_dangle, ..] => self.check_may_dangle(hir_id, attr), [sym::rustc_pass_by_value, ..] => self.check_pass_by_value(attr, span, target), [sym::rustc_allow_incoherent_impl, ..] => { self.check_allow_incoherent_impl(attr, span, target) @@ -236,11 +261,9 @@ impl<'tcx> CheckAttrVisitor<'tcx> { [sym::ffi_pure, ..] => self.check_ffi_pure(attr.span(), attrs, target), [sym::ffi_const, ..] => self.check_ffi_const(attr.span(), target), [sym::link_ordinal, ..] => self.check_link_ordinal(attr, span, target), - [sym::cold, ..] => self.check_cold(hir_id, attr, span, target), [sym::link, ..] => self.check_link(hir_id, attr, span, target), [sym::link_name, ..] => self.check_link_name(hir_id, attr, span, target), [sym::link_section, ..] => self.check_link_section(hir_id, attr, span, target), - [sym::no_mangle, ..] => self.check_no_mangle(hir_id, attr, span, target), [sym::macro_use, ..] | [sym::macro_escape, ..] => { self.check_macro_use(hir_id, attr, target) } @@ -276,7 +299,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> { self.check_type_const(hir_id,attr, target); } [sym::linkage, ..] => self.check_linkage(attr, span, target), - [sym::rustc_pub_transparent, ..] => self.check_rustc_pub_transparent(attr.span(), span, attrs), [ // ok sym::allow @@ -338,14 +360,14 @@ impl<'tcx> CheckAttrVisitor<'tcx> { if let Some(BuiltinAttribute { type_: AttributeType::CrateLevel, .. }) = attr.ident().and_then(|ident| BUILTIN_ATTRIBUTE_MAP.get(&ident.name)) { - match attr.style() { - ast::AttrStyle::Outer => self.tcx.emit_node_span_lint( + match style { + Some(ast::AttrStyle::Outer) => self.tcx.emit_node_span_lint( UNUSED_ATTRIBUTES, hir_id, attr.span(), errors::OuterCrateLevelAttr, ), - ast::AttrStyle::Inner => self.tcx.emit_node_span_lint( + Some(ast::AttrStyle::Inner) | None => self.tcx.emit_node_span_lint( UNUSED_ATTRIBUTES, hir_id, attr.span(), @@ -359,7 +381,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { check_duplicates(self.tcx, attr, hir_id, *duplicates, &mut seen); } - self.check_unused_attribute(hir_id, attr) + self.check_unused_attribute(hir_id, attr, style) } self.check_repr(attrs, span, target, item, hir_id); @@ -367,11 +389,11 @@ impl<'tcx> CheckAttrVisitor<'tcx> { self.check_rustc_force_inline(hir_id, attrs, span, target); } - fn inline_attr_str_error_with_macro_def(&self, hir_id: HirId, attr: &Attribute, sym: &str) { + fn inline_attr_str_error_with_macro_def(&self, hir_id: HirId, attr_span: Span, sym: &str) { self.tcx.emit_node_span_lint( UNUSED_ATTRIBUTES, hir_id, - attr.span(), + attr_span, errors::IgnoredAttrWithMacro { sym }, ); } @@ -431,7 +453,14 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } /// Checks if an `#[inline]` is applied to a function or a closure. - fn check_inline(&self, hir_id: HirId, attr: &Attribute, span: Span, target: Target) { + fn check_inline( + &self, + hir_id: HirId, + attr_span: Span, + defn_span: Span, + kind: &InlineAttr, + target: Target, + ) { match target { Target::Fn | Target::Closure @@ -440,7 +469,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { self.tcx.emit_node_span_lint( UNUSED_ATTRIBUTES, hir_id, - attr.span(), + attr_span, errors::IgnoredInlineAttrFnProto, ) } @@ -451,25 +480,22 @@ impl<'tcx> CheckAttrVisitor<'tcx> { Target::AssocConst => self.tcx.emit_node_span_lint( UNUSED_ATTRIBUTES, hir_id, - attr.span(), + attr_span, errors::IgnoredInlineAttrConstants, ), // FIXME(#80564): Same for fields, arms, and macro defs Target::Field | Target::Arm | Target::MacroDef => { - self.inline_attr_str_error_with_macro_def(hir_id, attr, "inline") + self.inline_attr_str_error_with_macro_def(hir_id, attr_span, "inline") } _ => { - self.dcx().emit_err(errors::InlineNotFnOrClosure { - attr_span: attr.span(), - defn_span: span, - }); + self.dcx().emit_err(errors::InlineNotFnOrClosure { attr_span, defn_span }); } } // `#[inline]` is ignored if the symbol must be codegened upstream because it's exported. if let Some(did) = hir_id.as_owner() && self.tcx.def_kind(did).has_codegen_attrs() - && !matches!(attr.meta_item_list().as_deref(), Some([item]) if item.has_name(sym::never)) + && kind != &InlineAttr::Never { let attrs = self.tcx.codegen_fn_attrs(did); // Not checking naked as `#[inline]` is forbidden for naked functions anyways. @@ -477,7 +503,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { self.tcx.emit_node_span_lint( UNUSED_ATTRIBUTES, hir_id, - attr.span(), + attr_span, errors::InlineIgnoredForExported {}, ); } @@ -518,7 +544,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { /// Checks that `#[optimize(..)]` is applied to a function/closure/method, /// or to an impl block or module. - fn check_optimize(&self, hir_id: HirId, attr: &Attribute, span: Span, target: Target) { + fn check_optimize(&self, hir_id: HirId, attr_span: Span, span: Span, target: Target) { let is_valid = matches!( target, Target::Fn @@ -527,7 +553,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { ); if !is_valid { self.dcx().emit_err(errors::OptimizeInvalidTarget { - attr_span: attr.span(), + attr_span, defn_span: span, on_crate: hir_id == CRATE_HIR_ID, }); @@ -636,9 +662,8 @@ impl<'tcx> CheckAttrVisitor<'tcx> { sym::naked, sym::instruction_set, sym::repr, + sym::align, sym::rustc_std_internal_symbol, - // code generation - sym::cold, // documentation sym::doc, ]; @@ -672,10 +697,25 @@ impl<'tcx> CheckAttrVisitor<'tcx> { // this check can be part of the parser and be removed here match other_attr { Attribute::Parsed( - AttributeKind::Deprecation { .. } | AttributeKind::Repr { .. }, + AttributeKind::Deprecation { .. } + | AttributeKind::Repr { .. } + | AttributeKind::Align { .. } + | AttributeKind::NoMangle(..) + | AttributeKind::Cold(..) + | AttributeKind::MustUse { .. }, ) => { continue; } + Attribute::Parsed(AttributeKind::Inline(.., span)) => { + self.dcx().emit_err(errors::NakedFunctionIncompatibleAttribute { + span: *span, + naked_span: attr.span(), + attr: sym::inline.to_string(), + }); + + return; + } + // FIXME(jdonszelmann): make exhaustive _ => {} } @@ -698,7 +738,10 @@ impl<'tcx> CheckAttrVisitor<'tcx> { && !matches!(other_attr.path().as_slice(), [sym::rustfmt, ..]) { let path = other_attr.path(); - let path: Vec<_> = path.iter().map(|s| s.as_str()).collect(); + let path: Vec<_> = path + .iter() + .map(|s| if *s == kw::PathRoot { "" } else { s.as_str() }) + .collect(); let other_attr_name = path.join("::"); self.dcx().emit_err(errors::NakedFunctionIncompatibleAttribute { @@ -787,7 +830,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { // with crates depending on them, we can't throw an error here. Target::Field | Target::Arm | Target::MacroDef => { for attr in attrs { - self.inline_attr_str_error_with_macro_def(hir_id, attr, "track_caller"); + self.inline_attr_str_error_with_macro_def(hir_id, attr.span(), "track_caller"); } } _ => { @@ -830,7 +873,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { // erroneously allowed it and some crates used it accidentally, to be compatible // with crates depending on them, we can't throw an error here. Target::Field | Target::Arm | Target::MacroDef => { - self.inline_attr_str_error_with_macro_def(hir_id, attr, "non_exhaustive"); + self.inline_attr_str_error_with_macro_def(hir_id, attr.span(), "non_exhaustive"); } _ => { self.dcx().emit_err(errors::NonExhaustiveWrongLocation { @@ -850,7 +893,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { // erroneously allowed it and some crates used it accidentally, to be compatible // with crates depending on them, we can't throw an error here. Target::Field | Target::Arm | Target::MacroDef => { - self.inline_attr_str_error_with_macro_def(hir_id, attr, "marker"); + self.inline_attr_str_error_with_macro_def(hir_id, attr.span(), "marker"); } _ => { self.dcx().emit_err(errors::AttrShouldBeAppliedToTrait { @@ -904,7 +947,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { // erroneously allowed it and some crates used it accidentally, to be compatible // with crates depending on them, we can't throw an error here. Target::Field | Target::Arm | Target::MacroDef => { - self.inline_attr_str_error_with_macro_def(hir_id, attr, "target_feature"); + self.inline_attr_str_error_with_macro_def(hir_id, attr.span(), "target_feature"); } _ => { self.dcx().emit_err(errors::AttrShouldBeAppliedToFn { @@ -1163,7 +1206,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { /// the first `inline`/`no_inline` attribute. fn check_doc_inline( &self, - attr: &Attribute, + style: AttrStyle, meta: &MetaItemInner, hir_id: HirId, target: Target, @@ -1193,8 +1236,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { meta.span(), errors::DocInlineOnlyUse { attr_span: meta.span(), - item_span: (attr.style() == AttrStyle::Outer) - .then(|| self.tcx.hir_span(hir_id)), + item_span: (style == AttrStyle::Outer).then(|| self.tcx.hir_span(hir_id)), }, ); } @@ -1203,7 +1245,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { fn check_doc_masked( &self, - attr: &Attribute, + style: AttrStyle, meta: &MetaItemInner, hir_id: HirId, target: Target, @@ -1215,8 +1257,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { meta.span(), errors::DocMaskedOnlyExternCrate { attr_span: meta.span(), - item_span: (attr.style() == AttrStyle::Outer) - .then(|| self.tcx.hir_span(hir_id)), + item_span: (style == AttrStyle::Outer).then(|| self.tcx.hir_span(hir_id)), }, ); return; @@ -1229,8 +1270,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { meta.span(), errors::DocMaskedNotExternCrateSelf { attr_span: meta.span(), - item_span: (attr.style() == AttrStyle::Outer) - .then(|| self.tcx.hir_span(hir_id)), + item_span: (style == AttrStyle::Outer).then(|| self.tcx.hir_span(hir_id)), }, ); } @@ -1254,13 +1294,14 @@ impl<'tcx> CheckAttrVisitor<'tcx> { fn check_attr_crate_level( &self, attr: &Attribute, + style: AttrStyle, meta: &MetaItemInner, hir_id: HirId, ) -> bool { if hir_id != CRATE_HIR_ID { // insert a bang between `#` and `[...` let bang_span = attr.span().lo() + BytePos(1); - let sugg = (attr.style() == AttrStyle::Outer + let sugg = (style == AttrStyle::Outer && self.tcx.hir_get_parent_item(hir_id) == CRATE_OWNER_ID) .then_some(errors::AttrCrateLevelOnlySugg { attr: attr.span().with_lo(bang_span).with_hi(bang_span), @@ -1277,7 +1318,13 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } /// Checks that `doc(test(...))` attribute contains only valid attributes and are at the right place. - fn check_test_attr(&self, attr: &Attribute, meta: &MetaItemInner, hir_id: HirId) { + fn check_test_attr( + &self, + attr: &Attribute, + style: AttrStyle, + meta: &MetaItemInner, + hir_id: HirId, + ) { if let Some(metas) = meta.meta_item_list() { for i_meta in metas { match (i_meta.name(), i_meta.meta_item()) { @@ -1285,7 +1332,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { // Allowed everywhere like `#[doc]` } (Some(sym::no_crate_inject), _) => { - self.check_attr_crate_level(attr, meta, hir_id); + self.check_attr_crate_level(attr, style, meta, hir_id); } (_, Some(m)) => { self.tcx.emit_node_span_lint( @@ -1339,6 +1386,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { fn check_doc_attrs( &self, attr: &Attribute, + style: AttrStyle, hir_id: HirId, target: Target, specified_inline: &mut Option<(bool, Span)>, @@ -1373,7 +1421,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } Some(sym::test) => { - self.check_test_attr(attr, meta, hir_id); + self.check_test_attr(attr, style, meta, hir_id); } Some( @@ -1384,25 +1432,25 @@ impl<'tcx> CheckAttrVisitor<'tcx> { | sym::html_root_url | sym::html_no_source, ) => { - self.check_attr_crate_level(attr, meta, hir_id); + self.check_attr_crate_level(attr, style, meta, hir_id); } Some(sym::cfg_hide) => { - if self.check_attr_crate_level(attr, meta, hir_id) { + if self.check_attr_crate_level(attr, style, meta, hir_id) { self.check_doc_cfg_hide(meta, hir_id); } } Some(sym::inline | sym::no_inline) => { - self.check_doc_inline(attr, meta, hir_id, target, specified_inline) + self.check_doc_inline(style, meta, hir_id, target, specified_inline) } - Some(sym::masked) => self.check_doc_masked(attr, meta, hir_id, target), + Some(sym::masked) => self.check_doc_masked(style, meta, hir_id, target), Some(sym::cfg | sym::hidden | sym::notable_trait) => {} Some(sym::rust_logo) => { - if self.check_attr_crate_level(attr, meta, hir_id) + if self.check_attr_crate_level(attr, style, meta, hir_id) && !self.tcx.features().rustdoc_internals() { feature_err( @@ -1441,7 +1489,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { errors::DocTestUnknownInclude { path, value: value.to_string(), - inner: match attr.style() { + inner: match style { AttrStyle::Inner => "!", AttrStyle::Outer => "", }, @@ -1534,7 +1582,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } /// Warns against some misuses of `#[must_use]` - fn check_must_use(&self, hir_id: HirId, attr: &Attribute, target: Target) { + fn check_must_use(&self, hir_id: HirId, attr_span: Span, target: Target) { if matches!( target, Target::Fn @@ -1574,7 +1622,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { self.tcx.emit_node_span_lint( UNUSED_ATTRIBUTES, hir_id, - attr.span(), + attr_span, errors::MustUseNoEffect { article, target }, ); } @@ -1590,7 +1638,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } /// Checks if `#[may_dangle]` is applied to a lifetime or type generic parameter in `Drop` impl. - fn check_may_dangle(&self, hir_id: HirId, attr: &Attribute) { + fn check_may_dangle(&self, hir_id: HirId, attr_span: Span) { if let hir::Node::GenericParam(param) = self.tcx.hir_node(hir_id) && matches!( param.kind, @@ -1607,11 +1655,11 @@ impl<'tcx> CheckAttrVisitor<'tcx> { return; } - self.dcx().emit_err(errors::InvalidMayDangle { attr_span: attr.span() }); + self.dcx().emit_err(errors::InvalidMayDangle { attr_span }); } /// Checks if `#[cold]` is applied to a non-function. - fn check_cold(&self, hir_id: HirId, attr: &Attribute, span: Span, target: Target) { + fn check_cold(&self, hir_id: HirId, attr_span: Span, span: Span, target: Target) { match target { Target::Fn | Target::Method(..) | Target::ForeignFn | Target::Closure => {} // FIXME(#80564): We permit struct fields, match arms and macro defs to have an @@ -1619,7 +1667,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { // erroneously allowed it and some crates used it accidentally, to be compatible // with crates depending on them, we can't throw an error here. Target::Field | Target::Arm | Target::MacroDef => { - self.inline_attr_str_error_with_macro_def(hir_id, attr, "cold"); + self.inline_attr_str_error_with_macro_def(hir_id, attr_span, "cold"); } _ => { // FIXME: #[cold] was previously allowed on non-functions and some crates used @@ -1627,7 +1675,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { self.tcx.emit_node_span_lint( UNUSED_ATTRIBUTES, hir_id, - attr.span(), + attr_span, errors::Cold { span, on_crate: hir_id == CRATE_HIR_ID }, ); } @@ -1661,7 +1709,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { // erroneously allowed it and some crates used it accidentally, to be compatible // with crates depending on them, we can't throw an error here. Target::Field | Target::Arm | Target::MacroDef => { - self.inline_attr_str_error_with_macro_def(hir_id, attr, "link_name"); + self.inline_attr_str_error_with_macro_def(hir_id, attr.span(), "link_name"); } _ => { // FIXME: #[cold] was previously allowed on non-functions/statics and some crates @@ -1695,7 +1743,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { // erroneously allowed it and some crates used it accidentally, to be compatible // with crates depending on them, we can't throw an error here. Target::Field | Target::Arm | Target::MacroDef => { - self.inline_attr_str_error_with_macro_def(hir_id, attr, "no_link"); + self.inline_attr_str_error_with_macro_def(hir_id, attr.span(), "no_link"); } _ => { self.dcx().emit_err(errors::NoLink { attr_span: attr.span(), span }); @@ -1717,7 +1765,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { // erroneously allowed it and some crates used it accidentally, to be compatible // with crates depending on them, we can't throw an error here. Target::Field | Target::Arm | Target::MacroDef => { - self.inline_attr_str_error_with_macro_def(hir_id, attr, "export_name"); + self.inline_attr_str_error_with_macro_def(hir_id, attr.span(), "export_name"); } _ => { self.dcx().emit_err(errors::ExportName { attr_span: attr.span(), span }); @@ -1891,7 +1939,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { // erroneously allowed it and some crates used it accidentally, to be compatible // with crates depending on them, we can't throw an error here. Target::Field | Target::Arm | Target::MacroDef => { - self.inline_attr_str_error_with_macro_def(hir_id, attr, "link_section"); + self.inline_attr_str_error_with_macro_def(hir_id, attr.span(), "link_section"); } _ => { // FIXME: #[link_section] was previously allowed on non-functions/statics and some @@ -1907,7 +1955,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } /// Checks if `#[no_mangle]` is applied to a function or static. - fn check_no_mangle(&self, hir_id: HirId, attr: &Attribute, span: Span, target: Target) { + fn check_no_mangle(&self, hir_id: HirId, attr_span: Span, span: Span, target: Target) { match target { Target::Static | Target::Fn => {} Target::Method(..) if self.is_impl_item(hir_id) => {} @@ -1916,7 +1964,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { // erroneously allowed it and some crates used it accidentally, to be compatible // with crates depending on them, we can't throw an error here. Target::Field | Target::Arm | Target::MacroDef => { - self.inline_attr_str_error_with_macro_def(hir_id, attr, "no_mangle"); + self.inline_attr_str_error_with_macro_def(hir_id, attr_span, "no_mangle"); } // FIXME: #[no_mangle] was previously allowed on non-functions/statics, this should be an error // The error should specify that the item that is wrong is specifically a *foreign* fn/static @@ -1930,8 +1978,8 @@ impl<'tcx> CheckAttrVisitor<'tcx> { self.tcx.emit_node_span_lint( UNUSED_ATTRIBUTES, hir_id, - attr.span(), - errors::NoMangleForeign { span, attr_span: attr.span(), foreign_item_kind }, + attr_span, + errors::NoMangleForeign { span, attr_span, foreign_item_kind }, ); } _ => { @@ -1940,13 +1988,35 @@ impl<'tcx> CheckAttrVisitor<'tcx> { self.tcx.emit_node_span_lint( UNUSED_ATTRIBUTES, hir_id, - attr.span(), + attr_span, errors::NoMangle { span }, ); } } } + /// Checks if the `#[align]` attributes on `item` are valid. + fn check_align(&self, span: Span, target: Target, align: Align, repr_span: Span) { + match target { + Target::Fn | Target::Method(_) => {} + Target::Struct | Target::Union | Target::Enum => { + self.dcx().emit_err(errors::AlignShouldBeReprAlign { + span: repr_span, + item: target.name(), + align_bytes: align.bytes(), + }); + } + _ => { + self.dcx().emit_err(errors::AttrApplication::StructEnumUnion { + hint_span: repr_span, + span, + }); + } + } + + self.check_align_value(align, repr_span); + } + /// Checks if the `#[repr]` attributes on `item` are valid. fn check_repr( &self, @@ -1999,23 +2069,16 @@ impl<'tcx> CheckAttrVisitor<'tcx> { match target { Target::Struct | Target::Union | Target::Enum => {} Target::Fn | Target::Method(_) => { - if !self.tcx.features().fn_align() { - feature_err( - &self.tcx.sess, - sym::fn_align, - *repr_span, - fluent::passes_repr_align_function, - ) - .emit(); - } + self.dcx().emit_err(errors::ReprAlignShouldBeAlign { + span: *repr_span, + item: target.name(), + }); } _ => { - self.dcx().emit_err( - errors::AttrApplication::StructEnumFunctionMethodUnion { - hint_span: *repr_span, - span, - }, - ); + self.dcx().emit_err(errors::AttrApplication::StructEnumUnion { + hint_span: *repr_span, + span, + }); } } @@ -2073,21 +2136,16 @@ impl<'tcx> CheckAttrVisitor<'tcx> { match target { Target::Struct | Target::Union | Target::Enum => continue, Target::Fn | Target::Method(_) => { - feature_err( - &self.tcx.sess, - sym::fn_align, - *repr_span, - fluent::passes_repr_align_function, - ) - .emit(); + self.dcx().emit_err(errors::ReprAlignShouldBeAlign { + span: *repr_span, + item: target.name(), + }); } _ => { - self.dcx().emit_err( - errors::AttrApplication::StructEnumFunctionMethodUnion { - hint_span: *repr_span, - span, - }, - ); + self.dcx().emit_err(errors::AttrApplication::StructEnumUnion { + hint_span: *repr_span, + span, + }); } } } @@ -2263,9 +2321,12 @@ impl<'tcx> CheckAttrVisitor<'tcx> { // `#[allow_internal_unstable]` attribute with just a lint, because we previously // erroneously allowed it and some crates used it accidentally, to be compatible // with crates depending on them, we can't throw an error here. - Target::Field | Target::Arm | Target::MacroDef => { - self.inline_attr_str_error_with_macro_def(hir_id, attr, "allow_internal_unstable") - } + Target::Field | Target::Arm | Target::MacroDef => self + .inline_attr_str_error_with_macro_def( + hir_id, + attr.span(), + "allow_internal_unstable", + ), _ => { self.tcx .dcx() @@ -2382,7 +2443,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } } - fn check_unused_attribute(&self, hir_id: HirId, attr: &Attribute) { + fn check_unused_attribute(&self, hir_id: HirId, attr: &Attribute, style: Option<AttrStyle>) { // FIXME(jdonszelmann): deduplicate these checks after more attrs are parsed. This is very // ugly now but can 100% be removed later. if let Attribute::Parsed(p) = attr { @@ -2435,14 +2496,14 @@ impl<'tcx> CheckAttrVisitor<'tcx> { }) { if hir_id != CRATE_HIR_ID { - match attr.style() { - ast::AttrStyle::Outer => self.tcx.emit_node_span_lint( + match style { + Some(ast::AttrStyle::Outer) => self.tcx.emit_node_span_lint( UNUSED_ATTRIBUTES, hir_id, attr.span(), errors::OuterCrateLevelAttr, ), - ast::AttrStyle::Inner => self.tcx.emit_node_span_lint( + Some(ast::AttrStyle::Inner) | None => self.tcx.emit_node_span_lint( UNUSED_ATTRIBUTES, hir_id, attr.span(), @@ -2638,8 +2699,10 @@ impl<'tcx> CheckAttrVisitor<'tcx> { span: Span, target: Target, ) { - let force_inline_attr = attrs.iter().find(|attr| attr.has_name(sym::rustc_force_inline)); - match (target, force_inline_attr) { + match ( + target, + find_attr!(attrs, AttributeKind::Inline(InlineAttr::Force { attr_span, .. }, _) => *attr_span), + ) { (Target::Closure, None) => { let is_coro = matches!( self.tcx.hir_expect_expr(hir_id).kind, @@ -2651,20 +2714,19 @@ impl<'tcx> CheckAttrVisitor<'tcx> { ); let parent_did = self.tcx.hir_get_parent_item(hir_id).to_def_id(); let parent_span = self.tcx.def_span(parent_did); - let parent_force_inline_attr = - self.tcx.get_attr(parent_did, sym::rustc_force_inline); - if let Some(attr) = parent_force_inline_attr - && is_coro + + if let Some(attr_span) = find_attr!( + self.tcx.get_all_attrs(parent_did), + AttributeKind::Inline(InlineAttr::Force { attr_span, .. }, _) => *attr_span + ) && is_coro { - self.dcx().emit_err(errors::RustcForceInlineCoro { - attr_span: attr.span(), - span: parent_span, - }); + self.dcx() + .emit_err(errors::RustcForceInlineCoro { attr_span, span: parent_span }); } } (Target::Fn, _) => (), - (_, Some(attr)) => { - self.dcx().emit_err(errors::RustcForceInline { attr_span: attr.span(), span }); + (_, Some(attr_span)) => { + self.dcx().emit_err(errors::RustcForceInline { attr_span, span }); } (_, None) => (), } @@ -2885,10 +2947,9 @@ fn check_invalid_crate_level_attr(tcx: TyCtxt<'_>, attrs: &[Attribute]) { fn check_non_exported_macro_for_invalid_attrs(tcx: TyCtxt<'_>, item: &Item<'_>) { let attrs = tcx.hir_attrs(item.hir_id()); - for attr in attrs { - if attr.has_name(sym::inline) { - tcx.dcx().emit_err(errors::NonExportedMacroInvalidAttrs { attr_span: attr.span() }); - } + if let Some(attr_span) = find_attr!(attrs, AttributeKind::Inline(i, span) if !matches!(i, InlineAttr::Force{..}) => *span) + { + tcx.dcx().emit_err(errors::NonExportedMacroInvalidAttrs { attr_span }); } } @@ -2908,6 +2969,7 @@ pub(crate) fn provide(providers: &mut Providers) { *providers = Providers { check_mod_attrs, ..*providers }; } +// FIXME(jdonszelmann): remove, check during parsing fn check_duplicates( tcx: TyCtxt<'_>, attr: &Attribute, diff --git a/compiler/rustc_passes/src/check_export.rs b/compiler/rustc_passes/src/check_export.rs index b9a3849f32f..f8f489d7d06 100644 --- a/compiler/rustc_passes/src/check_export.rs +++ b/compiler/rustc_passes/src/check_export.rs @@ -53,11 +53,11 @@ impl<'tcx> ExportableItemCollector<'tcx> { let is_pub = visibilities.is_directly_public(def_id); if has_attr && !is_pub { - let vis = visibilities.effective_vis(def_id).cloned().unwrap_or( + let vis = visibilities.effective_vis(def_id).cloned().unwrap_or_else(|| { EffectiveVisibility::from_vis(Visibility::Restricted( self.tcx.parent_module_from_def_id(def_id).to_local_def_id(), - )), - ); + )) + }); let vis = vis.at_level(Level::Direct); let span = self.tcx.def_span(def_id); diff --git a/compiler/rustc_passes/src/dead.rs b/compiler/rustc_passes/src/dead.rs index 4257d8e8d16..4738036318d 100644 --- a/compiler/rustc_passes/src/dead.rs +++ b/compiler/rustc_passes/src/dead.rs @@ -234,7 +234,14 @@ impl<'tcx> MarkSymbolVisitor<'tcx> { pats: &[hir::PatField<'_>], ) { let variant = match self.typeck_results().node_type(lhs.hir_id).kind() { - ty::Adt(adt, _) => adt.variant_of_res(res), + ty::Adt(adt, _) => { + // Marks the ADT live if its variant appears as the pattern, + // considering cases when we have `let T(x) = foo()` and `fn foo<T>() -> T;`, + // we will lose the liveness info of `T` cause we cannot mark it live when visiting `foo`. + // Related issue: https://github.com/rust-lang/rust/issues/120770 + self.check_def_id(adt.did()); + adt.variant_of_res(res) + } _ => span_bug!(lhs.span, "non-ADT in struct pattern"), }; for pat in pats { @@ -254,7 +261,11 @@ impl<'tcx> MarkSymbolVisitor<'tcx> { dotdot: hir::DotDotPos, ) { let variant = match self.typeck_results().node_type(lhs.hir_id).kind() { - ty::Adt(adt, _) => adt.variant_of_res(res), + ty::Adt(adt, _) => { + // Marks the ADT live if its variant appears as the pattern + self.check_def_id(adt.did()); + adt.variant_of_res(res) + } _ => { self.tcx.dcx().span_delayed_bug(lhs.span, "non-ADT in tuple struct pattern"); return; @@ -359,31 +370,6 @@ impl<'tcx> MarkSymbolVisitor<'tcx> { return false; } - // don't ignore impls for Enums and pub Structs whose methods don't have self receiver, - // cause external crate may call such methods to construct values of these types - if let Some(local_impl_of) = impl_of.as_local() - && let Some(local_def_id) = def_id.as_local() - && let Some(fn_sig) = - self.tcx.hir_fn_sig_by_hir_id(self.tcx.local_def_id_to_hir_id(local_def_id)) - && matches!(fn_sig.decl.implicit_self, hir::ImplicitSelfKind::None) - && let TyKind::Path(QPath::Resolved(_, path)) = - self.tcx.hir_expect_item(local_impl_of).expect_impl().self_ty.kind - && let Res::Def(def_kind, did) = path.res - { - match def_kind { - // for example, #[derive(Default)] pub struct T(i32); - // external crate can call T::default() to construct T, - // so that don't ignore impl Default for pub Enum and Structs - DefKind::Struct | DefKind::Union if self.tcx.visibility(did).is_public() => { - return false; - } - // don't ignore impl Default for Enums, - // cause we don't know which variant is constructed - DefKind::Enum => return false, - _ => (), - }; - } - if let Some(trait_of) = self.tcx.trait_id_of_impl(impl_of) && self.tcx.has_attr(trait_of, sym::rustc_trivial_field_reads) { @@ -494,38 +480,25 @@ impl<'tcx> MarkSymbolVisitor<'tcx> { impl_id: hir::ItemId, local_def_id: LocalDefId, ) -> bool { - if self.should_ignore_item(local_def_id.to_def_id()) { - return false; - } - let trait_def_id = match self.tcx.def_kind(local_def_id) { // assoc impl items of traits are live if the corresponding trait items are live - DefKind::AssocFn => self.tcx.associated_item(local_def_id).trait_item_def_id, + DefKind::AssocFn => self + .tcx + .associated_item(local_def_id) + .trait_item_def_id + .and_then(|def_id| def_id.as_local()), // impl items are live if the corresponding traits are live DefKind::Impl { of_trait: true } => self .tcx .impl_trait_ref(impl_id.owner_id.def_id) - .and_then(|trait_ref| Some(trait_ref.skip_binder().def_id)), + .and_then(|trait_ref| trait_ref.skip_binder().def_id.as_local()), _ => None, }; - if let Some(trait_def_id) = trait_def_id { - if let Some(trait_def_id) = trait_def_id.as_local() - && !self.live_symbols.contains(&trait_def_id) - { - return false; - } - - // FIXME: legacy logic to check whether the function may construct `Self`, - // this can be removed after supporting marking ADTs appearing in patterns - // as live, then we can check private impls of public traits directly - if let Some(fn_sig) = - self.tcx.hir_fn_sig_by_hir_id(self.tcx.local_def_id_to_hir_id(local_def_id)) - && matches!(fn_sig.decl.implicit_self, hir::ImplicitSelfKind::None) - && self.tcx.visibility(trait_def_id).is_public() - { - return true; - } + if let Some(trait_def_id) = trait_def_id + && !self.live_symbols.contains(&trait_def_id) + { + return false; } // The impl or impl item is used if the corresponding trait or trait item is used and the ty is used. @@ -635,6 +608,11 @@ impl<'tcx> Visitor<'tcx> for MarkSymbolVisitor<'tcx> { fn visit_pat_expr(&mut self, expr: &'tcx rustc_hir::PatExpr<'tcx>) { match &expr.kind { rustc_hir::PatExprKind::Path(qpath) => { + // mark the type of variant live when meeting E::V in expr + if let ty::Adt(adt, _) = self.typeck_results().node_type(expr.hir_id).kind() { + self.check_def_id(adt.did()); + } + let res = self.typeck_results().qpath_res(qpath, expr.hir_id); self.handle_res(res); } diff --git a/compiler/rustc_passes/src/diagnostic_items.rs b/compiler/rustc_passes/src/diagnostic_items.rs index 17a729f422a..8f572af02c2 100644 --- a/compiler/rustc_passes/src/diagnostic_items.rs +++ b/compiler/rustc_passes/src/diagnostic_items.rs @@ -10,7 +10,7 @@ //! * Compiler internal types like `Ty` and `TyCtxt` use rustc_hir::diagnostic_items::DiagnosticItems; -use rustc_hir::{Attribute, OwnerId}; +use rustc_hir::{Attribute, CRATE_OWNER_ID, OwnerId}; use rustc_middle::query::{LocalCrate, Providers}; use rustc_middle::ty::TyCtxt; use rustc_span::def_id::{DefId, LOCAL_CRATE}; @@ -67,7 +67,7 @@ fn diagnostic_items(tcx: TyCtxt<'_>, _: LocalCrate) -> DiagnosticItems { // Collect diagnostic items in this crate. let crate_items = tcx.hir_crate_items(()); - for id in crate_items.owners() { + for id in crate_items.owners().chain(std::iter::once(CRATE_OWNER_ID)) { observe_item(tcx, &mut diagnostic_items, id); } diff --git a/compiler/rustc_passes/src/errors.rs b/compiler/rustc_passes/src/errors.rs index f0d4b610f63..587d9170f06 100644 --- a/compiler/rustc_passes/src/errors.rs +++ b/compiler/rustc_passes/src/errors.rs @@ -1308,13 +1308,6 @@ pub(crate) enum AttrApplication { #[label] span: Span, }, - #[diag(passes_attr_application_struct_enum_function_method_union, code = E0517)] - StructEnumFunctionMethodUnion { - #[primary_span] - hint_span: Span, - #[label] - span: Span, - }, } #[derive(Diagnostic)] @@ -1816,3 +1809,26 @@ pub(crate) enum UnexportableItem<'a> { field_name: &'a str, }, } + +#[derive(Diagnostic)] +#[diag(passes_repr_align_should_be_align)] +pub(crate) struct ReprAlignShouldBeAlign { + #[primary_span] + #[help] + pub span: Span, + pub item: &'static str, +} + +#[derive(Diagnostic)] +#[diag(passes_align_should_be_repr_align)] +pub(crate) struct AlignShouldBeReprAlign { + #[primary_span] + #[suggestion( + style = "verbose", + applicability = "machine-applicable", + code = "#[repr(align({align_bytes}))]" + )] + pub span: Span, + pub item: &'static str, + pub align_bytes: u64, +} diff --git a/compiler/rustc_query_system/Cargo.toml b/compiler/rustc_query_system/Cargo.toml index 7db06953aeb..3d2d879a764 100644 --- a/compiler/rustc_query_system/Cargo.toml +++ b/compiler/rustc_query_system/Cargo.toml @@ -6,7 +6,6 @@ edition = "2024" [dependencies] # tidy-alphabetical-start parking_lot = "0.12" -rustc-rayon-core = { version = "0.5.0" } rustc_abi = { path = "../rustc_abi" } rustc_ast = { path = "../rustc_ast" } rustc_attr_data_structures = { path = "../rustc_attr_data_structures" } @@ -21,6 +20,7 @@ rustc_macros = { path = "../rustc_macros" } rustc_serialize = { path = "../rustc_serialize" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } +rustc_thread_pool = { path = "../rustc_thread_pool" } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } tracing = "0.1" # tidy-alphabetical-end diff --git a/compiler/rustc_query_system/src/query/job.rs b/compiler/rustc_query_system/src/query/job.rs index 1e79bd461d2..7e61f5026da 100644 --- a/compiler/rustc_query_system/src/query/job.rs +++ b/compiler/rustc_query_system/src/query/job.rs @@ -236,7 +236,7 @@ impl<I> QueryLatch<I> { // If this detects a deadlock and the deadlock handler wants to resume this thread // we have to be in the `wait` call. This is ensured by the deadlock handler // getting the self.info lock. - rayon_core::mark_blocked(); + rustc_thread_pool::mark_blocked(); let proxy = qcx.jobserver_proxy(); proxy.release_thread(); waiter.condvar.wait(&mut info); @@ -251,9 +251,9 @@ impl<I> QueryLatch<I> { let mut info = self.info.lock(); debug_assert!(!info.complete); info.complete = true; - let registry = rayon_core::Registry::current(); + let registry = rustc_thread_pool::Registry::current(); for waiter in info.waiters.drain(..) { - rayon_core::mark_unblocked(®istry); + rustc_thread_pool::mark_unblocked(®istry); waiter.condvar.notify_one(); } } @@ -507,7 +507,7 @@ fn remove_cycle<I: Clone>( /// all active queries for cycles before finally resuming all the waiters at once. pub fn break_query_cycles<I: Clone + Debug>( query_map: QueryMap<I>, - registry: &rayon_core::Registry, + registry: &rustc_thread_pool::Registry, ) { let mut wakelist = Vec::new(); // It is OK per the comments: @@ -543,7 +543,7 @@ pub fn break_query_cycles<I: Clone + Debug>( // we wake the threads up as otherwise Rayon could detect a deadlock if a thread we // resumed fell asleep and this thread had yet to mark the remaining threads as unblocked. for _ in 0..wakelist.len() { - rayon_core::mark_unblocked(registry); + rustc_thread_pool::mark_unblocked(registry); } for waiter in wakelist.into_iter() { diff --git a/compiler/rustc_resolve/src/def_collector.rs b/compiler/rustc_resolve/src/def_collector.rs index f8e0a6936a0..16852d1661e 100644 --- a/compiler/rustc_resolve/src/def_collector.rs +++ b/compiler/rustc_resolve/src/def_collector.rs @@ -3,7 +3,7 @@ use std::mem; use rustc_ast::visit::FnKind; use rustc_ast::*; use rustc_ast_pretty::pprust; -use rustc_attr_parsing::{AttributeParser, OmitDoc}; +use rustc_attr_parsing::{AttributeParser, Early, OmitDoc}; use rustc_expand::expand::AstFragment; use rustc_hir as hir; use rustc_hir::def::{CtorKind, CtorOf, DefKind}; @@ -128,7 +128,7 @@ impl<'a, 'ra, 'tcx> visit::Visitor<'a> for DefCollector<'a, 'ra, 'tcx> { // FIXME(jdonszelmann) make one of these in the resolver? // FIXME(jdonszelmann) don't care about tools here maybe? Just parse what we can. // Does that prevents errors from happening? maybe - let mut parser = AttributeParser::new_early( + let mut parser = AttributeParser::<'_, Early>::new( &self.resolver.tcx.sess, self.resolver.tcx.features(), Vec::new(), diff --git a/compiler/rustc_resolve/src/late.rs b/compiler/rustc_resolve/src/late.rs index 338d9edcd22..ac7bdda4195 100644 --- a/compiler/rustc_resolve/src/late.rs +++ b/compiler/rustc_resolve/src/late.rs @@ -869,11 +869,9 @@ impl<'ast, 'ra, 'tcx> Visitor<'ast> for LateResolutionVisitor<'_, 'ast, 'ra, 'tc self.with_generic_param_rib( &[], RibKind::Normal, - LifetimeRibKind::Generics { - binder: ty.id, - kind: LifetimeBinderKind::PolyTrait, - span, - }, + ty.id, + LifetimeBinderKind::PolyTrait, + span, |this| this.visit_path(path), ); } else { @@ -907,11 +905,9 @@ impl<'ast, 'ra, 'tcx> Visitor<'ast> for LateResolutionVisitor<'_, 'ast, 'ra, 'tc self.with_generic_param_rib( &bare_fn.generic_params, RibKind::Normal, - LifetimeRibKind::Generics { - binder: ty.id, - kind: LifetimeBinderKind::BareFnType, - span, - }, + ty.id, + LifetimeBinderKind::BareFnType, + span, |this| { this.visit_generic_params(&bare_fn.generic_params, false); this.with_lifetime_rib( @@ -942,11 +938,9 @@ impl<'ast, 'ra, 'tcx> Visitor<'ast> for LateResolutionVisitor<'_, 'ast, 'ra, 'tc self.with_generic_param_rib( &unsafe_binder.generic_params, RibKind::Normal, - LifetimeRibKind::Generics { - binder: ty.id, - kind: LifetimeBinderKind::BareFnType, - span, - }, + ty.id, + LifetimeBinderKind::BareFnType, + span, |this| { this.visit_generic_params(&unsafe_binder.generic_params, false); this.with_lifetime_rib( @@ -995,11 +989,9 @@ impl<'ast, 'ra, 'tcx> Visitor<'ast> for LateResolutionVisitor<'_, 'ast, 'ra, 'tc self.with_generic_param_rib( &tref.bound_generic_params, RibKind::Normal, - LifetimeRibKind::Generics { - binder: tref.trait_ref.ref_id, - kind: LifetimeBinderKind::PolyTrait, - span, - }, + tref.trait_ref.ref_id, + LifetimeBinderKind::PolyTrait, + span, |this| { this.visit_generic_params(&tref.bound_generic_params, false); this.smart_resolve_path( @@ -1020,11 +1012,9 @@ impl<'ast, 'ra, 'tcx> Visitor<'ast> for LateResolutionVisitor<'_, 'ast, 'ra, 'tc self.with_generic_param_rib( &generics.params, RibKind::Item(HasGenericParams::Yes(generics.span), def_kind), - LifetimeRibKind::Generics { - binder: foreign_item.id, - kind: LifetimeBinderKind::Item, - span: generics.span, - }, + foreign_item.id, + LifetimeBinderKind::Item, + generics.span, |this| visit::walk_item(this, foreign_item), ); } @@ -1032,11 +1022,9 @@ impl<'ast, 'ra, 'tcx> Visitor<'ast> for LateResolutionVisitor<'_, 'ast, 'ra, 'tc self.with_generic_param_rib( &generics.params, RibKind::Item(HasGenericParams::Yes(generics.span), def_kind), - LifetimeRibKind::Generics { - binder: foreign_item.id, - kind: LifetimeBinderKind::Function, - span: generics.span, - }, + foreign_item.id, + LifetimeBinderKind::Function, + generics.span, |this| visit::walk_item(this, foreign_item), ); } @@ -1374,11 +1362,9 @@ impl<'ast, 'ra, 'tcx> Visitor<'ast> for LateResolutionVisitor<'_, 'ast, 'ra, 'tc this.with_generic_param_rib( bound_generic_params, RibKind::Normal, - LifetimeRibKind::Generics { - binder: bounded_ty.id, - kind: LifetimeBinderKind::WhereBound, - span, - }, + bounded_ty.id, + LifetimeBinderKind::WhereBound, + span, |this| { this.visit_generic_params(bound_generic_params, false); this.visit_ty(bounded_ty); @@ -1432,11 +1418,14 @@ impl<'ast, 'ra, 'tcx> Visitor<'ast> for LateResolutionVisitor<'_, 'ast, 'ra, 'tc fn visit_variant(&mut self, v: &'ast Variant) { self.resolve_doc_links(&v.attrs, MaybeExported::Ok(v.id)); - visit::walk_variant(self, v) - } - - fn visit_variant_discr(&mut self, discr: &'ast AnonConst) { - self.resolve_anon_const(discr, AnonConstKind::EnumDiscriminant); + self.visit_id(v.id); + walk_list!(self, visit_attribute, &v.attrs); + self.visit_vis(&v.vis); + self.visit_ident(&v.ident); + self.visit_variant_data(&v.data); + if let Some(discr) = &v.disr_expr { + self.resolve_anon_const(discr, AnonConstKind::EnumDiscriminant); + } } fn visit_field_def(&mut self, f: &'ast FieldDef) { @@ -2555,11 +2544,9 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { this.with_generic_param_rib( &generics.params, RibKind::Item(HasGenericParams::Yes(generics.span), kind), - LifetimeRibKind::Generics { - binder: item.id, - kind: LifetimeBinderKind::Item, - span: generics.span, - }, + item.id, + LifetimeBinderKind::Item, + generics.span, |this| { let item_def_id = this.r.local_def_id(item.id).to_def_id(); this.with_self_rib( @@ -2632,11 +2619,9 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { self.with_generic_param_rib( &generics.params, RibKind::Item(HasGenericParams::Yes(generics.span), def_kind), - LifetimeRibKind::Generics { - binder: item.id, - kind: LifetimeBinderKind::Item, - span: generics.span, - }, + item.id, + LifetimeBinderKind::Item, + generics.span, |this| visit::walk_item(this, item), ); } @@ -2645,11 +2630,9 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { self.with_generic_param_rib( &generics.params, RibKind::Item(HasGenericParams::Yes(generics.span), def_kind), - LifetimeRibKind::Generics { - binder: item.id, - kind: LifetimeBinderKind::Function, - span: generics.span, - }, + item.id, + LifetimeBinderKind::Function, + generics.span, |this| visit::walk_item(this, item), ); self.resolve_define_opaques(define_opaque); @@ -2685,11 +2668,9 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { self.with_generic_param_rib( &generics.params, RibKind::Item(HasGenericParams::Yes(generics.span), def_kind), - LifetimeRibKind::Generics { - binder: item.id, - kind: LifetimeBinderKind::Item, - span: generics.span, - }, + item.id, + LifetimeBinderKind::Item, + generics.span, |this| { let local_def_id = this.r.local_def_id(item.id).to_def_id(); this.with_self_rib(Res::SelfTyParam { trait_: local_def_id }, |this| { @@ -2706,11 +2687,9 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { self.with_generic_param_rib( &generics.params, RibKind::Item(HasGenericParams::Yes(generics.span), def_kind), - LifetimeRibKind::Generics { - binder: item.id, - kind: LifetimeBinderKind::Item, - span: generics.span, - }, + item.id, + LifetimeBinderKind::Item, + generics.span, |this| { let local_def_id = this.r.local_def_id(item.id).to_def_id(); this.with_self_rib(Res::SelfTyParam { trait_: local_def_id }, |this| { @@ -2776,11 +2755,9 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { }, def_kind, ), - LifetimeRibKind::Generics { - binder: item.id, - kind: LifetimeBinderKind::ConstItem, - span: generics.span, - }, + item.id, + LifetimeBinderKind::ConstItem, + generics.span, |this| { this.visit_generics(generics); @@ -2825,11 +2802,9 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { self.with_generic_param_rib( &[], RibKind::Item(HasGenericParams::Yes(span), def_kind), - LifetimeRibKind::Generics { - binder: item.id, - kind: LifetimeBinderKind::Function, - span, - }, + item.id, + LifetimeBinderKind::Function, + span, |this| this.resolve_delegation(delegation), ); } @@ -2846,17 +2821,16 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { &'c mut self, params: &'c [GenericParam], kind: RibKind<'ra>, - lifetime_kind: LifetimeRibKind, + binder: NodeId, + generics_kind: LifetimeBinderKind, + generics_span: Span, f: F, ) where F: FnOnce(&mut Self), { debug!("with_generic_param_rib"); - let LifetimeRibKind::Generics { binder, span: generics_span, kind: generics_kind, .. } = - lifetime_kind - else { - panic!() - }; + let lifetime_kind = + LifetimeRibKind::Generics { binder, span: generics_span, kind: generics_kind }; let mut function_type_rib = Rib::new(kind); let mut function_value_rib = Rib::new(kind); @@ -3086,7 +3060,9 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { this.with_generic_param_rib( &generics.params, RibKind::AssocItem, - LifetimeRibKind::Generics { binder: item.id, span: generics.span, kind }, + item.id, + kind, + generics.span, |this| visit::walk_assoc_item(this, item, AssocCtxt::Trait), ); }; @@ -3104,11 +3080,9 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { self.with_generic_param_rib( &generics.params, RibKind::AssocItem, - LifetimeRibKind::Generics { - binder: item.id, - span: generics.span, - kind: LifetimeBinderKind::ConstItem, - }, + item.id, + LifetimeBinderKind::ConstItem, + generics.span, |this| { this.with_lifetime_rib( LifetimeRibKind::StaticIfNoLifetimeInScope { @@ -3145,11 +3119,9 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { self.with_generic_param_rib( &[], RibKind::AssocItem, - LifetimeRibKind::Generics { - binder: item.id, - kind: LifetimeBinderKind::Function, - span: delegation.path.segments.last().unwrap().ident.span, - }, + item.id, + LifetimeBinderKind::Function, + delegation.path.segments.last().unwrap().ident.span, |this| this.resolve_delegation(delegation), ); } @@ -3227,11 +3199,9 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { self.with_generic_param_rib( &generics.params, RibKind::Item(HasGenericParams::Yes(generics.span), self.r.local_def_kind(item_id)), - LifetimeRibKind::Generics { - span: generics.span, - binder: item_id, - kind: LifetimeBinderKind::ImplBlock, - }, + item_id, + LifetimeBinderKind::ImplBlock, + generics.span, |this| { // Dummy self type for better errors if `Self` is used in the trait path. this.with_self_rib(Res::SelfTyParam { trait_: LOCAL_CRATE.as_def_id() }, |this| { @@ -3316,15 +3286,14 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { self.with_generic_param_rib( &generics.params, RibKind::AssocItem, - LifetimeRibKind::Generics { - binder: item.id, - span: generics.span, - kind: LifetimeBinderKind::ConstItem, - }, + item.id, + LifetimeBinderKind::ConstItem, + generics.span, |this| { this.with_lifetime_rib( - // Until these are a hard error, we need to create them within the correct binder, - // Otherwise the lifetimes of this assoc const think they are lifetimes of the trait. + // Until these are a hard error, we need to create them within the + // correct binder, Otherwise the lifetimes of this assoc const think + // they are lifetimes of the trait. LifetimeRibKind::AnonymousCreateParameter { binder: item.id, report_in_path: true, @@ -3373,11 +3342,9 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { self.with_generic_param_rib( &generics.params, RibKind::AssocItem, - LifetimeRibKind::Generics { - binder: item.id, - span: generics.span, - kind: LifetimeBinderKind::Function, - }, + item.id, + LifetimeBinderKind::Function, + generics.span, |this| { // If this is a trait impl, ensure the method // exists in trait @@ -3404,11 +3371,9 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { self.with_generic_param_rib( &generics.params, RibKind::AssocItem, - LifetimeRibKind::Generics { - binder: item.id, - span: generics.span, - kind: LifetimeBinderKind::Item, - }, + item.id, + LifetimeBinderKind::Item, + generics.span, |this| { this.with_lifetime_rib(LifetimeRibKind::AnonymousReportError, |this| { // If this is a trait impl, ensure the type @@ -3434,11 +3399,9 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { self.with_generic_param_rib( &[], RibKind::AssocItem, - LifetimeRibKind::Generics { - binder: item.id, - kind: LifetimeBinderKind::Function, - span: delegation.path.segments.last().unwrap().ident.span, - }, + item.id, + LifetimeBinderKind::Function, + delegation.path.segments.last().unwrap().ident.span, |this| { this.check_trait_item( item.id, @@ -4951,11 +4914,9 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { self.with_generic_param_rib( generic_params, RibKind::Normal, - LifetimeRibKind::Generics { - binder: expr.id, - kind: LifetimeBinderKind::Closure, - span, - }, + expr.id, + LifetimeBinderKind::Closure, + span, |this| visit::walk_expr(this, expr), ); } diff --git a/compiler/rustc_resolve/src/rustdoc.rs b/compiler/rustc_resolve/src/rustdoc.rs index fa839d2748d..931c6241bf2 100644 --- a/compiler/rustc_resolve/src/rustdoc.rs +++ b/compiler/rustc_resolve/src/rustdoc.rs @@ -356,7 +356,12 @@ pub fn strip_generics_from_path(path_str: &str) -> Result<Box<str>, MalformedGen /// If there are no doc-comments, return true. /// FIXME(#78591): Support both inner and outer attributes on the same item. pub fn inner_docs(attrs: &[impl AttributeExt]) -> bool { - attrs.iter().find(|a| a.doc_str().is_some()).is_none_or(|a| a.style() == ast::AttrStyle::Inner) + for attr in attrs { + if let Some(attr_style) = attr.doc_resolution_scope() { + return attr_style == ast::AttrStyle::Inner; + } + } + true } /// Has `#[rustc_doc_primitive]` or `#[doc(keyword)]`. diff --git a/compiler/rustc_serialize/src/int_overflow.rs b/compiler/rustc_serialize/src/int_overflow.rs index f2aac2ef711..6782fbc33da 100644 --- a/compiler/rustc_serialize/src/int_overflow.rs +++ b/compiler/rustc_serialize/src/int_overflow.rs @@ -20,6 +20,7 @@ macro_rules! impl_debug_strict_add { ($( $ty:ty )*) => { $( impl DebugStrictAdd for $ty { + #[inline] fn debug_strict_add(self, other: Self) -> Self { if cfg!(debug_assertions) { self + other @@ -42,6 +43,7 @@ macro_rules! impl_debug_strict_sub { ($( $ty:ty )*) => { $( impl DebugStrictSub for $ty { + #[inline] fn debug_strict_sub(self, other: Self) -> Self { if cfg!(debug_assertions) { self - other diff --git a/compiler/rustc_serialize/src/opaque.rs b/compiler/rustc_serialize/src/opaque.rs index 00bad8e70cf..4242642c664 100644 --- a/compiler/rustc_serialize/src/opaque.rs +++ b/compiler/rustc_serialize/src/opaque.rs @@ -89,10 +89,12 @@ impl FileEncoder { self.buffered = 0; } + #[inline] pub fn file(&self) -> &File { &self.file } + #[inline] pub fn path(&self) -> &Path { &self.path } diff --git a/compiler/rustc_session/messages.ftl b/compiler/rustc_session/messages.ftl index 61953614c77..528c52eace7 100644 --- a/compiler/rustc_session/messages.ftl +++ b/compiler/rustc_session/messages.ftl @@ -40,11 +40,6 @@ session_file_is_not_writeable = output file {$file} is not writeable -- check it session_file_write_fail = failed to write `{$path}` due to error `{$err}` -session_forbidden_ctarget_feature = - target feature `{$feature}` cannot be {$enabled} with `-Ctarget-feature`: {$reason} - .note = this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! -session_forbidden_ctarget_feature_issue = for more information, see issue #116344 <https://github.com/rust-lang/rust/issues/116344> - session_function_return_requires_x86_or_x86_64 = `-Zfunction-return` (except `keep`) is only supported on x86 and x86_64 session_function_return_thunk_extern_requires_non_large_code_model = `-Zfunction-return=thunk-extern` is only supported on non-large code models @@ -137,9 +132,6 @@ session_target_stack_protector_not_supported = `-Z stack-protector={$stack_prote session_unleashed_feature_help_named = skipping check for `{$gate}` feature session_unleashed_feature_help_unnamed = skipping check that does not even have a feature gate -session_unstable_ctarget_feature = - unstable feature specified for `-Ctarget-feature`: `{$feature}` - .note = this feature is not stably supported; its behavior can change in the future session_unstable_virtual_function_elimination = `-Zvirtual-function-elimination` requires `-Clto` session_unsupported_crate_type_for_target = diff --git a/compiler/rustc_session/src/config.rs b/compiler/rustc_session/src/config.rs index 04ca0b75c31..87e4b0a17aa 100644 --- a/compiler/rustc_session/src/config.rs +++ b/compiler/rustc_session/src/config.rs @@ -1364,8 +1364,10 @@ impl Default for Options { cli_forced_local_thinlto_off: false, remap_path_prefix: Vec::new(), real_rust_source_base_dir: None, + real_rustc_dev_source_base_dir: None, edition: DEFAULT_EDITION, json_artifact_notifications: false, + json_timings: false, json_unused_externs: JsonUnusedExterns::No, json_future_incompat: false, pretty: None, @@ -1880,6 +1882,9 @@ pub struct JsonConfig { pub json_rendered: HumanReadableErrorType, pub json_color: ColorConfig, json_artifact_notifications: bool, + /// Output start and end timestamps of several high-level compilation sections + /// (frontend, backend, linker). + json_timings: bool, pub json_unused_externs: JsonUnusedExterns, json_future_incompat: bool, } @@ -1921,6 +1926,7 @@ pub fn parse_json(early_dcx: &EarlyDiagCtxt, matches: &getopts::Matches) -> Json let mut json_artifact_notifications = false; let mut json_unused_externs = JsonUnusedExterns::No; let mut json_future_incompat = false; + let mut json_timings = false; for option in matches.opt_strs("json") { // For now conservatively forbid `--color` with `--json` since `--json` // won't actually be emitting any colors and anything colorized is @@ -1937,6 +1943,7 @@ pub fn parse_json(early_dcx: &EarlyDiagCtxt, matches: &getopts::Matches) -> Json } "diagnostic-rendered-ansi" => json_color = ColorConfig::Always, "artifacts" => json_artifact_notifications = true, + "timings" => json_timings = true, "unused-externs" => json_unused_externs = JsonUnusedExterns::Loud, "unused-externs-silent" => json_unused_externs = JsonUnusedExterns::Silent, "future-incompat" => json_future_incompat = true, @@ -1949,6 +1956,7 @@ pub fn parse_json(early_dcx: &EarlyDiagCtxt, matches: &getopts::Matches) -> Json json_rendered, json_color, json_artifact_notifications, + json_timings, json_unused_externs, json_future_incompat, } @@ -2476,6 +2484,7 @@ pub fn build_session_options(early_dcx: &mut EarlyDiagCtxt, matches: &getopts::M json_rendered, json_color, json_artifact_notifications, + json_timings, json_unused_externs, json_future_incompat, } = parse_json(early_dcx, matches); @@ -2497,6 +2506,10 @@ pub fn build_session_options(early_dcx: &mut EarlyDiagCtxt, matches: &getopts::M let mut unstable_opts = UnstableOptions::build(early_dcx, matches, &mut target_modifiers); let (lint_opts, describe_lints, lint_cap) = get_cmd_lint_options(early_dcx, matches); + if !unstable_opts.unstable_options && json_timings { + early_dcx.early_fatal("--json=timings is unstable and requires using `-Zunstable-options`"); + } + check_error_format_stability(early_dcx, &unstable_opts, error_format); let output_types = parse_output_types(early_dcx, &unstable_opts, matches); @@ -2701,9 +2714,8 @@ pub fn build_session_options(early_dcx: &mut EarlyDiagCtxt, matches: &getopts::M let sysroot = filesearch::materialize_sysroot(sysroot_opt); - let real_rust_source_base_dir = { - // This is the location used by the `rust-src` `rustup` component. - let mut candidate = sysroot.join("lib/rustlib/src/rust"); + let real_source_base_dir = |suffix: &str, confirm: &str| { + let mut candidate = sysroot.join(suffix); if let Ok(metadata) = candidate.symlink_metadata() { // Replace the symlink bootstrap creates, with its destination. // We could try to use `fs::canonicalize` instead, but that might @@ -2716,9 +2728,17 @@ pub fn build_session_options(early_dcx: &mut EarlyDiagCtxt, matches: &getopts::M } // Only use this directory if it has a file we can expect to always find. - candidate.join("library/std/src/lib.rs").is_file().then_some(candidate) + candidate.join(confirm).is_file().then_some(candidate) }; + let real_rust_source_base_dir = + // This is the location used by the `rust-src` `rustup` component. + real_source_base_dir("lib/rustlib/src/rust", "library/std/src/lib.rs"); + + let real_rustc_dev_source_base_dir = + // This is the location used by the `rustc-dev` `rustup` component. + real_source_base_dir("lib/rustlib/rustc-src/rust", "compiler/rustc/src/main.rs"); + let mut search_paths = vec![]; for s in &matches.opt_strs("L") { search_paths.push(SearchPath::from_cli_opt( @@ -2772,8 +2792,10 @@ pub fn build_session_options(early_dcx: &mut EarlyDiagCtxt, matches: &getopts::M cli_forced_local_thinlto_off: disable_local_thinlto, remap_path_prefix, real_rust_source_base_dir, + real_rustc_dev_source_base_dir, edition, json_artifact_notifications, + json_timings, json_unused_externs, json_future_incompat, pretty, diff --git a/compiler/rustc_session/src/errors.rs b/compiler/rustc_session/src/errors.rs index 9c591dcf619..bf95014843d 100644 --- a/compiler/rustc_session/src/errors.rs +++ b/compiler/rustc_session/src/errors.rs @@ -501,20 +501,3 @@ pub(crate) struct SoftFloatIgnored; #[note] #[note(session_soft_float_deprecated_issue)] pub(crate) struct SoftFloatDeprecated; - -#[derive(Diagnostic)] -#[diag(session_forbidden_ctarget_feature)] -#[note] -#[note(session_forbidden_ctarget_feature_issue)] -pub(crate) struct ForbiddenCTargetFeature<'a> { - pub feature: &'a str, - pub enabled: &'a str, - pub reason: &'a str, -} - -#[derive(Diagnostic)] -#[diag(session_unstable_ctarget_feature)] -#[note] -pub(crate) struct UnstableCTargetFeature<'a> { - pub feature: &'a str, -} diff --git a/compiler/rustc_session/src/features.rs b/compiler/rustc_session/src/features.rs deleted file mode 100644 index 70a088a236f..00000000000 --- a/compiler/rustc_session/src/features.rs +++ /dev/null @@ -1,59 +0,0 @@ -use rustc_target::target_features::Stability; - -use crate::Session; -use crate::errors::{ForbiddenCTargetFeature, UnstableCTargetFeature}; - -pub trait StabilityExt { - /// Returns whether the feature may be toggled via `#[target_feature]` or `-Ctarget-feature`. - /// Otherwise, some features also may only be enabled by flag (target modifier). - /// (It might still be nightly-only even if this returns `true`, so make sure to also check - /// `requires_nightly`.) - fn is_toggle_permitted(&self, sess: &Session) -> Result<(), &'static str>; - - /// Check that feature is correctly enabled/disabled by command line flag (emits warnings) - fn verify_feature_enabled_by_flag(&self, sess: &Session, enable: bool, feature: &str); -} - -impl StabilityExt for Stability { - fn is_toggle_permitted(&self, sess: &Session) -> Result<(), &'static str> { - match self { - Stability::Forbidden { reason } => Err(reason), - Stability::TargetModifierOnly { reason, flag } => { - if !sess.opts.target_feature_flag_enabled(*flag) { Err(reason) } else { Ok(()) } - } - _ => Ok(()), - } - } - fn verify_feature_enabled_by_flag(&self, sess: &Session, enable: bool, feature: &str) { - if let Err(reason) = self.is_toggle_permitted(sess) { - sess.dcx().emit_warn(ForbiddenCTargetFeature { - feature, - enabled: if enable { "enabled" } else { "disabled" }, - reason, - }); - } else if self.requires_nightly().is_some() { - // An unstable feature. Warn about using it. It makes little sense - // to hard-error here since we just warn about fully unknown - // features above. - sess.dcx().emit_warn(UnstableCTargetFeature { feature }); - } - } -} - -pub fn retpoline_features_by_flags(sess: &Session, features: &mut Vec<&str>) { - // -Zretpoline without -Zretpoline-external-thunk enables - // retpoline-indirect-branches and retpoline-indirect-calls target features - let unstable_opts = &sess.opts.unstable_opts; - if unstable_opts.retpoline && !unstable_opts.retpoline_external_thunk { - features.push("+retpoline-indirect-branches"); - features.push("+retpoline-indirect-calls"); - } - // -Zretpoline-external-thunk (maybe, with -Zretpoline too) enables - // retpoline-external-thunk, retpoline-indirect-branches and - // retpoline-indirect-calls target features - if unstable_opts.retpoline_external_thunk { - features.push("+retpoline-external-thunk"); - features.push("+retpoline-indirect-branches"); - features.push("+retpoline-indirect-calls"); - } -} diff --git a/compiler/rustc_session/src/filesearch.rs b/compiler/rustc_session/src/filesearch.rs index def2cc97f06..4f8c3926207 100644 --- a/compiler/rustc_session/src/filesearch.rs +++ b/compiler/rustc_session/src/filesearch.rs @@ -209,10 +209,9 @@ pub fn get_or_default_sysroot() -> PathBuf { // // use `parent` twice to chop off the file name and then also the // directory containing the dll - let dir = dll.parent().and_then(|p| p.parent()).ok_or(format!( - "Could not move 2 levels upper using `parent()` on {}", - dll.display() - ))?; + let dir = dll.parent().and_then(|p| p.parent()).ok_or_else(|| { + format!("Could not move 2 levels upper using `parent()` on {}", dll.display()) + })?; // if `dir` points to target's dir, move up to the sysroot let mut sysroot_dir = if dir.ends_with(crate::config::host_tuple()) { @@ -265,5 +264,6 @@ pub fn get_or_default_sysroot() -> PathBuf { rustlib_path.exists().then_some(p) } - from_env_args_next().unwrap_or(default_from_rustc_driver_dll().expect("Failed finding sysroot")) + from_env_args_next() + .unwrap_or_else(|| default_from_rustc_driver_dll().expect("Failed finding sysroot")) } diff --git a/compiler/rustc_session/src/lib.rs b/compiler/rustc_session/src/lib.rs index 4added19e56..5e5872ee068 100644 --- a/compiler/rustc_session/src/lib.rs +++ b/compiler/rustc_session/src/lib.rs @@ -29,7 +29,6 @@ pub use session::*; pub mod output; pub use getopts; -pub mod features; rustc_fluent_macro::fluent_messages! { "../messages.ftl" } diff --git a/compiler/rustc_session/src/options.rs b/compiler/rustc_session/src/options.rs index 9ca405333f4..7fef942525b 100644 --- a/compiler/rustc_session/src/options.rs +++ b/compiler/rustc_session/src/options.rs @@ -290,14 +290,6 @@ macro_rules! top_level_options { mods.sort_by(|a, b| a.opt.cmp(&b.opt)); mods } - - pub fn target_feature_flag_enabled(&self, flag: &str) -> bool { - match flag { - "retpoline" => self.unstable_opts.retpoline, - "retpoline-external-thunk" => self.unstable_opts.retpoline_external_thunk, - _ => false, - } - } } ); } @@ -395,21 +387,35 @@ top_level_options!( /// Remap source path prefixes in all output (messages, object files, debug, etc.). remap_path_prefix: Vec<(PathBuf, PathBuf)> [TRACKED_NO_CRATE_HASH], - /// Base directory containing the `src/` for the Rust standard library, and - /// potentially `rustc` as well, if we can find it. Right now it's always - /// `$sysroot/lib/rustlib/src/rust` (i.e. the `rustup` `rust-src` component). + + /// Base directory containing the `library/` directory for the Rust standard library. + /// Right now it's always `$sysroot/lib/rustlib/src/rust` + /// (i.e. the `rustup` `rust-src` component). /// /// This directory is what the virtual `/rustc/$hash` is translated back to, /// if Rust was built with path remapping to `/rustc/$hash` enabled /// (the `rust.remap-debuginfo` option in `bootstrap.toml`). real_rust_source_base_dir: Option<PathBuf> [TRACKED_NO_CRATE_HASH], + /// Base directory containing the `compiler/` directory for the rustc sources. + /// Right now it's always `$sysroot/lib/rustlib/rustc-src/rust` + /// (i.e. the `rustup` `rustc-dev` component). + /// + /// This directory is what the virtual `/rustc-dev/$hash` is translated back to, + /// if Rust was built with path remapping to `/rustc/$hash` enabled + /// (the `rust.remap-debuginfo` option in `bootstrap.toml`). + real_rustc_dev_source_base_dir: Option<PathBuf> [TRACKED_NO_CRATE_HASH], + edition: Edition [TRACKED], /// `true` if we're emitting JSON blobs about each artifact produced /// by the compiler. json_artifact_notifications: bool [TRACKED], + /// `true` if we're emitting JSON timings with the start and end of + /// high-level compilation sections + json_timings: bool [UNTRACKED], + /// `true` if we're emitting a JSON blob containing the unused externs json_unused_externs: JsonUnusedExterns [UNTRACKED], @@ -2235,6 +2241,8 @@ options! { environment variable `RUSTC_GRAPHVIZ_FONT` (default: `Courier, monospace`)"), has_thread_local: Option<bool> = (None, parse_opt_bool, [TRACKED], "explicitly enable the `cfg(target_thread_local)` directive"), + hint_mostly_unused: bool = (false, parse_bool, [TRACKED], + "hint that most of this crate will go unused, to minimize work for uncalled functions"), human_readable_cgu_names: bool = (false, parse_bool, [TRACKED], "generate human-readable, predictable names for codegen units (default: no)"), identify_regions: bool = (false, parse_bool, [UNTRACKED], diff --git a/compiler/rustc_session/src/parse.rs b/compiler/rustc_session/src/parse.rs index 87c848cf857..0118cdb1fc2 100644 --- a/compiler/rustc_session/src/parse.rs +++ b/compiler/rustc_session/src/parse.rs @@ -8,10 +8,11 @@ use rustc_ast::attr::AttrIdGenerator; use rustc_ast::node_id::NodeId; use rustc_data_structures::fx::{FxHashMap, FxIndexMap, FxIndexSet}; use rustc_data_structures::sync::{AppendOnlyVec, Lock}; -use rustc_errors::emitter::{HumanEmitter, SilentEmitter, stderr_destination}; +use rustc_errors::emitter::{FatalOnlyEmitter, HumanEmitter, stderr_destination}; +use rustc_errors::translation::Translator; use rustc_errors::{ ColorConfig, Diag, DiagCtxt, DiagCtxtHandle, DiagMessage, EmissionGuarantee, MultiSpan, - StashKey, fallback_fluent_bundle, + StashKey, }; use rustc_feature::{GateIssue, UnstableFeatures, find_feature_issue}; use rustc_span::edition::Edition; @@ -242,10 +243,10 @@ pub struct ParseSess { impl ParseSess { /// Used for testing. pub fn new(locale_resources: Vec<&'static str>) -> Self { - let fallback_bundle = fallback_fluent_bundle(locale_resources, false); + let translator = Translator::with_fallback_bundle(locale_resources, false); let sm = Arc::new(SourceMap::new(FilePathMapping::empty())); let emitter = Box::new( - HumanEmitter::new(stderr_destination(ColorConfig::Auto), fallback_bundle) + HumanEmitter::new(stderr_destination(ColorConfig::Auto), translator) .sm(Some(Arc::clone(&sm))), ); let dcx = DiagCtxt::new(emitter); @@ -274,19 +275,14 @@ impl ParseSess { } } - pub fn with_silent_emitter( - locale_resources: Vec<&'static str>, - fatal_note: String, - emit_fatal_diagnostic: bool, - ) -> Self { - let fallback_bundle = fallback_fluent_bundle(locale_resources, false); + pub fn with_fatal_emitter(locale_resources: Vec<&'static str>, fatal_note: String) -> Self { + let translator = Translator::with_fallback_bundle(locale_resources, false); let sm = Arc::new(SourceMap::new(FilePathMapping::empty())); let fatal_emitter = - Box::new(HumanEmitter::new(stderr_destination(ColorConfig::Auto), fallback_bundle)); - let dcx = DiagCtxt::new(Box::new(SilentEmitter { + Box::new(HumanEmitter::new(stderr_destination(ColorConfig::Auto), translator)); + let dcx = DiagCtxt::new(Box::new(FatalOnlyEmitter { fatal_emitter, fatal_note: Some(fatal_note), - emit_fatal_diagnostic, })) .disable_warnings(); ParseSess::with_dcx(dcx, sm) diff --git a/compiler/rustc_session/src/session.rs b/compiler/rustc_session/src/session.rs index b8b4518b14e..ad58c3c8f7d 100644 --- a/compiler/rustc_session/src/session.rs +++ b/compiler/rustc_session/src/session.rs @@ -18,9 +18,11 @@ use rustc_errors::emitter::{ DynEmitter, HumanEmitter, HumanReadableErrorType, OutputTheme, stderr_destination, }; use rustc_errors::json::JsonEmitter; +use rustc_errors::timings::TimingSectionHandler; +use rustc_errors::translation::Translator; use rustc_errors::{ Diag, DiagCtxt, DiagCtxtHandle, DiagMessage, Diagnostic, ErrorGuaranteed, FatalAbort, - FluentBundle, LazyFallbackBundle, TerminalUrl, fallback_fluent_bundle, + TerminalUrl, fallback_fluent_bundle, }; use rustc_macros::HashStable_Generic; pub use rustc_span::def_id::StableCrateId; @@ -156,6 +158,9 @@ pub struct Session { /// Used by `-Z self-profile`. pub prof: SelfProfilerRef, + /// Used to emit section timings events (enabled by `--json=timings`). + pub timings: TimingSectionHandler, + /// Data about code being compiled, gathered during compilation. pub code_stats: CodeStats, @@ -944,8 +949,7 @@ impl Session { fn default_emitter( sopts: &config::Options, source_map: Arc<SourceMap>, - bundle: Option<Arc<FluentBundle>>, - fallback_bundle: LazyFallbackBundle, + translator: Translator, ) -> Box<DynEmitter> { let macro_backtrace = sopts.unstable_opts.macro_backtrace; let track_diagnostics = sopts.unstable_opts.track_diagnostics; @@ -970,17 +974,11 @@ fn default_emitter( let short = kind.short(); if let HumanReadableErrorType::AnnotateSnippet = kind { - let emitter = AnnotateSnippetEmitter::new( - source_map, - bundle, - fallback_bundle, - short, - macro_backtrace, - ); + let emitter = + AnnotateSnippetEmitter::new(source_map, translator, short, macro_backtrace); Box::new(emitter.ui_testing(sopts.unstable_opts.ui_testing)) } else { - let emitter = HumanEmitter::new(stderr_destination(color_config), fallback_bundle) - .fluent_bundle(bundle) + let emitter = HumanEmitter::new(stderr_destination(color_config), translator) .sm(source_map) .short_message(short) .diagnostic_width(sopts.diagnostic_width) @@ -1002,12 +1000,11 @@ fn default_emitter( JsonEmitter::new( Box::new(io::BufWriter::new(io::stderr())), source_map, - fallback_bundle, + translator, pretty, json_rendered, color_config, ) - .fluent_bundle(bundle) .ui_testing(sopts.unstable_opts.ui_testing) .ignored_directories_in_source_blocks( sopts.unstable_opts.ignore_directory_in_diagnostics_source_blocks.clone(), @@ -1026,7 +1023,7 @@ fn default_emitter( pub fn build_session( sopts: config::Options, io: CompilerIO, - bundle: Option<Arc<rustc_errors::FluentBundle>>, + fluent_bundle: Option<Arc<rustc_errors::FluentBundle>>, registry: rustc_errors::registry::Registry, fluent_resources: Vec<&'static str>, driver_lint_caps: FxHashMap<lint::LintId, lint::Level>, @@ -1048,12 +1045,15 @@ pub fn build_session( let cap_lints_allow = sopts.lint_cap.is_some_and(|cap| cap == lint::Allow); let can_emit_warnings = !(warnings_allow || cap_lints_allow); - let fallback_bundle = fallback_fluent_bundle( - fluent_resources, - sopts.unstable_opts.translate_directionality_markers, - ); + let translator = Translator { + fluent_bundle, + fallback_fluent_bundle: fallback_fluent_bundle( + fluent_resources, + sopts.unstable_opts.translate_directionality_markers, + ), + }; let source_map = rustc_span::source_map::get_source_map().unwrap(); - let emitter = default_emitter(&sopts, Arc::clone(&source_map), bundle, fallback_bundle); + let emitter = default_emitter(&sopts, Arc::clone(&source_map), translator); let mut dcx = DiagCtxt::new(emitter) .with_flags(sopts.unstable_opts.dcx_flags(can_emit_warnings)) @@ -1126,6 +1126,8 @@ pub fn build_session( .as_ref() .map(|_| rng().next_u32().to_base_fixed_len(CASE_INSENSITIVE).to_string()); + let timings = TimingSectionHandler::new(sopts.json_timings); + let sess = Session { target, host, @@ -1136,6 +1138,7 @@ pub fn build_session( io, incr_comp_session: RwLock::new(IncrCompSession::NotInitialized), prof, + timings, code_stats: Default::default(), lint_store: None, driver_lint_caps, @@ -1493,13 +1496,13 @@ impl EarlyDiagCtxt { fn mk_emitter(output: ErrorOutputType) -> Box<DynEmitter> { // FIXME(#100717): early errors aren't translated at the moment, so this is fine, but it will // need to reference every crate that might emit an early error for translation to work. - let fallback_bundle = - fallback_fluent_bundle(vec![rustc_errors::DEFAULT_LOCALE_RESOURCE], false); + let translator = + Translator::with_fallback_bundle(vec![rustc_errors::DEFAULT_LOCALE_RESOURCE], false); let emitter: Box<DynEmitter> = match output { config::ErrorOutputType::HumanReadable { kind, color_config } => { let short = kind.short(); Box::new( - HumanEmitter::new(stderr_destination(color_config), fallback_bundle) + HumanEmitter::new(stderr_destination(color_config), translator) .theme(if let HumanReadableErrorType::Unicode = kind { OutputTheme::Unicode } else { @@ -1512,7 +1515,7 @@ fn mk_emitter(output: ErrorOutputType) -> Box<DynEmitter> { Box::new(JsonEmitter::new( Box::new(io::BufWriter::new(io::stderr())), Some(Arc::new(SourceMap::new(FilePathMapping::empty()))), - fallback_bundle, + translator, pretty, json_rendered, color_config, diff --git a/compiler/rustc_span/src/hygiene.rs b/compiler/rustc_span/src/hygiene.rs index 315dedec107..29be3b73ee9 100644 --- a/compiler/rustc_span/src/hygiene.rs +++ b/compiler/rustc_span/src/hygiene.rs @@ -1213,6 +1213,17 @@ pub enum DesugaringKind { Contract, /// A pattern type range start/end PatTyRange, + /// A format literal. + FormatLiteral { + /// Was this format literal written in the source? + /// - `format!("boo")` => Yes, + /// - `format!(concat!("b", "o", "o"))` => No, + /// - `format!(include_str!("boo.txt"))` => No, + /// + /// If it wasn't written in the source then we have to be careful with suggestions about + /// rewriting it. + source: bool, + }, } impl DesugaringKind { @@ -1231,6 +1242,10 @@ impl DesugaringKind { DesugaringKind::BoundModifier => "trait bound modifier", DesugaringKind::Contract => "contract check", DesugaringKind::PatTyRange => "pattern type", + DesugaringKind::FormatLiteral { source: true } => "format string literal", + DesugaringKind::FormatLiteral { source: false } => { + "expression that expanded into a format string literal" + } } } @@ -1250,6 +1265,7 @@ impl DesugaringKind { DesugaringKind::BoundModifier => value == "BoundModifier", DesugaringKind::Contract => value == "Contract", DesugaringKind::PatTyRange => value == "PatTyRange", + DesugaringKind::FormatLiteral { .. } => value == "FormatLiteral", } } } diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index baadff16120..684b1781b44 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -396,8 +396,7 @@ symbols! { __S, __awaitee, __try_var, - _d, - _e, + _t, _task_context, a32, aarch64_target_feature, @@ -642,6 +641,7 @@ symbols! { cfi_encoding, char, char_is_ascii, + char_to_digit, child_id, child_kill, client, @@ -1080,7 +1080,6 @@ symbols! { fs_create_dir, fsub_algebraic, fsub_fast, - fsxr, full, fundamental, fused_iterator, @@ -1088,6 +1087,7 @@ symbols! { future_drop_poll, future_output, future_trait, + fxsr, gdb_script_file, ge, gen_blocks, @@ -1217,6 +1217,8 @@ symbols! { intrinsics, intrinsics_unaligned_volatile_load, intrinsics_unaligned_volatile_store, + io_error_new, + io_errorkind, io_stderr, io_stdout, irrefutable_let_patterns, @@ -1306,6 +1308,7 @@ symbols! { m68k_target_feature, macro_at_most_once_rep, macro_attributes_in_derive_output, + macro_concat, macro_escape, macro_export, macro_lifetime_matcher, @@ -1340,6 +1343,7 @@ symbols! { maybe_uninit, maybe_uninit_uninit, maybe_uninit_zeroed, + mem_align_of, mem_discriminant, mem_drop, mem_forget, @@ -1707,6 +1711,7 @@ symbols! { question_mark, quote, range_inclusive_new, + range_step, raw_dylib, raw_dylib_elf, raw_eq, @@ -2023,6 +2028,7 @@ symbols! { slice, slice_from_raw_parts, slice_from_raw_parts_mut, + slice_from_ref, slice_get_unchecked, slice_into_vec, slice_iter, @@ -2052,6 +2058,7 @@ symbols! { static_recursion, staticlib, std, + std_lib_injection, std_panic, std_panic_2015_macro, std_panic_macro, @@ -2183,6 +2190,7 @@ symbols! { type_changing_struct_update, type_const, type_id, + type_ir, type_ir_infer_ctxt_like, type_ir_inherent, type_ir_interner, diff --git a/compiler/rustc_symbol_mangling/src/v0.rs b/compiler/rustc_symbol_mangling/src/v0.rs index 49a5e20d7cf..1db8ad72b32 100644 --- a/compiler/rustc_symbol_mangling/src/v0.rs +++ b/compiler/rustc_symbol_mangling/src/v0.rs @@ -85,10 +85,6 @@ pub fn mangle_internal_symbol<'tcx>(tcx: TyCtxt<'tcx>, item_name: &str) -> Strin if item_name == "rust_eh_personality" { // rust_eh_personality must not be renamed as LLVM hard-codes the name return "rust_eh_personality".to_owned(); - } else if item_name == "__rust_no_alloc_shim_is_unstable" { - // Temporary back compat hack to give people the chance to migrate to - // include #[rustc_std_internal_symbol]. - return "__rust_no_alloc_shim_is_unstable".to_owned(); } let prefix = "_R"; diff --git a/compiler/rustc_target/src/spec/abi_map.rs b/compiler/rustc_target/src/spec/abi_map.rs index 4659bbdb890..42ec10a8e15 100644 --- a/compiler/rustc_target/src/spec/abi_map.rs +++ b/compiler/rustc_target/src/spec/abi_map.rs @@ -12,16 +12,19 @@ pub struct AbiMap { os: OsKind, } +/// result from trying to map an ABI #[derive(Copy, Clone, Debug)] pub enum AbiMapping { /// this ABI is exactly mapped for this platform Direct(CanonAbi), /// we don't yet warn on this, but we will Deprecated(CanonAbi), + /// ABI we do not map for this platform: it must not reach codegen Invalid, } impl AbiMapping { + /// optionally get a [CanonAbi], even if Deprecated pub fn into_option(self) -> Option<CanonAbi> { match self { Self::Direct(abi) | Self::Deprecated(abi) => Some(abi), @@ -29,6 +32,7 @@ impl AbiMapping { } } + /// get a [CanonAbi] even if Deprecated, panicking if Invalid #[track_caller] pub fn unwrap(self) -> CanonAbi { self.into_option().unwrap() @@ -40,6 +44,7 @@ impl AbiMapping { } impl AbiMap { + /// create an AbiMap according to arbitrary fields on the [Target] pub fn from_target(target: &Target) -> Self { // the purpose of this little exercise is to force listing what affects these mappings let arch = match &*target.arch { @@ -59,6 +64,7 @@ impl AbiMap { AbiMap { arch, os } } + /// lower an [ExternAbi] to a [CanonAbi] if this AbiMap allows pub fn canonize_abi(&self, extern_abi: ExternAbi, has_c_varargs: bool) -> AbiMapping { let AbiMap { os, arch } = *self; diff --git a/compiler/rustc_target/src/target_features.rs b/compiler/rustc_target/src/target_features.rs index a1eac1fba25..3eea1e070a6 100644 --- a/compiler/rustc_target/src/target_features.rs +++ b/compiler/rustc_target/src/target_features.rs @@ -11,11 +11,6 @@ use crate::spec::{FloatAbi, RustcAbi, Target}; /// These exist globally and are not in the target-specific lists below. pub const RUSTC_SPECIFIC_FEATURES: &[&str] = &["crt-static"]; -/// Features that require special handling when passing to LLVM: -/// these are target-specific (i.e., must also be listed in the target-specific list below) -/// but do not correspond to an LLVM target feature. -pub const RUSTC_SPECIAL_FEATURES: &[&str] = &["backchain"]; - /// Stability information for target features. #[derive(Debug, Copy, Clone)] pub enum Stability { @@ -34,9 +29,6 @@ pub enum Stability { /// particular for features are actually ABI configuration flags (not all targets are as nice as /// RISC-V and have an explicit way to set the ABI separate from target features). Forbidden { reason: &'static str }, - /// This feature can not be set via `-Ctarget-feature` or `#[target_feature]`, it can only be set - /// by target modifier flag. Target modifier flags are tracked to be consistent in linked modules. - TargetModifierOnly { reason: &'static str, flag: &'static str }, } use Stability::*; @@ -52,7 +44,6 @@ impl<CTX> HashStable<CTX> for Stability { Stability::Forbidden { reason } => { reason.hash_stable(hcx, hasher); } - Stability::TargetModifierOnly { .. } => {} } } } @@ -62,7 +53,7 @@ impl Stability { /// (It might still be nightly-only even if this returns `true`, so make sure to also check /// `requires_nightly`.) pub fn in_cfg(&self) -> bool { - !matches!(self, Stability::Forbidden { .. }) + matches!(self, Stability::Stable | Stability::Unstable { .. }) } /// Returns the nightly feature that is required to toggle this target feature via @@ -78,7 +69,16 @@ impl Stability { Stability::Unstable(nightly_feature) => Some(nightly_feature), Stability::Stable { .. } => None, Stability::Forbidden { .. } => panic!("forbidden features should not reach this far"), - Stability::TargetModifierOnly { .. } => None, + } + } + + /// Returns whether the feature may be toggled via `#[target_feature]` or `-Ctarget-feature`. + /// (It might still be nightly-only even if this returns `true`, so make sure to also check + /// `requires_nightly`.) + pub fn toggle_allowed(&self) -> Result<(), &'static str> { + match self { + Stability::Unstable(_) | Stability::Stable { .. } => Ok(()), + Stability::Forbidden { reason } => Err(reason), } } } @@ -270,12 +270,7 @@ static AARCH64_FEATURES: &[(&str, Stability, ImpliedFeatures)] = &[ ("rcpc3", Unstable(sym::aarch64_unstable_target_feature), &["rcpc2"]), // FEAT_RDM ("rdm", Stable, &["neon"]), - // This is needed for inline assembly, but shouldn't be stabilized as-is - // since it should be enabled globally using -Zfixed-x18, not - // #[target_feature]. - // Note that cfg(target_feature = "reserve-x18") is currently not set for - // targets that reserve x18 by default. - ("reserve-x18", Unstable(sym::aarch64_unstable_target_feature), &[]), + ("reserve-x18", Forbidden { reason: "use `-Zfixed-x18` compiler flag instead" }, &[]), // FEAT_SB ("sb", Stable, &[]), // FEAT_SHA1 & FEAT_SHA256 @@ -450,26 +445,17 @@ static X86_FEATURES: &[(&str, Stability, ImpliedFeatures)] = &[ ("rdseed", Stable, &[]), ( "retpoline-external-thunk", - Stability::TargetModifierOnly { - reason: "use `retpoline-external-thunk` target modifier flag instead", - flag: "retpoline-external-thunk", - }, + Stability::Forbidden { reason: "use `-Zretpoline-external-thunk` compiler flag instead" }, &[], ), ( "retpoline-indirect-branches", - Stability::TargetModifierOnly { - reason: "use `retpoline` target modifier flag instead", - flag: "retpoline", - }, + Stability::Forbidden { reason: "use `-Zretpoline` compiler flag instead" }, &[], ), ( "retpoline-indirect-calls", - Stability::TargetModifierOnly { - reason: "use `retpoline` target modifier flag instead", - flag: "retpoline", - }, + Stability::Forbidden { reason: "use `-Zretpoline` compiler flag instead" }, &[], ), ("rtm", Unstable(sym::rtm_target_feature), &[]), @@ -732,6 +718,7 @@ static LOONGARCH_FEATURES: &[(&str, Stability, ImpliedFeatures)] = &[ #[rustfmt::skip] const IBMZ_FEATURES: &[(&str, Stability, ImpliedFeatures)] = &[ // tidy-alphabetical-start + // For "backchain", https://github.com/rust-lang/rust/issues/142412 is a stabilization blocker ("backchain", Unstable(sym::s390x_target_feature), &[]), ("concurrent-functions", Unstable(sym::s390x_target_feature), &[]), ("deflate-conversion", Unstable(sym::s390x_target_feature), &[]), diff --git a/compiler/rustc_thread_pool/Cargo.toml b/compiler/rustc_thread_pool/Cargo.toml new file mode 100644 index 00000000000..d0bd065c457 --- /dev/null +++ b/compiler/rustc_thread_pool/Cargo.toml @@ -0,0 +1,50 @@ +[package] +name = "rustc_thread_pool" +version = "0.0.0" +authors = ["Niko Matsakis <niko@alum.mit.edu>", + "Josh Stone <cuviper@gmail.com>"] +description = "Core APIs for Rayon - fork for rustc" +license = "MIT OR Apache-2.0" +rust-version = "1.63" +edition = "2021" +readme = "README.md" +keywords = ["parallel", "thread", "concurrency", "join", "performance"] +categories = ["concurrency"] + +[dependencies] +crossbeam-deque = "0.8" +crossbeam-utils = "0.8" + +[dev-dependencies] +rand = "0.9" +rand_xorshift = "0.4" +scoped-tls = "1.0" + +[target.'cfg(unix)'.dev-dependencies] +libc = "0.2" + +[[test]] +name = "stack_overflow_crash" +path = "tests/stack_overflow_crash.rs" + +# NB: having one [[test]] manually defined means we need to declare them all + +[[test]] +name = "double_init_fail" +path = "tests/double_init_fail.rs" + +[[test]] +name = "init_zero_threads" +path = "tests/init_zero_threads.rs" + +[[test]] +name = "scope_join" +path = "tests/scope_join.rs" + +[[test]] +name = "simple_panic" +path = "tests/simple_panic.rs" + +[[test]] +name = "scoped_threadpool" +path = "tests/scoped_threadpool.rs" diff --git a/compiler/rustc_thread_pool/README.md b/compiler/rustc_thread_pool/README.md new file mode 100644 index 00000000000..a50cc1165b8 --- /dev/null +++ b/compiler/rustc_thread_pool/README.md @@ -0,0 +1,10 @@ +Note: This is an unstable fork made for use in rustc + +Rayon-core represents the "core, stable" APIs of Rayon: join, scope, and so forth, as well as the ability to create custom thread-pools with ThreadPool. + +Maybe worth mentioning: users are not necessarily intended to directly access rustc_thread_pool; all its APIs are mirrored in the rayon crate. To that end, the examples in the docs use rayon::join and so forth rather than rayon_core::join. + + +Please see [Rayon Docs] for details about using Rayon. + +[Rayon Docs]: https://docs.rs/rayon/ diff --git a/compiler/rustc_thread_pool/src/broadcast/mod.rs b/compiler/rustc_thread_pool/src/broadcast/mod.rs new file mode 100644 index 00000000000..9545c4b15d8 --- /dev/null +++ b/compiler/rustc_thread_pool/src/broadcast/mod.rs @@ -0,0 +1,148 @@ +use std::fmt; +use std::marker::PhantomData; +use std::sync::Arc; + +use crate::job::{ArcJob, StackJob}; +use crate::latch::{CountLatch, LatchRef}; +use crate::registry::{Registry, WorkerThread}; + +mod tests; + +/// Executes `op` within every thread in the current threadpool. If this is +/// called from a non-Rayon thread, it will execute in the global threadpool. +/// Any attempts to use `join`, `scope`, or parallel iterators will then operate +/// within that threadpool. When the call has completed on each thread, returns +/// a vector containing all of their return values. +/// +/// For more information, see the [`ThreadPool::broadcast()`][m] method. +/// +/// [m]: struct.ThreadPool.html#method.broadcast +pub fn broadcast<OP, R>(op: OP) -> Vec<R> +where + OP: Fn(BroadcastContext<'_>) -> R + Sync, + R: Send, +{ + // We assert that current registry has not terminated. + unsafe { broadcast_in(op, &Registry::current()) } +} + +/// Spawns an asynchronous task on every thread in this thread-pool. This task +/// will run in the implicit, global scope, which means that it may outlast the +/// current stack frame -- therefore, it cannot capture any references onto the +/// stack (you will likely need a `move` closure). +/// +/// For more information, see the [`ThreadPool::spawn_broadcast()`][m] method. +/// +/// [m]: struct.ThreadPool.html#method.spawn_broadcast +pub fn spawn_broadcast<OP>(op: OP) +where + OP: Fn(BroadcastContext<'_>) + Send + Sync + 'static, +{ + // We assert that current registry has not terminated. + unsafe { spawn_broadcast_in(op, &Registry::current()) } +} + +/// Provides context to a closure called by `broadcast`. +pub struct BroadcastContext<'a> { + worker: &'a WorkerThread, + + /// Make sure to prevent auto-traits like `Send` and `Sync`. + _marker: PhantomData<&'a mut dyn Fn()>, +} + +impl<'a> BroadcastContext<'a> { + pub(super) fn with<R>(f: impl FnOnce(BroadcastContext<'_>) -> R) -> R { + let worker_thread = WorkerThread::current(); + assert!(!worker_thread.is_null()); + f(BroadcastContext { worker: unsafe { &*worker_thread }, _marker: PhantomData }) + } + + /// Our index amongst the broadcast threads (ranges from `0..self.num_threads()`). + #[inline] + pub fn index(&self) -> usize { + self.worker.index() + } + + /// The number of threads receiving the broadcast in the thread pool. + /// + /// # Future compatibility note + /// + /// Future versions of Rayon might vary the number of threads over time, but + /// this method will always return the number of threads which are actually + /// receiving your particular `broadcast` call. + #[inline] + pub fn num_threads(&self) -> usize { + self.worker.registry().num_threads() + } +} + +impl<'a> fmt::Debug for BroadcastContext<'a> { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt.debug_struct("BroadcastContext") + .field("index", &self.index()) + .field("num_threads", &self.num_threads()) + .field("pool_id", &self.worker.registry().id()) + .finish() + } +} + +/// Execute `op` on every thread in the pool. It will be executed on each +/// thread when they have nothing else to do locally, before they try to +/// steal work from other threads. This function will not return until all +/// threads have completed the `op`. +/// +/// Unsafe because `registry` must not yet have terminated. +pub(super) unsafe fn broadcast_in<OP, R>(op: OP, registry: &Arc<Registry>) -> Vec<R> +where + OP: Fn(BroadcastContext<'_>) -> R + Sync, + R: Send, +{ + let f = move |injected: bool| { + debug_assert!(injected); + BroadcastContext::with(&op) + }; + + let n_threads = registry.num_threads(); + let current_thread = unsafe { WorkerThread::current().as_ref() }; + let tlv = crate::tlv::get(); + let latch = CountLatch::with_count(n_threads, current_thread); + let jobs: Vec<_> = + (0..n_threads).map(|_| StackJob::new(tlv, &f, LatchRef::new(&latch))).collect(); + let job_refs = jobs.iter().map(|job| unsafe { job.as_job_ref() }); + + registry.inject_broadcast(job_refs); + + // Wait for all jobs to complete, then collect the results, maybe propagating a panic. + latch.wait(current_thread); + jobs.into_iter().map(|job| unsafe { job.into_result() }).collect() +} + +/// Execute `op` on every thread in the pool. It will be executed on each +/// thread when they have nothing else to do locally, before they try to +/// steal work from other threads. This function returns immediately after +/// injecting the jobs. +/// +/// Unsafe because `registry` must not yet have terminated. +pub(super) unsafe fn spawn_broadcast_in<OP>(op: OP, registry: &Arc<Registry>) +where + OP: Fn(BroadcastContext<'_>) + Send + Sync + 'static, +{ + let job = ArcJob::new({ + let registry = Arc::clone(registry); + move || { + registry.catch_unwind(|| BroadcastContext::with(&op)); + registry.terminate(); // (*) permit registry to terminate now + } + }); + + let n_threads = registry.num_threads(); + let job_refs = (0..n_threads).map(|_| { + // Ensure that registry cannot terminate until this job has executed + // on each thread. This ref is decremented at the (*) above. + registry.increment_terminate_count(); + + ArcJob::as_static_job_ref(&job) + }); + + registry.inject_broadcast(job_refs); +} diff --git a/compiler/rustc_thread_pool/src/broadcast/tests.rs b/compiler/rustc_thread_pool/src/broadcast/tests.rs new file mode 100644 index 00000000000..fac8b8ad466 --- /dev/null +++ b/compiler/rustc_thread_pool/src/broadcast/tests.rs @@ -0,0 +1,264 @@ +#![cfg(test)] + +use std::sync::Arc; +use std::sync::atomic::{AtomicUsize, Ordering}; +use std::sync::mpsc::channel; +use std::{thread, time}; + +use crate::ThreadPoolBuilder; + +#[test] +fn broadcast_global() { + let v = crate::broadcast(|ctx| ctx.index()); + assert!(v.into_iter().eq(0..crate::current_num_threads())); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn spawn_broadcast_global() { + let (tx, rx) = channel(); + crate::spawn_broadcast(move |ctx| tx.send(ctx.index()).unwrap()); + + let mut v: Vec<_> = rx.into_iter().collect(); + v.sort_unstable(); + assert!(v.into_iter().eq(0..crate::current_num_threads())); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn broadcast_pool() { + let pool = ThreadPoolBuilder::new().num_threads(7).build().unwrap(); + let v = pool.broadcast(|ctx| ctx.index()); + assert!(v.into_iter().eq(0..7)); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn spawn_broadcast_pool() { + let (tx, rx) = channel(); + let pool = ThreadPoolBuilder::new().num_threads(7).build().unwrap(); + pool.spawn_broadcast(move |ctx| tx.send(ctx.index()).unwrap()); + + let mut v: Vec<_> = rx.into_iter().collect(); + v.sort_unstable(); + assert!(v.into_iter().eq(0..7)); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn broadcast_self() { + let pool = ThreadPoolBuilder::new().num_threads(7).build().unwrap(); + let v = pool.install(|| crate::broadcast(|ctx| ctx.index())); + assert!(v.into_iter().eq(0..7)); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn spawn_broadcast_self() { + let (tx, rx) = channel(); + let pool = ThreadPoolBuilder::new().num_threads(7).build().unwrap(); + pool.spawn(|| crate::spawn_broadcast(move |ctx| tx.send(ctx.index()).unwrap())); + + let mut v: Vec<_> = rx.into_iter().collect(); + v.sort_unstable(); + assert!(v.into_iter().eq(0..7)); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn broadcast_mutual() { + let count = AtomicUsize::new(0); + let pool1 = ThreadPoolBuilder::new().num_threads(3).build().unwrap(); + let pool2 = ThreadPoolBuilder::new().num_threads(7).build().unwrap(); + pool1.install(|| { + pool2.broadcast(|_| { + pool1.broadcast(|_| { + count.fetch_add(1, Ordering::Relaxed); + }) + }) + }); + assert_eq!(count.into_inner(), 3 * 7); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn spawn_broadcast_mutual() { + let (tx, rx) = channel(); + let pool1 = Arc::new(ThreadPoolBuilder::new().num_threads(3).build().unwrap()); + let pool2 = ThreadPoolBuilder::new().num_threads(7).build().unwrap(); + pool1.spawn({ + let pool1 = Arc::clone(&pool1); + move || { + pool2.spawn_broadcast(move |_| { + let tx = tx.clone(); + pool1.spawn_broadcast(move |_| tx.send(()).unwrap()) + }) + } + }); + assert_eq!(rx.into_iter().count(), 3 * 7); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn broadcast_mutual_sleepy() { + let count = AtomicUsize::new(0); + let pool1 = ThreadPoolBuilder::new().num_threads(3).build().unwrap(); + let pool2 = ThreadPoolBuilder::new().num_threads(7).build().unwrap(); + pool1.install(|| { + thread::sleep(time::Duration::from_secs(1)); + pool2.broadcast(|_| { + thread::sleep(time::Duration::from_secs(1)); + pool1.broadcast(|_| { + thread::sleep(time::Duration::from_millis(100)); + count.fetch_add(1, Ordering::Relaxed); + }) + }) + }); + assert_eq!(count.into_inner(), 3 * 7); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn spawn_broadcast_mutual_sleepy() { + let (tx, rx) = channel(); + let pool1 = Arc::new(ThreadPoolBuilder::new().num_threads(3).build().unwrap()); + let pool2 = ThreadPoolBuilder::new().num_threads(7).build().unwrap(); + pool1.spawn({ + let pool1 = Arc::clone(&pool1); + move || { + thread::sleep(time::Duration::from_secs(1)); + pool2.spawn_broadcast(move |_| { + let tx = tx.clone(); + thread::sleep(time::Duration::from_secs(1)); + pool1.spawn_broadcast(move |_| { + thread::sleep(time::Duration::from_millis(100)); + tx.send(()).unwrap(); + }) + }) + } + }); + assert_eq!(rx.into_iter().count(), 3 * 7); +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore)] +fn broadcast_panic_one() { + let count = AtomicUsize::new(0); + let pool = ThreadPoolBuilder::new().num_threads(7).build().unwrap(); + let result = crate::unwind::halt_unwinding(|| { + pool.broadcast(|ctx| { + count.fetch_add(1, Ordering::Relaxed); + if ctx.index() == 3 { + panic!("Hello, world!"); + } + }) + }); + assert_eq!(count.into_inner(), 7); + assert!(result.is_err(), "broadcast panic should propagate!"); +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore)] +fn spawn_broadcast_panic_one() { + let (tx, rx) = channel(); + let (panic_tx, panic_rx) = channel(); + let pool = ThreadPoolBuilder::new() + .num_threads(7) + .panic_handler(move |e| panic_tx.send(e).unwrap()) + .build() + .unwrap(); + pool.spawn_broadcast(move |ctx| { + tx.send(()).unwrap(); + if ctx.index() == 3 { + panic!("Hello, world!"); + } + }); + drop(pool); // including panic_tx + assert_eq!(rx.into_iter().count(), 7); + assert_eq!(panic_rx.into_iter().count(), 1); +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore)] +fn broadcast_panic_many() { + let count = AtomicUsize::new(0); + let pool = ThreadPoolBuilder::new().num_threads(7).build().unwrap(); + let result = crate::unwind::halt_unwinding(|| { + pool.broadcast(|ctx| { + count.fetch_add(1, Ordering::Relaxed); + if ctx.index() % 2 == 0 { + panic!("Hello, world!"); + } + }) + }); + assert_eq!(count.into_inner(), 7); + assert!(result.is_err(), "broadcast panic should propagate!"); +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore)] +fn spawn_broadcast_panic_many() { + let (tx, rx) = channel(); + let (panic_tx, panic_rx) = channel(); + let pool = ThreadPoolBuilder::new() + .num_threads(7) + .panic_handler(move |e| panic_tx.send(e).unwrap()) + .build() + .unwrap(); + pool.spawn_broadcast(move |ctx| { + tx.send(()).unwrap(); + if ctx.index() % 2 == 0 { + panic!("Hello, world!"); + } + }); + drop(pool); // including panic_tx + assert_eq!(rx.into_iter().count(), 7); + assert_eq!(panic_rx.into_iter().count(), 4); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn broadcast_sleep_race() { + let test_duration = time::Duration::from_secs(1); + let pool = ThreadPoolBuilder::new().num_threads(7).build().unwrap(); + let start = time::Instant::now(); + while start.elapsed() < test_duration { + pool.broadcast(|ctx| { + // A slight spread of sleep duration increases the chance that one + // of the threads will race in the pool's idle sleep afterward. + thread::sleep(time::Duration::from_micros(ctx.index() as u64)); + }); + } +} + +#[test] +fn broadcast_after_spawn_broadcast() { + let (tx, rx) = channel(); + + // Queue a non-blocking spawn_broadcast. + crate::spawn_broadcast(move |ctx| tx.send(ctx.index()).unwrap()); + + // This blocking broadcast runs after all prior broadcasts. + crate::broadcast(|_| {}); + + // The spawn_broadcast **must** have run by now on all threads. + let mut v: Vec<_> = rx.try_iter().collect(); + v.sort_unstable(); + assert!(v.into_iter().eq(0..crate::current_num_threads())); +} + +#[test] +fn broadcast_after_spawn() { + let (tx, rx) = channel(); + + // Queue a regular spawn on a thread-local deque. + crate::registry::in_worker(move |_, _| { + crate::spawn(move || tx.send(22).unwrap()); + }); + + // Broadcast runs after the local deque is empty. + crate::broadcast(|_| {}); + + // The spawn **must** have run by now. + assert_eq!(22, rx.try_recv().unwrap()); +} diff --git a/compiler/rustc_thread_pool/src/compile_fail/mod.rs b/compiler/rustc_thread_pool/src/compile_fail/mod.rs new file mode 100644 index 00000000000..f2ec646a4d3 --- /dev/null +++ b/compiler/rustc_thread_pool/src/compile_fail/mod.rs @@ -0,0 +1,7 @@ +// These modules contain `compile_fail` doc tests. +mod quicksort_race1; +mod quicksort_race2; +mod quicksort_race3; +mod rc_return; +mod rc_upvar; +mod scope_join_bad; diff --git a/compiler/rustc_thread_pool/src/compile_fail/quicksort_race1.rs b/compiler/rustc_thread_pool/src/compile_fail/quicksort_race1.rs new file mode 100644 index 00000000000..f6dbc769699 --- /dev/null +++ b/compiler/rustc_thread_pool/src/compile_fail/quicksort_race1.rs @@ -0,0 +1,28 @@ +/*! ```compile_fail,E0524 + +fn quick_sort<T:PartialOrd+Send>(v: &mut [T]) { + if v.len() <= 1 { + return; + } + + let mid = partition(v); + let (lo, _hi) = v.split_at_mut(mid); + rustc_thread_pool::join(|| quick_sort(lo), || quick_sort(lo)); //~ ERROR +} + +fn partition<T:PartialOrd+Send>(v: &mut [T]) -> usize { + let pivot = v.len() - 1; + let mut i = 0; + for j in 0..pivot { + if v[j] <= v[pivot] { + v.swap(i, j); + i += 1; + } + } + v.swap(i, pivot); + i +} + +fn main() { } + +``` */ diff --git a/compiler/rustc_thread_pool/src/compile_fail/quicksort_race2.rs b/compiler/rustc_thread_pool/src/compile_fail/quicksort_race2.rs new file mode 100644 index 00000000000..ccd737a700d --- /dev/null +++ b/compiler/rustc_thread_pool/src/compile_fail/quicksort_race2.rs @@ -0,0 +1,28 @@ +/*! ```compile_fail,E0500 + +fn quick_sort<T:PartialOrd+Send>(v: &mut [T]) { + if v.len() <= 1 { + return; + } + + let mid = partition(v); + let (lo, _hi) = v.split_at_mut(mid); + rustc_thread_pool::join(|| quick_sort(lo), || quick_sort(v)); //~ ERROR +} + +fn partition<T:PartialOrd+Send>(v: &mut [T]) -> usize { + let pivot = v.len() - 1; + let mut i = 0; + for j in 0..pivot { + if v[j] <= v[pivot] { + v.swap(i, j); + i += 1; + } + } + v.swap(i, pivot); + i +} + +fn main() { } + +``` */ diff --git a/compiler/rustc_thread_pool/src/compile_fail/quicksort_race3.rs b/compiler/rustc_thread_pool/src/compile_fail/quicksort_race3.rs new file mode 100644 index 00000000000..6acdf084433 --- /dev/null +++ b/compiler/rustc_thread_pool/src/compile_fail/quicksort_race3.rs @@ -0,0 +1,28 @@ +/*! ```compile_fail,E0524 + +fn quick_sort<T:PartialOrd+Send>(v: &mut [T]) { + if v.len() <= 1 { + return; + } + + let mid = partition(v); + let (_lo, hi) = v.split_at_mut(mid); + rustc_thread_pool::join(|| quick_sort(hi), || quick_sort(hi)); //~ ERROR +} + +fn partition<T:PartialOrd+Send>(v: &mut [T]) -> usize { + let pivot = v.len() - 1; + let mut i = 0; + for j in 0..pivot { + if v[j] <= v[pivot] { + v.swap(i, j); + i += 1; + } + } + v.swap(i, pivot); + i +} + +fn main() { } + +``` */ diff --git a/compiler/rustc_thread_pool/src/compile_fail/rc_return.rs b/compiler/rustc_thread_pool/src/compile_fail/rc_return.rs new file mode 100644 index 00000000000..165c685aba1 --- /dev/null +++ b/compiler/rustc_thread_pool/src/compile_fail/rc_return.rs @@ -0,0 +1,17 @@ +/** ```compile_fail,E0277 + +use std::rc::Rc; + +rustc_thread_pool::join(|| Rc::new(22), || ()); //~ ERROR + +``` */ +mod left {} + +/** ```compile_fail,E0277 + +use std::rc::Rc; + +rustc_thread_pool::join(|| (), || Rc::new(23)); //~ ERROR + +``` */ +mod right {} diff --git a/compiler/rustc_thread_pool/src/compile_fail/rc_upvar.rs b/compiler/rustc_thread_pool/src/compile_fail/rc_upvar.rs new file mode 100644 index 00000000000..6dc9ead48a0 --- /dev/null +++ b/compiler/rustc_thread_pool/src/compile_fail/rc_upvar.rs @@ -0,0 +1,9 @@ +/*! ```compile_fail,E0277 + +use std::rc::Rc; + +let r = Rc::new(22); +rustc_thread_pool::join(|| r.clone(), || r.clone()); +//~^ ERROR + +``` */ diff --git a/compiler/rustc_thread_pool/src/compile_fail/scope_join_bad.rs b/compiler/rustc_thread_pool/src/compile_fail/scope_join_bad.rs new file mode 100644 index 00000000000..e65abfc3c1e --- /dev/null +++ b/compiler/rustc_thread_pool/src/compile_fail/scope_join_bad.rs @@ -0,0 +1,24 @@ +/*! ```compile_fail,E0373 + +fn bad_scope<F>(f: F) + where F: FnOnce(&i32) + Send, +{ + rustc_thread_pool::scope(|s| { + let x = 22; + s.spawn(|_| f(&x)); //~ ERROR `x` does not live long enough + }); +} + +fn good_scope<F>(f: F) + where F: FnOnce(&i32) + Send, +{ + let x = 22; + rustc_thread_pool::scope(|s| { + s.spawn(|_| f(&x)); + }); +} + +fn main() { +} + +``` */ diff --git a/compiler/rustc_thread_pool/src/job.rs b/compiler/rustc_thread_pool/src/job.rs new file mode 100644 index 00000000000..e6e84ac2320 --- /dev/null +++ b/compiler/rustc_thread_pool/src/job.rs @@ -0,0 +1,277 @@ +use std::any::Any; +use std::cell::UnsafeCell; +use std::mem; +use std::sync::Arc; + +use crossbeam_deque::{Injector, Steal}; + +use crate::latch::Latch; +use crate::tlv::Tlv; +use crate::{tlv, unwind}; + +pub(super) enum JobResult<T> { + None, + Ok(T), + Panic(Box<dyn Any + Send>), +} + +/// A `Job` is used to advertise work for other threads that they may +/// want to steal. In accordance with time honored tradition, jobs are +/// arranged in a deque, so that thieves can take from the top of the +/// deque while the main worker manages the bottom of the deque. This +/// deque is managed by the `thread_pool` module. +pub(super) trait Job { + /// Unsafe: this may be called from a different thread than the one + /// which scheduled the job, so the implementer must ensure the + /// appropriate traits are met, whether `Send`, `Sync`, or both. + unsafe fn execute(this: *const ()); +} + +/// Effectively a Job trait object. Each JobRef **must** be executed +/// exactly once, or else data may leak. +/// +/// Internally, we store the job's data in a `*const ()` pointer. The +/// true type is something like `*const StackJob<...>`, but we hide +/// it. We also carry the "execute fn" from the `Job` trait. +pub(super) struct JobRef { + pointer: *const (), + execute_fn: unsafe fn(*const ()), +} + +unsafe impl Send for JobRef {} +unsafe impl Sync for JobRef {} + +impl JobRef { + /// Unsafe: caller asserts that `data` will remain valid until the + /// job is executed. + pub(super) unsafe fn new<T>(data: *const T) -> JobRef + where + T: Job, + { + // erase types: + JobRef { pointer: data as *const (), execute_fn: <T as Job>::execute } + } + + /// Returns an opaque handle that can be saved and compared, + /// without making `JobRef` itself `Copy + Eq`. + #[inline] + pub(super) fn id(&self) -> impl Eq { + (self.pointer, self.execute_fn) + } + + #[inline] + pub(super) unsafe fn execute(self) { + unsafe { (self.execute_fn)(self.pointer) } + } +} + +/// A job that will be owned by a stack slot. This means that when it +/// executes it need not free any heap data, the cleanup occurs when +/// the stack frame is later popped. The function parameter indicates +/// `true` if the job was stolen -- executed on a different thread. +pub(super) struct StackJob<L, F, R> +where + L: Latch + Sync, + F: FnOnce(bool) -> R + Send, + R: Send, +{ + pub(super) latch: L, + func: UnsafeCell<Option<F>>, + result: UnsafeCell<JobResult<R>>, + tlv: Tlv, +} + +impl<L, F, R> StackJob<L, F, R> +where + L: Latch + Sync, + F: FnOnce(bool) -> R + Send, + R: Send, +{ + pub(super) fn new(tlv: Tlv, func: F, latch: L) -> StackJob<L, F, R> { + StackJob { + latch, + func: UnsafeCell::new(Some(func)), + result: UnsafeCell::new(JobResult::None), + tlv, + } + } + + pub(super) unsafe fn as_job_ref(&self) -> JobRef { + unsafe { JobRef::new(self) } + } + + pub(super) unsafe fn run_inline(self, stolen: bool) -> R { + self.func.into_inner().unwrap()(stolen) + } + + pub(super) unsafe fn into_result(self) -> R { + self.result.into_inner().into_return_value() + } +} + +impl<L, F, R> Job for StackJob<L, F, R> +where + L: Latch + Sync, + F: FnOnce(bool) -> R + Send, + R: Send, +{ + unsafe fn execute(this: *const ()) { + let this = unsafe { &*(this as *const Self) }; + tlv::set(this.tlv); + let abort = unwind::AbortIfPanic; + let func = unsafe { (*this.func.get()).take().unwrap() }; + unsafe { + (*this.result.get()) = JobResult::call(func); + } + unsafe { + Latch::set(&this.latch); + } + mem::forget(abort); + } +} + +/// Represents a job stored in the heap. Used to implement +/// `scope`. Unlike `StackJob`, when executed, `HeapJob` simply +/// invokes a closure, which then triggers the appropriate logic to +/// signal that the job executed. +/// +/// (Probably `StackJob` should be refactored in a similar fashion.) +pub(super) struct HeapJob<BODY> +where + BODY: FnOnce() + Send, +{ + job: BODY, + tlv: Tlv, +} + +impl<BODY> HeapJob<BODY> +where + BODY: FnOnce() + Send, +{ + pub(super) fn new(tlv: Tlv, job: BODY) -> Box<Self> { + Box::new(HeapJob { job, tlv }) + } + + /// Creates a `JobRef` from this job -- note that this hides all + /// lifetimes, so it is up to you to ensure that this JobRef + /// doesn't outlive any data that it closes over. + pub(super) unsafe fn into_job_ref(self: Box<Self>) -> JobRef { + unsafe { JobRef::new(Box::into_raw(self)) } + } + + /// Creates a static `JobRef` from this job. + pub(super) fn into_static_job_ref(self: Box<Self>) -> JobRef + where + BODY: 'static, + { + unsafe { self.into_job_ref() } + } +} + +impl<BODY> Job for HeapJob<BODY> +where + BODY: FnOnce() + Send, +{ + unsafe fn execute(this: *const ()) { + let this = unsafe { Box::from_raw(this as *mut Self) }; + tlv::set(this.tlv); + (this.job)(); + } +} + +/// Represents a job stored in an `Arc` -- like `HeapJob`, but may +/// be turned into multiple `JobRef`s and called multiple times. +pub(super) struct ArcJob<BODY> +where + BODY: Fn() + Send + Sync, +{ + job: BODY, +} + +impl<BODY> ArcJob<BODY> +where + BODY: Fn() + Send + Sync, +{ + pub(super) fn new(job: BODY) -> Arc<Self> { + Arc::new(ArcJob { job }) + } + + /// Creates a `JobRef` from this job -- note that this hides all + /// lifetimes, so it is up to you to ensure that this JobRef + /// doesn't outlive any data that it closes over. + pub(super) unsafe fn as_job_ref(this: &Arc<Self>) -> JobRef { + unsafe { JobRef::new(Arc::into_raw(Arc::clone(this))) } + } + + /// Creates a static `JobRef` from this job. + pub(super) fn as_static_job_ref(this: &Arc<Self>) -> JobRef + where + BODY: 'static, + { + unsafe { Self::as_job_ref(this) } + } +} + +impl<BODY> Job for ArcJob<BODY> +where + BODY: Fn() + Send + Sync, +{ + unsafe fn execute(this: *const ()) { + let this = unsafe { Arc::from_raw(this as *mut Self) }; + (this.job)(); + } +} + +impl<T> JobResult<T> { + fn call(func: impl FnOnce(bool) -> T) -> Self { + match unwind::halt_unwinding(|| func(true)) { + Ok(x) => JobResult::Ok(x), + Err(x) => JobResult::Panic(x), + } + } + + /// Convert the `JobResult` for a job that has finished (and hence + /// its JobResult is populated) into its return value. + /// + /// NB. This will panic if the job panicked. + pub(super) fn into_return_value(self) -> T { + match self { + JobResult::None => unreachable!(), + JobResult::Ok(x) => x, + JobResult::Panic(x) => unwind::resume_unwinding(x), + } + } +} + +/// Indirect queue to provide FIFO job priority. +pub(super) struct JobFifo { + inner: Injector<JobRef>, +} + +impl JobFifo { + pub(super) fn new() -> Self { + JobFifo { inner: Injector::new() } + } + + pub(super) unsafe fn push(&self, job_ref: JobRef) -> JobRef { + // A little indirection ensures that spawns are always prioritized in FIFO order. The + // jobs in a thread's deque may be popped from the back (LIFO) or stolen from the front + // (FIFO), but either way they will end up popping from the front of this queue. + self.inner.push(job_ref); + unsafe { JobRef::new(self) } + } +} + +impl Job for JobFifo { + unsafe fn execute(this: *const ()) { + // We "execute" a queue by executing its first job, FIFO. + let this = unsafe { &*(this as *const Self) }; + loop { + match this.inner.steal() { + Steal::Success(job_ref) => break unsafe { job_ref.execute() }, + Steal::Empty => panic!("FIFO is empty"), + Steal::Retry => {} + } + } + } +} diff --git a/compiler/rustc_thread_pool/src/join/mod.rs b/compiler/rustc_thread_pool/src/join/mod.rs new file mode 100644 index 00000000000..f285362c19b --- /dev/null +++ b/compiler/rustc_thread_pool/src/join/mod.rs @@ -0,0 +1,201 @@ +use std::any::Any; + +use crate::job::StackJob; +use crate::latch::SpinLatch; +use crate::registry::{self, WorkerThread}; +use crate::tlv::{self, Tlv}; +use crate::{FnContext, unwind}; + +#[cfg(test)] +mod tests; + +/// Takes two closures and *potentially* runs them in parallel. It +/// returns a pair of the results from those closures. +/// +/// Conceptually, calling `join()` is similar to spawning two threads, +/// one executing each of the two closures. However, the +/// implementation is quite different and incurs very low +/// overhead. The underlying technique is called "work stealing": the +/// Rayon runtime uses a fixed pool of worker threads and attempts to +/// only execute code in parallel when there are idle CPUs to handle +/// it. +/// +/// When `join` is called from outside the thread pool, the calling +/// thread will block while the closures execute in the pool. When +/// `join` is called within the pool, the calling thread still actively +/// participates in the thread pool. It will begin by executing closure +/// A (on the current thread). While it is doing that, it will advertise +/// closure B as being available for other threads to execute. Once closure A +/// has completed, the current thread will try to execute closure B; +/// if however closure B has been stolen, then it will look for other work +/// while waiting for the thief to fully execute closure B. (This is the +/// typical work-stealing strategy). +/// +/// # Examples +/// +/// This example uses join to perform a quick-sort (note this is not a +/// particularly optimized implementation: if you **actually** want to +/// sort for real, you should prefer [the `par_sort` method] offered +/// by Rayon). +/// +/// [the `par_sort` method]: ../rayon/slice/trait.ParallelSliceMut.html#method.par_sort +/// +/// ```rust +/// # use rustc_thread_pool as rayon; +/// let mut v = vec![5, 1, 8, 22, 0, 44]; +/// quick_sort(&mut v); +/// assert_eq!(v, vec![0, 1, 5, 8, 22, 44]); +/// +/// fn quick_sort<T:PartialOrd+Send>(v: &mut [T]) { +/// if v.len() > 1 { +/// let mid = partition(v); +/// let (lo, hi) = v.split_at_mut(mid); +/// rayon::join(|| quick_sort(lo), +/// || quick_sort(hi)); +/// } +/// } +/// +/// // Partition rearranges all items `<=` to the pivot +/// // item (arbitrary selected to be the last item in the slice) +/// // to the first half of the slice. It then returns the +/// // "dividing point" where the pivot is placed. +/// fn partition<T:PartialOrd+Send>(v: &mut [T]) -> usize { +/// let pivot = v.len() - 1; +/// let mut i = 0; +/// for j in 0..pivot { +/// if v[j] <= v[pivot] { +/// v.swap(i, j); +/// i += 1; +/// } +/// } +/// v.swap(i, pivot); +/// i +/// } +/// ``` +/// +/// # Warning about blocking I/O +/// +/// The assumption is that the closures given to `join()` are +/// CPU-bound tasks that do not perform I/O or other blocking +/// operations. If you do perform I/O, and that I/O should block +/// (e.g., waiting for a network request), the overall performance may +/// be poor. Moreover, if you cause one closure to be blocked waiting +/// on another (for example, using a channel), that could lead to a +/// deadlock. +/// +/// # Panics +/// +/// No matter what happens, both closures will always be executed. If +/// a single closure panics, whether it be the first or second +/// closure, that panic will be propagated and hence `join()` will +/// panic with the same panic value. If both closures panic, `join()` +/// will panic with the panic value from the first closure. +pub fn join<A, B, RA, RB>(oper_a: A, oper_b: B) -> (RA, RB) +where + A: FnOnce() -> RA + Send, + B: FnOnce() -> RB + Send, + RA: Send, + RB: Send, +{ + #[inline] + fn call<R>(f: impl FnOnce() -> R) -> impl FnOnce(FnContext) -> R { + move |_| f() + } + + join_context(call(oper_a), call(oper_b)) +} + +/// Identical to `join`, except that the closures have a parameter +/// that provides context for the way the closure has been called, +/// especially indicating whether they're executing on a different +/// thread than where `join_context` was called. This will occur if +/// the second job is stolen by a different thread, or if +/// `join_context` was called from outside the thread pool to begin +/// with. +pub fn join_context<A, B, RA, RB>(oper_a: A, oper_b: B) -> (RA, RB) +where + A: FnOnce(FnContext) -> RA + Send, + B: FnOnce(FnContext) -> RB + Send, + RA: Send, + RB: Send, +{ + #[inline] + fn call_a<R>(f: impl FnOnce(FnContext) -> R, injected: bool) -> impl FnOnce() -> R { + move || f(FnContext::new(injected)) + } + + #[inline] + fn call_b<R>(f: impl FnOnce(FnContext) -> R) -> impl FnOnce(bool) -> R { + move |migrated| f(FnContext::new(migrated)) + } + + registry::in_worker(|worker_thread, injected| unsafe { + let tlv = tlv::get(); + // Create virtual wrapper for task b; this all has to be + // done here so that the stack frame can keep it all live + // long enough. + let job_b = StackJob::new(tlv, call_b(oper_b), SpinLatch::new(worker_thread)); + let job_b_ref = job_b.as_job_ref(); + let job_b_id = job_b_ref.id(); + worker_thread.push(job_b_ref); + + // Execute task a; hopefully b gets stolen in the meantime. + let status_a = unwind::halt_unwinding(call_a(oper_a, injected)); + let result_a = match status_a { + Ok(v) => v, + Err(err) => join_recover_from_panic(worker_thread, &job_b.latch, err, tlv), + }; + + // Now that task A has finished, try to pop job B from the + // local stack. It may already have been popped by job A; it + // may also have been stolen. There may also be some tasks + // pushed on top of it in the stack, and we will have to pop + // those off to get to it. + while !job_b.latch.probe() { + if let Some(job) = worker_thread.take_local_job() { + if job_b_id == job.id() { + // Found it! Let's run it. + // + // Note that this could panic, but it's ok if we unwind here. + + // Restore the TLV since we might have run some jobs overwriting it when waiting for job b. + tlv::set(tlv); + + let result_b = job_b.run_inline(injected); + return (result_a, result_b); + } else { + worker_thread.execute(job); + } + } else { + // Local deque is empty. Time to steal from other + // threads. + worker_thread.wait_until(&job_b.latch); + debug_assert!(job_b.latch.probe()); + break; + } + } + + // Restore the TLV since we might have run some jobs overwriting it when waiting for job b. + tlv::set(tlv); + + (result_a, job_b.into_result()) + }) +} + +/// If job A panics, we still cannot return until we are sure that job +/// B is complete. This is because it may contain references into the +/// enclosing stack frame(s). +#[cold] // cold path +unsafe fn join_recover_from_panic( + worker_thread: &WorkerThread, + job_b_latch: &SpinLatch<'_>, + err: Box<dyn Any + Send>, + tlv: Tlv, +) -> ! { + unsafe { worker_thread.wait_until(job_b_latch) }; + + // Restore the TLV since we might have run some jobs overwriting it when waiting for job b. + tlv::set(tlv); + + unwind::resume_unwinding(err) +} diff --git a/compiler/rustc_thread_pool/src/join/tests.rs b/compiler/rustc_thread_pool/src/join/tests.rs new file mode 100644 index 00000000000..9df99072c3a --- /dev/null +++ b/compiler/rustc_thread_pool/src/join/tests.rs @@ -0,0 +1,151 @@ +//! Tests for the join code. + +use rand::distr::StandardUniform; +use rand::{Rng, SeedableRng}; +use rand_xorshift::XorShiftRng; + +use super::*; +use crate::ThreadPoolBuilder; + +fn quick_sort<T: PartialOrd + Send>(v: &mut [T]) { + if v.len() <= 1 { + return; + } + + let mid = partition(v); + let (lo, hi) = v.split_at_mut(mid); + join(|| quick_sort(lo), || quick_sort(hi)); +} + +fn partition<T: PartialOrd + Send>(v: &mut [T]) -> usize { + let pivot = v.len() - 1; + let mut i = 0; + for j in 0..pivot { + if v[j] <= v[pivot] { + v.swap(i, j); + i += 1; + } + } + v.swap(i, pivot); + i +} + +fn seeded_rng() -> XorShiftRng { + let mut seed = <XorShiftRng as SeedableRng>::Seed::default(); + (0..).zip(seed.as_mut()).for_each(|(i, x)| *x = i); + XorShiftRng::from_seed(seed) +} + +#[test] +fn sort() { + let rng = seeded_rng(); + let mut data: Vec<u32> = rng.sample_iter(&StandardUniform).take(6 * 1024).collect(); + let mut sorted_data = data.clone(); + sorted_data.sort(); + quick_sort(&mut data); + assert_eq!(data, sorted_data); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn sort_in_pool() { + let rng = seeded_rng(); + let mut data: Vec<u32> = rng.sample_iter(&StandardUniform).take(12 * 1024).collect(); + + let pool = ThreadPoolBuilder::new().build().unwrap(); + let mut sorted_data = data.clone(); + sorted_data.sort(); + pool.install(|| quick_sort(&mut data)); + assert_eq!(data, sorted_data); +} + +#[test] +#[should_panic(expected = "Hello, world!")] +fn panic_propagate_a() { + join(|| panic!("Hello, world!"), || ()); +} + +#[test] +#[should_panic(expected = "Hello, world!")] +fn panic_propagate_b() { + join(|| (), || panic!("Hello, world!")); +} + +#[test] +#[should_panic(expected = "Hello, world!")] +fn panic_propagate_both() { + join(|| panic!("Hello, world!"), || panic!("Goodbye, world!")); +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore)] +fn panic_b_still_executes() { + let mut x = false; + match unwind::halt_unwinding(|| join(|| panic!("Hello, world!"), || x = true)) { + Ok(_) => panic!("failed to propagate panic from closure A,"), + Err(_) => assert!(x, "closure b failed to execute"), + } +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn join_context_both() { + // If we're not in a pool, both should be marked stolen as they're injected. + let (a_migrated, b_migrated) = join_context(|a| a.migrated(), |b| b.migrated()); + assert!(a_migrated); + assert!(b_migrated); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn join_context_neither() { + // If we're already in a 1-thread pool, neither job should be stolen. + let pool = ThreadPoolBuilder::new().num_threads(1).build().unwrap(); + let (a_migrated, b_migrated) = + pool.install(|| join_context(|a| a.migrated(), |b| b.migrated())); + assert!(!a_migrated); + assert!(!b_migrated); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn join_context_second() { + use std::sync::Barrier; + + // If we're already in a 2-thread pool, the second job should be stolen. + let barrier = Barrier::new(2); + let pool = ThreadPoolBuilder::new().num_threads(2).build().unwrap(); + let (a_migrated, b_migrated) = pool.install(|| { + join_context( + |a| { + barrier.wait(); + a.migrated() + }, + |b| { + barrier.wait(); + b.migrated() + }, + ) + }); + assert!(!a_migrated); + assert!(b_migrated); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn join_counter_overflow() { + const MAX: u32 = 500_000; + + let mut i = 0; + let mut j = 0; + let pool = ThreadPoolBuilder::new().num_threads(2).build().unwrap(); + + // Hammer on join a bunch of times -- used to hit overflow debug-assertions + // in JEC on 32-bit targets: https://github.com/rayon-rs/rayon/issues/797 + for _ in 0..MAX { + pool.join(|| i += 1, || j += 1); + } + + assert_eq!(i, MAX); + assert_eq!(j, MAX); +} diff --git a/compiler/rustc_thread_pool/src/latch.rs b/compiler/rustc_thread_pool/src/latch.rs new file mode 100644 index 00000000000..49ba62d3bea --- /dev/null +++ b/compiler/rustc_thread_pool/src/latch.rs @@ -0,0 +1,431 @@ +use std::marker::PhantomData; +use std::ops::Deref; +use std::sync::atomic::{AtomicUsize, Ordering}; +use std::sync::{Arc, Condvar, Mutex}; + +use crate::registry::{Registry, WorkerThread}; + +/// We define various kinds of latches, which are all a primitive signaling +/// mechanism. A latch starts as false. Eventually someone calls `set()` and +/// it becomes true. You can test if it has been set by calling `probe()`. +/// +/// Some kinds of latches, but not all, support a `wait()` operation +/// that will wait until the latch is set, blocking efficiently. That +/// is not part of the trait since it is not possibly to do with all +/// latches. +/// +/// The intention is that `set()` is called once, but `probe()` may be +/// called any number of times. Once `probe()` returns true, the memory +/// effects that occurred before `set()` become visible. +/// +/// It'd probably be better to refactor the API into two paired types, +/// but that's a bit of work, and this is not a public API. +/// +/// ## Memory ordering +/// +/// Latches need to guarantee two things: +/// +/// - Once `probe()` returns true, all memory effects from the `set()` +/// are visible (in other words, the set should synchronize-with +/// the probe). +/// - Once `set()` occurs, the next `probe()` *will* observe it. This +/// typically requires a seq-cst ordering. See [the "tickle-then-get-sleepy" scenario in the sleep +/// README](/src/sleep/README.md#tickle-then-get-sleepy) for details. +pub(super) trait Latch { + /// Set the latch, signalling others. + /// + /// # WARNING + /// + /// Setting a latch triggers other threads to wake up and (in some + /// cases) complete. This may, in turn, cause memory to be + /// deallocated and so forth. One must be very careful about this, + /// and it's typically better to read all the fields you will need + /// to access *before* a latch is set! + /// + /// This function operates on `*const Self` instead of `&self` to allow it + /// to become dangling during this call. The caller must ensure that the + /// pointer is valid upon entry, and not invalidated during the call by any + /// actions other than `set` itself. + unsafe fn set(this: *const Self); +} + +pub(super) trait AsCoreLatch { + fn as_core_latch(&self) -> &CoreLatch; +} + +/// Latch is not set, owning thread is awake +const UNSET: usize = 0; + +/// Latch is not set, owning thread is going to sleep on this latch +/// (but has not yet fallen asleep). +const SLEEPY: usize = 1; + +/// Latch is not set, owning thread is asleep on this latch and +/// must be awoken. +const SLEEPING: usize = 2; + +/// Latch is set. +const SET: usize = 3; + +/// Spin latches are the simplest, most efficient kind, but they do +/// not support a `wait()` operation. They just have a boolean flag +/// that becomes true when `set()` is called. +#[derive(Debug)] +pub(super) struct CoreLatch { + state: AtomicUsize, +} + +impl CoreLatch { + #[inline] + fn new() -> Self { + Self { state: AtomicUsize::new(0) } + } + + /// Invoked by owning thread as it prepares to sleep. Returns true + /// if the owning thread may proceed to fall asleep, false if the + /// latch was set in the meantime. + #[inline] + pub(super) fn get_sleepy(&self) -> bool { + self.state.compare_exchange(UNSET, SLEEPY, Ordering::SeqCst, Ordering::Relaxed).is_ok() + } + + /// Invoked by owning thread as it falls asleep sleep. Returns + /// true if the owning thread should block, or false if the latch + /// was set in the meantime. + #[inline] + pub(super) fn fall_asleep(&self) -> bool { + self.state.compare_exchange(SLEEPY, SLEEPING, Ordering::SeqCst, Ordering::Relaxed).is_ok() + } + + /// Invoked by owning thread as it falls asleep sleep. Returns + /// true if the owning thread should block, or false if the latch + /// was set in the meantime. + #[inline] + pub(super) fn wake_up(&self) { + if !self.probe() { + let _ = + self.state.compare_exchange(SLEEPING, UNSET, Ordering::SeqCst, Ordering::Relaxed); + } + } + + /// Set the latch. If this returns true, the owning thread was sleeping + /// and must be awoken. + /// + /// This is private because, typically, setting a latch involves + /// doing some wakeups; those are encapsulated in the surrounding + /// latch code. + #[inline] + unsafe fn set(this: *const Self) -> bool { + let old_state = unsafe { (*this).state.swap(SET, Ordering::AcqRel) }; + old_state == SLEEPING + } + + /// Test if this latch has been set. + #[inline] + pub(super) fn probe(&self) -> bool { + self.state.load(Ordering::Acquire) == SET + } +} + +impl AsCoreLatch for CoreLatch { + #[inline] + fn as_core_latch(&self) -> &CoreLatch { + self + } +} + +/// Spin latches are the simplest, most efficient kind, but they do +/// not support a `wait()` operation. They just have a boolean flag +/// that becomes true when `set()` is called. +pub(super) struct SpinLatch<'r> { + core_latch: CoreLatch, + registry: &'r Arc<Registry>, + target_worker_index: usize, + cross: bool, +} + +impl<'r> SpinLatch<'r> { + /// Creates a new spin latch that is owned by `thread`. This means + /// that `thread` is the only thread that should be blocking on + /// this latch -- it also means that when the latch is set, we + /// will wake `thread` if it is sleeping. + #[inline] + pub(super) fn new(thread: &'r WorkerThread) -> SpinLatch<'r> { + SpinLatch { + core_latch: CoreLatch::new(), + registry: thread.registry(), + target_worker_index: thread.index(), + cross: false, + } + } + + /// Creates a new spin latch for cross-threadpool blocking. Notably, we + /// need to make sure the registry is kept alive after setting, so we can + /// safely call the notification. + #[inline] + pub(super) fn cross(thread: &'r WorkerThread) -> SpinLatch<'r> { + SpinLatch { cross: true, ..SpinLatch::new(thread) } + } + + #[inline] + pub(super) fn probe(&self) -> bool { + self.core_latch.probe() + } +} + +impl<'r> AsCoreLatch for SpinLatch<'r> { + #[inline] + fn as_core_latch(&self) -> &CoreLatch { + &self.core_latch + } +} + +impl<'r> Latch for SpinLatch<'r> { + #[inline] + unsafe fn set(this: *const Self) { + let cross_registry; + + let registry: &Registry = if unsafe { (*this).cross } { + // Ensure the registry stays alive while we notify it. + // Otherwise, it would be possible that we set the spin + // latch and the other thread sees it and exits, causing + // the registry to be deallocated, all before we get a + // chance to invoke `registry.notify_worker_latch_is_set`. + cross_registry = Arc::clone(unsafe { (*this).registry }); + &cross_registry + } else { + // If this is not a "cross-registry" spin-latch, then the + // thread which is performing `set` is itself ensuring + // that the registry stays alive. However, that doesn't + // include this *particular* `Arc` handle if the waiting + // thread then exits, so we must completely dereference it. + unsafe { (*this).registry } + }; + let target_worker_index = unsafe { (*this).target_worker_index }; + + // NOTE: Once we `set`, the target may proceed and invalidate `this`! + if unsafe { CoreLatch::set(&(*this).core_latch) } { + // Subtle: at this point, we can no longer read from + // `self`, because the thread owning this spin latch may + // have awoken and deallocated the latch. Therefore, we + // only use fields whose values we already read. + registry.notify_worker_latch_is_set(target_worker_index); + } + } +} + +/// A Latch starts as false and eventually becomes true. You can block +/// until it becomes true. +#[derive(Debug)] +pub(super) struct LockLatch { + m: Mutex<bool>, + v: Condvar, +} + +impl LockLatch { + #[inline] + pub(super) fn new() -> LockLatch { + LockLatch { m: Mutex::new(false), v: Condvar::new() } + } + + /// Block until latch is set, then resets this lock latch so it can be reused again. + pub(super) fn wait_and_reset(&self) { + let mut guard = self.m.lock().unwrap(); + while !*guard { + guard = self.v.wait(guard).unwrap(); + } + *guard = false; + } + + /// Block until latch is set. + pub(super) fn wait(&self) { + let mut guard = self.m.lock().unwrap(); + while !*guard { + guard = self.v.wait(guard).unwrap(); + } + } +} + +impl Latch for LockLatch { + #[inline] + unsafe fn set(this: *const Self) { + let mut guard = unsafe { (*this).m.lock().unwrap() }; + *guard = true; + unsafe { (*this).v.notify_all() }; + } +} + +/// Once latches are used to implement one-time blocking, primarily +/// for the termination flag of the threads in the pool. +/// +/// Note: like a `SpinLatch`, once-latches are always associated with +/// some registry that is probing them, which must be tickled when +/// they are set. *Unlike* a `SpinLatch`, they don't themselves hold a +/// reference to that registry. This is because in some cases the +/// registry owns the once-latch, and that would create a cycle. So a +/// `OnceLatch` must be given a reference to its owning registry when +/// it is set. For this reason, it does not implement the `Latch` +/// trait (but it doesn't have to, as it is not used in those generic +/// contexts). +#[derive(Debug)] +pub(super) struct OnceLatch { + core_latch: CoreLatch, +} + +impl OnceLatch { + #[inline] + pub(super) fn new() -> OnceLatch { + Self { core_latch: CoreLatch::new() } + } + + /// Set the latch, then tickle the specific worker thread, + /// which should be the one that owns this latch. + #[inline] + pub(super) unsafe fn set_and_tickle_one( + this: *const Self, + registry: &Registry, + target_worker_index: usize, + ) { + if unsafe { CoreLatch::set(&(*this).core_latch) } { + registry.notify_worker_latch_is_set(target_worker_index); + } + } +} + +impl AsCoreLatch for OnceLatch { + #[inline] + fn as_core_latch(&self) -> &CoreLatch { + &self.core_latch + } +} + +/// Counting latches are used to implement scopes. They track a +/// counter. Unlike other latches, calling `set()` does not +/// necessarily make the latch be considered `set()`; instead, it just +/// decrements the counter. The latch is only "set" (in the sense that +/// `probe()` returns true) once the counter reaches zero. +#[derive(Debug)] +pub(super) struct CountLatch { + counter: AtomicUsize, + kind: CountLatchKind, +} + +enum CountLatchKind { + /// A latch for scopes created on a rayon thread which will participate in work- + /// stealing while it waits for completion. This thread is not necessarily part + /// of the same registry as the scope itself! + Stealing { + latch: CoreLatch, + /// If a worker thread in registry A calls `in_place_scope` on a ThreadPool + /// with registry B, when a job completes in a thread of registry B, we may + /// need to call `notify_worker_latch_is_set()` to wake the thread in registry A. + /// That means we need a reference to registry A (since at that point we will + /// only have a reference to registry B), so we stash it here. + registry: Arc<Registry>, + /// The index of the worker to wake in `registry` + worker_index: usize, + }, + + /// A latch for scopes created on a non-rayon thread which will block to wait. + Blocking { latch: LockLatch }, +} + +impl std::fmt::Debug for CountLatchKind { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + CountLatchKind::Stealing { latch, .. } => { + f.debug_tuple("Stealing").field(latch).finish() + } + CountLatchKind::Blocking { latch, .. } => { + f.debug_tuple("Blocking").field(latch).finish() + } + } + } +} + +impl CountLatch { + pub(super) fn new(owner: Option<&WorkerThread>) -> Self { + Self::with_count(1, owner) + } + + pub(super) fn with_count(count: usize, owner: Option<&WorkerThread>) -> Self { + Self { + counter: AtomicUsize::new(count), + kind: match owner { + Some(owner) => CountLatchKind::Stealing { + latch: CoreLatch::new(), + registry: Arc::clone(owner.registry()), + worker_index: owner.index(), + }, + None => CountLatchKind::Blocking { latch: LockLatch::new() }, + }, + } + } + + #[inline] + pub(super) fn increment(&self) { + let old_counter = self.counter.fetch_add(1, Ordering::Relaxed); + debug_assert!(old_counter != 0); + } + + pub(super) fn wait(&self, owner: Option<&WorkerThread>) { + match &self.kind { + CountLatchKind::Stealing { latch, registry, worker_index } => unsafe { + let owner = owner.expect("owner thread"); + debug_assert_eq!(registry.id(), owner.registry().id()); + debug_assert_eq!(*worker_index, owner.index()); + owner.wait_until(latch); + }, + CountLatchKind::Blocking { latch } => latch.wait(), + } + } +} + +impl Latch for CountLatch { + #[inline] + unsafe fn set(this: *const Self) { + if unsafe { (*this).counter.fetch_sub(1, Ordering::SeqCst) == 1 } { + // NOTE: Once we call `set` on the internal `latch`, + // the target may proceed and invalidate `this`! + match unsafe { &(*this).kind } { + CountLatchKind::Stealing { latch, registry, worker_index } => { + let registry = Arc::clone(registry); + if unsafe { CoreLatch::set(latch) } { + registry.notify_worker_latch_is_set(*worker_index); + } + } + CountLatchKind::Blocking { latch } => unsafe { LockLatch::set(latch) }, + } + } + } +} + +/// `&L` without any implication of `dereferenceable` for `Latch::set` +pub(super) struct LatchRef<'a, L> { + inner: *const L, + marker: PhantomData<&'a L>, +} + +impl<L> LatchRef<'_, L> { + pub(super) fn new(inner: &L) -> LatchRef<'_, L> { + LatchRef { inner, marker: PhantomData } + } +} + +unsafe impl<L: Sync> Sync for LatchRef<'_, L> {} + +impl<L> Deref for LatchRef<'_, L> { + type Target = L; + + fn deref(&self) -> &L { + // SAFETY: if we have &self, the inner latch is still alive + unsafe { &*self.inner } + } +} + +impl<L: Latch> Latch for LatchRef<'_, L> { + #[inline] + unsafe fn set(this: *const Self) { + unsafe { L::set((*this).inner) }; + } +} diff --git a/compiler/rustc_thread_pool/src/lib.rs b/compiler/rustc_thread_pool/src/lib.rs new file mode 100644 index 00000000000..34252d919e3 --- /dev/null +++ b/compiler/rustc_thread_pool/src/lib.rs @@ -0,0 +1,903 @@ +//! Rayon-core houses the core stable APIs of Rayon. +//! +//! These APIs have been mirrored in the Rayon crate and it is recommended to use these from there. +//! +//! [`join`] is used to take two closures and potentially run them in parallel. +//! - It will run in parallel if task B gets stolen before task A can finish. +//! - It will run sequentially if task A finishes before task B is stolen and can continue on task B. +//! +//! [`scope`] creates a scope in which you can run any number of parallel tasks. +//! These tasks can spawn nested tasks and scopes, but given the nature of work stealing, the order of execution can not be guaranteed. +//! The scope will exist until all tasks spawned within the scope have been completed. +//! +//! [`spawn`] add a task into the 'static' or 'global' scope, or a local scope created by the [`scope()`] function. +//! +//! [`ThreadPool`] can be used to create your own thread pools (using [`ThreadPoolBuilder`]) or to customize the global one. +//! Tasks spawned within the pool (using [`install()`], [`join()`], etc.) will be added to a deque, +//! where it becomes available for work stealing from other threads in the local threadpool. +//! +//! [`join`]: fn.join.html +//! [`scope`]: fn.scope.html +//! [`scope()`]: fn.scope.html +//! [`spawn`]: fn.spawn.html +//! [`ThreadPool`]: struct.threadpool.html +//! [`install()`]: struct.ThreadPool.html#method.install +//! [`spawn()`]: struct.ThreadPool.html#method.spawn +//! [`join()`]: struct.ThreadPool.html#method.join +//! [`ThreadPoolBuilder`]: struct.ThreadPoolBuilder.html +//! +//! # Global fallback when threading is unsupported +//! +//! Rayon uses `std` APIs for threading, but some targets have incomplete implementations that +//! always return `Unsupported` errors. The WebAssembly `wasm32-unknown-unknown` and `wasm32-wasi` +//! targets are notable examples of this. Rather than panicking on the unsupported error when +//! creating the implicit global threadpool, Rayon configures a fallback mode instead. +//! +//! This fallback mode mostly functions as if it were using a single-threaded "pool", like setting +//! `RAYON_NUM_THREADS=1`. For example, `join` will execute its two closures sequentially, since +//! there is no other thread to share the work. However, since the pool is not running independent +//! of the main thread, non-blocking calls like `spawn` may not execute at all, unless a lower- +//! priority call like `broadcast` gives them an opening. The fallback mode does not try to emulate +//! anything like thread preemption or `async` task switching, but `yield_now` or `yield_local` +//! can also volunteer execution time. +//! +//! Explicit `ThreadPoolBuilder` methods always report their error without any fallback. +//! +//! # Restricting multiple versions +//! +//! In order to ensure proper coordination between threadpools, and especially +//! to make sure there's only one global threadpool, `rayon-core` is actively +//! restricted from building multiple versions of itself into a single target. +//! You may see a build error like this in violation: +//! +//! ```text +//! error: native library `rayon-core` is being linked to by more +//! than one package, and can only be linked to by one package +//! ``` +//! +//! While we strive to keep `rayon-core` semver-compatible, it's still +//! possible to arrive at this situation if different crates have overly +//! restrictive tilde or inequality requirements for `rayon-core`. The +//! conflicting requirements will need to be resolved before the build will +//! succeed. + +#![cfg_attr(test, allow(unused_crate_dependencies))] +#![warn(rust_2018_idioms)] + +use std::any::Any; +use std::error::Error; +use std::marker::PhantomData; +use std::str::FromStr; +use std::{env, fmt, io, thread}; + +#[macro_use] +mod private; + +mod broadcast; +mod job; +mod join; +mod latch; +mod registry; +mod scope; +mod sleep; +mod spawn; +mod thread_pool; +mod unwind; +mod worker_local; + +mod compile_fail; +mod tests; + +pub mod tlv; + +pub use worker_local::WorkerLocal; + +pub use self::broadcast::{BroadcastContext, broadcast, spawn_broadcast}; +pub use self::join::{join, join_context}; +use self::registry::{CustomSpawn, DefaultSpawn, ThreadSpawn}; +pub use self::registry::{Registry, ThreadBuilder, mark_blocked, mark_unblocked}; +pub use self::scope::{Scope, ScopeFifo, in_place_scope, in_place_scope_fifo, scope, scope_fifo}; +pub use self::spawn::{spawn, spawn_fifo}; +pub use self::thread_pool::{ + ThreadPool, Yield, current_thread_has_pending_tasks, current_thread_index, yield_local, + yield_now, +}; + +/// Returns the maximum number of threads that Rayon supports in a single thread-pool. +/// +/// If a higher thread count is requested by calling `ThreadPoolBuilder::num_threads` or by setting +/// the `RAYON_NUM_THREADS` environment variable, then it will be reduced to this maximum. +/// +/// The value may vary between different targets, and is subject to change in new Rayon versions. +pub fn max_num_threads() -> usize { + // We are limited by the bits available in the sleep counter's `AtomicUsize`. + crate::sleep::THREADS_MAX +} + +/// Returns the number of threads in the current registry. If this +/// code is executing within a Rayon thread-pool, then this will be +/// the number of threads for the thread-pool of the current +/// thread. Otherwise, it will be the number of threads for the global +/// thread-pool. +/// +/// This can be useful when trying to judge how many times to split +/// parallel work (the parallel iterator traits use this value +/// internally for this purpose). +/// +/// # Future compatibility note +/// +/// Note that unless this thread-pool was created with a +/// builder that specifies the number of threads, then this +/// number may vary over time in future versions (see [the +/// `num_threads()` method for details][snt]). +/// +/// [snt]: struct.ThreadPoolBuilder.html#method.num_threads +pub fn current_num_threads() -> usize { + crate::registry::Registry::current_num_threads() +} + +/// Error when initializing a thread pool. +#[derive(Debug)] +pub struct ThreadPoolBuildError { + kind: ErrorKind, +} + +#[derive(Debug)] +enum ErrorKind { + GlobalPoolAlreadyInitialized, + IOError(io::Error), +} + +/// Used to create a new [`ThreadPool`] or to configure the global rayon thread pool. +/// ## Creating a ThreadPool +/// The following creates a thread pool with 22 threads. +/// +/// ```rust +/// # use rustc_thread_pool as rayon; +/// let pool = rayon::ThreadPoolBuilder::new().num_threads(22).build().unwrap(); +/// ``` +/// +/// To instead configure the global thread pool, use [`build_global()`]: +/// +/// ```rust +/// # use rustc_thread_pool as rayon; +/// rayon::ThreadPoolBuilder::new().num_threads(22).build_global().unwrap(); +/// ``` +/// +/// [`ThreadPool`]: struct.ThreadPool.html +/// [`build_global()`]: struct.ThreadPoolBuilder.html#method.build_global +pub struct ThreadPoolBuilder<S = DefaultSpawn> { + /// The number of threads in the rayon thread pool. + /// If zero will use the RAYON_NUM_THREADS environment variable. + /// If RAYON_NUM_THREADS is invalid or zero will use the default. + num_threads: usize, + + /// Custom closure, if any, to handle a panic that we cannot propagate + /// anywhere else. + panic_handler: Option<Box<PanicHandler>>, + + /// Closure to compute the name of a thread. + get_thread_name: Option<Box<dyn FnMut(usize) -> String>>, + + /// The stack size for the created worker threads + stack_size: Option<usize>, + + /// Closure invoked on deadlock. + deadlock_handler: Option<Box<DeadlockHandler>>, + + /// Closure invoked on worker thread start. + start_handler: Option<Box<StartHandler>>, + + /// Closure invoked on worker thread exit. + exit_handler: Option<Box<ExitHandler>>, + + /// Closure invoked to spawn threads. + spawn_handler: S, + + /// Closure invoked when starting computations in a thread. + acquire_thread_handler: Option<Box<AcquireThreadHandler>>, + + /// Closure invoked when blocking in a thread. + release_thread_handler: Option<Box<ReleaseThreadHandler>>, + + /// If false, worker threads will execute spawned jobs in a + /// "depth-first" fashion. If true, they will do a "breadth-first" + /// fashion. Depth-first is the default. + breadth_first: bool, +} + +/// Contains the rayon thread pool configuration. Use [`ThreadPoolBuilder`] instead. +/// +/// [`ThreadPoolBuilder`]: struct.ThreadPoolBuilder.html +#[deprecated(note = "Use `ThreadPoolBuilder`")] +#[derive(Default)] +pub struct Configuration { + builder: ThreadPoolBuilder, +} + +/// The type for a panic handling closure. Note that this same closure +/// may be invoked multiple times in parallel. +type PanicHandler = dyn Fn(Box<dyn Any + Send>) + Send + Sync; + +/// The type for a closure that gets invoked when the Rayon thread pool deadlocks +type DeadlockHandler = dyn Fn() + Send + Sync; + +/// The type for a closure that gets invoked when a thread starts. The +/// closure is passed the index of the thread on which it is invoked. +/// Note that this same closure may be invoked multiple times in parallel. +type StartHandler = dyn Fn(usize) + Send + Sync; + +/// The type for a closure that gets invoked when a thread exits. The +/// closure is passed the index of the thread on which it is invoked. +/// Note that this same closure may be invoked multiple times in parallel. +type ExitHandler = dyn Fn(usize) + Send + Sync; + +// NB: We can't `#[derive(Default)]` because `S` is left ambiguous. +impl Default for ThreadPoolBuilder { + fn default() -> Self { + ThreadPoolBuilder { + num_threads: 0, + panic_handler: None, + get_thread_name: None, + stack_size: None, + start_handler: None, + exit_handler: None, + deadlock_handler: None, + acquire_thread_handler: None, + release_thread_handler: None, + spawn_handler: DefaultSpawn, + breadth_first: false, + } + } +} + +/// The type for a closure that gets invoked before starting computations in a thread. +/// Note that this same closure may be invoked multiple times in parallel. +type AcquireThreadHandler = dyn Fn() + Send + Sync; + +/// The type for a closure that gets invoked before blocking in a thread. +/// Note that this same closure may be invoked multiple times in parallel. +type ReleaseThreadHandler = dyn Fn() + Send + Sync; + +impl ThreadPoolBuilder { + /// Creates and returns a valid rayon thread pool builder, but does not initialize it. + pub fn new() -> Self { + Self::default() + } +} + +/// Note: the `S: ThreadSpawn` constraint is an internal implementation detail for the +/// default spawn and those set by [`spawn_handler`](#method.spawn_handler). +impl<S> ThreadPoolBuilder<S> +where + S: ThreadSpawn, +{ + /// Creates a new `ThreadPool` initialized using this configuration. + pub fn build(self) -> Result<ThreadPool, ThreadPoolBuildError> { + ThreadPool::build(self) + } + + /// Initializes the global thread pool. This initialization is + /// **optional**. If you do not call this function, the thread pool + /// will be automatically initialized with the default + /// configuration. Calling `build_global` is not recommended, except + /// in two scenarios: + /// + /// - You wish to change the default configuration. + /// - You are running a benchmark, in which case initializing may + /// yield slightly more consistent results, since the worker threads + /// will already be ready to go even in the first iteration. But + /// this cost is minimal. + /// + /// Initialization of the global thread pool happens exactly + /// once. Once started, the configuration cannot be + /// changed. Therefore, if you call `build_global` a second time, it + /// will return an error. An `Ok` result indicates that this + /// is the first initialization of the thread pool. + pub fn build_global(self) -> Result<(), ThreadPoolBuildError> { + let registry = registry::init_global_registry(self)?; + registry.wait_until_primed(); + Ok(()) + } +} + +impl ThreadPoolBuilder { + /// Creates a scoped `ThreadPool` initialized using this configuration. + /// + /// This is a convenience function for building a pool using [`std::thread::scope`] + /// to spawn threads in a [`spawn_handler`](#method.spawn_handler). + /// The threads in this pool will start by calling `wrapper`, which should + /// do initialization and continue by calling `ThreadBuilder::run()`. + /// + /// [`std::thread::scope`]: https://doc.rust-lang.org/std/thread/fn.scope.html + /// + /// # Examples + /// + /// A scoped pool may be useful in combination with scoped thread-local variables. + /// + /// ``` + /// # use rustc_thread_pool as rayon; + /// + /// scoped_tls::scoped_thread_local!(static POOL_DATA: Vec<i32>); + /// + /// fn main() -> Result<(), rayon::ThreadPoolBuildError> { + /// let pool_data = vec![1, 2, 3]; + /// + /// // We haven't assigned any TLS data yet. + /// assert!(!POOL_DATA.is_set()); + /// + /// rayon::ThreadPoolBuilder::new() + /// .build_scoped( + /// // Borrow `pool_data` in TLS for each thread. + /// |thread| POOL_DATA.set(&pool_data, || thread.run()), + /// // Do some work that needs the TLS data. + /// |pool| pool.install(|| assert!(POOL_DATA.is_set())), + /// )?; + /// + /// // Once we've returned, `pool_data` is no longer borrowed. + /// drop(pool_data); + /// Ok(()) + /// } + /// ``` + pub fn build_scoped<W, F, R>(self, wrapper: W, with_pool: F) -> Result<R, ThreadPoolBuildError> + where + W: Fn(ThreadBuilder) + Sync, // expected to call `run()` + F: FnOnce(&ThreadPool) -> R, + { + std::thread::scope(|scope| { + let pool = self + .spawn_handler(|thread| { + let mut builder = std::thread::Builder::new(); + if let Some(name) = thread.name() { + builder = builder.name(name.to_string()); + } + if let Some(size) = thread.stack_size() { + builder = builder.stack_size(size); + } + builder.spawn_scoped(scope, || wrapper(thread))?; + Ok(()) + }) + .build()?; + let result = unwind::halt_unwinding(|| with_pool(&pool)); + pool.wait_until_stopped(); + match result { + Ok(result) => Ok(result), + Err(err) => unwind::resume_unwinding(err), + } + }) + } +} + +impl<S> ThreadPoolBuilder<S> { + /// Sets a custom function for spawning threads. + /// + /// Note that the threads will not exit until after the pool is dropped. It + /// is up to the caller to wait for thread termination if that is important + /// for any invariants. For instance, threads created in [`std::thread::scope`] + /// will be joined before that scope returns, and this will block indefinitely + /// if the pool is leaked. Furthermore, the global thread pool doesn't terminate + /// until the entire process exits! + /// + /// # Examples + /// + /// A minimal spawn handler just needs to call `run()` from an independent thread. + /// + /// ``` + /// # use rustc_thread_pool as rayon; + /// fn main() -> Result<(), rayon::ThreadPoolBuildError> { + /// let pool = rayon::ThreadPoolBuilder::new() + /// .spawn_handler(|thread| { + /// std::thread::spawn(|| thread.run()); + /// Ok(()) + /// }) + /// .build()?; + /// + /// pool.install(|| println!("Hello from my custom thread!")); + /// Ok(()) + /// } + /// ``` + /// + /// The default spawn handler sets the name and stack size if given, and propagates + /// any errors from the thread builder. + /// + /// ``` + /// # use rustc_thread_pool as rayon; + /// fn main() -> Result<(), rayon::ThreadPoolBuildError> { + /// let pool = rayon::ThreadPoolBuilder::new() + /// .spawn_handler(|thread| { + /// let mut b = std::thread::Builder::new(); + /// if let Some(name) = thread.name() { + /// b = b.name(name.to_owned()); + /// } + /// if let Some(stack_size) = thread.stack_size() { + /// b = b.stack_size(stack_size); + /// } + /// b.spawn(|| thread.run())?; + /// Ok(()) + /// }) + /// .build()?; + /// + /// pool.install(|| println!("Hello from my fully custom thread!")); + /// Ok(()) + /// } + /// ``` + /// + /// This can also be used for a pool of scoped threads like [`crossbeam::scope`], + /// or [`std::thread::scope`] introduced in Rust 1.63, which is encapsulated in + /// [`build_scoped`](#method.build_scoped). + /// + /// [`crossbeam::scope`]: https://docs.rs/crossbeam/0.8/crossbeam/fn.scope.html + /// [`std::thread::scope`]: https://doc.rust-lang.org/std/thread/fn.scope.html + /// + /// ``` + /// # use rustc_thread_pool as rayon; + /// fn main() -> Result<(), rayon::ThreadPoolBuildError> { + /// std::thread::scope(|scope| { + /// let pool = rayon::ThreadPoolBuilder::new() + /// .spawn_handler(|thread| { + /// let mut builder = std::thread::Builder::new(); + /// if let Some(name) = thread.name() { + /// builder = builder.name(name.to_string()); + /// } + /// if let Some(size) = thread.stack_size() { + /// builder = builder.stack_size(size); + /// } + /// builder.spawn_scoped(scope, || { + /// // Add any scoped initialization here, then run! + /// thread.run() + /// })?; + /// Ok(()) + /// }) + /// .build()?; + /// + /// pool.install(|| println!("Hello from my custom scoped thread!")); + /// Ok(()) + /// }) + /// } + /// ``` + pub fn spawn_handler<F>(self, spawn: F) -> ThreadPoolBuilder<CustomSpawn<F>> + where + F: FnMut(ThreadBuilder) -> io::Result<()>, + { + ThreadPoolBuilder { + spawn_handler: CustomSpawn::new(spawn), + // ..self + num_threads: self.num_threads, + panic_handler: self.panic_handler, + get_thread_name: self.get_thread_name, + stack_size: self.stack_size, + start_handler: self.start_handler, + exit_handler: self.exit_handler, + deadlock_handler: self.deadlock_handler, + acquire_thread_handler: self.acquire_thread_handler, + release_thread_handler: self.release_thread_handler, + breadth_first: self.breadth_first, + } + } + + /// Returns a reference to the current spawn handler. + fn get_spawn_handler(&mut self) -> &mut S { + &mut self.spawn_handler + } + + /// Get the number of threads that will be used for the thread + /// pool. See `num_threads()` for more information. + fn get_num_threads(&self) -> usize { + if self.num_threads > 0 { + self.num_threads + } else { + let default = || thread::available_parallelism().map(|n| n.get()).unwrap_or(1); + + match env::var("RAYON_NUM_THREADS").ok().and_then(|s| usize::from_str(&s).ok()) { + Some(x @ 1..) => return x, + Some(0) => return default(), + _ => {} + } + + // Support for deprecated `RAYON_RS_NUM_CPUS`. + match env::var("RAYON_RS_NUM_CPUS").ok().and_then(|s| usize::from_str(&s).ok()) { + Some(x @ 1..) => x, + _ => default(), + } + } + } + + /// Get the thread name for the thread with the given index. + fn get_thread_name(&mut self, index: usize) -> Option<String> { + let f = self.get_thread_name.as_mut()?; + Some(f(index)) + } + + /// Sets a closure which takes a thread index and returns + /// the thread's name. + pub fn thread_name<F>(mut self, closure: F) -> Self + where + F: FnMut(usize) -> String + 'static, + { + self.get_thread_name = Some(Box::new(closure)); + self + } + + /// Sets the number of threads to be used in the rayon threadpool. + /// + /// If you specify a non-zero number of threads using this + /// function, then the resulting thread-pools are guaranteed to + /// start at most this number of threads. + /// + /// If `num_threads` is 0, or you do not call this function, then + /// the Rayon runtime will select the number of threads + /// automatically. At present, this is based on the + /// `RAYON_NUM_THREADS` environment variable (if set), + /// or the number of logical CPUs (otherwise). + /// In the future, however, the default behavior may + /// change to dynamically add or remove threads as needed. + /// + /// **Future compatibility warning:** Given the default behavior + /// may change in the future, if you wish to rely on a fixed + /// number of threads, you should use this function to specify + /// that number. To reproduce the current default behavior, you + /// may wish to use [`std::thread::available_parallelism`] + /// to query the number of CPUs dynamically. + /// + /// **Old environment variable:** `RAYON_NUM_THREADS` is a one-to-one + /// replacement of the now deprecated `RAYON_RS_NUM_CPUS` environment + /// variable. If both variables are specified, `RAYON_NUM_THREADS` will + /// be preferred. + pub fn num_threads(mut self, num_threads: usize) -> Self { + self.num_threads = num_threads; + self + } + + /// Returns a copy of the current panic handler. + fn take_panic_handler(&mut self) -> Option<Box<PanicHandler>> { + self.panic_handler.take() + } + + /// Normally, whenever Rayon catches a panic, it tries to + /// propagate it to someplace sensible, to try and reflect the + /// semantics of sequential execution. But in some cases, + /// particularly with the `spawn()` APIs, there is no + /// obvious place where we should propagate the panic to. + /// In that case, this panic handler is invoked. + /// + /// If no panic handler is set, the default is to abort the + /// process, under the principle that panics should not go + /// unobserved. + /// + /// If the panic handler itself panics, this will abort the + /// process. To prevent this, wrap the body of your panic handler + /// in a call to `std::panic::catch_unwind()`. + pub fn panic_handler<H>(mut self, panic_handler: H) -> Self + where + H: Fn(Box<dyn Any + Send>) + Send + Sync + 'static, + { + self.panic_handler = Some(Box::new(panic_handler)); + self + } + + /// Get the stack size of the worker threads + fn get_stack_size(&self) -> Option<usize> { + self.stack_size + } + + /// Sets the stack size of the worker threads + pub fn stack_size(mut self, stack_size: usize) -> Self { + self.stack_size = Some(stack_size); + self + } + + /// **(DEPRECATED)** Suggest to worker threads that they execute + /// spawned jobs in a "breadth-first" fashion. + /// + /// Typically, when a worker thread is idle or blocked, it will + /// attempt to execute the job from the *top* of its local deque of + /// work (i.e., the job most recently spawned). If this flag is set + /// to true, however, workers will prefer to execute in a + /// *breadth-first* fashion -- that is, they will search for jobs at + /// the *bottom* of their local deque. (At present, workers *always* + /// steal from the bottom of other workers' deques, regardless of + /// the setting of this flag.) + /// + /// If you think of the tasks as a tree, where a parent task + /// spawns its children in the tree, then this flag loosely + /// corresponds to doing a breadth-first traversal of the tree, + /// whereas the default would be to do a depth-first traversal. + /// + /// **Note that this is an "execution hint".** Rayon's task + /// execution is highly dynamic and the precise order in which + /// independent tasks are executed is not intended to be + /// guaranteed. + /// + /// This `breadth_first()` method is now deprecated per [RFC #1], + /// and in the future its effect may be removed. Consider using + /// [`scope_fifo()`] for a similar effect. + /// + /// [RFC #1]: https://github.com/rayon-rs/rfcs/blob/master/accepted/rfc0001-scope-scheduling.md + /// [`scope_fifo()`]: fn.scope_fifo.html + #[deprecated(note = "use `scope_fifo` and `spawn_fifo` for similar effect")] + pub fn breadth_first(mut self) -> Self { + self.breadth_first = true; + self + } + + fn get_breadth_first(&self) -> bool { + self.breadth_first + } + + /// Takes the current acquire thread callback, leaving `None`. + fn take_acquire_thread_handler(&mut self) -> Option<Box<AcquireThreadHandler>> { + self.acquire_thread_handler.take() + } + + /// Set a callback to be invoked when starting computations in a thread. + pub fn acquire_thread_handler<H>(mut self, acquire_thread_handler: H) -> Self + where + H: Fn() + Send + Sync + 'static, + { + self.acquire_thread_handler = Some(Box::new(acquire_thread_handler)); + self + } + + /// Takes the current release thread callback, leaving `None`. + fn take_release_thread_handler(&mut self) -> Option<Box<ReleaseThreadHandler>> { + self.release_thread_handler.take() + } + + /// Set a callback to be invoked when blocking in thread. + pub fn release_thread_handler<H>(mut self, release_thread_handler: H) -> Self + where + H: Fn() + Send + Sync + 'static, + { + self.release_thread_handler = Some(Box::new(release_thread_handler)); + self + } + + /// Takes the current deadlock callback, leaving `None`. + fn take_deadlock_handler(&mut self) -> Option<Box<DeadlockHandler>> { + self.deadlock_handler.take() + } + + /// Set a callback to be invoked on current deadlock. + pub fn deadlock_handler<H>(mut self, deadlock_handler: H) -> Self + where + H: Fn() + Send + Sync + 'static, + { + self.deadlock_handler = Some(Box::new(deadlock_handler)); + self + } + + /// Takes the current thread start callback, leaving `None`. + fn take_start_handler(&mut self) -> Option<Box<StartHandler>> { + self.start_handler.take() + } + + /// Sets a callback to be invoked on thread start. + /// + /// The closure is passed the index of the thread on which it is invoked. + /// Note that this same closure may be invoked multiple times in parallel. + /// If this closure panics, the panic will be passed to the panic handler. + /// If that handler returns, then startup will continue normally. + pub fn start_handler<H>(mut self, start_handler: H) -> Self + where + H: Fn(usize) + Send + Sync + 'static, + { + self.start_handler = Some(Box::new(start_handler)); + self + } + + /// Returns a current thread exit callback, leaving `None`. + fn take_exit_handler(&mut self) -> Option<Box<ExitHandler>> { + self.exit_handler.take() + } + + /// Sets a callback to be invoked on thread exit. + /// + /// The closure is passed the index of the thread on which it is invoked. + /// Note that this same closure may be invoked multiple times in parallel. + /// If this closure panics, the panic will be passed to the panic handler. + /// If that handler returns, then the thread will exit normally. + pub fn exit_handler<H>(mut self, exit_handler: H) -> Self + where + H: Fn(usize) + Send + Sync + 'static, + { + self.exit_handler = Some(Box::new(exit_handler)); + self + } +} + +#[allow(deprecated)] +impl Configuration { + /// Creates and return a valid rayon thread pool configuration, but does not initialize it. + pub fn new() -> Configuration { + Configuration { builder: ThreadPoolBuilder::new() } + } + + /// Deprecated in favor of `ThreadPoolBuilder::build`. + pub fn build(self) -> Result<ThreadPool, Box<dyn Error + 'static>> { + self.builder.build().map_err(Box::from) + } + + /// Deprecated in favor of `ThreadPoolBuilder::thread_name`. + pub fn thread_name<F>(mut self, closure: F) -> Self + where + F: FnMut(usize) -> String + 'static, + { + self.builder = self.builder.thread_name(closure); + self + } + + /// Deprecated in favor of `ThreadPoolBuilder::num_threads`. + pub fn num_threads(mut self, num_threads: usize) -> Configuration { + self.builder = self.builder.num_threads(num_threads); + self + } + + /// Deprecated in favor of `ThreadPoolBuilder::panic_handler`. + pub fn panic_handler<H>(mut self, panic_handler: H) -> Configuration + where + H: Fn(Box<dyn Any + Send>) + Send + Sync + 'static, + { + self.builder = self.builder.panic_handler(panic_handler); + self + } + + /// Deprecated in favor of `ThreadPoolBuilder::stack_size`. + pub fn stack_size(mut self, stack_size: usize) -> Self { + self.builder = self.builder.stack_size(stack_size); + self + } + + /// Deprecated in favor of `ThreadPoolBuilder::breadth_first`. + pub fn breadth_first(mut self) -> Self { + self.builder = self.builder.breadth_first(); + self + } + + /// Deprecated in favor of `ThreadPoolBuilder::start_handler`. + pub fn start_handler<H>(mut self, start_handler: H) -> Configuration + where + H: Fn(usize) + Send + Sync + 'static, + { + self.builder = self.builder.start_handler(start_handler); + self + } + + /// Deprecated in favor of `ThreadPoolBuilder::exit_handler`. + pub fn exit_handler<H>(mut self, exit_handler: H) -> Configuration + where + H: Fn(usize) + Send + Sync + 'static, + { + self.builder = self.builder.exit_handler(exit_handler); + self + } + + /// Returns a ThreadPoolBuilder with identical parameters. + fn into_builder(self) -> ThreadPoolBuilder { + self.builder + } +} + +impl ThreadPoolBuildError { + fn new(kind: ErrorKind) -> ThreadPoolBuildError { + ThreadPoolBuildError { kind } + } + + fn is_unsupported(&self) -> bool { + matches!(&self.kind, ErrorKind::IOError(e) if e.kind() == io::ErrorKind::Unsupported) + } +} + +const GLOBAL_POOL_ALREADY_INITIALIZED: &str = + "The global thread pool has already been initialized."; + +impl Error for ThreadPoolBuildError { + #[allow(deprecated)] + fn description(&self) -> &str { + match self.kind { + ErrorKind::GlobalPoolAlreadyInitialized => GLOBAL_POOL_ALREADY_INITIALIZED, + ErrorKind::IOError(ref e) => e.description(), + } + } + + fn source(&self) -> Option<&(dyn Error + 'static)> { + match &self.kind { + ErrorKind::GlobalPoolAlreadyInitialized => None, + ErrorKind::IOError(e) => Some(e), + } + } +} + +impl fmt::Display for ThreadPoolBuildError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match &self.kind { + ErrorKind::GlobalPoolAlreadyInitialized => GLOBAL_POOL_ALREADY_INITIALIZED.fmt(f), + ErrorKind::IOError(e) => e.fmt(f), + } + } +} + +/// Deprecated in favor of `ThreadPoolBuilder::build_global`. +#[deprecated(note = "use `ThreadPoolBuilder::build_global`")] +#[allow(deprecated)] +pub fn initialize(config: Configuration) -> Result<(), Box<dyn Error>> { + config.into_builder().build_global().map_err(Box::from) +} + +impl<S> fmt::Debug for ThreadPoolBuilder<S> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let ThreadPoolBuilder { + ref num_threads, + ref get_thread_name, + ref panic_handler, + ref stack_size, + ref deadlock_handler, + ref start_handler, + ref exit_handler, + ref acquire_thread_handler, + ref release_thread_handler, + spawn_handler: _, + ref breadth_first, + } = *self; + + // Just print `Some(<closure>)` or `None` to the debug + // output. + struct ClosurePlaceholder; + impl fmt::Debug for ClosurePlaceholder { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("<closure>") + } + } + let get_thread_name = get_thread_name.as_ref().map(|_| ClosurePlaceholder); + let panic_handler = panic_handler.as_ref().map(|_| ClosurePlaceholder); + let deadlock_handler = deadlock_handler.as_ref().map(|_| ClosurePlaceholder); + let start_handler = start_handler.as_ref().map(|_| ClosurePlaceholder); + let exit_handler = exit_handler.as_ref().map(|_| ClosurePlaceholder); + let acquire_thread_handler = acquire_thread_handler.as_ref().map(|_| ClosurePlaceholder); + let release_thread_handler = release_thread_handler.as_ref().map(|_| ClosurePlaceholder); + + f.debug_struct("ThreadPoolBuilder") + .field("num_threads", num_threads) + .field("get_thread_name", &get_thread_name) + .field("panic_handler", &panic_handler) + .field("stack_size", &stack_size) + .field("deadlock_handler", &deadlock_handler) + .field("start_handler", &start_handler) + .field("exit_handler", &exit_handler) + .field("acquire_thread_handler", &acquire_thread_handler) + .field("release_thread_handler", &release_thread_handler) + .field("breadth_first", &breadth_first) + .finish() + } +} + +#[allow(deprecated)] +impl fmt::Debug for Configuration { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.builder.fmt(f) + } +} + +/// Provides the calling context to a closure called by `join_context`. +#[derive(Debug)] +pub struct FnContext { + migrated: bool, + + /// disable `Send` and `Sync`, just for a little future-proofing. + _marker: PhantomData<*mut ()>, +} + +impl FnContext { + #[inline] + fn new(migrated: bool) -> Self { + FnContext { migrated, _marker: PhantomData } + } +} + +impl FnContext { + /// Returns `true` if the closure was called from a different thread + /// than it was provided from. + #[inline] + pub fn migrated(&self) -> bool { + self.migrated + } +} diff --git a/compiler/rustc_thread_pool/src/private.rs b/compiler/rustc_thread_pool/src/private.rs new file mode 100644 index 00000000000..5d4f4a8c2ca --- /dev/null +++ b/compiler/rustc_thread_pool/src/private.rs @@ -0,0 +1,26 @@ +//! The public parts of this private module are used to create traits +//! that cannot be implemented outside of our own crate. This way we +//! can feel free to extend those traits without worrying about it +//! being a breaking change for other implementations. + +/// If this type is pub but not publicly reachable, third parties +/// can't name it and can't implement traits using it. +#[allow(missing_debug_implementations)] +pub struct PrivateMarker; + +macro_rules! private_decl { + () => { + /// This trait is private; this method exists to make it + /// impossible to implement outside the crate. + #[doc(hidden)] + fn __rayon_private__(&self) -> crate::private::PrivateMarker; + }; +} + +macro_rules! private_impl { + () => { + fn __rayon_private__(&self) -> crate::private::PrivateMarker { + crate::private::PrivateMarker + } + }; +} diff --git a/compiler/rustc_thread_pool/src/registry.rs b/compiler/rustc_thread_pool/src/registry.rs new file mode 100644 index 00000000000..03a01aa29d2 --- /dev/null +++ b/compiler/rustc_thread_pool/src/registry.rs @@ -0,0 +1,1025 @@ +use std::cell::Cell; +use std::collections::hash_map::DefaultHasher; +use std::hash::Hasher; +use std::sync::atomic::{AtomicUsize, Ordering}; +use std::sync::{Arc, Mutex, Once}; +use std::{fmt, io, mem, ptr, thread}; + +use crossbeam_deque::{Injector, Steal, Stealer, Worker}; + +use crate::job::{JobFifo, JobRef, StackJob}; +use crate::latch::{AsCoreLatch, CoreLatch, Latch, LatchRef, LockLatch, OnceLatch, SpinLatch}; +use crate::sleep::Sleep; +use crate::tlv::Tlv; +use crate::{ + AcquireThreadHandler, DeadlockHandler, ErrorKind, ExitHandler, PanicHandler, + ReleaseThreadHandler, StartHandler, ThreadPoolBuildError, ThreadPoolBuilder, Yield, unwind, +}; + +/// Thread builder used for customization via +/// [`ThreadPoolBuilder::spawn_handler`](struct.ThreadPoolBuilder.html#method.spawn_handler). +pub struct ThreadBuilder { + name: Option<String>, + stack_size: Option<usize>, + worker: Worker<JobRef>, + stealer: Stealer<JobRef>, + registry: Arc<Registry>, + index: usize, +} + +impl ThreadBuilder { + /// Gets the index of this thread in the pool, within `0..num_threads`. + pub fn index(&self) -> usize { + self.index + } + + /// Gets the string that was specified by `ThreadPoolBuilder::name()`. + pub fn name(&self) -> Option<&str> { + self.name.as_deref() + } + + /// Gets the value that was specified by `ThreadPoolBuilder::stack_size()`. + pub fn stack_size(&self) -> Option<usize> { + self.stack_size + } + + /// Executes the main loop for this thread. This will not return until the + /// thread pool is dropped. + pub fn run(self) { + unsafe { main_loop(self) } + } +} + +impl fmt::Debug for ThreadBuilder { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("ThreadBuilder") + .field("pool", &self.registry.id()) + .field("index", &self.index) + .field("name", &self.name) + .field("stack_size", &self.stack_size) + .finish() + } +} + +/// Generalized trait for spawning a thread in the `Registry`. +/// +/// This trait is pub-in-private -- E0445 forces us to make it public, +/// but we don't actually want to expose these details in the API. +pub trait ThreadSpawn { + private_decl! {} + + /// Spawn a thread with the `ThreadBuilder` parameters, and then + /// call `ThreadBuilder::run()`. + fn spawn(&mut self, thread: ThreadBuilder) -> io::Result<()>; +} + +/// Spawns a thread in the "normal" way with `std::thread::Builder`. +/// +/// This type is pub-in-private -- E0445 forces us to make it public, +/// but we don't actually want to expose these details in the API. +#[derive(Debug, Default)] +pub struct DefaultSpawn; + +impl ThreadSpawn for DefaultSpawn { + private_impl! {} + + fn spawn(&mut self, thread: ThreadBuilder) -> io::Result<()> { + let mut b = thread::Builder::new(); + if let Some(name) = thread.name() { + b = b.name(name.to_owned()); + } + if let Some(stack_size) = thread.stack_size() { + b = b.stack_size(stack_size); + } + b.spawn(|| thread.run())?; + Ok(()) + } +} + +/// Spawns a thread with a user's custom callback. +/// +/// This type is pub-in-private -- E0445 forces us to make it public, +/// but we don't actually want to expose these details in the API. +#[derive(Debug)] +pub struct CustomSpawn<F>(F); + +impl<F> CustomSpawn<F> +where + F: FnMut(ThreadBuilder) -> io::Result<()>, +{ + pub(super) fn new(spawn: F) -> Self { + CustomSpawn(spawn) + } +} + +impl<F> ThreadSpawn for CustomSpawn<F> +where + F: FnMut(ThreadBuilder) -> io::Result<()>, +{ + private_impl! {} + + #[inline] + fn spawn(&mut self, thread: ThreadBuilder) -> io::Result<()> { + (self.0)(thread) + } +} + +pub struct Registry { + thread_infos: Vec<ThreadInfo>, + sleep: Sleep, + injected_jobs: Injector<JobRef>, + broadcasts: Mutex<Vec<Worker<JobRef>>>, + panic_handler: Option<Box<PanicHandler>>, + pub(crate) deadlock_handler: Option<Box<DeadlockHandler>>, + start_handler: Option<Box<StartHandler>>, + exit_handler: Option<Box<ExitHandler>>, + pub(crate) acquire_thread_handler: Option<Box<AcquireThreadHandler>>, + pub(crate) release_thread_handler: Option<Box<ReleaseThreadHandler>>, + + // When this latch reaches 0, it means that all work on this + // registry must be complete. This is ensured in the following ways: + // + // - if this is the global registry, there is a ref-count that never + // gets released. + // - if this is a user-created thread-pool, then so long as the thread-pool + // exists, it holds a reference. + // - when we inject a "blocking job" into the registry with `ThreadPool::install()`, + // no adjustment is needed; the `ThreadPool` holds the reference, and since we won't + // return until the blocking job is complete, that ref will continue to be held. + // - when `join()` or `scope()` is invoked, similarly, no adjustments are needed. + // These are always owned by some other job (e.g., one injected by `ThreadPool::install()`) + // and that job will keep the pool alive. + terminate_count: AtomicUsize, +} + +/// //////////////////////////////////////////////////////////////////////// +/// Initialization + +static mut THE_REGISTRY: Option<Arc<Registry>> = None; +static THE_REGISTRY_SET: Once = Once::new(); + +/// Starts the worker threads (if that has not already happened). If +/// initialization has not already occurred, use the default +/// configuration. +pub(super) fn global_registry() -> &'static Arc<Registry> { + set_global_registry(default_global_registry) + .or_else(|err| { + // SAFETY: we only create a shared reference to `THE_REGISTRY` after the `call_once` + // that initializes it, and there will be no more mutable accesses at all. + debug_assert!(THE_REGISTRY_SET.is_completed()); + let the_registry = unsafe { &*ptr::addr_of!(THE_REGISTRY) }; + the_registry.as_ref().ok_or(err) + }) + .expect("The global thread pool has not been initialized.") +} + +/// Starts the worker threads (if that has not already happened) with +/// the given builder. +pub(super) fn init_global_registry<S>( + builder: ThreadPoolBuilder<S>, +) -> Result<&'static Arc<Registry>, ThreadPoolBuildError> +where + S: ThreadSpawn, +{ + set_global_registry(|| Registry::new(builder)) +} + +/// Starts the worker threads (if that has not already happened) +/// by creating a registry with the given callback. +fn set_global_registry<F>(registry: F) -> Result<&'static Arc<Registry>, ThreadPoolBuildError> +where + F: FnOnce() -> Result<Arc<Registry>, ThreadPoolBuildError>, +{ + let mut result = Err(ThreadPoolBuildError::new(ErrorKind::GlobalPoolAlreadyInitialized)); + + THE_REGISTRY_SET.call_once(|| { + result = registry().map(|registry: Arc<Registry>| { + // SAFETY: this is the only mutable access to `THE_REGISTRY`, thanks to `Once`, and + // `global_registry()` only takes a shared reference **after** this `call_once`. + unsafe { + ptr::addr_of_mut!(THE_REGISTRY).write(Some(registry)); + (*ptr::addr_of!(THE_REGISTRY)).as_ref().unwrap_unchecked() + } + }) + }); + + result +} + +fn default_global_registry() -> Result<Arc<Registry>, ThreadPoolBuildError> { + let result = Registry::new(ThreadPoolBuilder::new()); + + // If we're running in an environment that doesn't support threads at all, we can fall back to + // using the current thread alone. This is crude, and probably won't work for non-blocking + // calls like `spawn` or `broadcast_spawn`, but a lot of stuff does work fine. + // + // Notably, this allows current WebAssembly targets to work even though their threading support + // is stubbed out, and we won't have to change anything if they do add real threading. + let unsupported = matches!(&result, Err(e) if e.is_unsupported()); + if unsupported && WorkerThread::current().is_null() { + let builder = ThreadPoolBuilder::new().num_threads(1).spawn_handler(|thread| { + // Rather than starting a new thread, we're just taking over the current thread + // *without* running the main loop, so we can still return from here. + // The WorkerThread is leaked, but we never shutdown the global pool anyway. + let worker_thread = Box::leak(Box::new(WorkerThread::from(thread))); + let registry = &*worker_thread.registry; + let index = worker_thread.index; + + unsafe { + WorkerThread::set_current(worker_thread); + + // let registry know we are ready to do work + Latch::set(®istry.thread_infos[index].primed); + } + + Ok(()) + }); + + let fallback_result = Registry::new(builder); + if fallback_result.is_ok() { + return fallback_result; + } + } + + result +} + +struct Terminator<'a>(&'a Arc<Registry>); + +impl<'a> Drop for Terminator<'a> { + fn drop(&mut self) { + self.0.terminate() + } +} + +impl Registry { + pub(super) fn new<S>( + mut builder: ThreadPoolBuilder<S>, + ) -> Result<Arc<Self>, ThreadPoolBuildError> + where + S: ThreadSpawn, + { + // Soft-limit the number of threads that we can actually support. + let n_threads = Ord::min(builder.get_num_threads(), crate::max_num_threads()); + + let breadth_first = builder.get_breadth_first(); + + let (workers, stealers): (Vec<_>, Vec<_>) = (0..n_threads) + .map(|_| { + let worker = if breadth_first { Worker::new_fifo() } else { Worker::new_lifo() }; + + let stealer = worker.stealer(); + (worker, stealer) + }) + .unzip(); + + let (broadcasts, broadcast_stealers): (Vec<_>, Vec<_>) = (0..n_threads) + .map(|_| { + let worker = Worker::new_fifo(); + let stealer = worker.stealer(); + (worker, stealer) + }) + .unzip(); + + let registry = Arc::new(Registry { + thread_infos: stealers.into_iter().map(ThreadInfo::new).collect(), + sleep: Sleep::new(n_threads), + injected_jobs: Injector::new(), + broadcasts: Mutex::new(broadcasts), + terminate_count: AtomicUsize::new(1), + panic_handler: builder.take_panic_handler(), + deadlock_handler: builder.take_deadlock_handler(), + start_handler: builder.take_start_handler(), + exit_handler: builder.take_exit_handler(), + acquire_thread_handler: builder.take_acquire_thread_handler(), + release_thread_handler: builder.take_release_thread_handler(), + }); + + // If we return early or panic, make sure to terminate existing threads. + let t1000 = Terminator(®istry); + + for (index, (worker, stealer)) in workers.into_iter().zip(broadcast_stealers).enumerate() { + let thread = ThreadBuilder { + name: builder.get_thread_name(index), + stack_size: builder.get_stack_size(), + registry: Arc::clone(®istry), + worker, + stealer, + index, + }; + if let Err(e) = builder.get_spawn_handler().spawn(thread) { + return Err(ThreadPoolBuildError::new(ErrorKind::IOError(e))); + } + } + + // Returning normally now, without termination. + mem::forget(t1000); + + Ok(registry) + } + + pub fn current() -> Arc<Registry> { + unsafe { + let worker_thread = WorkerThread::current(); + let registry = if worker_thread.is_null() { + global_registry() + } else { + &(*worker_thread).registry + }; + Arc::clone(registry) + } + } + + /// Returns the number of threads in the current registry. This + /// is better than `Registry::current().num_threads()` because it + /// avoids incrementing the `Arc`. + pub(super) fn current_num_threads() -> usize { + unsafe { + let worker_thread = WorkerThread::current(); + if worker_thread.is_null() { + global_registry().num_threads() + } else { + (*worker_thread).registry.num_threads() + } + } + } + + /// Returns the current `WorkerThread` if it's part of this `Registry`. + pub(super) fn current_thread(&self) -> Option<&WorkerThread> { + unsafe { + let worker = WorkerThread::current().as_ref()?; + if worker.registry().id() == self.id() { Some(worker) } else { None } + } + } + + /// Returns an opaque identifier for this registry. + pub(super) fn id(&self) -> RegistryId { + // We can rely on `self` not to change since we only ever create + // registries that are boxed up in an `Arc` (see `new()` above). + RegistryId { addr: self as *const Self as usize } + } + + pub(super) fn num_threads(&self) -> usize { + self.thread_infos.len() + } + + pub(super) fn catch_unwind(&self, f: impl FnOnce()) { + if let Err(err) = unwind::halt_unwinding(f) { + // If there is no handler, or if that handler itself panics, then we abort. + let abort_guard = unwind::AbortIfPanic; + if let Some(ref handler) = self.panic_handler { + handler(err); + mem::forget(abort_guard); + } + } + } + + /// Waits for the worker threads to get up and running. This is + /// meant to be used for benchmarking purposes, primarily, so that + /// you can get more consistent numbers by having everything + /// "ready to go". + pub(super) fn wait_until_primed(&self) { + for info in &self.thread_infos { + info.primed.wait(); + } + } + + /// Waits for the worker threads to stop. This is used for testing + /// -- so we can check that termination actually works. + pub(super) fn wait_until_stopped(&self) { + self.release_thread(); + for info in &self.thread_infos { + info.stopped.wait(); + } + self.acquire_thread(); + } + + pub(crate) fn acquire_thread(&self) { + if let Some(ref acquire_thread_handler) = self.acquire_thread_handler { + acquire_thread_handler(); + } + } + + pub(crate) fn release_thread(&self) { + if let Some(ref release_thread_handler) = self.release_thread_handler { + release_thread_handler(); + } + } + + /// //////////////////////////////////////////////////////////////////////// + /// MAIN LOOP + /// + /// So long as all of the worker threads are hanging out in their + /// top-level loop, there is no work to be done. + + /// Push a job into the given `registry`. If we are running on a + /// worker thread for the registry, this will push onto the + /// deque. Else, it will inject from the outside (which is slower). + pub(super) fn inject_or_push(&self, job_ref: JobRef) { + let worker_thread = WorkerThread::current(); + unsafe { + if !worker_thread.is_null() && (*worker_thread).registry().id() == self.id() { + (*worker_thread).push(job_ref); + } else { + self.inject(job_ref); + } + } + } + + /// Push a job into the "external jobs" queue; it will be taken by + /// whatever worker has nothing to do. Use this if you know that + /// you are not on a worker of this registry. + pub(super) fn inject(&self, injected_job: JobRef) { + // It should not be possible for `state.terminate` to be true + // here. It is only set to true when the user creates (and + // drops) a `ThreadPool`; and, in that case, they cannot be + // calling `inject()` later, since they dropped their + // `ThreadPool`. + debug_assert_ne!( + self.terminate_count.load(Ordering::Acquire), + 0, + "inject() sees state.terminate as true" + ); + + let queue_was_empty = self.injected_jobs.is_empty(); + + self.injected_jobs.push(injected_job); + self.sleep.new_injected_jobs(1, queue_was_empty); + } + + pub(crate) fn has_injected_job(&self) -> bool { + !self.injected_jobs.is_empty() + } + + fn pop_injected_job(&self) -> Option<JobRef> { + loop { + match self.injected_jobs.steal() { + Steal::Success(job) => return Some(job), + Steal::Empty => return None, + Steal::Retry => {} + } + } + } + + /// Push a job into each thread's own "external jobs" queue; it will be + /// executed only on that thread, when it has nothing else to do locally, + /// before it tries to steal other work. + /// + /// **Panics** if not given exactly as many jobs as there are threads. + pub(super) fn inject_broadcast(&self, injected_jobs: impl ExactSizeIterator<Item = JobRef>) { + assert_eq!(self.num_threads(), injected_jobs.len()); + { + let broadcasts = self.broadcasts.lock().unwrap(); + + // It should not be possible for `state.terminate` to be true + // here. It is only set to true when the user creates (and + // drops) a `ThreadPool`; and, in that case, they cannot be + // calling `inject_broadcast()` later, since they dropped their + // `ThreadPool`. + debug_assert_ne!( + self.terminate_count.load(Ordering::Acquire), + 0, + "inject_broadcast() sees state.terminate as true" + ); + + assert_eq!(broadcasts.len(), injected_jobs.len()); + for (worker, job_ref) in broadcasts.iter().zip(injected_jobs) { + worker.push(job_ref); + } + } + for i in 0..self.num_threads() { + self.sleep.notify_worker_latch_is_set(i); + } + } + + /// If already in a worker-thread of this registry, just execute `op`. + /// Otherwise, inject `op` in this thread-pool. Either way, block until `op` + /// completes and return its return value. If `op` panics, that panic will + /// be propagated as well. The second argument indicates `true` if injection + /// was performed, `false` if executed directly. + pub(super) fn in_worker<OP, R>(&self, op: OP) -> R + where + OP: FnOnce(&WorkerThread, bool) -> R + Send, + R: Send, + { + unsafe { + let worker_thread = WorkerThread::current(); + if worker_thread.is_null() { + self.in_worker_cold(op) + } else if (*worker_thread).registry().id() != self.id() { + self.in_worker_cross(&*worker_thread, op) + } else { + // Perfectly valid to give them a `&T`: this is the + // current thread, so we know the data structure won't be + // invalidated until we return. + op(&*worker_thread, false) + } + } + } + + #[cold] + unsafe fn in_worker_cold<OP, R>(&self, op: OP) -> R + where + OP: FnOnce(&WorkerThread, bool) -> R + Send, + R: Send, + { + thread_local!(static LOCK_LATCH: LockLatch = LockLatch::new()); + + LOCK_LATCH.with(|l| { + // This thread isn't a member of *any* thread pool, so just block. + debug_assert!(WorkerThread::current().is_null()); + let job = StackJob::new( + Tlv::null(), + |injected| { + let worker_thread = WorkerThread::current(); + assert!(injected && !worker_thread.is_null()); + op(unsafe { &*worker_thread }, true) + }, + LatchRef::new(l), + ); + self.inject(unsafe { job.as_job_ref() }); + self.release_thread(); + job.latch.wait_and_reset(); // Make sure we can use the same latch again next time. + self.acquire_thread(); + + unsafe { job.into_result() } + }) + } + + #[cold] + unsafe fn in_worker_cross<OP, R>(&self, current_thread: &WorkerThread, op: OP) -> R + where + OP: FnOnce(&WorkerThread, bool) -> R + Send, + R: Send, + { + // This thread is a member of a different pool, so let it process + // other work while waiting for this `op` to complete. + debug_assert!(current_thread.registry().id() != self.id()); + let latch = SpinLatch::cross(current_thread); + let job = StackJob::new( + Tlv::null(), + |injected| { + let worker_thread = WorkerThread::current(); + assert!(injected && !worker_thread.is_null()); + op(unsafe { &*worker_thread }, true) + }, + latch, + ); + self.inject(unsafe { job.as_job_ref() }); + unsafe { current_thread.wait_until(&job.latch) }; + unsafe { job.into_result() } + } + + /// Increments the terminate counter. This increment should be + /// balanced by a call to `terminate`, which will decrement. This + /// is used when spawning asynchronous work, which needs to + /// prevent the registry from terminating so long as it is active. + /// + /// Note that blocking functions such as `join` and `scope` do not + /// need to concern themselves with this fn; their context is + /// responsible for ensuring the current thread-pool will not + /// terminate until they return. + /// + /// The global thread-pool always has an outstanding reference + /// (the initial one). Custom thread-pools have one outstanding + /// reference that is dropped when the `ThreadPool` is dropped: + /// since installing the thread-pool blocks until any joins/scopes + /// complete, this ensures that joins/scopes are covered. + /// + /// The exception is `::spawn()`, which can create a job outside + /// of any blocking scope. In that case, the job itself holds a + /// terminate count and is responsible for invoking `terminate()` + /// when finished. + pub(super) fn increment_terminate_count(&self) { + let previous = self.terminate_count.fetch_add(1, Ordering::AcqRel); + debug_assert!(previous != 0, "registry ref count incremented from zero"); + assert!(previous != usize::MAX, "overflow in registry ref count"); + } + + /// Signals that the thread-pool which owns this registry has been + /// dropped. The worker threads will gradually terminate, once any + /// extant work is completed. + pub(super) fn terminate(&self) { + if self.terminate_count.fetch_sub(1, Ordering::AcqRel) == 1 { + for (i, thread_info) in self.thread_infos.iter().enumerate() { + unsafe { OnceLatch::set_and_tickle_one(&thread_info.terminate, self, i) }; + } + } + } + + /// Notify the worker that the latch they are sleeping on has been "set". + pub(super) fn notify_worker_latch_is_set(&self, target_worker_index: usize) { + self.sleep.notify_worker_latch_is_set(target_worker_index); + } +} + +/// Mark a Rayon worker thread as blocked. This triggers the deadlock handler +/// if no other worker thread is active +#[inline] +pub fn mark_blocked() { + let worker_thread = WorkerThread::current(); + assert!(!worker_thread.is_null()); + unsafe { + let registry = &(*worker_thread).registry; + registry.sleep.mark_blocked(®istry.deadlock_handler) + } +} + +/// Mark a previously blocked Rayon worker thread as unblocked +#[inline] +pub fn mark_unblocked(registry: &Registry) { + registry.sleep.mark_unblocked() +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub(super) struct RegistryId { + addr: usize, +} + +struct ThreadInfo { + /// Latch set once thread has started and we are entering into the + /// main loop. Used to wait for worker threads to become primed, + /// primarily of interest for benchmarking. + primed: LockLatch, + + /// Latch is set once worker thread has completed. Used to wait + /// until workers have stopped; only used for tests. + stopped: LockLatch, + + /// The latch used to signal that terminated has been requested. + /// This latch is *set* by the `terminate` method on the + /// `Registry`, once the registry's main "terminate" counter + /// reaches zero. + terminate: OnceLatch, + + /// the "stealer" half of the worker's deque + stealer: Stealer<JobRef>, +} + +impl ThreadInfo { + fn new(stealer: Stealer<JobRef>) -> ThreadInfo { + ThreadInfo { + primed: LockLatch::new(), + stopped: LockLatch::new(), + terminate: OnceLatch::new(), + stealer, + } + } +} + +/// //////////////////////////////////////////////////////////////////////// +/// WorkerThread identifiers + +pub(super) struct WorkerThread { + /// the "worker" half of our local deque + worker: Worker<JobRef>, + + /// the "stealer" half of the worker's broadcast deque + stealer: Stealer<JobRef>, + + /// local queue used for `spawn_fifo` indirection + fifo: JobFifo, + + pub(crate) index: usize, + + /// A weak random number generator. + rng: XorShift64Star, + + pub(crate) registry: Arc<Registry>, +} + +// This is a bit sketchy, but basically: the WorkerThread is +// allocated on the stack of the worker on entry and stored into this +// thread local variable. So it will remain valid at least until the +// worker is fully unwound. Using an unsafe pointer avoids the need +// for a RefCell<T> etc. +thread_local! { + static WORKER_THREAD_STATE: Cell<*const WorkerThread> = const { Cell::new(ptr::null()) }; +} + +impl From<ThreadBuilder> for WorkerThread { + fn from(thread: ThreadBuilder) -> Self { + Self { + worker: thread.worker, + stealer: thread.stealer, + fifo: JobFifo::new(), + index: thread.index, + rng: XorShift64Star::new(), + registry: thread.registry, + } + } +} + +impl Drop for WorkerThread { + fn drop(&mut self) { + // Undo `set_current` + WORKER_THREAD_STATE.with(|t| { + assert!(t.get().eq(&(self as *const _))); + t.set(ptr::null()); + }); + } +} + +impl WorkerThread { + /// Gets the `WorkerThread` index for the current thread; returns + /// NULL if this is not a worker thread. This pointer is valid + /// anywhere on the current thread. + #[inline] + pub(super) fn current() -> *const WorkerThread { + WORKER_THREAD_STATE.with(Cell::get) + } + + /// Sets `self` as the worker thread index for the current thread. + /// This is done during worker thread startup. + unsafe fn set_current(thread: *const WorkerThread) { + WORKER_THREAD_STATE.with(|t| { + assert!(t.get().is_null()); + t.set(thread); + }); + } + + /// Returns the registry that owns this worker thread. + #[inline] + pub(super) fn registry(&self) -> &Arc<Registry> { + &self.registry + } + + /// Our index amongst the worker threads (ranges from `0..self.num_threads()`). + #[inline] + pub(super) fn index(&self) -> usize { + self.index + } + + #[inline] + pub(super) unsafe fn push(&self, job: JobRef) { + let queue_was_empty = self.worker.is_empty(); + self.worker.push(job); + self.registry.sleep.new_internal_jobs(1, queue_was_empty); + } + + #[inline] + pub(super) unsafe fn push_fifo(&self, job: JobRef) { + unsafe { self.push(self.fifo.push(job)) }; + } + + #[inline] + pub(super) fn local_deque_is_empty(&self) -> bool { + self.worker.is_empty() + } + + /// Attempts to obtain a "local" job -- typically this means + /// popping from the top of the stack, though if we are configured + /// for breadth-first execution, it would mean dequeuing from the + /// bottom. + #[inline] + pub(super) fn take_local_job(&self) -> Option<JobRef> { + let popped_job = self.worker.pop(); + + if popped_job.is_some() { + return popped_job; + } + + loop { + match self.stealer.steal() { + Steal::Success(job) => return Some(job), + Steal::Empty => return None, + Steal::Retry => {} + } + } + } + + pub(super) fn has_injected_job(&self) -> bool { + !self.stealer.is_empty() || self.registry.has_injected_job() + } + + /// Wait until the latch is set. Try to keep busy by popping and + /// stealing tasks as necessary. + #[inline] + pub(super) unsafe fn wait_until<L: AsCoreLatch + ?Sized>(&self, latch: &L) { + let latch = latch.as_core_latch(); + if !latch.probe() { + unsafe { self.wait_until_cold(latch) }; + } + } + + #[cold] + unsafe fn wait_until_cold(&self, latch: &CoreLatch) { + // the code below should swallow all panics and hence never + // unwind; but if something does wrong, we want to abort, + // because otherwise other code in rayon may assume that the + // latch has been signaled, and that can lead to random memory + // accesses, which would be *very bad* + let abort_guard = unwind::AbortIfPanic; + + 'outer: while !latch.probe() { + // Check for local work *before* we start marking ourself idle, + // especially to avoid modifying shared sleep state. + if let Some(job) = self.take_local_job() { + unsafe { self.execute(job) }; + continue; + } + + let mut idle_state = self.registry.sleep.start_looking(self.index); + while !latch.probe() { + if let Some(job) = self.find_work() { + self.registry.sleep.work_found(); + unsafe { self.execute(job) }; + // The job might have injected local work, so go back to the outer loop. + continue 'outer; + } else { + self.registry.sleep.no_work_found(&mut idle_state, latch, &self) + } + } + + // If we were sleepy, we are not anymore. We "found work" -- + // whatever the surrounding thread was doing before it had to wait. + self.registry.sleep.work_found(); + break; + } + + mem::forget(abort_guard); // successful execution, do not abort + } + + unsafe fn wait_until_out_of_work(&self) { + debug_assert_eq!(self as *const _, WorkerThread::current()); + let registry = &*self.registry; + let index = self.index; + + registry.acquire_thread(); + unsafe { self.wait_until(®istry.thread_infos[index].terminate) }; + + // Should not be any work left in our queue. + debug_assert!(self.take_local_job().is_none()); + + // Let registry know we are done + unsafe { Latch::set(®istry.thread_infos[index].stopped) }; + } + + fn find_work(&self) -> Option<JobRef> { + // Try to find some work to do. We give preference first + // to things in our local deque, then in other workers + // deques, and finally to injected jobs from the + // outside. The idea is to finish what we started before + // we take on something new. + self.take_local_job().or_else(|| self.steal()).or_else(|| self.registry.pop_injected_job()) + } + + pub(super) fn yield_now(&self) -> Yield { + match self.find_work() { + Some(job) => unsafe { + self.execute(job); + Yield::Executed + }, + None => Yield::Idle, + } + } + + pub(super) fn yield_local(&self) -> Yield { + match self.take_local_job() { + Some(job) => unsafe { + self.execute(job); + Yield::Executed + }, + None => Yield::Idle, + } + } + + #[inline] + pub(super) unsafe fn execute(&self, job: JobRef) { + unsafe { job.execute() }; + } + + /// Try to steal a single job and return it. + /// + /// This should only be done as a last resort, when there is no + /// local work to do. + fn steal(&self) -> Option<JobRef> { + // we only steal when we don't have any work to do locally + debug_assert!(self.local_deque_is_empty()); + + // otherwise, try to steal + let thread_infos = &self.registry.thread_infos.as_slice(); + let num_threads = thread_infos.len(); + if num_threads <= 1 { + return None; + } + + loop { + let mut retry = false; + let start = self.rng.next_usize(num_threads); + let job = (start..num_threads) + .chain(0..start) + .filter(move |&i| i != self.index) + .find_map(|victim_index| { + let victim = &thread_infos[victim_index]; + match victim.stealer.steal() { + Steal::Success(job) => Some(job), + Steal::Empty => None, + Steal::Retry => { + retry = true; + None + } + } + }); + if job.is_some() || !retry { + return job; + } + } + } +} + +/// //////////////////////////////////////////////////////////////////////// + +unsafe fn main_loop(thread: ThreadBuilder) { + let worker_thread = &WorkerThread::from(thread); + unsafe { WorkerThread::set_current(worker_thread) }; + let registry = &*worker_thread.registry; + let index = worker_thread.index; + + // let registry know we are ready to do work + unsafe { Latch::set(®istry.thread_infos[index].primed) }; + + // Worker threads should not panic. If they do, just abort, as the + // internal state of the threadpool is corrupted. Note that if + // **user code** panics, we should catch that and redirect. + let abort_guard = unwind::AbortIfPanic; + + // Inform a user callback that we started a thread. + if let Some(ref handler) = registry.start_handler { + registry.catch_unwind(|| handler(index)); + } + + unsafe { worker_thread.wait_until_out_of_work() }; + + // Normal termination, do not abort. + mem::forget(abort_guard); + + // Inform a user callback that we exited a thread. + if let Some(ref handler) = registry.exit_handler { + registry.catch_unwind(|| handler(index)); + // We're already exiting the thread, there's nothing else to do. + } + + registry.release_thread(); +} + +/// If already in a worker-thread, just execute `op`. Otherwise, +/// execute `op` in the default thread-pool. Either way, block until +/// `op` completes and return its return value. If `op` panics, that +/// panic will be propagated as well. The second argument indicates +/// `true` if injection was performed, `false` if executed directly. +pub(super) fn in_worker<OP, R>(op: OP) -> R +where + OP: FnOnce(&WorkerThread, bool) -> R + Send, + R: Send, +{ + unsafe { + let owner_thread = WorkerThread::current(); + if !owner_thread.is_null() { + // Perfectly valid to give them a `&T`: this is the + // current thread, so we know the data structure won't be + // invalidated until we return. + op(&*owner_thread, false) + } else { + global_registry().in_worker(op) + } + } +} + +/// [xorshift*] is a fast pseudorandom number generator which will +/// even tolerate weak seeding, as long as it's not zero. +/// +/// [xorshift*]: https://en.wikipedia.org/wiki/Xorshift#xorshift* +struct XorShift64Star { + state: Cell<u64>, +} + +impl XorShift64Star { + fn new() -> Self { + // Any non-zero seed will do -- this uses the hash of a global counter. + let mut seed = 0; + while seed == 0 { + let mut hasher = DefaultHasher::new(); + static COUNTER: AtomicUsize = AtomicUsize::new(0); + hasher.write_usize(COUNTER.fetch_add(1, Ordering::Relaxed)); + seed = hasher.finish(); + } + + XorShift64Star { state: Cell::new(seed) } + } + + fn next(&self) -> u64 { + let mut x = self.state.get(); + debug_assert_ne!(x, 0); + x ^= x >> 12; + x ^= x << 25; + x ^= x >> 27; + self.state.set(x); + x.wrapping_mul(0x2545_f491_4f6c_dd1d) + } + + /// Return a value from `0..n`. + fn next_usize(&self, n: usize) -> usize { + (self.next() % n as u64) as usize + } +} diff --git a/compiler/rustc_thread_pool/src/scope/mod.rs b/compiler/rustc_thread_pool/src/scope/mod.rs new file mode 100644 index 00000000000..55e58b3509d --- /dev/null +++ b/compiler/rustc_thread_pool/src/scope/mod.rs @@ -0,0 +1,783 @@ +//! Methods for custom fork-join scopes, created by the [`scope()`] +//! and [`in_place_scope()`] functions. These are a more flexible alternative to [`join()`]. +//! +//! [`scope()`]: fn.scope.html +//! [`in_place_scope()`]: fn.in_place_scope.html +//! [`join()`]: ../join/join.fn.html + +use std::any::Any; +use std::marker::PhantomData; +use std::mem::ManuallyDrop; +use std::sync::Arc; +use std::sync::atomic::{AtomicPtr, Ordering}; +use std::{fmt, ptr}; + +use crate::broadcast::BroadcastContext; +use crate::job::{ArcJob, HeapJob, JobFifo, JobRef}; +use crate::latch::{CountLatch, Latch}; +use crate::registry::{Registry, WorkerThread, global_registry, in_worker}; +use crate::tlv::{self, Tlv}; +use crate::unwind; + +#[cfg(test)] +mod tests; + +/// Represents a fork-join scope which can be used to spawn any number of tasks. +/// See [`scope()`] for more information. +/// +///[`scope()`]: fn.scope.html +pub struct Scope<'scope> { + base: ScopeBase<'scope>, +} + +/// Represents a fork-join scope which can be used to spawn any number of tasks. +/// Those spawned from the same thread are prioritized in relative FIFO order. +/// See [`scope_fifo()`] for more information. +/// +///[`scope_fifo()`]: fn.scope_fifo.html +pub struct ScopeFifo<'scope> { + base: ScopeBase<'scope>, + fifos: Vec<JobFifo>, +} + +struct ScopeBase<'scope> { + /// thread registry where `scope()` was executed or where `in_place_scope()` + /// should spawn jobs. + registry: Arc<Registry>, + + /// if some job panicked, the error is stored here; it will be + /// propagated to the one who created the scope + panic: AtomicPtr<Box<dyn Any + Send + 'static>>, + + /// latch to track job counts + job_completed_latch: CountLatch, + + /// You can think of a scope as containing a list of closures to execute, + /// all of which outlive `'scope`. They're not actually required to be + /// `Sync`, but it's still safe to let the `Scope` implement `Sync` because + /// the closures are only *moved* across threads to be executed. + #[allow(clippy::type_complexity)] + marker: PhantomData<Box<dyn FnOnce(&Scope<'scope>) + Send + Sync + 'scope>>, + + /// The TLV at the scope's creation. Used to set the TLV for spawned jobs. + tlv: Tlv, +} + +/// Creates a "fork-join" scope `s` and invokes the closure with a +/// reference to `s`. This closure can then spawn asynchronous tasks +/// into `s`. Those tasks may run asynchronously with respect to the +/// closure; they may themselves spawn additional tasks into `s`. When +/// the closure returns, it will block until all tasks that have been +/// spawned into `s` complete. +/// +/// `scope()` is a more flexible building block compared to `join()`, +/// since a loop can be used to spawn any number of tasks without +/// recursing. However, that flexibility comes at a performance price: +/// tasks spawned using `scope()` must be allocated onto the heap, +/// whereas `join()` can make exclusive use of the stack. **Prefer +/// `join()` (or, even better, parallel iterators) where possible.** +/// +/// # Example +/// +/// The Rayon `join()` function launches two closures and waits for them +/// to stop. One could implement `join()` using a scope like so, although +/// it would be less efficient than the real implementation: +/// +/// ```rust +/// # use rustc_thread_pool as rayon; +/// pub fn join<A,B,RA,RB>(oper_a: A, oper_b: B) -> (RA, RB) +/// where A: FnOnce() -> RA + Send, +/// B: FnOnce() -> RB + Send, +/// RA: Send, +/// RB: Send, +/// { +/// let mut result_a: Option<RA> = None; +/// let mut result_b: Option<RB> = None; +/// rayon::scope(|s| { +/// s.spawn(|_| result_a = Some(oper_a())); +/// s.spawn(|_| result_b = Some(oper_b())); +/// }); +/// (result_a.unwrap(), result_b.unwrap()) +/// } +/// ``` +/// +/// # A note on threading +/// +/// The closure given to `scope()` executes in the Rayon thread-pool, +/// as do those given to `spawn()`. This means that you can't access +/// thread-local variables (well, you can, but they may have +/// unexpected values). +/// +/// # Task execution +/// +/// Task execution potentially starts as soon as `spawn()` is called. +/// The task will end sometime before `scope()` returns. Note that the +/// *closure* given to scope may return much earlier. In general +/// the lifetime of a scope created like `scope(body)` goes something like this: +/// +/// - Scope begins when `scope(body)` is called +/// - Scope body `body()` is invoked +/// - Scope tasks may be spawned +/// - Scope body returns +/// - Scope tasks execute, possibly spawning more tasks +/// - Once all tasks are done, scope ends and `scope()` returns +/// +/// To see how and when tasks are joined, consider this example: +/// +/// ```rust +/// # use rustc_thread_pool as rayon; +/// // point start +/// rayon::scope(|s| { +/// s.spawn(|s| { // task s.1 +/// s.spawn(|s| { // task s.1.1 +/// rayon::scope(|t| { +/// t.spawn(|_| ()); // task t.1 +/// t.spawn(|_| ()); // task t.2 +/// }); +/// }); +/// }); +/// s.spawn(|s| { // task s.2 +/// }); +/// // point mid +/// }); +/// // point end +/// ``` +/// +/// The various tasks that are run will execute roughly like so: +/// +/// ```notrust +/// | (start) +/// | +/// | (scope `s` created) +/// +-----------------------------------------------+ (task s.2) +/// +-------+ (task s.1) | +/// | | | +/// | +---+ (task s.1.1) | +/// | | | | +/// | | | (scope `t` created) | +/// | | +----------------+ (task t.2) | +/// | | +---+ (task t.1) | | +/// | (mid) | | | | | +/// : | + <-+------------+ (scope `t` ends) | +/// : | | | +/// |<------+---+-----------------------------------+ (scope `s` ends) +/// | +/// | (end) +/// ``` +/// +/// The point here is that everything spawned into scope `s` will +/// terminate (at latest) at the same point -- right before the +/// original call to `rayon::scope` returns. This includes new +/// subtasks created by other subtasks (e.g., task `s.1.1`). If a new +/// scope is created (such as `t`), the things spawned into that scope +/// will be joined before that scope returns, which in turn occurs +/// before the creating task (task `s.1.1` in this case) finishes. +/// +/// There is no guaranteed order of execution for spawns in a scope, +/// given that other threads may steal tasks at any time. However, they +/// are generally prioritized in a LIFO order on the thread from which +/// they were spawned. So in this example, absent any stealing, we can +/// expect `s.2` to execute before `s.1`, and `t.2` before `t.1`. Other +/// threads always steal from the other end of the deque, like FIFO +/// order. The idea is that "recent" tasks are most likely to be fresh +/// in the local CPU's cache, while other threads can steal older +/// "stale" tasks. For an alternate approach, consider +/// [`scope_fifo()`] instead. +/// +/// [`scope_fifo()`]: fn.scope_fifo.html +/// +/// # Accessing stack data +/// +/// In general, spawned tasks may access stack data in place that +/// outlives the scope itself. Other data must be fully owned by the +/// spawned task. +/// +/// ```rust +/// # use rustc_thread_pool as rayon; +/// let ok: Vec<i32> = vec![1, 2, 3]; +/// rayon::scope(|s| { +/// let bad: Vec<i32> = vec![4, 5, 6]; +/// s.spawn(|_| { +/// // We can access `ok` because outlives the scope `s`. +/// println!("ok: {:?}", ok); +/// +/// // If we just try to use `bad` here, the closure will borrow `bad` +/// // (because we are just printing it out, and that only requires a +/// // borrow), which will result in a compilation error. Read on +/// // for options. +/// // println!("bad: {:?}", bad); +/// }); +/// }); +/// ``` +/// +/// As the comments example above suggest, to reference `bad` we must +/// take ownership of it. One way to do this is to detach the closure +/// from the surrounding stack frame, using the `move` keyword. This +/// will cause it to take ownership of *all* the variables it touches, +/// in this case including both `ok` *and* `bad`: +/// +/// ```rust +/// # use rustc_thread_pool as rayon; +/// let ok: Vec<i32> = vec![1, 2, 3]; +/// rayon::scope(|s| { +/// let bad: Vec<i32> = vec![4, 5, 6]; +/// s.spawn(move |_| { +/// println!("ok: {:?}", ok); +/// println!("bad: {:?}", bad); +/// }); +/// +/// // That closure is fine, but now we can't use `ok` anywhere else, +/// // since it is owned by the previous task: +/// // s.spawn(|_| println!("ok: {:?}", ok)); +/// }); +/// ``` +/// +/// While this works, it could be a problem if we want to use `ok` elsewhere. +/// There are two choices. We can keep the closure as a `move` closure, but +/// instead of referencing the variable `ok`, we create a shadowed variable that +/// is a borrow of `ok` and capture *that*: +/// +/// ```rust +/// # use rustc_thread_pool as rayon; +/// let ok: Vec<i32> = vec![1, 2, 3]; +/// rayon::scope(|s| { +/// let bad: Vec<i32> = vec![4, 5, 6]; +/// let ok: &Vec<i32> = &ok; // shadow the original `ok` +/// s.spawn(move |_| { +/// println!("ok: {:?}", ok); // captures the shadowed version +/// println!("bad: {:?}", bad); +/// }); +/// +/// // Now we too can use the shadowed `ok`, since `&Vec<i32>` references +/// // can be shared freely. Note that we need a `move` closure here though, +/// // because otherwise we'd be trying to borrow the shadowed `ok`, +/// // and that doesn't outlive `scope`. +/// s.spawn(move |_| println!("ok: {:?}", ok)); +/// }); +/// ``` +/// +/// Another option is not to use the `move` keyword but instead to take ownership +/// of individual variables: +/// +/// ```rust +/// # use rustc_thread_pool as rayon; +/// let ok: Vec<i32> = vec![1, 2, 3]; +/// rayon::scope(|s| { +/// let bad: Vec<i32> = vec![4, 5, 6]; +/// s.spawn(|_| { +/// // Transfer ownership of `bad` into a local variable (also named `bad`). +/// // This will force the closure to take ownership of `bad` from the environment. +/// let bad = bad; +/// println!("ok: {:?}", ok); // `ok` is only borrowed. +/// println!("bad: {:?}", bad); // refers to our local variable, above. +/// }); +/// +/// s.spawn(|_| println!("ok: {:?}", ok)); // we too can borrow `ok` +/// }); +/// ``` +/// +/// # Panics +/// +/// If a panic occurs, either in the closure given to `scope()` or in +/// any of the spawned jobs, that panic will be propagated and the +/// call to `scope()` will panic. If multiple panics occurs, it is +/// non-deterministic which of their panic values will propagate. +/// Regardless, once a task is spawned using `scope.spawn()`, it will +/// execute, even if the spawning task should later panic. `scope()` +/// returns once all spawned jobs have completed, and any panics are +/// propagated at that point. +pub fn scope<'scope, OP, R>(op: OP) -> R +where + OP: FnOnce(&Scope<'scope>) -> R + Send, + R: Send, +{ + in_worker(|owner_thread, _| { + let scope = Scope::<'scope>::new(Some(owner_thread), None); + scope.base.complete(Some(owner_thread), || op(&scope)) + }) +} + +/// Creates a "fork-join" scope `s` with FIFO order, and invokes the +/// closure with a reference to `s`. This closure can then spawn +/// asynchronous tasks into `s`. Those tasks may run asynchronously with +/// respect to the closure; they may themselves spawn additional tasks +/// into `s`. When the closure returns, it will block until all tasks +/// that have been spawned into `s` complete. +/// +/// # Task execution +/// +/// Tasks in a `scope_fifo()` run similarly to [`scope()`], but there's a +/// difference in the order of execution. Consider a similar example: +/// +/// [`scope()`]: fn.scope.html +/// +/// ```rust +/// # use rustc_thread_pool as rayon; +/// // point start +/// rayon::scope_fifo(|s| { +/// s.spawn_fifo(|s| { // task s.1 +/// s.spawn_fifo(|s| { // task s.1.1 +/// rayon::scope_fifo(|t| { +/// t.spawn_fifo(|_| ()); // task t.1 +/// t.spawn_fifo(|_| ()); // task t.2 +/// }); +/// }); +/// }); +/// s.spawn_fifo(|s| { // task s.2 +/// }); +/// // point mid +/// }); +/// // point end +/// ``` +/// +/// The various tasks that are run will execute roughly like so: +/// +/// ```notrust +/// | (start) +/// | +/// | (FIFO scope `s` created) +/// +--------------------+ (task s.1) +/// +-------+ (task s.2) | +/// | | +---+ (task s.1.1) +/// | | | | +/// | | | | (FIFO scope `t` created) +/// | | | +----------------+ (task t.1) +/// | | | +---+ (task t.2) | +/// | (mid) | | | | | +/// : | | + <-+------------+ (scope `t` ends) +/// : | | | +/// |<------+------------+---+ (scope `s` ends) +/// | +/// | (end) +/// ``` +/// +/// Under `scope_fifo()`, the spawns are prioritized in a FIFO order on +/// the thread from which they were spawned, as opposed to `scope()`'s +/// LIFO. So in this example, we can expect `s.1` to execute before +/// `s.2`, and `t.1` before `t.2`. Other threads also steal tasks in +/// FIFO order, as usual. Overall, this has roughly the same order as +/// the now-deprecated [`breadth_first`] option, except the effect is +/// isolated to a particular scope. If spawns are intermingled from any +/// combination of `scope()` and `scope_fifo()`, or from different +/// threads, their order is only specified with respect to spawns in the +/// same scope and thread. +/// +/// For more details on this design, see Rayon [RFC #1]. +/// +/// [`breadth_first`]: struct.ThreadPoolBuilder.html#method.breadth_first +/// [RFC #1]: https://github.com/rayon-rs/rfcs/blob/master/accepted/rfc0001-scope-scheduling.md +/// +/// # Panics +/// +/// If a panic occurs, either in the closure given to `scope_fifo()` or +/// in any of the spawned jobs, that panic will be propagated and the +/// call to `scope_fifo()` will panic. If multiple panics occurs, it is +/// non-deterministic which of their panic values will propagate. +/// Regardless, once a task is spawned using `scope.spawn_fifo()`, it +/// will execute, even if the spawning task should later panic. +/// `scope_fifo()` returns once all spawned jobs have completed, and any +/// panics are propagated at that point. +pub fn scope_fifo<'scope, OP, R>(op: OP) -> R +where + OP: FnOnce(&ScopeFifo<'scope>) -> R + Send, + R: Send, +{ + in_worker(|owner_thread, _| { + let scope = ScopeFifo::<'scope>::new(Some(owner_thread), None); + scope.base.complete(Some(owner_thread), || op(&scope)) + }) +} + +/// Creates a "fork-join" scope `s` and invokes the closure with a +/// reference to `s`. This closure can then spawn asynchronous tasks +/// into `s`. Those tasks may run asynchronously with respect to the +/// closure; they may themselves spawn additional tasks into `s`. When +/// the closure returns, it will block until all tasks that have been +/// spawned into `s` complete. +/// +/// This is just like `scope()` except the closure runs on the same thread +/// that calls `in_place_scope()`. Only work that it spawns runs in the +/// thread pool. +/// +/// # Panics +/// +/// If a panic occurs, either in the closure given to `in_place_scope()` or in +/// any of the spawned jobs, that panic will be propagated and the +/// call to `in_place_scope()` will panic. If multiple panics occurs, it is +/// non-deterministic which of their panic values will propagate. +/// Regardless, once a task is spawned using `scope.spawn()`, it will +/// execute, even if the spawning task should later panic. `in_place_scope()` +/// returns once all spawned jobs have completed, and any panics are +/// propagated at that point. +pub fn in_place_scope<'scope, OP, R>(op: OP) -> R +where + OP: FnOnce(&Scope<'scope>) -> R, +{ + do_in_place_scope(None, op) +} + +pub(crate) fn do_in_place_scope<'scope, OP, R>(registry: Option<&Arc<Registry>>, op: OP) -> R +where + OP: FnOnce(&Scope<'scope>) -> R, +{ + let thread = unsafe { WorkerThread::current().as_ref() }; + let scope = Scope::<'scope>::new(thread, registry); + scope.base.complete(thread, || op(&scope)) +} + +/// Creates a "fork-join" scope `s` with FIFO order, and invokes the +/// closure with a reference to `s`. This closure can then spawn +/// asynchronous tasks into `s`. Those tasks may run asynchronously with +/// respect to the closure; they may themselves spawn additional tasks +/// into `s`. When the closure returns, it will block until all tasks +/// that have been spawned into `s` complete. +/// +/// This is just like `scope_fifo()` except the closure runs on the same thread +/// that calls `in_place_scope_fifo()`. Only work that it spawns runs in the +/// thread pool. +/// +/// # Panics +/// +/// If a panic occurs, either in the closure given to `in_place_scope_fifo()` or in +/// any of the spawned jobs, that panic will be propagated and the +/// call to `in_place_scope_fifo()` will panic. If multiple panics occurs, it is +/// non-deterministic which of their panic values will propagate. +/// Regardless, once a task is spawned using `scope.spawn_fifo()`, it will +/// execute, even if the spawning task should later panic. `in_place_scope_fifo()` +/// returns once all spawned jobs have completed, and any panics are +/// propagated at that point. +pub fn in_place_scope_fifo<'scope, OP, R>(op: OP) -> R +where + OP: FnOnce(&ScopeFifo<'scope>) -> R, +{ + do_in_place_scope_fifo(None, op) +} + +pub(crate) fn do_in_place_scope_fifo<'scope, OP, R>(registry: Option<&Arc<Registry>>, op: OP) -> R +where + OP: FnOnce(&ScopeFifo<'scope>) -> R, +{ + let thread = unsafe { WorkerThread::current().as_ref() }; + let scope = ScopeFifo::<'scope>::new(thread, registry); + scope.base.complete(thread, || op(&scope)) +} + +impl<'scope> Scope<'scope> { + fn new(owner: Option<&WorkerThread>, registry: Option<&Arc<Registry>>) -> Self { + let base = ScopeBase::new(owner, registry); + Scope { base } + } + + /// Spawns a job into the fork-join scope `self`. This job will + /// execute sometime before the fork-join scope completes. The + /// job is specified as a closure, and this closure receives its + /// own reference to the scope `self` as argument. This can be + /// used to inject new jobs into `self`. + /// + /// # Returns + /// + /// Nothing. The spawned closures cannot pass back values to the + /// caller directly, though they can write to local variables on + /// the stack (if those variables outlive the scope) or + /// communicate through shared channels. + /// + /// (The intention is to eventually integrate with Rust futures to + /// support spawns of functions that compute a value.) + /// + /// # Examples + /// + /// ```rust + /// # use rustc_thread_pool as rayon; + /// let mut value_a = None; + /// let mut value_b = None; + /// let mut value_c = None; + /// rayon::scope(|s| { + /// s.spawn(|s1| { + /// // ^ this is the same scope as `s`; this handle `s1` + /// // is intended for use by the spawned task, + /// // since scope handles cannot cross thread boundaries. + /// + /// value_a = Some(22); + /// + /// // the scope `s` will not end until all these tasks are done + /// s1.spawn(|_| { + /// value_b = Some(44); + /// }); + /// }); + /// + /// s.spawn(|_| { + /// value_c = Some(66); + /// }); + /// }); + /// assert_eq!(value_a, Some(22)); + /// assert_eq!(value_b, Some(44)); + /// assert_eq!(value_c, Some(66)); + /// ``` + /// + /// # See also + /// + /// The [`scope` function] has more extensive documentation about + /// task spawning. + /// + /// [`scope` function]: fn.scope.html + pub fn spawn<BODY>(&self, body: BODY) + where + BODY: FnOnce(&Scope<'scope>) + Send + 'scope, + { + let scope_ptr = ScopePtr(self); + let job = HeapJob::new(self.base.tlv, move || unsafe { + // SAFETY: this job will execute before the scope ends. + let scope = scope_ptr.as_ref(); + ScopeBase::execute_job(&scope.base, move || body(scope)) + }); + let job_ref = self.base.heap_job_ref(job); + + // Since `Scope` implements `Sync`, we can't be sure that we're still in a + // thread of this pool, so we can't just push to the local worker thread. + // Also, this might be an in-place scope. + self.base.registry.inject_or_push(job_ref); + } + + /// Spawns a job into every thread of the fork-join scope `self`. This job will + /// execute on each thread sometime before the fork-join scope completes. The + /// job is specified as a closure, and this closure receives its own reference + /// to the scope `self` as argument, as well as a `BroadcastContext`. + pub fn spawn_broadcast<BODY>(&self, body: BODY) + where + BODY: Fn(&Scope<'scope>, BroadcastContext<'_>) + Send + Sync + 'scope, + { + let scope_ptr = ScopePtr(self); + let job = ArcJob::new(move || unsafe { + // SAFETY: this job will execute before the scope ends. + let scope = scope_ptr.as_ref(); + let body = &body; + let func = move || BroadcastContext::with(move |ctx| body(scope, ctx)); + ScopeBase::execute_job(&scope.base, func) + }); + self.base.inject_broadcast(job) + } +} + +impl<'scope> ScopeFifo<'scope> { + fn new(owner: Option<&WorkerThread>, registry: Option<&Arc<Registry>>) -> Self { + let base = ScopeBase::new(owner, registry); + let num_threads = base.registry.num_threads(); + let fifos = (0..num_threads).map(|_| JobFifo::new()).collect(); + ScopeFifo { base, fifos } + } + + /// Spawns a job into the fork-join scope `self`. This job will + /// execute sometime before the fork-join scope completes. The + /// job is specified as a closure, and this closure receives its + /// own reference to the scope `self` as argument. This can be + /// used to inject new jobs into `self`. + /// + /// # See also + /// + /// This method is akin to [`Scope::spawn()`], but with a FIFO + /// priority. The [`scope_fifo` function] has more details about + /// this distinction. + /// + /// [`Scope::spawn()`]: struct.Scope.html#method.spawn + /// [`scope_fifo` function]: fn.scope_fifo.html + pub fn spawn_fifo<BODY>(&self, body: BODY) + where + BODY: FnOnce(&ScopeFifo<'scope>) + Send + 'scope, + { + let scope_ptr = ScopePtr(self); + let job = HeapJob::new(self.base.tlv, move || unsafe { + // SAFETY: this job will execute before the scope ends. + let scope = scope_ptr.as_ref(); + ScopeBase::execute_job(&scope.base, move || body(scope)) + }); + let job_ref = self.base.heap_job_ref(job); + + // If we're in the pool, use our scope's private fifo for this thread to execute + // in a locally-FIFO order. Otherwise, just use the pool's global injector. + match self.base.registry.current_thread() { + Some(worker) => { + let fifo = &self.fifos[worker.index()]; + // SAFETY: this job will execute before the scope ends. + unsafe { worker.push(fifo.push(job_ref)) }; + } + None => self.base.registry.inject(job_ref), + } + } + + /// Spawns a job into every thread of the fork-join scope `self`. This job will + /// execute on each thread sometime before the fork-join scope completes. The + /// job is specified as a closure, and this closure receives its own reference + /// to the scope `self` as argument, as well as a `BroadcastContext`. + pub fn spawn_broadcast<BODY>(&self, body: BODY) + where + BODY: Fn(&ScopeFifo<'scope>, BroadcastContext<'_>) + Send + Sync + 'scope, + { + let scope_ptr = ScopePtr(self); + let job = ArcJob::new(move || unsafe { + // SAFETY: this job will execute before the scope ends. + let scope = scope_ptr.as_ref(); + let body = &body; + let func = move || BroadcastContext::with(move |ctx| body(scope, ctx)); + ScopeBase::execute_job(&scope.base, func) + }); + self.base.inject_broadcast(job) + } +} + +impl<'scope> ScopeBase<'scope> { + /// Creates the base of a new scope for the given registry + fn new(owner: Option<&WorkerThread>, registry: Option<&Arc<Registry>>) -> Self { + let registry = registry.unwrap_or_else(|| match owner { + Some(owner) => owner.registry(), + None => global_registry(), + }); + + ScopeBase { + registry: Arc::clone(registry), + panic: AtomicPtr::new(ptr::null_mut()), + job_completed_latch: CountLatch::new(owner), + marker: PhantomData, + tlv: tlv::get(), + } + } + + fn heap_job_ref<FUNC>(&self, job: Box<HeapJob<FUNC>>) -> JobRef + where + FUNC: FnOnce() + Send + 'scope, + { + unsafe { + self.job_completed_latch.increment(); + job.into_job_ref() + } + } + + fn inject_broadcast<FUNC>(&self, job: Arc<ArcJob<FUNC>>) + where + FUNC: Fn() + Send + Sync + 'scope, + { + let n_threads = self.registry.num_threads(); + let job_refs = (0..n_threads).map(|_| unsafe { + self.job_completed_latch.increment(); + ArcJob::as_job_ref(&job) + }); + + self.registry.inject_broadcast(job_refs); + } + + /// Executes `func` as a job, either aborting or executing as + /// appropriate. + fn complete<FUNC, R>(&self, owner: Option<&WorkerThread>, func: FUNC) -> R + where + FUNC: FnOnce() -> R, + { + let result = unsafe { Self::execute_job_closure(self, func) }; + self.job_completed_latch.wait(owner); + + // Restore the TLV if we ran some jobs while waiting + tlv::set(self.tlv); + + self.maybe_propagate_panic(); + result.unwrap() // only None if `op` panicked, and that would have been propagated + } + + /// Executes `func` as a job, either aborting or executing as + /// appropriate. + unsafe fn execute_job<FUNC>(this: *const Self, func: FUNC) + where + FUNC: FnOnce(), + { + let _: Option<()> = unsafe { Self::execute_job_closure(this, func) }; + } + + /// Executes `func` as a job in scope. Adjusts the "job completed" + /// counters and also catches any panic and stores it into + /// `scope`. + unsafe fn execute_job_closure<FUNC, R>(this: *const Self, func: FUNC) -> Option<R> + where + FUNC: FnOnce() -> R, + { + let result = match unwind::halt_unwinding(func) { + Ok(r) => Some(r), + Err(err) => { + unsafe { (*this).job_panicked(err) }; + None + } + }; + unsafe { Latch::set(&(*this).job_completed_latch) }; + result + } + + fn job_panicked(&self, err: Box<dyn Any + Send + 'static>) { + // capture the first error we see, free the rest + if self.panic.load(Ordering::Relaxed).is_null() { + let nil = ptr::null_mut(); + let mut err = ManuallyDrop::new(Box::new(err)); // box up the fat ptr + let err_ptr: *mut Box<dyn Any + Send + 'static> = &mut **err; + if self + .panic + .compare_exchange(nil, err_ptr, Ordering::Release, Ordering::Relaxed) + .is_ok() + { + // ownership now transferred into self.panic + } else { + // another panic raced in ahead of us, so drop ours + let _: Box<Box<_>> = ManuallyDrop::into_inner(err); + } + } + } + + fn maybe_propagate_panic(&self) { + // propagate panic, if any occurred; at this point, all + // outstanding jobs have completed, so we can use a relaxed + // ordering: + let panic = self.panic.swap(ptr::null_mut(), Ordering::Relaxed); + if !panic.is_null() { + let value = unsafe { Box::from_raw(panic) }; + + // Restore the TLV if we ran some jobs while waiting + tlv::set(self.tlv); + + unwind::resume_unwinding(*value); + } + } +} + +impl<'scope> fmt::Debug for Scope<'scope> { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt.debug_struct("Scope") + .field("pool_id", &self.base.registry.id()) + .field("panic", &self.base.panic) + .field("job_completed_latch", &self.base.job_completed_latch) + .finish() + } +} + +impl<'scope> fmt::Debug for ScopeFifo<'scope> { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt.debug_struct("ScopeFifo") + .field("num_fifos", &self.fifos.len()) + .field("pool_id", &self.base.registry.id()) + .field("panic", &self.base.panic) + .field("job_completed_latch", &self.base.job_completed_latch) + .finish() + } +} + +/// Used to capture a scope `&Self` pointer in jobs, without faking a lifetime. +/// +/// Unsafe code is still required to dereference the pointer, but that's fine in +/// scope jobs that are guaranteed to execute before the scope ends. +struct ScopePtr<T>(*const T); + +// SAFETY: !Send for raw pointers is not for safety, just as a lint +unsafe impl<T: Sync> Send for ScopePtr<T> {} + +// SAFETY: !Sync for raw pointers is not for safety, just as a lint +unsafe impl<T: Sync> Sync for ScopePtr<T> {} + +impl<T> ScopePtr<T> { + // Helper to avoid disjoint captures of `scope_ptr.0` + unsafe fn as_ref(&self) -> &T { + unsafe { &*self.0 } + } +} diff --git a/compiler/rustc_thread_pool/src/scope/tests.rs b/compiler/rustc_thread_pool/src/scope/tests.rs new file mode 100644 index 00000000000..2df3bc67e29 --- /dev/null +++ b/compiler/rustc_thread_pool/src/scope/tests.rs @@ -0,0 +1,607 @@ +use std::iter::once; +use std::sync::atomic::{AtomicUsize, Ordering}; +use std::sync::{Barrier, Mutex}; +use std::vec; + +use rand::{Rng, SeedableRng}; +use rand_xorshift::XorShiftRng; + +use crate::{Scope, ScopeFifo, ThreadPoolBuilder, scope, scope_fifo, unwind}; + +#[test] +fn scope_empty() { + scope(|_| {}); +} + +#[test] +fn scope_result() { + let x = scope(|_| 22); + assert_eq!(x, 22); +} + +#[test] +fn scope_two() { + let counter = &AtomicUsize::new(0); + scope(|s| { + s.spawn(move |_| { + counter.fetch_add(1, Ordering::SeqCst); + }); + s.spawn(move |_| { + counter.fetch_add(10, Ordering::SeqCst); + }); + }); + + let v = counter.load(Ordering::SeqCst); + assert_eq!(v, 11); +} + +#[test] +fn scope_divide_and_conquer() { + let counter_p = &AtomicUsize::new(0); + scope(|s| s.spawn(move |s| divide_and_conquer(s, counter_p, 1024))); + + let counter_s = &AtomicUsize::new(0); + divide_and_conquer_seq(counter_s, 1024); + + let p = counter_p.load(Ordering::SeqCst); + let s = counter_s.load(Ordering::SeqCst); + assert_eq!(p, s); +} + +fn divide_and_conquer<'scope>(scope: &Scope<'scope>, counter: &'scope AtomicUsize, size: usize) { + if size > 1 { + scope.spawn(move |scope| divide_and_conquer(scope, counter, size / 2)); + scope.spawn(move |scope| divide_and_conquer(scope, counter, size / 2)); + } else { + // count the leaves + counter.fetch_add(1, Ordering::SeqCst); + } +} + +fn divide_and_conquer_seq(counter: &AtomicUsize, size: usize) { + if size > 1 { + divide_and_conquer_seq(counter, size / 2); + divide_and_conquer_seq(counter, size / 2); + } else { + // count the leaves + counter.fetch_add(1, Ordering::SeqCst); + } +} + +struct Tree<T: Send> { + value: T, + children: Vec<Tree<T>>, +} + +impl<T: Send> Tree<T> { + fn iter(&self) -> vec::IntoIter<&T> { + once(&self.value) + .chain(self.children.iter().flat_map(Tree::iter)) + .collect::<Vec<_>>() // seems like it shouldn't be needed... but prevents overflow + .into_iter() + } + + fn update<OP>(&mut self, op: OP) + where + OP: Fn(&mut T) + Sync, + T: Send, + { + scope(|s| self.update_in_scope(&op, s)); + } + + fn update_in_scope<'scope, OP>(&'scope mut self, op: &'scope OP, scope: &Scope<'scope>) + where + OP: Fn(&mut T) + Sync, + { + let Tree { ref mut value, ref mut children } = *self; + scope.spawn(move |scope| { + for child in children { + scope.spawn(move |scope| child.update_in_scope(op, scope)); + } + }); + + op(value); + } +} + +fn random_tree(depth: usize) -> Tree<u32> { + assert!(depth > 0); + let mut seed = <XorShiftRng as SeedableRng>::Seed::default(); + (0..).zip(seed.as_mut()).for_each(|(i, x)| *x = i); + let mut rng = XorShiftRng::from_seed(seed); + random_tree1(depth, &mut rng) +} + +fn random_tree1(depth: usize, rng: &mut XorShiftRng) -> Tree<u32> { + let children = if depth == 0 { + vec![] + } else { + (0..rng.random_range(0..4)) // somewhere between 0 and 3 children at each level + .map(|_| random_tree1(depth - 1, rng)) + .collect() + }; + + Tree { value: rng.random_range(0..1_000_000), children } +} + +#[test] +fn update_tree() { + let mut tree: Tree<u32> = random_tree(10); + let values: Vec<u32> = tree.iter().cloned().collect(); + tree.update(|v| *v += 1); + let new_values: Vec<u32> = tree.iter().cloned().collect(); + assert_eq!(values.len(), new_values.len()); + for (&i, &j) in values.iter().zip(&new_values) { + assert_eq!(i + 1, j); + } +} + +/// Check that if you have a chain of scoped tasks where T0 spawns T1 +/// spawns T2 and so forth down to Tn, the stack space should not grow +/// linearly with N. We test this by some unsafe hackery and +/// permitting an approx 10% change with a 10x input change. +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn linear_stack_growth() { + let builder = ThreadPoolBuilder::new().num_threads(1); + let pool = builder.build().unwrap(); + pool.install(|| { + let mut max_diff = Mutex::new(0); + let bottom_of_stack = 0; + scope(|s| the_final_countdown(s, &bottom_of_stack, &max_diff, 5)); + let diff_when_5 = *max_diff.get_mut().unwrap() as f64; + + scope(|s| the_final_countdown(s, &bottom_of_stack, &max_diff, 500)); + let diff_when_500 = *max_diff.get_mut().unwrap() as f64; + + let ratio = diff_when_5 / diff_when_500; + assert!(ratio > 0.9 && ratio < 1.1, "stack usage ratio out of bounds: {}", ratio); + }); +} + +fn the_final_countdown<'scope>( + s: &Scope<'scope>, + bottom_of_stack: &'scope i32, + max: &'scope Mutex<usize>, + n: usize, +) { + let top_of_stack = 0; + let p = bottom_of_stack as *const i32 as usize; + let q = &top_of_stack as *const i32 as usize; + let diff = if p > q { p - q } else { q - p }; + + let mut data = max.lock().unwrap(); + *data = Ord::max(diff, *data); + + if n > 0 { + s.spawn(move |s| the_final_countdown(s, bottom_of_stack, max, n - 1)); + } +} + +#[test] +#[should_panic(expected = "Hello, world!")] +fn panic_propagate_scope() { + scope(|_| panic!("Hello, world!")); +} + +#[test] +#[should_panic(expected = "Hello, world!")] +fn panic_propagate_spawn() { + scope(|s| s.spawn(|_| panic!("Hello, world!"))); +} + +#[test] +#[should_panic(expected = "Hello, world!")] +fn panic_propagate_nested_spawn() { + scope(|s| s.spawn(|s| s.spawn(|s| s.spawn(|_| panic!("Hello, world!"))))); +} + +#[test] +#[should_panic(expected = "Hello, world!")] +fn panic_propagate_nested_scope_spawn() { + scope(|s| s.spawn(|_| scope(|s| s.spawn(|_| panic!("Hello, world!"))))); +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore)] +fn panic_propagate_still_execute_1() { + let mut x = false; + let result = unwind::halt_unwinding(|| { + scope(|s| { + s.spawn(|_| panic!("Hello, world!")); // job A + s.spawn(|_| x = true); // job B, should still execute even though A panics + }); + }); + match result { + Ok(_) => panic!("failed to propagate panic"), + Err(_) => assert!(x, "job b failed to execute"), + } +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore)] +fn panic_propagate_still_execute_2() { + let mut x = false; + let result = unwind::halt_unwinding(|| { + scope(|s| { + s.spawn(|_| x = true); // job B, should still execute even though A panics + s.spawn(|_| panic!("Hello, world!")); // job A + }); + }); + match result { + Ok(_) => panic!("failed to propagate panic"), + Err(_) => assert!(x, "job b failed to execute"), + } +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore)] +fn panic_propagate_still_execute_3() { + let mut x = false; + let result = unwind::halt_unwinding(|| { + scope(|s| { + s.spawn(|_| x = true); // spawned job should still execute despite later panic + panic!("Hello, world!"); + }); + }); + match result { + Ok(_) => panic!("failed to propagate panic"), + Err(_) => assert!(x, "panic after spawn, spawn failed to execute"), + } +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore)] +fn panic_propagate_still_execute_4() { + let mut x = false; + let result = unwind::halt_unwinding(|| { + scope(|s| { + s.spawn(|_| panic!("Hello, world!")); + x = true; + }); + }); + match result { + Ok(_) => panic!("failed to propagate panic"), + Err(_) => assert!(x, "panic in spawn tainted scope"), + } +} + +macro_rules! test_order { + ($scope:ident => $spawn:ident) => {{ + let builder = ThreadPoolBuilder::new().num_threads(1); + let pool = builder.build().unwrap(); + pool.install(|| { + let vec = Mutex::new(vec![]); + $scope(|scope| { + let vec = &vec; + for i in 0..10 { + scope.$spawn(move |scope| { + for j in 0..10 { + scope.$spawn(move |_| { + vec.lock().unwrap().push(i * 10 + j); + }); + } + }); + } + }); + vec.into_inner().unwrap() + }) + }}; +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn lifo_order() { + // In the absence of stealing, `scope()` runs its `spawn()` jobs in LIFO order. + let vec = test_order!(scope => spawn); + let expected: Vec<i32> = (0..100).rev().collect(); // LIFO -> reversed + assert_eq!(vec, expected); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn fifo_order() { + // In the absence of stealing, `scope_fifo()` runs its `spawn_fifo()` jobs in FIFO order. + let vec = test_order!(scope_fifo => spawn_fifo); + let expected: Vec<i32> = (0..100).collect(); // FIFO -> natural order + assert_eq!(vec, expected); +} + +macro_rules! test_nested_order { + ($outer_scope:ident => $outer_spawn:ident, + $inner_scope:ident => $inner_spawn:ident) => {{ + let builder = ThreadPoolBuilder::new().num_threads(1); + let pool = builder.build().unwrap(); + pool.install(|| { + let vec = Mutex::new(vec![]); + $outer_scope(|scope| { + let vec = &vec; + for i in 0..10 { + scope.$outer_spawn(move |_| { + $inner_scope(|scope| { + for j in 0..10 { + scope.$inner_spawn(move |_| { + vec.lock().unwrap().push(i * 10 + j); + }); + } + }); + }); + } + }); + vec.into_inner().unwrap() + }) + }}; +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn nested_lifo_order() { + // In the absence of stealing, `scope()` runs its `spawn()` jobs in LIFO order. + let vec = test_nested_order!(scope => spawn, scope => spawn); + let expected: Vec<i32> = (0..100).rev().collect(); // LIFO -> reversed + assert_eq!(vec, expected); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn nested_fifo_order() { + // In the absence of stealing, `scope_fifo()` runs its `spawn_fifo()` jobs in FIFO order. + let vec = test_nested_order!(scope_fifo => spawn_fifo, scope_fifo => spawn_fifo); + let expected: Vec<i32> = (0..100).collect(); // FIFO -> natural order + assert_eq!(vec, expected); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn nested_lifo_fifo_order() { + // LIFO on the outside, FIFO on the inside + let vec = test_nested_order!(scope => spawn, scope_fifo => spawn_fifo); + let expected: Vec<i32> = (0..10).rev().flat_map(|i| (0..10).map(move |j| i * 10 + j)).collect(); + assert_eq!(vec, expected); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn nested_fifo_lifo_order() { + // FIFO on the outside, LIFO on the inside + let vec = test_nested_order!(scope_fifo => spawn_fifo, scope => spawn); + let expected: Vec<i32> = (0..10).flat_map(|i| (0..10).rev().map(move |j| i * 10 + j)).collect(); + assert_eq!(vec, expected); +} + +macro_rules! spawn_push { + ($scope:ident . $spawn:ident, $vec:ident, $i:expr) => {{ + $scope.$spawn(move |_| $vec.lock().unwrap().push($i)); + }}; +} + +/// Test spawns pushing a series of numbers, interleaved +/// such that negative values are using an inner scope. +macro_rules! test_mixed_order { + ($outer_scope:ident => $outer_spawn:ident, + $inner_scope:ident => $inner_spawn:ident) => {{ + let builder = ThreadPoolBuilder::new().num_threads(1); + let pool = builder.build().unwrap(); + pool.install(|| { + let vec = Mutex::new(vec![]); + $outer_scope(|outer_scope| { + let vec = &vec; + spawn_push!(outer_scope.$outer_spawn, vec, 0); + $inner_scope(|inner_scope| { + spawn_push!(inner_scope.$inner_spawn, vec, -1); + spawn_push!(outer_scope.$outer_spawn, vec, 1); + spawn_push!(inner_scope.$inner_spawn, vec, -2); + spawn_push!(outer_scope.$outer_spawn, vec, 2); + spawn_push!(inner_scope.$inner_spawn, vec, -3); + }); + spawn_push!(outer_scope.$outer_spawn, vec, 3); + }); + vec.into_inner().unwrap() + }) + }}; +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn mixed_lifo_order() { + // NB: the end of the inner scope makes us execute some of the outer scope + // before they've all been spawned, so they're not perfectly LIFO. + let vec = test_mixed_order!(scope => spawn, scope => spawn); + let expected = vec![-3, 2, -2, 1, -1, 3, 0]; + assert_eq!(vec, expected); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn mixed_fifo_order() { + let vec = test_mixed_order!(scope_fifo => spawn_fifo, scope_fifo => spawn_fifo); + let expected = vec![-1, 0, -2, 1, -3, 2, 3]; + assert_eq!(vec, expected); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn mixed_lifo_fifo_order() { + // NB: the end of the inner scope makes us execute some of the outer scope + // before they've all been spawned, so they're not perfectly LIFO. + let vec = test_mixed_order!(scope => spawn, scope_fifo => spawn_fifo); + let expected = vec![-1, 2, -2, 1, -3, 3, 0]; + assert_eq!(vec, expected); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn mixed_fifo_lifo_order() { + let vec = test_mixed_order!(scope_fifo => spawn_fifo, scope => spawn); + let expected = vec![-3, 0, -2, 1, -1, 2, 3]; + assert_eq!(vec, expected); +} + +#[test] +fn static_scope() { + static COUNTER: AtomicUsize = AtomicUsize::new(0); + + let mut range = 0..100; + let sum = range.clone().sum(); + let iter = &mut range; + + COUNTER.store(0, Ordering::Relaxed); + scope(|s: &Scope<'static>| { + // While we're allowed the locally borrowed iterator, + // the spawns must be static. + for i in iter { + s.spawn(move |_| { + COUNTER.fetch_add(i, Ordering::Relaxed); + }); + } + }); + + assert_eq!(COUNTER.load(Ordering::Relaxed), sum); +} + +#[test] +fn static_scope_fifo() { + static COUNTER: AtomicUsize = AtomicUsize::new(0); + + let mut range = 0..100; + let sum = range.clone().sum(); + let iter = &mut range; + + COUNTER.store(0, Ordering::Relaxed); + scope_fifo(|s: &ScopeFifo<'static>| { + // While we're allowed the locally borrowed iterator, + // the spawns must be static. + for i in iter { + s.spawn_fifo(move |_| { + COUNTER.fetch_add(i, Ordering::Relaxed); + }); + } + }); + + assert_eq!(COUNTER.load(Ordering::Relaxed), sum); +} + +#[test] +fn mixed_lifetime_scope() { + fn increment<'slice, 'counter>(counters: &'slice [&'counter AtomicUsize]) { + scope(move |s: &Scope<'counter>| { + // We can borrow 'slice here, but the spawns can only borrow 'counter. + for &c in counters { + s.spawn(move |_| { + c.fetch_add(1, Ordering::Relaxed); + }); + } + }); + } + + let counter = AtomicUsize::new(0); + increment(&[&counter; 100]); + assert_eq!(counter.into_inner(), 100); +} + +#[test] +fn mixed_lifetime_scope_fifo() { + fn increment<'slice, 'counter>(counters: &'slice [&'counter AtomicUsize]) { + scope_fifo(move |s: &ScopeFifo<'counter>| { + // We can borrow 'slice here, but the spawns can only borrow 'counter. + for &c in counters { + s.spawn_fifo(move |_| { + c.fetch_add(1, Ordering::Relaxed); + }); + } + }); + } + + let counter = AtomicUsize::new(0); + increment(&[&counter; 100]); + assert_eq!(counter.into_inner(), 100); +} + +#[test] +fn scope_spawn_broadcast() { + let sum = AtomicUsize::new(0); + let n = scope(|s| { + s.spawn_broadcast(|_, ctx| { + sum.fetch_add(ctx.index(), Ordering::Relaxed); + }); + crate::current_num_threads() + }); + assert_eq!(sum.into_inner(), n * (n - 1) / 2); +} + +#[test] +fn scope_fifo_spawn_broadcast() { + let sum = AtomicUsize::new(0); + let n = scope_fifo(|s| { + s.spawn_broadcast(|_, ctx| { + sum.fetch_add(ctx.index(), Ordering::Relaxed); + }); + crate::current_num_threads() + }); + assert_eq!(sum.into_inner(), n * (n - 1) / 2); +} + +#[test] +fn scope_spawn_broadcast_nested() { + let sum = AtomicUsize::new(0); + let n = scope(|s| { + s.spawn_broadcast(|s, _| { + s.spawn_broadcast(|_, ctx| { + sum.fetch_add(ctx.index(), Ordering::Relaxed); + }); + }); + crate::current_num_threads() + }); + assert_eq!(sum.into_inner(), n * n * (n - 1) / 2); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn scope_spawn_broadcast_barrier() { + let barrier = Barrier::new(8); + let pool = ThreadPoolBuilder::new().num_threads(7).build().unwrap(); + pool.in_place_scope(|s| { + s.spawn_broadcast(|_, _| { + barrier.wait(); + }); + barrier.wait(); + }); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn scope_spawn_broadcast_panic_one() { + let count = AtomicUsize::new(0); + let pool = ThreadPoolBuilder::new().num_threads(7).build().unwrap(); + let result = crate::unwind::halt_unwinding(|| { + pool.scope(|s| { + s.spawn_broadcast(|_, ctx| { + count.fetch_add(1, Ordering::Relaxed); + if ctx.index() == 3 { + panic!("Hello, world!"); + } + }); + }); + }); + assert_eq!(count.into_inner(), 7); + assert!(result.is_err(), "broadcast panic should propagate!"); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn scope_spawn_broadcast_panic_many() { + let count = AtomicUsize::new(0); + let pool = ThreadPoolBuilder::new().num_threads(7).build().unwrap(); + let result = crate::unwind::halt_unwinding(|| { + pool.scope(|s| { + s.spawn_broadcast(|_, ctx| { + count.fetch_add(1, Ordering::Relaxed); + if ctx.index() % 2 == 0 { + panic!("Hello, world!"); + } + }); + }); + }); + assert_eq!(count.into_inner(), 7); + assert!(result.is_err(), "broadcast panic should propagate!"); +} diff --git a/compiler/rustc_thread_pool/src/sleep/README.md b/compiler/rustc_thread_pool/src/sleep/README.md new file mode 100644 index 00000000000..1e11da55f4a --- /dev/null +++ b/compiler/rustc_thread_pool/src/sleep/README.md @@ -0,0 +1,252 @@ +# Introduction: the sleep module + +The code in this module governs when worker threads should go to +sleep. The system used in this code was introduced in [Rayon RFC #5]. +There is also a [video walkthrough] available. Both of those may be +valuable resources to understanding the code, though naturally they +will also grow stale over time. The comments in this file are +extracted from the RFC and meant to be kept up to date. + +[Rayon RFC #5]: https://github.com/rayon-rs/rfcs/pull/5 +[video walkthrough]: https://youtu.be/HvmQsE5M4cY + +# The `Sleep` struct + +The `Sleep` struct is embedded into each registry. It performs several functions: + +* It tracks when workers are awake or asleep. +* It decides how long a worker should look for work before it goes to sleep, + via a callback that is invoked periodically from the worker's search loop. +* It is notified when latches are set, jobs are published, or other + events occur, and it will go and wake the appropriate threads if + they are sleeping. + +# Thread states + +There are three main thread states: + +* An **active** thread is one that is actively executing a job. +* An **idle** thread is one that is searching for work to do. It will be + trying to steal work or pop work from the global injector queue. +* A **sleeping** thread is one that is blocked on a condition variable, + waiting to be awoken. + +We sometimes refer to the final two states collectively as **inactive**. +Threads begin as idle but transition to idle and finally sleeping when +they're unable to find work to do. + +## Sleepy threads + +There is one other special state worth mentioning. During the idle state, +threads can get **sleepy**. A sleepy thread is still idle, in that it is still +searching for work, but it is *about* to go to sleep after it does one more +search (or some other number, potentially). When a thread enters the sleepy +state, it signals (via the **jobs event counter**, described below) that it is +about to go to sleep. If new work is published, this will lead to the counter +being adjusted. When the thread actually goes to sleep, it will (hopefully, but +not guaranteed) see that the counter has changed and elect not to sleep, but +instead to search again. See the section on the **jobs event counter** for more +details. + +# The counters + +One of the key structs in the sleep module is `AtomicCounters`, found in +`counters.rs`. It packs three counters into one atomically managed value: + +* Two **thread counters**, which track the number of threads in a particular state. +* The **jobs event counter**, which is used to signal when new work is available. + It (sort of) tracks the number of jobs posted, but not quite, and it can rollover. + +## Thread counters + +There are two thread counters, one that tracks **inactive** threads and one that +tracks **sleeping** threads. From this, one can deduce the number of threads +that are idle by subtracting sleeping threads from inactive threads. We track +the counters in this way because it permits simpler atomic operations. One can +increment the number of sleeping threads (and thus decrease the number of idle +threads) simply by doing one atomic increment, for example. Similarly, one can +decrease the number of sleeping threads (and increase the number of idle +threads) through one atomic decrement. + +These counters are adjusted as follows: + +* When a thread enters the idle state: increment the inactive thread counter. +* When a thread enters the sleeping state: increment the sleeping thread counter. +* When a thread awakens a sleeping thread: decrement the sleeping thread counter. + * Subtle point: the thread that *awakens* the sleeping thread decrements the + counter, not the thread that is *sleeping*. This is because there is a delay + between signaling a thread to wake and the thread actually waking: + decrementing the counter when awakening the thread means that other threads + that may be posting work will see the up-to-date value that much faster. +* When a thread finds work, exiting the idle state: decrement the inactive + thread counter. + +## Jobs event counter + +The final counter is the **jobs event counter**. The role of this counter is to +help sleepy threads detect when new work is posted in a lightweight fashion. In +its simplest form, we would simply have a counter that gets incremented each +time a new job is posted. This way, when a thread gets sleepy, it could read the +counter, and then compare to see if the value has changed before it actually +goes to sleep. But this [turns out to be too expensive] in practice, so we use a +somewhat more complex scheme. + +[turns out to be too expensive]: https://github.com/rayon-rs/rayon/pull/746#issuecomment-624802747 + +The idea is that the counter toggles between two states, depending on whether +its value is even or odd (or, equivalently, on the value of its low bit): + +* Even -- If the low bit is zero, then it means that there has been no new work + since the last thread got sleepy. +* Odd -- If the low bit is one, then it means that new work was posted since + the last thread got sleepy. + +### New work is posted + +When new work is posted, we check the value of the counter: if it is even, +then we increment it by one, so that it becomes odd. + +### Worker thread gets sleepy + +When a worker thread gets sleepy, it will read the value of the counter. If the +counter is odd, it will increment the counter so that it is even. Either way, it +remembers the final value of the counter. The final value will be used later, +when the thread is going to sleep. If at that time the counter has not changed, +then we can assume no new jobs have been posted (though note the remote +possibility of rollover, discussed in detail below). + +# Protocol for a worker thread to post work + +The full protocol for a thread to post work is as follows + +* If the work is posted into the injection queue, then execute a seq-cst fence (see below). +* Load the counters, incrementing the JEC if it is even so that it is odd. +* Check if there are idle threads available to handle this new job. If not, + and there are sleeping threads, then wake one or more threads. + +# Protocol for a worker thread to fall asleep + +The full protocol for a thread to fall asleep is as follows: + +* After completing all its jobs, the worker goes idle and begins to + search for work. As it searches, it counts "rounds". In each round, + it searches all other work threads' queues, plus the 'injector queue' for + work injected from the outside. If work is found in this search, the thread + becomes active again and hence restarts this protocol from the top. +* After a certain number of rounds, the thread "gets sleepy" and executes `get_sleepy` + above, remembering the `final_value` of the JEC. It does one more search for work. +* If no work is found, the thread atomically: + * Checks the JEC to see that it has not changed from `final_value`. + * If it has, then the thread goes back to searching for work. We reset to + just before we got sleepy, so that we will do one more search + before attempting to sleep again (rather than searching for many rounds). + * Increments the number of sleeping threads by 1. +* The thread then executes a seq-cst fence operation (see below). +* The thread then does one final check for injected jobs (see below). If any + are available, it returns to the 'pre-sleepy' state as if the JEC had changed. +* The thread waits to be signaled. Once signaled, it returns to the idle state. + +# The jobs event counter and deadlock + +As described in the section on the JEC, the main concern around going to sleep +is avoiding a race condition wherein: + +* Thread A looks for work, finds none. +* Thread B posts work but sees no sleeping threads. +* Thread A goes to sleep. + +The JEC protocol largely prevents this, but due to rollover, this prevention is +not complete. It is possible -- if unlikely -- that enough activity occurs for +Thread A to observe the same JEC value that it saw when getting sleepy. If the +new work being published came from *inside* the thread-pool, then this race +condition isn't too harmful. It means that we have fewer workers processing the +work then we should, but we won't deadlock. This seems like an acceptable risk +given that this is unlikely in practice. + +However, if the work was posted as an *external* job, that is a problem. In that +case, it's possible that all of our workers could go to sleep, and the external +job would never get processed. To prevent that, the sleeping protocol includes +one final check to see if the injector queue is empty before fully falling +asleep. Note that this final check occurs **after** the number of sleeping +threads has been incremented. We are not concerned therefore with races against +injections that occur after that increment, only before. + +Unfortunately, there is one rather subtle point concerning this final check: +we wish to avoid the possibility that: + +* work is pushed into the injection queue by an outside thread X, +* the sleepy thread S sees the JEC but it has rolled over and is equal +* the sleepy thread S reads the injection queue but does not see the work posted by X. + +This is possible because the C++ memory model typically offers guarantees of the +form "if you see the access A, then you must see those other accesses" -- but it +doesn't guarantee that you will see the access A (i.e., if you think of +processors with independent caches, you may be operating on very out of date +cache state). + +## Using seq-cst fences to prevent deadlock + +To overcome this problem, we have inserted two sequentially consistent fence +operations into the protocols above: + +* One fence occurs after work is posted into the injection queue, but before the + counters are read (including the number of sleeping threads). + * Note that no fence is needed for work posted to internal queues, since it is ok + to overlook work in that case. +* One fence occurs after the number of sleeping threads is incremented, but + before the injection queue is read. + +### Proof sketch + +What follows is a "proof sketch" that the protocol is deadlock free. We model +two relevant bits of memory, the job injector queue J and the atomic counters C. + +Consider the actions of the injecting thread: + +* PushJob: Job is injected, which can be modeled as an atomic write to J with release semantics. +* PushFence: A sequentially consistent fence is executed. +* ReadSleepers: The counters C are read (they may also be incremented, but we just consider the read that comes first). + +Meanwhile, the sleepy thread does the following: + +* IncSleepers: The number of sleeping threads is incremented, which is atomic exchange to C. +* SleepFence: A sequentially consistent fence is executed. +* ReadJob: We look to see if the queue is empty, which is a read of J with acquire semantics. + +Either PushFence or SleepFence must come first: + +* If PushFence comes first, then PushJob must be visible to ReadJob. +* If SleepFence comes first, then IncSleepers is visible to ReadSleepers. + +# Deadlock detection + +This module tracks a number of variables in order to detect deadlocks due to user code blocking. +These variables are stored in the `SleepData` struct which itself is kept behind a mutex. +It contains the following fields: +- `worker_count` - The number of threads in the thread pool. +- `active_threads` - The number of threads in the thread pool which are running + and aren't blocked in user code or sleeping. +- `blocked_threads` - The number of threads which are blocked in user code. + This doesn't include threads blocked by Rayon. + +User code can indicate blocking by calling `mark_blocked` before blocking and +calling `mark_unblocked` before unblocking a thread. +This will adjust `active_threads` and `blocked_threads` accordingly. + +When we tickle the thread pool in `Sleep::tickle_cold`, we set `active_threads` to +`worker_count` - `blocked_threads` since we wake up all Rayon threads, but not thread blocked +by user code. + +A deadlock is detected by checking if `active_threads` is 0 and `blocked_threads` is above 0. +If we ignored `blocked_threads` we would have a deadlock +immediately when creating the thread pool. +We would also deadlock once the thread pool ran out of work. +It is not possible for Rayon itself to deadlock. +Deadlocks can only be caused by user code blocking, so this condition doesn't miss any deadlocks. + +We check for the deadlock condition when +threads fall asleep in `mark_unblocked` and in `Sleep::sleep`. +If there's a deadlock detected we call the user provided deadlock handler while we hold the +lock to `SleepData`. This means the deadlock handler cannot call `mark_blocked` and +`mark_unblocked`. The user is expected to handle the deadlock in some non-Rayon thread. +Once the deadlock handler returns, the thread which called the deadlock handler will go to sleep. diff --git a/compiler/rustc_thread_pool/src/sleep/counters.rs b/compiler/rustc_thread_pool/src/sleep/counters.rs new file mode 100644 index 00000000000..f2682028b96 --- /dev/null +++ b/compiler/rustc_thread_pool/src/sleep/counters.rs @@ -0,0 +1,273 @@ +use std::sync::atomic::{AtomicUsize, Ordering}; + +pub(super) struct AtomicCounters { + /// Packs together a number of counters. The counters are ordered as + /// follows, from least to most significant bits (here, we assuming + /// that [`THREADS_BITS`] is equal to 10): + /// + /// * Bits 0..10: Stores the number of **sleeping threads** + /// * Bits 10..20: Stores the number of **inactive threads** + /// * Bits 20..: Stores the **job event counter** (JEC) + /// + /// This uses 10 bits ([`THREADS_BITS`]) to encode the number of threads. Note + /// that the total number of bits (and hence the number of bits used for the + /// JEC) will depend on whether we are using a 32- or 64-bit architecture. + value: AtomicUsize, +} + +#[derive(Copy, Clone)] +pub(super) struct Counters { + word: usize, +} + +/// A value read from the **Jobs Event Counter**. +/// See the [`README.md`](README.md) for more +/// coverage of how the jobs event counter works. +#[derive(Copy, Clone, Debug, PartialEq, PartialOrd)] +pub(super) struct JobsEventCounter(usize); + +impl JobsEventCounter { + pub(super) const DUMMY: JobsEventCounter = JobsEventCounter(usize::MAX); + + #[inline] + pub(super) fn as_usize(self) -> usize { + self.0 + } + + /// The JEC "is sleepy" if the last thread to increment it was in the + /// process of becoming sleepy. This is indicated by its value being *even*. + /// When new jobs are posted, they check if the JEC is sleepy, and if so + /// they incremented it. + #[inline] + pub(super) fn is_sleepy(self) -> bool { + (self.as_usize() & 1) == 0 + } + + /// The JEC "is active" if the last thread to increment it was posting new + /// work. This is indicated by its value being *odd*. When threads get + /// sleepy, they will check if the JEC is active, and increment it. + #[inline] + pub(super) fn is_active(self) -> bool { + !self.is_sleepy() + } +} + +/// Number of bits used for the thread counters. +#[cfg(target_pointer_width = "64")] +const THREADS_BITS: usize = 16; + +#[cfg(target_pointer_width = "32")] +const THREADS_BITS: usize = 8; + +/// Bits to shift to select the sleeping threads +/// (used with `select_bits`). +#[allow(clippy::erasing_op)] +const SLEEPING_SHIFT: usize = 0 * THREADS_BITS; + +/// Bits to shift to select the inactive threads +/// (used with `select_bits`). +#[allow(clippy::identity_op)] +const INACTIVE_SHIFT: usize = 1 * THREADS_BITS; + +/// Bits to shift to select the JEC +/// (use JOBS_BITS). +const JEC_SHIFT: usize = 2 * THREADS_BITS; + +/// Max value for the thread counters. +pub(crate) const THREADS_MAX: usize = (1 << THREADS_BITS) - 1; + +/// Constant that can be added to add one sleeping thread. +const ONE_SLEEPING: usize = 1; + +/// Constant that can be added to add one inactive thread. +/// An inactive thread is either idle, sleepy, or sleeping. +const ONE_INACTIVE: usize = 1 << INACTIVE_SHIFT; + +/// Constant that can be added to add one to the JEC. +const ONE_JEC: usize = 1 << JEC_SHIFT; + +impl AtomicCounters { + #[inline] + pub(super) fn new() -> AtomicCounters { + AtomicCounters { value: AtomicUsize::new(0) } + } + + /// Load and return the current value of the various counters. + /// This value can then be given to other method which will + /// attempt to update the counters via compare-and-swap. + #[inline] + pub(super) fn load(&self, ordering: Ordering) -> Counters { + Counters::new(self.value.load(ordering)) + } + + #[inline] + fn try_exchange(&self, old_value: Counters, new_value: Counters, ordering: Ordering) -> bool { + self.value + .compare_exchange(old_value.word, new_value.word, ordering, Ordering::Relaxed) + .is_ok() + } + + /// Adds an inactive thread. This cannot fail. + /// + /// This should be invoked when a thread enters its idle loop looking + /// for work. It is decremented when work is found. Note that it is + /// not decremented if the thread transitions from idle to sleepy or sleeping; + /// so the number of inactive threads is always greater-than-or-equal + /// to the number of sleeping threads. + #[inline] + pub(super) fn add_inactive_thread(&self) { + self.value.fetch_add(ONE_INACTIVE, Ordering::SeqCst); + } + + /// Increments the jobs event counter if `increment_when`, when applied to + /// the current value, is true. Used to toggle the JEC from even (sleepy) to + /// odd (active) or vice versa. Returns the final value of the counters, for + /// which `increment_when` is guaranteed to return false. + pub(super) fn increment_jobs_event_counter_if( + &self, + increment_when: impl Fn(JobsEventCounter) -> bool, + ) -> Counters { + loop { + let old_value = self.load(Ordering::SeqCst); + if increment_when(old_value.jobs_counter()) { + let new_value = old_value.increment_jobs_counter(); + if self.try_exchange(old_value, new_value, Ordering::SeqCst) { + return new_value; + } + } else { + return old_value; + } + } + } + + /// Subtracts an inactive thread. This cannot fail. It is invoked + /// when a thread finds work and hence becomes active. It returns the + /// number of sleeping threads to wake up (if any). + /// + /// See `add_inactive_thread`. + #[inline] + pub(super) fn sub_inactive_thread(&self) -> usize { + let old_value = Counters::new(self.value.fetch_sub(ONE_INACTIVE, Ordering::SeqCst)); + debug_assert!( + old_value.inactive_threads() > 0, + "sub_inactive_thread: old_value {:?} has no inactive threads", + old_value, + ); + debug_assert!( + old_value.sleeping_threads() <= old_value.inactive_threads(), + "sub_inactive_thread: old_value {:?} had {} sleeping threads and {} inactive threads", + old_value, + old_value.sleeping_threads(), + old_value.inactive_threads(), + ); + + // Current heuristic: whenever an inactive thread goes away, if + // there are any sleeping threads, wake 'em up. + let sleeping_threads = old_value.sleeping_threads(); + Ord::min(sleeping_threads, 2) + } + + /// Subtracts a sleeping thread. This cannot fail, but it is only + /// safe to do if you you know the number of sleeping threads is + /// non-zero (i.e., because you have just awoken a sleeping + /// thread). + #[inline] + pub(super) fn sub_sleeping_thread(&self) { + let old_value = Counters::new(self.value.fetch_sub(ONE_SLEEPING, Ordering::SeqCst)); + debug_assert!( + old_value.sleeping_threads() > 0, + "sub_sleeping_thread: old_value {:?} had no sleeping threads", + old_value, + ); + debug_assert!( + old_value.sleeping_threads() <= old_value.inactive_threads(), + "sub_sleeping_thread: old_value {:?} had {} sleeping threads and {} inactive threads", + old_value, + old_value.sleeping_threads(), + old_value.inactive_threads(), + ); + } + + #[inline] + pub(super) fn try_add_sleeping_thread(&self, old_value: Counters) -> bool { + debug_assert!( + old_value.inactive_threads() > 0, + "try_add_sleeping_thread: old_value {:?} has no inactive threads", + old_value, + ); + debug_assert!( + old_value.sleeping_threads() < THREADS_MAX, + "try_add_sleeping_thread: old_value {:?} has too many sleeping threads", + old_value, + ); + + let mut new_value = old_value; + new_value.word += ONE_SLEEPING; + + self.try_exchange(old_value, new_value, Ordering::SeqCst) + } +} + +#[inline] +fn select_thread(word: usize, shift: usize) -> usize { + (word >> shift) & THREADS_MAX +} + +#[inline] +fn select_jec(word: usize) -> usize { + word >> JEC_SHIFT +} + +impl Counters { + #[inline] + fn new(word: usize) -> Counters { + Counters { word } + } + + #[inline] + fn increment_jobs_counter(self) -> Counters { + // We can freely add to JEC because it occupies the most significant bits. + // Thus it doesn't overflow into the other counters, just wraps itself. + Counters { word: self.word.wrapping_add(ONE_JEC) } + } + + #[inline] + pub(super) fn jobs_counter(self) -> JobsEventCounter { + JobsEventCounter(select_jec(self.word)) + } + + /// The number of threads that are not actively + /// executing work. They may be idle, sleepy, or asleep. + #[inline] + pub(super) fn inactive_threads(self) -> usize { + select_thread(self.word, INACTIVE_SHIFT) + } + + #[inline] + pub(super) fn awake_but_idle_threads(self) -> usize { + debug_assert!( + self.sleeping_threads() <= self.inactive_threads(), + "sleeping threads: {} > raw idle threads {}", + self.sleeping_threads(), + self.inactive_threads() + ); + self.inactive_threads() - self.sleeping_threads() + } + + #[inline] + pub(super) fn sleeping_threads(self) -> usize { + select_thread(self.word, SLEEPING_SHIFT) + } +} + +impl std::fmt::Debug for Counters { + fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let word = format!("{:016x}", self.word); + fmt.debug_struct("Counters") + .field("word", &word) + .field("jobs", &self.jobs_counter().0) + .field("inactive", &self.inactive_threads()) + .field("sleeping", &self.sleeping_threads()) + .finish() + } +} diff --git a/compiler/rustc_thread_pool/src/sleep/mod.rs b/compiler/rustc_thread_pool/src/sleep/mod.rs new file mode 100644 index 00000000000..a9cdf68cc7e --- /dev/null +++ b/compiler/rustc_thread_pool/src/sleep/mod.rs @@ -0,0 +1,386 @@ +//! Code that decides when workers should go to sleep. See README.md +//! for an overview. + +use std::sync::atomic::Ordering; +use std::sync::{Condvar, Mutex}; +use std::thread; + +use crossbeam_utils::CachePadded; + +use crate::DeadlockHandler; +use crate::latch::CoreLatch; +use crate::registry::WorkerThread; + +mod counters; +pub(crate) use self::counters::THREADS_MAX; +use self::counters::{AtomicCounters, JobsEventCounter}; + +struct SleepData { + /// The number of threads in the thread pool. + worker_count: usize, + + /// The number of threads in the thread pool which are running and + /// aren't blocked in user code or sleeping. + active_threads: usize, + + /// The number of threads which are blocked in user code. + /// This doesn't include threads blocked by this module. + blocked_threads: usize, +} + +impl SleepData { + /// Checks if the conditions for a deadlock holds and if so calls the deadlock handler + #[inline] + pub(super) fn deadlock_check(&self, deadlock_handler: &Option<Box<DeadlockHandler>>) { + if self.active_threads == 0 && self.blocked_threads > 0 { + (deadlock_handler.as_ref().unwrap())(); + } + } +} + +/// The `Sleep` struct is embedded into each registry. It governs the waking and sleeping +/// of workers. It has callbacks that are invoked periodically at significant events, +/// such as when workers are looping and looking for work, when latches are set, or when +/// jobs are published, and it either blocks threads or wakes them in response to these +/// events. See the [`README.md`] in this module for more details. +/// +/// [`README.md`] README.md +pub(super) struct Sleep { + /// One "sleep state" per worker. Used to track if a worker is sleeping and to have + /// them block. + worker_sleep_states: Vec<CachePadded<WorkerSleepState>>, + + counters: AtomicCounters, + + data: Mutex<SleepData>, +} + +/// An instance of this struct is created when a thread becomes idle. +/// It is consumed when the thread finds work, and passed by `&mut` +/// reference for operations that preserve the idle state. (In other +/// words, producing one of these structs is evidence the thread is +/// idle.) It tracks state such as how long the thread has been idle. +pub(super) struct IdleState { + /// What is worker index of the idle thread? + worker_index: usize, + + /// How many rounds have we been circling without sleeping? + rounds: u32, + + /// Once we become sleepy, what was the sleepy counter value? + /// Set to `INVALID_SLEEPY_COUNTER` otherwise. + jobs_counter: JobsEventCounter, +} + +/// The "sleep state" for an individual worker. +#[derive(Default)] +struct WorkerSleepState { + /// Set to true when the worker goes to sleep; set to false when + /// the worker is notified or when it wakes. + is_blocked: Mutex<bool>, + + condvar: Condvar, +} + +const ROUNDS_UNTIL_SLEEPY: u32 = 32; +const ROUNDS_UNTIL_SLEEPING: u32 = ROUNDS_UNTIL_SLEEPY + 1; + +impl Sleep { + pub(super) fn new(n_threads: usize) -> Sleep { + assert!(n_threads <= THREADS_MAX); + Sleep { + worker_sleep_states: (0..n_threads).map(|_| Default::default()).collect(), + counters: AtomicCounters::new(), + data: Mutex::new(SleepData { + worker_count: n_threads, + active_threads: n_threads, + blocked_threads: 0, + }), + } + } + + /// Mark a Rayon worker thread as blocked. This triggers the deadlock handler + /// if no other worker thread is active + #[inline] + pub(super) fn mark_blocked(&self, deadlock_handler: &Option<Box<DeadlockHandler>>) { + let mut data = self.data.lock().unwrap(); + debug_assert!(data.active_threads > 0); + debug_assert!(data.blocked_threads < data.worker_count); + debug_assert!(data.active_threads > 0); + data.active_threads -= 1; + data.blocked_threads += 1; + + data.deadlock_check(deadlock_handler); + } + + /// Mark a previously blocked Rayon worker thread as unblocked + #[inline] + pub(super) fn mark_unblocked(&self) { + let mut data = self.data.lock().unwrap(); + debug_assert!(data.active_threads < data.worker_count); + debug_assert!(data.blocked_threads > 0); + data.active_threads += 1; + data.blocked_threads -= 1; + } + + #[inline] + pub(super) fn start_looking(&self, worker_index: usize) -> IdleState { + self.counters.add_inactive_thread(); + + IdleState { worker_index, rounds: 0, jobs_counter: JobsEventCounter::DUMMY } + } + + #[inline] + pub(super) fn work_found(&self) { + // If we were the last idle thread and other threads are still sleeping, + // then we should wake up another thread. + let threads_to_wake = self.counters.sub_inactive_thread(); + self.wake_any_threads(threads_to_wake as u32); + } + + #[inline] + pub(super) fn no_work_found( + &self, + idle_state: &mut IdleState, + latch: &CoreLatch, + thread: &WorkerThread, + ) { + if idle_state.rounds < ROUNDS_UNTIL_SLEEPY { + thread::yield_now(); + idle_state.rounds += 1; + } else if idle_state.rounds == ROUNDS_UNTIL_SLEEPY { + idle_state.jobs_counter = self.announce_sleepy(); + idle_state.rounds += 1; + thread::yield_now(); + } else if idle_state.rounds < ROUNDS_UNTIL_SLEEPING { + idle_state.rounds += 1; + thread::yield_now(); + } else { + debug_assert_eq!(idle_state.rounds, ROUNDS_UNTIL_SLEEPING); + self.sleep(idle_state, latch, thread); + } + } + + #[cold] + fn announce_sleepy(&self) -> JobsEventCounter { + self.counters.increment_jobs_event_counter_if(JobsEventCounter::is_active).jobs_counter() + } + + #[cold] + fn sleep(&self, idle_state: &mut IdleState, latch: &CoreLatch, thread: &WorkerThread) { + let worker_index = idle_state.worker_index; + + if !latch.get_sleepy() { + return; + } + + let sleep_state = &self.worker_sleep_states[worker_index]; + let mut is_blocked = sleep_state.is_blocked.lock().unwrap(); + debug_assert!(!*is_blocked); + + // Our latch was signalled. We should wake back up fully as we + // will have some stuff to do. + if !latch.fall_asleep() { + idle_state.wake_fully(); + return; + } + + loop { + let counters = self.counters.load(Ordering::SeqCst); + + // Check if the JEC has changed since we got sleepy. + debug_assert!(idle_state.jobs_counter.is_sleepy()); + if counters.jobs_counter() != idle_state.jobs_counter { + // JEC has changed, so a new job was posted, but for some reason + // we didn't see it. We should return to just before the SLEEPY + // state so we can do another search and (if we fail to find + // work) go back to sleep. + idle_state.wake_partly(); + latch.wake_up(); + return; + } + + // Otherwise, let's move from IDLE to SLEEPING. + if self.counters.try_add_sleeping_thread(counters) { + break; + } + } + + // Successfully registered as asleep. + + // We have one last check for injected jobs to do. This protects against + // deadlock in the very unlikely event that + // + // - an external job is being injected while we are sleepy + // - that job triggers the rollover over the JEC such that we don't see it + // - we are the last active worker thread + std::sync::atomic::fence(Ordering::SeqCst); + if thread.has_injected_job() { + // If we see an externally injected job, then we have to 'wake + // ourselves up'. (Ordinarily, `sub_sleeping_thread` is invoked by + // the one that wakes us.) + self.counters.sub_sleeping_thread(); + } else { + { + // Decrement the number of active threads and check for a deadlock + let mut data = self.data.lock().unwrap(); + data.active_threads -= 1; + data.deadlock_check(&thread.registry.deadlock_handler); + } + + // If we don't see an injected job (the normal case), then flag + // ourselves as asleep and wait till we are notified. + // + // (Note that `is_blocked` is held under a mutex and the mutex was + // acquired *before* we incremented the "sleepy counter". This means + // that whomever is coming to wake us will have to wait until we + // release the mutex in the call to `wait`, so they will see this + // boolean as true.) + thread.registry.release_thread(); + *is_blocked = true; + while *is_blocked { + is_blocked = sleep_state.condvar.wait(is_blocked).unwrap(); + } + + // Drop `is_blocked` now in case `acquire_thread` blocks + drop(is_blocked); + + thread.registry.acquire_thread(); + } + + // Update other state: + idle_state.wake_fully(); + latch.wake_up(); + } + + /// Notify the given thread that it should wake up (if it is + /// sleeping). When this method is invoked, we typically know the + /// thread is asleep, though in rare cases it could have been + /// awoken by (e.g.) new work having been posted. + pub(super) fn notify_worker_latch_is_set(&self, target_worker_index: usize) { + self.wake_specific_thread(target_worker_index); + } + + /// Signals that `num_jobs` new jobs were injected into the thread + /// pool from outside. This function will ensure that there are + /// threads available to process them, waking threads from sleep + /// if necessary. + /// + /// # Parameters + /// + /// - `num_jobs` -- lower bound on number of jobs available for stealing. + /// We'll try to get at least one thread per job. + #[inline] + pub(super) fn new_injected_jobs(&self, num_jobs: u32, queue_was_empty: bool) { + // This fence is needed to guarantee that threads + // as they are about to fall asleep, observe any + // new jobs that may have been injected. + std::sync::atomic::fence(Ordering::SeqCst); + + self.new_jobs(num_jobs, queue_was_empty) + } + + /// Signals that `num_jobs` new jobs were pushed onto a thread's + /// local deque. This function will try to ensure that there are + /// threads available to process them, waking threads from sleep + /// if necessary. However, this is not guaranteed: under certain + /// race conditions, the function may fail to wake any new + /// threads; in that case the existing thread should eventually + /// pop the job. + /// + /// # Parameters + /// + /// - `num_jobs` -- lower bound on number of jobs available for stealing. + /// We'll try to get at least one thread per job. + #[inline] + pub(super) fn new_internal_jobs(&self, num_jobs: u32, queue_was_empty: bool) { + self.new_jobs(num_jobs, queue_was_empty) + } + + /// Common helper for `new_injected_jobs` and `new_internal_jobs`. + #[inline] + fn new_jobs(&self, num_jobs: u32, queue_was_empty: bool) { + // Read the counters and -- if sleepy workers have announced themselves + // -- announce that there is now work available. The final value of `counters` + // with which we exit the loop thus corresponds to a state when + let counters = self.counters.increment_jobs_event_counter_if(JobsEventCounter::is_sleepy); + let num_awake_but_idle = counters.awake_but_idle_threads(); + let num_sleepers = counters.sleeping_threads(); + + if num_sleepers == 0 { + // nobody to wake + return; + } + + // Promote from u16 to u32 so we can interoperate with + // num_jobs more easily. + let num_awake_but_idle = num_awake_but_idle as u32; + let num_sleepers = num_sleepers as u32; + + // If the queue is non-empty, then we always wake up a worker + // -- clearly the existing idle jobs aren't enough. Otherwise, + // check to see if we have enough idle workers. + if !queue_was_empty { + let num_to_wake = Ord::min(num_jobs, num_sleepers); + self.wake_any_threads(num_to_wake); + } else if num_awake_but_idle < num_jobs { + let num_to_wake = Ord::min(num_jobs - num_awake_but_idle, num_sleepers); + self.wake_any_threads(num_to_wake); + } + } + + #[cold] + fn wake_any_threads(&self, mut num_to_wake: u32) { + if num_to_wake > 0 { + for i in 0..self.worker_sleep_states.len() { + if self.wake_specific_thread(i) { + num_to_wake -= 1; + if num_to_wake == 0 { + return; + } + } + } + } + } + + fn wake_specific_thread(&self, index: usize) -> bool { + let sleep_state = &self.worker_sleep_states[index]; + + let mut is_blocked = sleep_state.is_blocked.lock().unwrap(); + if *is_blocked { + *is_blocked = false; + + // Increment the number of active threads + self.data.lock().unwrap().active_threads += 1; + + sleep_state.condvar.notify_one(); + + // When the thread went to sleep, it will have incremented + // this value. When we wake it, its our job to decrement + // it. We could have the thread do it, but that would + // introduce a delay between when the thread was + // *notified* and when this counter was decremented. That + // might mislead people with new work into thinking that + // there are sleeping threads that they should try to + // wake, when in fact there is nothing left for them to + // do. + self.counters.sub_sleeping_thread(); + + true + } else { + false + } + } +} + +impl IdleState { + fn wake_fully(&mut self) { + self.rounds = 0; + self.jobs_counter = JobsEventCounter::DUMMY; + } + + fn wake_partly(&mut self) { + self.rounds = ROUNDS_UNTIL_SLEEPY; + self.jobs_counter = JobsEventCounter::DUMMY; + } +} diff --git a/compiler/rustc_thread_pool/src/spawn/mod.rs b/compiler/rustc_thread_pool/src/spawn/mod.rs new file mode 100644 index 00000000000..040a02bfa67 --- /dev/null +++ b/compiler/rustc_thread_pool/src/spawn/mod.rs @@ -0,0 +1,165 @@ +use std::mem; +use std::sync::Arc; + +use crate::job::*; +use crate::registry::Registry; +use crate::tlv::Tlv; +use crate::unwind; + +/// Puts the task into the Rayon threadpool's job queue in the "static" +/// or "global" scope. Just like a standard thread, this task is not +/// tied to the current stack frame, and hence it cannot hold any +/// references other than those with `'static` lifetime. If you want +/// to spawn a task that references stack data, use [the `scope()` +/// function][scope] to create a scope. +/// +/// [scope]: fn.scope.html +/// +/// Since tasks spawned with this function cannot hold references into +/// the enclosing stack frame, you almost certainly want to use a +/// `move` closure as their argument (otherwise, the closure will +/// typically hold references to any variables from the enclosing +/// function that you happen to use). +/// +/// This API assumes that the closure is executed purely for its +/// side-effects (i.e., it might send messages, modify data protected +/// by a mutex, or some such thing). +/// +/// There is no guaranteed order of execution for spawns, given that +/// other threads may steal tasks at any time. However, they are +/// generally prioritized in a LIFO order on the thread from which +/// they were spawned. Other threads always steal from the other end of +/// the deque, like FIFO order. The idea is that "recent" tasks are +/// most likely to be fresh in the local CPU's cache, while other +/// threads can steal older "stale" tasks. For an alternate approach, +/// consider [`spawn_fifo()`] instead. +/// +/// [`spawn_fifo()`]: fn.spawn_fifo.html +/// +/// # Panic handling +/// +/// If this closure should panic, the resulting panic will be +/// propagated to the panic handler registered in the `ThreadPoolBuilder`, +/// if any. See [`ThreadPoolBuilder::panic_handler()`][ph] for more +/// details. +/// +/// [ph]: struct.ThreadPoolBuilder.html#method.panic_handler +/// +/// # Examples +/// +/// This code creates a Rayon task that increments a global counter. +/// +/// ```rust +/// # use rustc_thread_pool as rayon; +/// use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT}; +/// +/// static GLOBAL_COUNTER: AtomicUsize = ATOMIC_USIZE_INIT; +/// +/// rayon::spawn(move || { +/// GLOBAL_COUNTER.fetch_add(1, Ordering::SeqCst); +/// }); +/// ``` +pub fn spawn<F>(func: F) +where + F: FnOnce() + Send + 'static, +{ + // We assert that current registry has not terminated. + unsafe { spawn_in(func, &Registry::current()) } +} + +/// Spawns an asynchronous job in `registry.` +/// +/// Unsafe because `registry` must not yet have terminated. +pub(super) unsafe fn spawn_in<F>(func: F, registry: &Arc<Registry>) +where + F: FnOnce() + Send + 'static, +{ + // We assert that this does not hold any references (we know + // this because of the `'static` bound in the interface); + // moreover, we assert that the code below is not supposed to + // be able to panic, and hence the data won't leak but will be + // enqueued into some deque for later execution. + let abort_guard = unwind::AbortIfPanic; // just in case we are wrong, and code CAN panic + let job_ref = unsafe { spawn_job(func, registry) }; + registry.inject_or_push(job_ref); + mem::forget(abort_guard); +} + +unsafe fn spawn_job<F>(func: F, registry: &Arc<Registry>) -> JobRef +where + F: FnOnce() + Send + 'static, +{ + // Ensure that registry cannot terminate until this job has + // executed. This ref is decremented at the (*) below. + registry.increment_terminate_count(); + + HeapJob::new(Tlv::null(), { + let registry = Arc::clone(registry); + move || { + registry.catch_unwind(func); + registry.terminate(); // (*) permit registry to terminate now + } + }) + .into_static_job_ref() +} + +/// Fires off a task into the Rayon threadpool in the "static" or +/// "global" scope. Just like a standard thread, this task is not +/// tied to the current stack frame, and hence it cannot hold any +/// references other than those with `'static` lifetime. If you want +/// to spawn a task that references stack data, use [the `scope_fifo()` +/// function](fn.scope_fifo.html) to create a scope. +/// +/// The behavior is essentially the same as [the `spawn` +/// function](fn.spawn.html), except that calls from the same thread +/// will be prioritized in FIFO order. This is similar to the now- +/// deprecated [`breadth_first`] option, except the effect is isolated +/// to relative `spawn_fifo` calls, not all threadpool tasks. +/// +/// For more details on this design, see Rayon [RFC #1]. +/// +/// [`breadth_first`]: struct.ThreadPoolBuilder.html#method.breadth_first +/// [RFC #1]: https://github.com/rayon-rs/rfcs/blob/master/accepted/rfc0001-scope-scheduling.md +/// +/// # Panic handling +/// +/// If this closure should panic, the resulting panic will be +/// propagated to the panic handler registered in the `ThreadPoolBuilder`, +/// if any. See [`ThreadPoolBuilder::panic_handler()`][ph] for more +/// details. +/// +/// [ph]: struct.ThreadPoolBuilder.html#method.panic_handler +pub fn spawn_fifo<F>(func: F) +where + F: FnOnce() + Send + 'static, +{ + // We assert that current registry has not terminated. + unsafe { spawn_fifo_in(func, &Registry::current()) } +} + +/// Spawns an asynchronous FIFO job in `registry.` +/// +/// Unsafe because `registry` must not yet have terminated. +pub(super) unsafe fn spawn_fifo_in<F>(func: F, registry: &Arc<Registry>) +where + F: FnOnce() + Send + 'static, +{ + // We assert that this does not hold any references (we know + // this because of the `'static` bound in the interface); + // moreover, we assert that the code below is not supposed to + // be able to panic, and hence the data won't leak but will be + // enqueued into some deque for later execution. + let abort_guard = unwind::AbortIfPanic; // just in case we are wrong, and code CAN panic + let job_ref = unsafe { spawn_job(func, registry) }; + + // If we're in the pool, use our thread's private fifo for this thread to execute + // in a locally-FIFO order. Otherwise, just use the pool's global injector. + match registry.current_thread() { + Some(worker) => unsafe { worker.push_fifo(job_ref) }, + None => registry.inject(job_ref), + } + mem::forget(abort_guard); +} + +#[cfg(test)] +mod tests; diff --git a/compiler/rustc_thread_pool/src/spawn/tests.rs b/compiler/rustc_thread_pool/src/spawn/tests.rs new file mode 100644 index 00000000000..8a70d2faf9c --- /dev/null +++ b/compiler/rustc_thread_pool/src/spawn/tests.rs @@ -0,0 +1,246 @@ +use std::any::Any; +use std::sync::Mutex; +use std::sync::mpsc::channel; + +use super::{spawn, spawn_fifo}; +use crate::{ThreadPoolBuilder, scope}; + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn spawn_then_join_in_worker() { + let (tx, rx) = channel(); + scope(move |_| { + spawn(move || tx.send(22).unwrap()); + }); + assert_eq!(22, rx.recv().unwrap()); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn spawn_then_join_outside_worker() { + let (tx, rx) = channel(); + spawn(move || tx.send(22).unwrap()); + assert_eq!(22, rx.recv().unwrap()); +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore)] +fn panic_fwd() { + let (tx, rx) = channel(); + + let tx = Mutex::new(tx); + let panic_handler = move |err: Box<dyn Any + Send>| { + let tx = tx.lock().unwrap(); + if let Some(&msg) = err.downcast_ref::<&str>() { + if msg == "Hello, world!" { + tx.send(1).unwrap(); + } else { + tx.send(2).unwrap(); + } + } else { + tx.send(3).unwrap(); + } + }; + + let builder = ThreadPoolBuilder::new().panic_handler(panic_handler); + + builder.build().unwrap().spawn(move || panic!("Hello, world!")); + + assert_eq!(1, rx.recv().unwrap()); +} + +/// Test what happens when the thread-pool is dropped but there are +/// still active asynchronous tasks. We expect the thread-pool to stay +/// alive and executing until those threads are complete. +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn termination_while_things_are_executing() { + let (tx0, rx0) = channel(); + let (tx1, rx1) = channel(); + + // Create a thread-pool and spawn some code in it, but then drop + // our reference to it. + { + let thread_pool = ThreadPoolBuilder::new().build().unwrap(); + thread_pool.spawn(move || { + let data = rx0.recv().unwrap(); + + // At this point, we know the "main" reference to the + // `ThreadPool` has been dropped, but there are still + // active threads. Launch one more. + spawn(move || { + tx1.send(data).unwrap(); + }); + }); + } + + tx0.send(22).unwrap(); + let v = rx1.recv().unwrap(); + assert_eq!(v, 22); +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore)] +fn custom_panic_handler_and_spawn() { + let (tx, rx) = channel(); + + // Create a parallel closure that will send panics on the + // channel; since the closure is potentially executed in parallel + // with itself, we have to wrap `tx` in a mutex. + let tx = Mutex::new(tx); + let panic_handler = move |e: Box<dyn Any + Send>| { + tx.lock().unwrap().send(e).unwrap(); + }; + + // Execute an async that will panic. + let builder = ThreadPoolBuilder::new().panic_handler(panic_handler); + builder.build().unwrap().spawn(move || { + panic!("Hello, world!"); + }); + + // Check that we got back the panic we expected. + let error = rx.recv().unwrap(); + if let Some(&msg) = error.downcast_ref::<&str>() { + assert_eq!(msg, "Hello, world!"); + } else { + panic!("did not receive a string from panic handler"); + } +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore)] +fn custom_panic_handler_and_nested_spawn() { + let (tx, rx) = channel(); + + // Create a parallel closure that will send panics on the + // channel; since the closure is potentially executed in parallel + // with itself, we have to wrap `tx` in a mutex. + let tx = Mutex::new(tx); + let panic_handler = move |e| { + tx.lock().unwrap().send(e).unwrap(); + }; + + // Execute an async that will (eventually) panic. + const PANICS: usize = 3; + let builder = ThreadPoolBuilder::new().panic_handler(panic_handler); + builder.build().unwrap().spawn(move || { + // launch 3 nested spawn-asyncs; these should be in the same + // thread-pool and hence inherit the same panic handler + for _ in 0..PANICS { + spawn(move || { + panic!("Hello, world!"); + }); + } + }); + + // Check that we get back the panics we expected. + for _ in 0..PANICS { + let error = rx.recv().unwrap(); + if let Some(&msg) = error.downcast_ref::<&str>() { + assert_eq!(msg, "Hello, world!"); + } else { + panic!("did not receive a string from panic handler"); + } + } +} + +macro_rules! test_order { + ($outer_spawn:ident, $inner_spawn:ident) => {{ + let builder = ThreadPoolBuilder::new().num_threads(1); + let pool = builder.build().unwrap(); + let (tx, rx) = channel(); + pool.install(move || { + for i in 0..10 { + let tx = tx.clone(); + $outer_spawn(move || { + for j in 0..10 { + let tx = tx.clone(); + $inner_spawn(move || { + tx.send(i * 10 + j).unwrap(); + }); + } + }); + } + }); + rx.iter().collect::<Vec<i32>>() + }}; +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn lifo_order() { + // In the absence of stealing, `spawn()` jobs on a thread will run in LIFO order. + let vec = test_order!(spawn, spawn); + let expected: Vec<i32> = (0..100).rev().collect(); // LIFO -> reversed + assert_eq!(vec, expected); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn fifo_order() { + // In the absence of stealing, `spawn_fifo()` jobs on a thread will run in FIFO order. + let vec = test_order!(spawn_fifo, spawn_fifo); + let expected: Vec<i32> = (0..100).collect(); // FIFO -> natural order + assert_eq!(vec, expected); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn lifo_fifo_order() { + // LIFO on the outside, FIFO on the inside + let vec = test_order!(spawn, spawn_fifo); + let expected: Vec<i32> = (0..10).rev().flat_map(|i| (0..10).map(move |j| i * 10 + j)).collect(); + assert_eq!(vec, expected); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn fifo_lifo_order() { + // FIFO on the outside, LIFO on the inside + let vec = test_order!(spawn_fifo, spawn); + let expected: Vec<i32> = (0..10).flat_map(|i| (0..10).rev().map(move |j| i * 10 + j)).collect(); + assert_eq!(vec, expected); +} + +macro_rules! spawn_send { + ($spawn:ident, $tx:ident, $i:expr) => {{ + let tx = $tx.clone(); + $spawn(move || tx.send($i).unwrap()); + }}; +} + +/// Test mixed spawns pushing a series of numbers, interleaved such +/// such that negative values are using the second kind of spawn. +macro_rules! test_mixed_order { + ($pos_spawn:ident, $neg_spawn:ident) => {{ + let builder = ThreadPoolBuilder::new().num_threads(1); + let pool = builder.build().unwrap(); + let (tx, rx) = channel(); + pool.install(move || { + spawn_send!($pos_spawn, tx, 0); + spawn_send!($neg_spawn, tx, -1); + spawn_send!($pos_spawn, tx, 1); + spawn_send!($neg_spawn, tx, -2); + spawn_send!($pos_spawn, tx, 2); + spawn_send!($neg_spawn, tx, -3); + spawn_send!($pos_spawn, tx, 3); + }); + rx.iter().collect::<Vec<i32>>() + }}; +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn mixed_lifo_fifo_order() { + let vec = test_mixed_order!(spawn, spawn_fifo); + let expected = vec![3, -1, 2, -2, 1, -3, 0]; + assert_eq!(vec, expected); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn mixed_fifo_lifo_order() { + let vec = test_mixed_order!(spawn_fifo, spawn); + let expected = vec![0, -3, 1, -2, 2, -1, 3]; + assert_eq!(vec, expected); +} diff --git a/compiler/rustc_thread_pool/src/tests.rs b/compiler/rustc_thread_pool/src/tests.rs new file mode 100644 index 00000000000..3082f11a167 --- /dev/null +++ b/compiler/rustc_thread_pool/src/tests.rs @@ -0,0 +1,197 @@ +#![cfg(test)] + +use std::sync::atomic::{AtomicUsize, Ordering}; +use std::sync::{Arc, Barrier}; + +use crate::{ThreadPoolBuildError, ThreadPoolBuilder}; + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn worker_thread_index() { + let pool = ThreadPoolBuilder::new().num_threads(22).build().unwrap(); + assert_eq!(pool.current_num_threads(), 22); + assert_eq!(pool.current_thread_index(), None); + let index = pool.install(|| pool.current_thread_index().unwrap()); + assert!(index < 22); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn start_callback_called() { + let n_threads = 16; + let n_called = Arc::new(AtomicUsize::new(0)); + // Wait for all the threads in the pool plus the one running tests. + let barrier = Arc::new(Barrier::new(n_threads + 1)); + + let b = Arc::clone(&barrier); + let nc = Arc::clone(&n_called); + let start_handler = move |_| { + nc.fetch_add(1, Ordering::SeqCst); + b.wait(); + }; + + let conf = ThreadPoolBuilder::new().num_threads(n_threads).start_handler(start_handler); + let _ = conf.build().unwrap(); + + // Wait for all the threads to have been scheduled to run. + barrier.wait(); + + // The handler must have been called on every started thread. + assert_eq!(n_called.load(Ordering::SeqCst), n_threads); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn exit_callback_called() { + let n_threads = 16; + let n_called = Arc::new(AtomicUsize::new(0)); + // Wait for all the threads in the pool plus the one running tests. + let barrier = Arc::new(Barrier::new(n_threads + 1)); + + let b = Arc::clone(&barrier); + let nc = Arc::clone(&n_called); + let exit_handler = move |_| { + nc.fetch_add(1, Ordering::SeqCst); + b.wait(); + }; + + let conf = ThreadPoolBuilder::new().num_threads(n_threads).exit_handler(exit_handler); + { + let _ = conf.build().unwrap(); + // Drop the pool so it stops the running threads. + } + + // Wait for all the threads to have been scheduled to run. + barrier.wait(); + + // The handler must have been called on every exiting thread. + assert_eq!(n_called.load(Ordering::SeqCst), n_threads); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn handler_panics_handled_correctly() { + let n_threads = 16; + let n_called = Arc::new(AtomicUsize::new(0)); + // Wait for all the threads in the pool plus the one running tests. + let start_barrier = Arc::new(Barrier::new(n_threads + 1)); + let exit_barrier = Arc::new(Barrier::new(n_threads + 1)); + + let start_handler = move |_| { + panic!("ensure panic handler is called when starting"); + }; + let exit_handler = move |_| { + panic!("ensure panic handler is called when exiting"); + }; + + let sb = Arc::clone(&start_barrier); + let eb = Arc::clone(&exit_barrier); + let nc = Arc::clone(&n_called); + let panic_handler = move |_| { + let val = nc.fetch_add(1, Ordering::SeqCst); + if val < n_threads { + sb.wait(); + } else { + eb.wait(); + } + }; + + let conf = ThreadPoolBuilder::new() + .num_threads(n_threads) + .start_handler(start_handler) + .exit_handler(exit_handler) + .panic_handler(panic_handler); + { + let _ = conf.build().unwrap(); + + // Wait for all the threads to start, panic in the start handler, + // and been taken care of by the panic handler. + start_barrier.wait(); + + // Drop the pool so it stops the running threads. + } + + // Wait for all the threads to exit, panic in the exit handler, + // and been taken care of by the panic handler. + exit_barrier.wait(); + + // The panic handler must have been called twice on every thread. + assert_eq!(n_called.load(Ordering::SeqCst), 2 * n_threads); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn check_config_build() { + let pool = ThreadPoolBuilder::new().num_threads(22).build().unwrap(); + assert_eq!(pool.current_num_threads(), 22); +} + +/// Helper used by check_error_send_sync to ensure ThreadPoolBuildError is Send + Sync +fn _send_sync<T: Send + Sync>() {} + +#[test] +fn check_error_send_sync() { + _send_sync::<ThreadPoolBuildError>(); +} + +#[allow(deprecated)] +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn configuration() { + let start_handler = move |_| {}; + let exit_handler = move |_| {}; + let panic_handler = move |_| {}; + let thread_name = move |i| format!("thread_name_{}", i); + + // Ensure we can call all public methods on Configuration + crate::Configuration::new() + .thread_name(thread_name) + .num_threads(5) + .panic_handler(panic_handler) + .stack_size(4e6 as usize) + .breadth_first() + .start_handler(start_handler) + .exit_handler(exit_handler) + .build() + .unwrap(); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn default_pool() { + ThreadPoolBuilder::default().build().unwrap(); +} + +/// Test that custom spawned threads get their `WorkerThread` cleared once +/// the pool is done with them, allowing them to be used with rayon again +/// later. e.g. WebAssembly want to have their own pool of available threads. +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn cleared_current_thread() -> Result<(), ThreadPoolBuildError> { + let n_threads = 5; + let mut handles = vec![]; + let pool = ThreadPoolBuilder::new() + .num_threads(n_threads) + .spawn_handler(|thread| { + let handle = std::thread::spawn(move || { + thread.run(); + + // Afterward, the current thread shouldn't be set anymore. + assert_eq!(crate::current_thread_index(), None); + }); + handles.push(handle); + Ok(()) + }) + .build()?; + assert_eq!(handles.len(), n_threads); + + pool.install(|| assert!(crate::current_thread_index().is_some())); + drop(pool); + + // Wait for all threads to make their assertions and exit + for handle in handles { + handle.join().unwrap(); + } + + Ok(()) +} diff --git a/compiler/rustc_thread_pool/src/thread_pool/mod.rs b/compiler/rustc_thread_pool/src/thread_pool/mod.rs new file mode 100644 index 00000000000..3294e2a77cb --- /dev/null +++ b/compiler/rustc_thread_pool/src/thread_pool/mod.rs @@ -0,0 +1,513 @@ +//! Contains support for user-managed thread pools, represented by the +//! the [`ThreadPool`] type (see that struct for details). +//! +//! [`ThreadPool`]: struct.ThreadPool.html + +use std::error::Error; +use std::fmt; +use std::sync::Arc; + +use crate::broadcast::{self, BroadcastContext}; +use crate::registry::{Registry, ThreadSpawn, WorkerThread}; +use crate::scope::{do_in_place_scope, do_in_place_scope_fifo}; +use crate::{ + Scope, ScopeFifo, ThreadPoolBuildError, ThreadPoolBuilder, join, scope, scope_fifo, spawn, +}; + +mod tests; + +/// Represents a user created [thread-pool]. +/// +/// Use a [`ThreadPoolBuilder`] to specify the number and/or names of threads +/// in the pool. After calling [`ThreadPoolBuilder::build()`], you can then +/// execute functions explicitly within this [`ThreadPool`] using +/// [`ThreadPool::install()`]. By contrast, top level rayon functions +/// (like `join()`) will execute implicitly within the current thread-pool. +/// +/// +/// ## Creating a ThreadPool +/// +/// ```rust +/// # use rustc_thread_pool as rayon; +/// let pool = rayon::ThreadPoolBuilder::new().num_threads(8).build().unwrap(); +/// ``` +/// +/// [`install()`][`ThreadPool::install()`] executes a closure in one of the `ThreadPool`'s +/// threads. In addition, any other rayon operations called inside of `install()` will also +/// execute in the context of the `ThreadPool`. +/// +/// When the `ThreadPool` is dropped, that's a signal for the threads it manages to terminate, +/// they will complete executing any remaining work that you have spawned, and automatically +/// terminate. +/// +/// +/// [thread-pool]: https://en.wikipedia.org/wiki/Thread_pool +/// [`ThreadPool`]: struct.ThreadPool.html +/// [`ThreadPool::new()`]: struct.ThreadPool.html#method.new +/// [`ThreadPoolBuilder`]: struct.ThreadPoolBuilder.html +/// [`ThreadPoolBuilder::build()`]: struct.ThreadPoolBuilder.html#method.build +/// [`ThreadPool::install()`]: struct.ThreadPool.html#method.install +pub struct ThreadPool { + registry: Arc<Registry>, +} + +impl ThreadPool { + #[deprecated(note = "Use `ThreadPoolBuilder::build`")] + #[allow(deprecated)] + /// Deprecated in favor of `ThreadPoolBuilder::build`. + pub fn new(configuration: crate::Configuration) -> Result<ThreadPool, Box<dyn Error>> { + Self::build(configuration.into_builder()).map_err(Box::from) + } + + pub(super) fn build<S>( + builder: ThreadPoolBuilder<S>, + ) -> Result<ThreadPool, ThreadPoolBuildError> + where + S: ThreadSpawn, + { + let registry = Registry::new(builder)?; + Ok(ThreadPool { registry }) + } + + /// Executes `op` within the threadpool. Any attempts to use + /// `join`, `scope`, or parallel iterators will then operate + /// within that threadpool. + /// + /// # Warning: thread-local data + /// + /// Because `op` is executing within the Rayon thread-pool, + /// thread-local data from the current thread will not be + /// accessible. + /// + /// # Warning: execution order + /// + /// If the current thread is part of a different thread pool, it will try to + /// keep busy while the `op` completes in its target pool, similar to + /// calling [`ThreadPool::yield_now()`] in a loop. Therefore, it may + /// potentially schedule other tasks to run on the current thread in the + /// meantime. For example + /// + /// ```rust + /// # use rustc_thread_pool as rayon; + /// fn main() { + /// rayon::ThreadPoolBuilder::new().num_threads(1).build_global().unwrap(); + /// let pool = rustc_thread_pool::ThreadPoolBuilder::default().build().unwrap(); + /// let do_it = || { + /// print!("one "); + /// pool.install(||{}); + /// print!("two "); + /// }; + /// rayon::join(|| do_it(), || do_it()); + /// } + /// ``` + /// + /// Since we configured just one thread in the global pool, one might + /// expect `do_it()` to run sequentially, producing: + /// + /// ```ascii + /// one two one two + /// ``` + /// + /// However each call to `install()` yields implicitly, allowing rayon to + /// run multiple instances of `do_it()` concurrently on the single, global + /// thread. The following output would be equally valid: + /// + /// ```ascii + /// one one two two + /// ``` + /// + /// # Panics + /// + /// If `op` should panic, that panic will be propagated. + /// + /// ## Using `install()` + /// + /// ```rust + /// # use rustc_thread_pool as rayon; + /// fn main() { + /// let pool = rayon::ThreadPoolBuilder::new().num_threads(8).build().unwrap(); + /// let n = pool.install(|| fib(20)); + /// println!("{}", n); + /// } + /// + /// fn fib(n: usize) -> usize { + /// if n == 0 || n == 1 { + /// return n; + /// } + /// let (a, b) = rayon::join(|| fib(n - 1), || fib(n - 2)); // runs inside of `pool` + /// return a + b; + /// } + /// ``` + pub fn install<OP, R>(&self, op: OP) -> R + where + OP: FnOnce() -> R + Send, + R: Send, + { + self.registry.in_worker(|_, _| op()) + } + + /// Executes `op` within every thread in the threadpool. Any attempts to use + /// `join`, `scope`, or parallel iterators will then operate within that + /// threadpool. + /// + /// Broadcasts are executed on each thread after they have exhausted their + /// local work queue, before they attempt work-stealing from other threads. + /// The goal of that strategy is to run everywhere in a timely manner + /// *without* being too disruptive to current work. There may be alternative + /// broadcast styles added in the future for more or less aggressive + /// injection, if the need arises. + /// + /// # Warning: thread-local data + /// + /// Because `op` is executing within the Rayon thread-pool, + /// thread-local data from the current thread will not be + /// accessible. + /// + /// # Panics + /// + /// If `op` should panic on one or more threads, exactly one panic + /// will be propagated, only after all threads have completed + /// (or panicked) their own `op`. + /// + /// # Examples + /// + /// ``` + /// # use rustc_thread_pool as rayon; + /// use std::sync::atomic::{AtomicUsize, Ordering}; + /// + /// fn main() { + /// let pool = rayon::ThreadPoolBuilder::new().num_threads(5).build().unwrap(); + /// + /// // The argument gives context, including the index of each thread. + /// let v: Vec<usize> = pool.broadcast(|ctx| ctx.index() * ctx.index()); + /// assert_eq!(v, &[0, 1, 4, 9, 16]); + /// + /// // The closure can reference the local stack + /// let count = AtomicUsize::new(0); + /// pool.broadcast(|_| count.fetch_add(1, Ordering::Relaxed)); + /// assert_eq!(count.into_inner(), 5); + /// } + /// ``` + pub fn broadcast<OP, R>(&self, op: OP) -> Vec<R> + where + OP: Fn(BroadcastContext<'_>) -> R + Sync, + R: Send, + { + // We assert that `self.registry` has not terminated. + unsafe { broadcast::broadcast_in(op, &self.registry) } + } + + /// Returns the (current) number of threads in the thread pool. + /// + /// # Future compatibility note + /// + /// Note that unless this thread-pool was created with a + /// [`ThreadPoolBuilder`] that specifies the number of threads, + /// then this number may vary over time in future versions (see [the + /// `num_threads()` method for details][snt]). + /// + /// [snt]: struct.ThreadPoolBuilder.html#method.num_threads + /// [`ThreadPoolBuilder`]: struct.ThreadPoolBuilder.html + #[inline] + pub fn current_num_threads(&self) -> usize { + self.registry.num_threads() + } + + /// If called from a Rayon worker thread in this thread-pool, + /// returns the index of that thread; if not called from a Rayon + /// thread, or called from a Rayon thread that belongs to a + /// different thread-pool, returns `None`. + /// + /// The index for a given thread will not change over the thread's + /// lifetime. However, multiple threads may share the same index if + /// they are in distinct thread-pools. + /// + /// # Future compatibility note + /// + /// Currently, every thread-pool (including the global + /// thread-pool) has a fixed number of threads, but this may + /// change in future Rayon versions (see [the `num_threads()` method + /// for details][snt]). In that case, the index for a + /// thread would not change during its lifetime, but thread + /// indices may wind up being reused if threads are terminated and + /// restarted. + /// + /// [snt]: struct.ThreadPoolBuilder.html#method.num_threads + #[inline] + pub fn current_thread_index(&self) -> Option<usize> { + let curr = self.registry.current_thread()?; + Some(curr.index()) + } + + /// Returns true if the current worker thread currently has "local + /// tasks" pending. This can be useful as part of a heuristic for + /// deciding whether to spawn a new task or execute code on the + /// current thread, particularly in breadth-first + /// schedulers. However, keep in mind that this is an inherently + /// racy check, as other worker threads may be actively "stealing" + /// tasks from our local deque. + /// + /// **Background:** Rayon's uses a [work-stealing] scheduler. The + /// key idea is that each thread has its own [deque] of + /// tasks. Whenever a new task is spawned -- whether through + /// `join()`, `Scope::spawn()`, or some other means -- that new + /// task is pushed onto the thread's *local* deque. Worker threads + /// have a preference for executing their own tasks; if however + /// they run out of tasks, they will go try to "steal" tasks from + /// other threads. This function therefore has an inherent race + /// with other active worker threads, which may be removing items + /// from the local deque. + /// + /// [work-stealing]: https://en.wikipedia.org/wiki/Work_stealing + /// [deque]: https://en.wikipedia.org/wiki/Double-ended_queue + #[inline] + pub fn current_thread_has_pending_tasks(&self) -> Option<bool> { + let curr = self.registry.current_thread()?; + Some(!curr.local_deque_is_empty()) + } + + /// Execute `oper_a` and `oper_b` in the thread-pool and return + /// the results. Equivalent to `self.install(|| join(oper_a, + /// oper_b))`. + pub fn join<A, B, RA, RB>(&self, oper_a: A, oper_b: B) -> (RA, RB) + where + A: FnOnce() -> RA + Send, + B: FnOnce() -> RB + Send, + RA: Send, + RB: Send, + { + self.install(|| join(oper_a, oper_b)) + } + + /// Creates a scope that executes within this thread-pool. + /// Equivalent to `self.install(|| scope(...))`. + /// + /// See also: [the `scope()` function][scope]. + /// + /// [scope]: fn.scope.html + pub fn scope<'scope, OP, R>(&self, op: OP) -> R + where + OP: FnOnce(&Scope<'scope>) -> R + Send, + R: Send, + { + self.install(|| scope(op)) + } + + /// Creates a scope that executes within this thread-pool. + /// Spawns from the same thread are prioritized in relative FIFO order. + /// Equivalent to `self.install(|| scope_fifo(...))`. + /// + /// See also: [the `scope_fifo()` function][scope_fifo]. + /// + /// [scope_fifo]: fn.scope_fifo.html + pub fn scope_fifo<'scope, OP, R>(&self, op: OP) -> R + where + OP: FnOnce(&ScopeFifo<'scope>) -> R + Send, + R: Send, + { + self.install(|| scope_fifo(op)) + } + + /// Creates a scope that spawns work into this thread-pool. + /// + /// See also: [the `in_place_scope()` function][in_place_scope]. + /// + /// [in_place_scope]: fn.in_place_scope.html + pub fn in_place_scope<'scope, OP, R>(&self, op: OP) -> R + where + OP: FnOnce(&Scope<'scope>) -> R, + { + do_in_place_scope(Some(&self.registry), op) + } + + /// Creates a scope that spawns work into this thread-pool in FIFO order. + /// + /// See also: [the `in_place_scope_fifo()` function][in_place_scope_fifo]. + /// + /// [in_place_scope_fifo]: fn.in_place_scope_fifo.html + pub fn in_place_scope_fifo<'scope, OP, R>(&self, op: OP) -> R + where + OP: FnOnce(&ScopeFifo<'scope>) -> R, + { + do_in_place_scope_fifo(Some(&self.registry), op) + } + + /// Spawns an asynchronous task in this thread-pool. This task will + /// run in the implicit, global scope, which means that it may outlast + /// the current stack frame -- therefore, it cannot capture any references + /// onto the stack (you will likely need a `move` closure). + /// + /// See also: [the `spawn()` function defined on scopes][spawn]. + /// + /// [spawn]: struct.Scope.html#method.spawn + pub fn spawn<OP>(&self, op: OP) + where + OP: FnOnce() + Send + 'static, + { + // We assert that `self.registry` has not terminated. + unsafe { spawn::spawn_in(op, &self.registry) } + } + + /// Spawns an asynchronous task in this thread-pool. This task will + /// run in the implicit, global scope, which means that it may outlast + /// the current stack frame -- therefore, it cannot capture any references + /// onto the stack (you will likely need a `move` closure). + /// + /// See also: [the `spawn_fifo()` function defined on scopes][spawn_fifo]. + /// + /// [spawn_fifo]: struct.ScopeFifo.html#method.spawn_fifo + pub fn spawn_fifo<OP>(&self, op: OP) + where + OP: FnOnce() + Send + 'static, + { + // We assert that `self.registry` has not terminated. + unsafe { spawn::spawn_fifo_in(op, &self.registry) } + } + + /// Spawns an asynchronous task on every thread in this thread-pool. This task + /// will run in the implicit, global scope, which means that it may outlast the + /// current stack frame -- therefore, it cannot capture any references onto the + /// stack (you will likely need a `move` closure). + pub fn spawn_broadcast<OP>(&self, op: OP) + where + OP: Fn(BroadcastContext<'_>) + Send + Sync + 'static, + { + // We assert that `self.registry` has not terminated. + unsafe { broadcast::spawn_broadcast_in(op, &self.registry) } + } + + /// Cooperatively yields execution to Rayon. + /// + /// This is similar to the general [`yield_now()`], but only if the current + /// thread is part of *this* thread pool. + /// + /// Returns `Some(Yield::Executed)` if anything was executed, `Some(Yield::Idle)` if + /// nothing was available, or `None` if the current thread is not part this pool. + pub fn yield_now(&self) -> Option<Yield> { + let curr = self.registry.current_thread()?; + Some(curr.yield_now()) + } + + /// Cooperatively yields execution to local Rayon work. + /// + /// This is similar to the general [`yield_local()`], but only if the current + /// thread is part of *this* thread pool. + /// + /// Returns `Some(Yield::Executed)` if anything was executed, `Some(Yield::Idle)` if + /// nothing was available, or `None` if the current thread is not part this pool. + pub fn yield_local(&self) -> Option<Yield> { + let curr = self.registry.current_thread()?; + Some(curr.yield_local()) + } + + pub(crate) fn wait_until_stopped(self) { + let registry = Arc::clone(&self.registry); + drop(self); + registry.wait_until_stopped(); + } +} + +impl Drop for ThreadPool { + fn drop(&mut self) { + self.registry.terminate(); + } +} + +impl fmt::Debug for ThreadPool { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt.debug_struct("ThreadPool") + .field("num_threads", &self.current_num_threads()) + .field("id", &self.registry.id()) + .finish() + } +} + +/// If called from a Rayon worker thread, returns the index of that +/// thread within its current pool; if not called from a Rayon thread, +/// returns `None`. +/// +/// The index for a given thread will not change over the thread's +/// lifetime. However, multiple threads may share the same index if +/// they are in distinct thread-pools. +/// +/// See also: [the `ThreadPool::current_thread_index()` method]. +/// +/// [m]: struct.ThreadPool.html#method.current_thread_index +/// +/// # Future compatibility note +/// +/// Currently, every thread-pool (including the global +/// thread-pool) has a fixed number of threads, but this may +/// change in future Rayon versions (see [the `num_threads()` method +/// for details][snt]). In that case, the index for a +/// thread would not change during its lifetime, but thread +/// indices may wind up being reused if threads are terminated and +/// restarted. +/// +/// [snt]: struct.ThreadPoolBuilder.html#method.num_threads +#[inline] +pub fn current_thread_index() -> Option<usize> { + unsafe { + let curr = WorkerThread::current().as_ref()?; + Some(curr.index()) + } +} + +/// If called from a Rayon worker thread, indicates whether that +/// thread's local deque still has pending tasks. Otherwise, returns +/// `None`. For more information, see [the +/// `ThreadPool::current_thread_has_pending_tasks()` method][m]. +/// +/// [m]: struct.ThreadPool.html#method.current_thread_has_pending_tasks +#[inline] +pub fn current_thread_has_pending_tasks() -> Option<bool> { + unsafe { + let curr = WorkerThread::current().as_ref()?; + Some(!curr.local_deque_is_empty()) + } +} + +/// Cooperatively yields execution to Rayon. +/// +/// If the current thread is part of a rayon thread pool, this looks for a +/// single unit of pending work in the pool, then executes it. Completion of +/// that work might include nested work or further work stealing. +/// +/// This is similar to [`std::thread::yield_now()`], but does not literally make +/// that call. If you are implementing a polling loop, you may want to also +/// yield to the OS scheduler yourself if no Rayon work was found. +/// +/// Returns `Some(Yield::Executed)` if anything was executed, `Some(Yield::Idle)` if +/// nothing was available, or `None` if this thread is not part of any pool at all. +pub fn yield_now() -> Option<Yield> { + unsafe { + let thread = WorkerThread::current().as_ref()?; + Some(thread.yield_now()) + } +} + +/// Cooperatively yields execution to local Rayon work. +/// +/// If the current thread is part of a rayon thread pool, this looks for a +/// single unit of pending work in this thread's queue, then executes it. +/// Completion of that work might include nested work or further work stealing. +/// +/// This is similar to [`yield_now()`], but does not steal from other threads. +/// +/// Returns `Some(Yield::Executed)` if anything was executed, `Some(Yield::Idle)` if +/// nothing was available, or `None` if this thread is not part of any pool at all. +pub fn yield_local() -> Option<Yield> { + unsafe { + let thread = WorkerThread::current().as_ref()?; + Some(thread.yield_local()) + } +} + +/// Result of [`yield_now()`] or [`yield_local()`]. +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum Yield { + /// Work was found and executed. + Executed, + /// No available work was found. + Idle, +} diff --git a/compiler/rustc_thread_pool/src/thread_pool/tests.rs b/compiler/rustc_thread_pool/src/thread_pool/tests.rs new file mode 100644 index 00000000000..42c99565088 --- /dev/null +++ b/compiler/rustc_thread_pool/src/thread_pool/tests.rs @@ -0,0 +1,416 @@ +#![cfg(test)] + +use std::sync::atomic::{AtomicUsize, Ordering}; +use std::sync::mpsc::channel; +use std::sync::{Arc, Mutex}; + +use crate::{Scope, ScopeFifo, ThreadPool, ThreadPoolBuilder, join}; + +#[test] +#[should_panic(expected = "Hello, world!")] +fn panic_propagate() { + let thread_pool = ThreadPoolBuilder::new().build().unwrap(); + thread_pool.install(|| { + panic!("Hello, world!"); + }); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn workers_stop() { + let registry; + + { + // once we exit this block, thread-pool will be dropped + let thread_pool = ThreadPoolBuilder::new().num_threads(22).build().unwrap(); + registry = thread_pool.install(|| { + // do some work on these threads + join_a_lot(22); + + Arc::clone(&thread_pool.registry) + }); + assert_eq!(registry.num_threads(), 22); + } + + // once thread-pool is dropped, registry should terminate, which + // should lead to worker threads stopping + registry.wait_until_stopped(); +} + +fn join_a_lot(n: usize) { + if n > 0 { + join(|| join_a_lot(n - 1), || join_a_lot(n - 1)); + } +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn sleeper_stop() { + use std::{thread, time}; + + let registry; + + { + // once we exit this block, thread-pool will be dropped + let thread_pool = ThreadPoolBuilder::new().num_threads(22).build().unwrap(); + registry = Arc::clone(&thread_pool.registry); + + // Give time for at least some of the thread pool to fall asleep. + thread::sleep(time::Duration::from_secs(1)); + } + + // once thread-pool is dropped, registry should terminate, which + // should lead to worker threads stopping + registry.wait_until_stopped(); +} + +/// Creates a start/exit handler that increments an atomic counter. +fn count_handler() -> (Arc<AtomicUsize>, impl Fn(usize)) { + let count = Arc::new(AtomicUsize::new(0)); + (Arc::clone(&count), move |_| { + count.fetch_add(1, Ordering::SeqCst); + }) +} + +/// Wait until a counter is no longer shared, then return its value. +fn wait_for_counter(mut counter: Arc<AtomicUsize>) -> usize { + use std::{thread, time}; + + for _ in 0..60 { + counter = match Arc::try_unwrap(counter) { + Ok(counter) => return counter.into_inner(), + Err(counter) => { + thread::sleep(time::Duration::from_secs(1)); + counter + } + }; + } + + // That's too long! + panic!("Counter is still shared!"); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn failed_thread_stack() { + // Note: we first tried to force failure with a `usize::MAX` stack, but + // macOS and Windows weren't fazed, or at least didn't fail the way we want. + // They work with `isize::MAX`, but 32-bit platforms may feasibly allocate a + // 2GB stack, so it might not fail until the second thread. + let stack_size = ::std::isize::MAX as usize; + + let (start_count, start_handler) = count_handler(); + let (exit_count, exit_handler) = count_handler(); + let builder = ThreadPoolBuilder::new() + .num_threads(10) + .stack_size(stack_size) + .start_handler(start_handler) + .exit_handler(exit_handler); + + let pool = builder.build(); + assert!(pool.is_err(), "thread stack should have failed!"); + + // With such a huge stack, 64-bit will probably fail on the first thread; + // 32-bit might manage the first 2GB, but certainly fail the second. + let start_count = wait_for_counter(start_count); + assert!(start_count <= 1); + assert_eq!(start_count, wait_for_counter(exit_count)); +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore)] +fn panic_thread_name() { + let (start_count, start_handler) = count_handler(); + let (exit_count, exit_handler) = count_handler(); + let builder = ThreadPoolBuilder::new() + .num_threads(10) + .start_handler(start_handler) + .exit_handler(exit_handler) + .thread_name(|i| { + if i >= 5 { + panic!(); + } + format!("panic_thread_name#{}", i) + }); + + let pool = crate::unwind::halt_unwinding(|| builder.build()); + assert!(pool.is_err(), "thread-name panic should propagate!"); + + // Assuming they're created in order, threads 0 through 4 should have + // been started already, and then terminated by the panic. + assert_eq!(5, wait_for_counter(start_count)); + assert_eq!(5, wait_for_counter(exit_count)); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn self_install() { + let pool = ThreadPoolBuilder::new().num_threads(1).build().unwrap(); + + // If the inner `install` blocks, then nothing will actually run it! + assert!(pool.install(|| pool.install(|| true))); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn mutual_install() { + let pool1 = ThreadPoolBuilder::new().num_threads(1).build().unwrap(); + let pool2 = ThreadPoolBuilder::new().num_threads(1).build().unwrap(); + + let ok = pool1.install(|| { + // This creates a dependency from `pool1` -> `pool2` + pool2.install(|| { + // This creates a dependency from `pool2` -> `pool1` + pool1.install(|| { + // If they blocked on inter-pool installs, there would be no + // threads left to run this! + true + }) + }) + }); + assert!(ok); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn mutual_install_sleepy() { + use std::{thread, time}; + + let pool1 = ThreadPoolBuilder::new().num_threads(1).build().unwrap(); + let pool2 = ThreadPoolBuilder::new().num_threads(1).build().unwrap(); + + let ok = pool1.install(|| { + // This creates a dependency from `pool1` -> `pool2` + pool2.install(|| { + // Give `pool1` time to fall asleep. + thread::sleep(time::Duration::from_secs(1)); + + // This creates a dependency from `pool2` -> `pool1` + pool1.install(|| { + // Give `pool2` time to fall asleep. + thread::sleep(time::Duration::from_secs(1)); + + // If they blocked on inter-pool installs, there would be no + // threads left to run this! + true + }) + }) + }); + assert!(ok); +} + +#[test] +#[allow(deprecated)] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn check_thread_pool_new() { + let pool = ThreadPool::new(crate::Configuration::new().num_threads(22)).unwrap(); + assert_eq!(pool.current_num_threads(), 22); +} + +macro_rules! test_scope_order { + ($scope:ident => $spawn:ident) => {{ + let builder = ThreadPoolBuilder::new().num_threads(1); + let pool = builder.build().unwrap(); + pool.install(|| { + let vec = Mutex::new(vec![]); + pool.$scope(|scope| { + let vec = &vec; + for i in 0..10 { + scope.$spawn(move |_| { + vec.lock().unwrap().push(i); + }); + } + }); + vec.into_inner().unwrap() + }) + }}; +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn scope_lifo_order() { + let vec = test_scope_order!(scope => spawn); + let expected: Vec<i32> = (0..10).rev().collect(); // LIFO -> reversed + assert_eq!(vec, expected); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn scope_fifo_order() { + let vec = test_scope_order!(scope_fifo => spawn_fifo); + let expected: Vec<i32> = (0..10).collect(); // FIFO -> natural order + assert_eq!(vec, expected); +} + +macro_rules! test_spawn_order { + ($spawn:ident) => {{ + let builder = ThreadPoolBuilder::new().num_threads(1); + let pool = &builder.build().unwrap(); + let (tx, rx) = channel(); + pool.install(move || { + for i in 0..10 { + let tx = tx.clone(); + pool.$spawn(move || { + tx.send(i).unwrap(); + }); + } + }); + rx.iter().collect::<Vec<i32>>() + }}; +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn spawn_lifo_order() { + let vec = test_spawn_order!(spawn); + let expected: Vec<i32> = (0..10).rev().collect(); // LIFO -> reversed + assert_eq!(vec, expected); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn spawn_fifo_order() { + let vec = test_spawn_order!(spawn_fifo); + let expected: Vec<i32> = (0..10).collect(); // FIFO -> natural order + assert_eq!(vec, expected); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn nested_scopes() { + // Create matching scopes for every thread pool. + fn nest<'scope, OP>(pools: &[ThreadPool], scopes: Vec<&Scope<'scope>>, op: OP) + where + OP: FnOnce(&[&Scope<'scope>]) + Send, + { + if let Some((pool, tail)) = pools.split_first() { + pool.scope(move |s| { + // This move reduces the reference lifetimes by variance to match s, + // but the actual scopes are still tied to the invariant 'scope. + let mut scopes = scopes; + scopes.push(s); + nest(tail, scopes, op) + }) + } else { + (op)(&scopes) + } + } + + let pools: Vec<_> = + (0..10).map(|_| ThreadPoolBuilder::new().num_threads(1).build().unwrap()).collect(); + + let counter = AtomicUsize::new(0); + nest(&pools, vec![], |scopes| { + for &s in scopes { + s.spawn(|_| { + // Our 'scope lets us borrow the counter in every pool. + counter.fetch_add(1, Ordering::Relaxed); + }); + } + }); + assert_eq!(counter.into_inner(), pools.len()); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn nested_fifo_scopes() { + // Create matching fifo scopes for every thread pool. + fn nest<'scope, OP>(pools: &[ThreadPool], scopes: Vec<&ScopeFifo<'scope>>, op: OP) + where + OP: FnOnce(&[&ScopeFifo<'scope>]) + Send, + { + if let Some((pool, tail)) = pools.split_first() { + pool.scope_fifo(move |s| { + // This move reduces the reference lifetimes by variance to match s, + // but the actual scopes are still tied to the invariant 'scope. + let mut scopes = scopes; + scopes.push(s); + nest(tail, scopes, op) + }) + } else { + (op)(&scopes) + } + } + + let pools: Vec<_> = + (0..10).map(|_| ThreadPoolBuilder::new().num_threads(1).build().unwrap()).collect(); + + let counter = AtomicUsize::new(0); + nest(&pools, vec![], |scopes| { + for &s in scopes { + s.spawn_fifo(|_| { + // Our 'scope lets us borrow the counter in every pool. + counter.fetch_add(1, Ordering::Relaxed); + }); + } + }); + assert_eq!(counter.into_inner(), pools.len()); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn in_place_scope_no_deadlock() { + let pool = ThreadPoolBuilder::new().num_threads(1).build().unwrap(); + let (tx, rx) = channel(); + let rx_ref = ℞ + pool.in_place_scope(move |s| { + // With regular scopes this closure would never run because this scope op + // itself would block the only worker thread. + s.spawn(move |_| { + tx.send(()).unwrap(); + }); + rx_ref.recv().unwrap(); + }); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn in_place_scope_fifo_no_deadlock() { + let pool = ThreadPoolBuilder::new().num_threads(1).build().unwrap(); + let (tx, rx) = channel(); + let rx_ref = ℞ + pool.in_place_scope_fifo(move |s| { + // With regular scopes this closure would never run because this scope op + // itself would block the only worker thread. + s.spawn_fifo(move |_| { + tx.send(()).unwrap(); + }); + rx_ref.recv().unwrap(); + }); +} + +#[test] +fn yield_now_to_spawn() { + let (tx, rx) = channel(); + + // Queue a regular spawn. + crate::spawn(move || tx.send(22).unwrap()); + + // The single-threaded fallback mode (for wasm etc.) won't + // get a chance to run the spawn if we never yield to it. + crate::registry::in_worker(move |_, _| { + crate::yield_now(); + }); + + // The spawn **must** have started by now, but we still might have to wait + // for it to finish if a different thread stole it first. + assert_eq!(22, rx.recv().unwrap()); +} + +#[test] +fn yield_local_to_spawn() { + let (tx, rx) = channel(); + + // Queue a regular spawn. + crate::spawn(move || tx.send(22).unwrap()); + + // The single-threaded fallback mode (for wasm etc.) won't + // get a chance to run the spawn if we never yield to it. + crate::registry::in_worker(move |_, _| { + crate::yield_local(); + }); + + // The spawn **must** have started by now, but we still might have to wait + // for it to finish if a different thread stole it first. + assert_eq!(22, rx.recv().unwrap()); +} diff --git a/compiler/rustc_thread_pool/src/tlv.rs b/compiler/rustc_thread_pool/src/tlv.rs new file mode 100644 index 00000000000..b5f63479e2f --- /dev/null +++ b/compiler/rustc_thread_pool/src/tlv.rs @@ -0,0 +1,32 @@ +//! Allows access to the Rayon's thread local value +//! which is preserved when moving jobs across threads + +use std::cell::Cell; +use std::ptr; + +thread_local!(pub static TLV: Cell<*const ()> = const { Cell::new(ptr::null()) }); + +#[derive(Copy, Clone)] +pub(crate) struct Tlv(pub(crate) *const ()); + +impl Tlv { + #[inline] + pub(crate) fn null() -> Self { + Self(ptr::null()) + } +} + +unsafe impl Sync for Tlv {} +unsafe impl Send for Tlv {} + +/// Sets the current thread-local value +#[inline] +pub(crate) fn set(value: Tlv) { + TLV.with(|tlv| tlv.set(value.0)); +} + +/// Returns the current thread-local value +#[inline] +pub(crate) fn get() -> Tlv { + TLV.with(|tlv| Tlv(tlv.get())) +} diff --git a/compiler/rustc_thread_pool/src/unwind.rs b/compiler/rustc_thread_pool/src/unwind.rs new file mode 100644 index 00000000000..9671fa57821 --- /dev/null +++ b/compiler/rustc_thread_pool/src/unwind.rs @@ -0,0 +1,31 @@ +//! Package up unwind recovery. Note that if you are in some sensitive +//! place, you can use the `AbortIfPanic` helper to protect against +//! accidental panics in the rayon code itself. + +use std::any::Any; +use std::panic::{self, AssertUnwindSafe}; +use std::thread; + +/// Executes `f` and captures any panic, translating that panic into a +/// `Err` result. The assumption is that any panic will be propagated +/// later with `resume_unwinding`, and hence `f` can be treated as +/// exception safe. +pub(super) fn halt_unwinding<F, R>(func: F) -> thread::Result<R> +where + F: FnOnce() -> R, +{ + panic::catch_unwind(AssertUnwindSafe(func)) +} + +pub(super) fn resume_unwinding(payload: Box<dyn Any + Send>) -> ! { + panic::resume_unwind(payload) +} + +pub(super) struct AbortIfPanic; + +impl Drop for AbortIfPanic { + fn drop(&mut self) { + eprintln!("Rayon: detected unexpected panic; aborting"); + ::std::process::abort(); + } +} diff --git a/compiler/rustc_thread_pool/src/worker_local.rs b/compiler/rustc_thread_pool/src/worker_local.rs new file mode 100644 index 00000000000..d108c91f9ee --- /dev/null +++ b/compiler/rustc_thread_pool/src/worker_local.rs @@ -0,0 +1,75 @@ +use std::fmt; +use std::ops::Deref; +use std::sync::Arc; + +use crate::registry::{Registry, WorkerThread}; + +#[repr(align(64))] +#[derive(Debug)] +struct CacheAligned<T>(T); + +/// Holds worker-locals values for each thread in a thread pool. +/// You can only access the worker local value through the Deref impl +/// on the thread pool it was constructed on. It will panic otherwise +pub struct WorkerLocal<T> { + locals: Vec<CacheAligned<T>>, + registry: Arc<Registry>, +} + +/// We prevent concurrent access to the underlying value in the +/// Deref impl, thus any values safe to send across threads can +/// be used with WorkerLocal. +unsafe impl<T: Send> Sync for WorkerLocal<T> {} + +impl<T> WorkerLocal<T> { + /// Creates a new worker local where the `initial` closure computes the + /// value this worker local should take for each thread in the thread pool. + #[inline] + pub fn new<F: FnMut(usize) -> T>(mut initial: F) -> WorkerLocal<T> { + let registry = Registry::current(); + WorkerLocal { + locals: (0..registry.num_threads()).map(|i| CacheAligned(initial(i))).collect(), + registry, + } + } + + /// Returns the worker-local value for each thread + #[inline] + pub fn into_inner(self) -> Vec<T> { + self.locals.into_iter().map(|c| c.0).collect() + } + + fn current(&self) -> &T { + unsafe { + let worker_thread = WorkerThread::current(); + if worker_thread.is_null() + || &*(*worker_thread).registry as *const _ != &*self.registry as *const _ + { + panic!("WorkerLocal can only be used on the thread pool it was created on") + } + &self.locals[(*worker_thread).index].0 + } + } +} + +impl<T> WorkerLocal<Vec<T>> { + /// Joins the elements of all the worker locals into one Vec + pub fn join(self) -> Vec<T> { + self.into_inner().into_iter().flat_map(|v| v).collect() + } +} + +impl<T: fmt::Debug> fmt::Debug for WorkerLocal<T> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("WorkerLocal").field("registry", &self.registry.id()).finish() + } +} + +impl<T> Deref for WorkerLocal<T> { + type Target = T; + + #[inline(always)] + fn deref(&self) -> &T { + self.current() + } +} diff --git a/compiler/rustc_thread_pool/tests/double_init_fail.rs b/compiler/rustc_thread_pool/tests/double_init_fail.rs new file mode 100644 index 00000000000..85e509518d4 --- /dev/null +++ b/compiler/rustc_thread_pool/tests/double_init_fail.rs @@ -0,0 +1,15 @@ +#![allow(unused_crate_dependencies)] + +use std::error::Error; + +use rustc_thread_pool::ThreadPoolBuilder; + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn double_init_fail() { + let result1 = ThreadPoolBuilder::new().build_global(); + assert!(result1.is_ok()); + let err = ThreadPoolBuilder::new().build_global().unwrap_err(); + assert!(err.source().is_none()); + assert_eq!(err.to_string(), "The global thread pool has already been initialized.",); +} diff --git a/compiler/rustc_thread_pool/tests/init_zero_threads.rs b/compiler/rustc_thread_pool/tests/init_zero_threads.rs new file mode 100644 index 00000000000..261493fcb7b --- /dev/null +++ b/compiler/rustc_thread_pool/tests/init_zero_threads.rs @@ -0,0 +1,9 @@ +#![allow(unused_crate_dependencies)] + +use rustc_thread_pool::ThreadPoolBuilder; + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn init_zero_threads() { + ThreadPoolBuilder::new().num_threads(0).build_global().unwrap(); +} diff --git a/compiler/rustc_thread_pool/tests/scope_join.rs b/compiler/rustc_thread_pool/tests/scope_join.rs new file mode 100644 index 00000000000..83468da81c0 --- /dev/null +++ b/compiler/rustc_thread_pool/tests/scope_join.rs @@ -0,0 +1,47 @@ +#![allow(unused_crate_dependencies)] + +/// Test that one can emulate join with `scope`: +fn pseudo_join<F, G>(f: F, g: G) +where + F: FnOnce() + Send, + G: FnOnce() + Send, +{ + rustc_thread_pool::scope(|s| { + s.spawn(|_| g()); + f(); + }); +} + +fn quick_sort<T: PartialOrd + Send>(v: &mut [T]) { + if v.len() <= 1 { + return; + } + + let mid = partition(v); + let (lo, hi) = v.split_at_mut(mid); + pseudo_join(|| quick_sort(lo), || quick_sort(hi)); +} + +fn partition<T: PartialOrd + Send>(v: &mut [T]) -> usize { + let pivot = v.len() - 1; + let mut i = 0; + for j in 0..pivot { + if v[j] <= v[pivot] { + v.swap(i, j); + i += 1; + } + } + v.swap(i, pivot); + i +} + +fn is_sorted<T: Send + Ord>(v: &[T]) -> bool { + (1..v.len()).all(|i| v[i - 1] <= v[i]) +} + +#[test] +fn scope_join() { + let mut v: Vec<i32> = (0..256).rev().collect(); + quick_sort(&mut v); + assert!(is_sorted(&v)); +} diff --git a/compiler/rustc_thread_pool/tests/scoped_threadpool.rs b/compiler/rustc_thread_pool/tests/scoped_threadpool.rs new file mode 100644 index 00000000000..295da650e88 --- /dev/null +++ b/compiler/rustc_thread_pool/tests/scoped_threadpool.rs @@ -0,0 +1,99 @@ +#![allow(unused_crate_dependencies)] + +use crossbeam_utils::thread; +use rustc_thread_pool::ThreadPoolBuilder; + +#[derive(PartialEq, Eq, Debug)] +struct Local(i32); + +scoped_tls::scoped_thread_local!(static LOCAL: Local); + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn missing_scoped_tls() { + LOCAL.set(&Local(42), || { + let pool = ThreadPoolBuilder::new().build().expect("thread pool created"); + + // `LOCAL` is not set in the pool. + pool.install(|| { + assert!(!LOCAL.is_set()); + }); + }); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn spawn_scoped_tls_threadpool() { + LOCAL.set(&Local(42), || { + LOCAL.with(|x| { + thread::scope(|scope| { + let pool = ThreadPoolBuilder::new() + .spawn_handler(move |thread| { + scope + .builder() + .spawn(move |_| { + // Borrow the same local value in the thread pool. + LOCAL.set(x, || thread.run()) + }) + .map(|_| ()) + }) + .build() + .expect("thread pool created"); + + // The pool matches our local value. + pool.install(|| { + assert!(LOCAL.is_set()); + LOCAL.with(|y| { + assert_eq!(x, y); + }); + }); + + // If we change our local value, the pool is not affected. + LOCAL.set(&Local(-1), || { + pool.install(|| { + assert!(LOCAL.is_set()); + LOCAL.with(|y| { + assert_eq!(x, y); + }); + }); + }); + }) + .expect("scope threads ok"); + // `thread::scope` will wait for the threads to exit before returning. + }); + }); +} + +#[test] +#[cfg_attr(any(target_os = "emscripten", target_family = "wasm"), ignore)] +fn build_scoped_tls_threadpool() { + LOCAL.set(&Local(42), || { + LOCAL.with(|x| { + ThreadPoolBuilder::new() + .build_scoped( + move |thread| LOCAL.set(x, || thread.run()), + |pool| { + // The pool matches our local value. + pool.install(|| { + assert!(LOCAL.is_set()); + LOCAL.with(|y| { + assert_eq!(x, y); + }); + }); + + // If we change our local value, the pool is not affected. + LOCAL.set(&Local(-1), || { + pool.install(|| { + assert!(LOCAL.is_set()); + LOCAL.with(|y| { + assert_eq!(x, y); + }); + }); + }); + }, + ) + .expect("thread pool created"); + // Internally, `std::thread::scope` will wait for the threads to exit before returning. + }); + }); +} diff --git a/compiler/rustc_thread_pool/tests/simple_panic.rs b/compiler/rustc_thread_pool/tests/simple_panic.rs new file mode 100644 index 00000000000..b35b4d632d2 --- /dev/null +++ b/compiler/rustc_thread_pool/tests/simple_panic.rs @@ -0,0 +1,9 @@ +#![allow(unused_crate_dependencies)] + +use rustc_thread_pool::join; + +#[test] +#[should_panic(expected = "should panic")] +fn simple_panic() { + join(|| {}, || panic!("should panic")); +} diff --git a/compiler/rustc_thread_pool/tests/stack_overflow_crash.rs b/compiler/rustc_thread_pool/tests/stack_overflow_crash.rs new file mode 100644 index 00000000000..805b6d8ee3f --- /dev/null +++ b/compiler/rustc_thread_pool/tests/stack_overflow_crash.rs @@ -0,0 +1,87 @@ +#![allow(unused_crate_dependencies)] + +use std::env; +#[cfg(target_os = "linux")] +use std::os::unix::process::ExitStatusExt; +use std::process::{Command, ExitStatus, Stdio}; + +use rustc_thread_pool::ThreadPoolBuilder; + +fn force_stack_overflow(depth: u32) { + let mut buffer = [0u8; 1024 * 1024]; + #[allow(clippy::incompatible_msrv)] + std::hint::black_box(&mut buffer); + if depth > 0 { + force_stack_overflow(depth - 1); + } +} + +#[cfg(unix)] +fn disable_core() { + unsafe { + libc::setrlimit(libc::RLIMIT_CORE, &libc::rlimit { rlim_cur: 0, rlim_max: 0 }); + } +} + +#[cfg(unix)] +fn overflow_code() -> Option<i32> { + None +} + +#[cfg(windows)] +fn overflow_code() -> Option<i32> { + use std::os::windows::process::ExitStatusExt; + + ExitStatus::from_raw(0xc00000fd /*STATUS_STACK_OVERFLOW*/).code() +} + +#[test] +#[cfg_attr(not(any(unix, windows)), ignore)] +fn stack_overflow_crash() { + // First check that the recursive call actually causes a stack overflow, + // and does not get optimized away. + let status = run_ignored("run_with_small_stack"); + assert!(!status.success()); + #[cfg(any(unix, windows))] + assert_eq!(status.code(), overflow_code()); + #[cfg(target_os = "linux")] + assert!(matches!(status.signal(), Some(libc::SIGABRT | libc::SIGSEGV))); + + // Now run with a larger stack and verify correct operation. + let status = run_ignored("run_with_large_stack"); + assert_eq!(status.code(), Some(0)); + #[cfg(target_os = "linux")] + assert_eq!(status.signal(), None); +} + +fn run_ignored(test: &str) -> ExitStatus { + Command::new(env::current_exe().unwrap()) + .arg("--ignored") + .arg("--exact") + .arg(test) + .stdout(Stdio::null()) + .stderr(Stdio::null()) + .status() + .unwrap() +} + +#[test] +#[ignore] +fn run_with_small_stack() { + run_with_stack(8); +} + +#[test] +#[ignore] +fn run_with_large_stack() { + run_with_stack(48); +} + +fn run_with_stack(stack_size_in_mb: usize) { + let pool = ThreadPoolBuilder::new().stack_size(stack_size_in_mb * 1024 * 1024).build().unwrap(); + pool.install(|| { + #[cfg(unix)] + disable_core(); + force_stack_overflow(32); + }); +} diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/region.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/region.rs index b8207c4f816..5c669678ccc 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/region.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/region.rs @@ -891,7 +891,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { (b'a'..=b'z') .map(|c| format!("'{}", c as char)) .find(|candidate| !used_names.iter().any(|e| e.as_str() == candidate)) - .unwrap_or("'lt".to_string()) + .unwrap_or_else(|| "'lt".to_string()) }; let mut visitor = LifetimeReplaceVisitor { diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs index ee5a5b247ce..2bbf90ed3ed 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs @@ -2721,6 +2721,13 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { ObligationCauseCode::TupleElem => { err.note("only the last element of a tuple may have a dynamically sized type"); } + ObligationCauseCode::DynCompatible(span) => { + err.multipart_suggestion( + "you might have meant to use `Self` to refer to the implementing type", + vec![(span, "Self".into())], + Applicability::MachineApplicable, + ); + } ObligationCauseCode::WhereClause(item_def_id, span) | ObligationCauseCode::WhereClauseInExpr(item_def_id, span, ..) | ObligationCauseCode::HostEffectInExpr(item_def_id, span, ..) @@ -2872,13 +2879,23 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { _ => (), } } - let descr = format!("required by {a} bound in `{item_name}`"); - if span.is_visible(sm) { - let msg = format!("required by {this} in `{short_item_name}`"); - multispan.push_span_label(span, msg); - err.span_note(multispan, descr); + + // If this is from a format string literal desugaring, + // we've already said "required by this formatting parameter" + let is_in_fmt_lit = if let Some(s) = err.span.primary_span() { + matches!(s.desugaring_kind(), Some(DesugaringKind::FormatLiteral { .. })) } else { - err.span_note(tcx.def_span(item_def_id), descr); + false + }; + if !is_in_fmt_lit { + let descr = format!("required by {a} bound in `{item_name}`"); + if span.is_visible(sm) { + let msg = format!("required by {this} in `{short_item_name}`"); + multispan.push_span_label(span, msg); + err.span_note(multispan, descr); + } else { + err.span_note(tcx.def_span(item_def_id), descr); + } } if let Some(note) = note { err.note(note); @@ -3575,11 +3592,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { } ObligationCauseCode::TrivialBound => { err.help("see issue #48214"); - tcx.disabled_nightly_features( - err, - Some(tcx.local_def_id_to_hir_id(body_id)), - [(String::new(), sym::trivial_bounds)], - ); + tcx.disabled_nightly_features(err, [(String::new(), sym::trivial_bounds)]); } ObligationCauseCode::OpaqueReturnType(expr_info) => { let (expr_ty, expr) = if let Some((expr_ty, hir_id)) = expr_info { @@ -3977,7 +3990,15 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { ) = expr.kind { if Some(*span) != err.span.primary_span() { - err.span_label(*span, "required by a bound introduced by this call"); + let msg = if span.is_desugaring(DesugaringKind::FormatLiteral { source: true }) + { + "required by this formatting parameter" + } else if span.is_desugaring(DesugaringKind::FormatLiteral { source: false }) { + "required by a formatting parameter in this expression" + } else { + "required by a bound introduced by this call" + }; + err.span_label(*span, msg); } } diff --git a/compiler/rustc_trait_selection/src/traits/dyn_compatibility.rs b/compiler/rustc_trait_selection/src/traits/dyn_compatibility.rs index ee30956295a..bdfe48a3928 100644 --- a/compiler/rustc_trait_selection/src/traits/dyn_compatibility.rs +++ b/compiler/rustc_trait_selection/src/traits/dyn_compatibility.rs @@ -31,7 +31,7 @@ use crate::traits::{ /// /// Currently that is `Self` in supertraits. This is needed /// because `dyn_compatibility_violations` can't be used during -/// type collection. +/// type collection, as type collection is needed for `dyn_compatiblity_violations` itself. #[instrument(level = "debug", skip(tcx), ret)] pub fn hir_ty_lowering_dyn_compatibility_violations( tcx: TyCtxt<'_>, diff --git a/compiler/rustc_trait_selection/src/traits/normalize.rs b/compiler/rustc_trait_selection/src/traits/normalize.rs index 35a43b294ee..9e02ce32b21 100644 --- a/compiler/rustc_trait_selection/src/traits/normalize.rs +++ b/compiler/rustc_trait_selection/src/traits/normalize.rs @@ -224,7 +224,7 @@ impl<'a, 'b, 'tcx> AssocTypeNormalizer<'a, 'b, 'tcx> { ) .ok() .flatten() - .unwrap_or(proj.to_term(infcx.tcx)); + .unwrap_or_else(|| proj.to_term(infcx.tcx)); PlaceholderReplacer::replace_placeholders( infcx, diff --git a/compiler/rustc_ty_utils/src/layout.rs b/compiler/rustc_ty_utils/src/layout.rs index 9774263e4c9..d5222822461 100644 --- a/compiler/rustc_ty_utils/src/layout.rs +++ b/compiler/rustc_ty_utils/src/layout.rs @@ -896,10 +896,9 @@ fn variant_info_for_coroutine<'tcx>( variant_size = variant_size.max(offset + field_layout.size); FieldInfo { kind: FieldKind::CoroutineLocal, - name: field_name.unwrap_or(Symbol::intern(&format!( - ".coroutine_field{}", - local.as_usize() - ))), + name: field_name.unwrap_or_else(|| { + Symbol::intern(&format!(".coroutine_field{}", local.as_usize())) + }), offset: offset.bytes(), size: field_layout.size.bytes(), align: field_layout.align.abi.bytes(), diff --git a/compiler/rustc_type_ir/src/fast_reject.rs b/compiler/rustc_type_ir/src/fast_reject.rs index fa5e8d43702..d88c88fc6f3 100644 --- a/compiler/rustc_type_ir/src/fast_reject.rs +++ b/compiler/rustc_type_ir/src/fast_reject.rs @@ -240,6 +240,10 @@ impl<I: Interner, const INSTANTIATE_LHS_WITH_INFER: bool, const INSTANTIATE_RHS_ self.types_may_unify_inner(lhs, rhs, Self::STARTING_DEPTH) } + pub fn types_may_unify_with_depth(self, lhs: I::Ty, rhs: I::Ty, depth_limit: usize) -> bool { + self.types_may_unify_inner(lhs, rhs, depth_limit) + } + fn args_may_unify_inner( self, obligation_args: I::GenericArgs, diff --git a/compiler/rustc_type_ir/src/lib.rs b/compiler/rustc_type_ir/src/lib.rs index 792090effcf..3863a6d7c5a 100644 --- a/compiler/rustc_type_ir/src/lib.rs +++ b/compiler/rustc_type_ir/src/lib.rs @@ -1,3 +1,4 @@ +#![cfg_attr(feature = "nightly", rustc_diagnostic_item = "type_ir")] // tidy-alphabetical-start #![allow(rustc::usage_of_ty_tykind)] #![allow(rustc::usage_of_type_ir_inherent)] @@ -7,6 +8,7 @@ feature(associated_type_defaults, never_type, rustc_attrs, negative_impls) )] #![cfg_attr(feature = "nightly", allow(internal_features))] +#![cfg_attr(not(bootstrap), allow(rustc::direct_use_of_rustc_type_ir))] // tidy-alphabetical-end extern crate self as rustc_type_ir; diff --git a/compiler/rustc_type_ir/src/search_graph/global_cache.rs b/compiler/rustc_type_ir/src/search_graph/global_cache.rs index a2442660259..1b99cc820f1 100644 --- a/compiler/rustc_type_ir/src/search_graph/global_cache.rs +++ b/compiler/rustc_type_ir/src/search_graph/global_cache.rs @@ -2,6 +2,7 @@ use derive_where::derive_where; use super::{AvailableDepth, Cx, NestedGoals}; use crate::data_structures::HashMap; +use crate::search_graph::EvaluationResult; struct Success<X: Cx> { required_depth: usize, @@ -43,28 +44,26 @@ impl<X: Cx> GlobalCache<X> { &mut self, cx: X, input: X::Input, - - origin_result: X::Result, + evaluation_result: EvaluationResult<X>, dep_node: X::DepNodeIndex, - - required_depth: usize, - encountered_overflow: bool, - nested_goals: NestedGoals<X>, ) { - let result = cx.mk_tracked(origin_result, dep_node); + let EvaluationResult { encountered_overflow, required_depth, heads, nested_goals, result } = + evaluation_result; + debug_assert!(heads.is_empty()); + let result = cx.mk_tracked(result, dep_node); let entry = self.map.entry(input).or_default(); if encountered_overflow { let with_overflow = WithOverflow { nested_goals, result }; let prev = entry.with_overflow.insert(required_depth, with_overflow); if let Some(prev) = &prev { assert!(cx.evaluation_is_concurrent()); - assert_eq!(cx.get_tracked(&prev.result), origin_result); + assert_eq!(cx.get_tracked(&prev.result), evaluation_result.result); } } else { let prev = entry.success.replace(Success { required_depth, nested_goals, result }); if let Some(prev) = &prev { assert!(cx.evaluation_is_concurrent()); - assert_eq!(cx.get_tracked(&prev.result), origin_result); + assert_eq!(cx.get_tracked(&prev.result), evaluation_result.result); } } } diff --git a/compiler/rustc_type_ir/src/search_graph/mod.rs b/compiler/rustc_type_ir/src/search_graph/mod.rs index b59b4f92854..a857da2fcd5 100644 --- a/compiler/rustc_type_ir/src/search_graph/mod.rs +++ b/compiler/rustc_type_ir/src/search_graph/mod.rs @@ -1,16 +1,16 @@ -/// The search graph is responsible for caching and cycle detection in the trait -/// solver. Making sure that caching doesn't result in soundness bugs or unstable -/// query results is very challenging and makes this one of the most-involved -/// self-contained components of the compiler. -/// -/// We added fuzzing support to test its correctness. The fuzzers used to verify -/// the current implementation can be found in https://github.com/lcnr/search_graph_fuzz. -/// -/// This is just a quick overview of the general design, please check out the relevant -/// [rustc-dev-guide chapter](https://rustc-dev-guide.rust-lang.org/solve/caching.html) for -/// more details. Caching is split between a global cache and the per-cycle `provisional_cache`. -/// The global cache has to be completely unobservable, while the per-cycle cache may impact -/// behavior as long as the resulting behavior is still correct. +//! The search graph is responsible for caching and cycle detection in the trait +//! solver. Making sure that caching doesn't result in soundness bugs or unstable +//! query results is very challenging and makes this one of the most-involved +//! self-contained components of the compiler. +//! +//! We added fuzzing support to test its correctness. The fuzzers used to verify +//! the current implementation can be found in <https://github.com/lcnr/search_graph_fuzz>. +//! +//! This is just a quick overview of the general design, please check out the relevant +//! [rustc-dev-guide chapter](https://rustc-dev-guide.rust-lang.org/solve/caching.html) for +//! more details. Caching is split between a global cache and the per-cycle `provisional_cache`. +//! The global cache has to be completely unobservable, while the per-cycle cache may impact +//! behavior as long as the resulting behavior is still correct. use std::cmp::Ordering; use std::collections::BTreeMap; use std::collections::hash_map::Entry; @@ -381,18 +381,16 @@ impl PathsToNested { /// The nested goals of each stack entry and the path from the /// stack entry to that nested goal. /// +/// They are used when checking whether reevaluating a global cache +/// would encounter a cycle or use a provisional cache entry given the +/// currentl search graph state. We need to disable the global cache +/// in this case as it could otherwise result in behaviorial differences. +/// Cycles can impact behavior. The cycle ABA may have different final +/// results from a the cycle BAB depending on the cycle root. +/// /// We only start tracking nested goals once we've either encountered /// overflow or a solver cycle. This is a performance optimization to /// avoid tracking nested goals on the happy path. -/// -/// We use nested goals for two reasons: -/// - when rebasing provisional cache entries -/// - when checking whether we have to ignore a global cache entry as reevaluating -/// it would encounter a cycle or use a provisional cache entry. -/// -/// We need to disable the global cache if using it would hide a cycle, as -/// cycles can impact behavior. The cycle ABA may have different final -/// results from a the cycle BAB depending on the cycle root. #[derive_where(Debug, Default, Clone; X: Cx)] struct NestedGoals<X: Cx> { nested_goals: HashMap<X::Input, PathsToNested>, @@ -450,6 +448,43 @@ struct ProvisionalCacheEntry<X: Cx> { result: X::Result, } +/// The final result of evaluating a goal. +/// +/// We reset `encountered_overflow` when reevaluating a goal, +/// but need to track whether we've hit the recursion limit at +/// all for correctness. +/// +/// We've previously simply returned the final `StackEntry` but this +/// made it easy to accidentally drop information from the previous +/// evaluation. +#[derive_where(Debug; X: Cx)] +struct EvaluationResult<X: Cx> { + encountered_overflow: bool, + required_depth: usize, + heads: CycleHeads, + nested_goals: NestedGoals<X>, + result: X::Result, +} + +impl<X: Cx> EvaluationResult<X> { + fn finalize( + final_entry: StackEntry<X>, + encountered_overflow: bool, + result: X::Result, + ) -> EvaluationResult<X> { + EvaluationResult { + encountered_overflow, + // Unlike `encountered_overflow`, we share `heads`, `required_depth`, + // and `nested_goals` between evaluations. + required_depth: final_entry.required_depth, + heads: final_entry.heads, + nested_goals: final_entry.nested_goals, + // We only care about the final result. + result, + } + } +} + pub struct SearchGraph<D: Delegate<Cx = X>, X: Cx = <D as Delegate>::Cx> { root_depth: AvailableDepth, /// The stack of goals currently being computed. @@ -562,7 +597,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { input: X::Input, step_kind_from_parent: PathKind, inspect: &mut D::ProofTreeBuilder, - mut evaluate_goal: impl FnMut(&mut Self, &mut D::ProofTreeBuilder) -> X::Result, + evaluate_goal: impl Fn(&mut Self, X, X::Input, &mut D::ProofTreeBuilder) -> X::Result + Copy, ) -> X::Result { let Some(available_depth) = AvailableDepth::allowed_depth_for_nested::<D>(self.root_depth, &self.stack) @@ -616,12 +651,12 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { input, step_kind_from_parent, available_depth, + provisional_result: None, required_depth: 0, heads: Default::default(), encountered_overflow: false, has_been_used: None, nested_goals: Default::default(), - provisional_result: None, }); // This is for global caching, so we properly track query dependencies. @@ -630,35 +665,41 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { // not tracked by the cache key and from outside of this anon task, it // must not be added to the global cache. Notably, this is the case for // trait solver cycles participants. - let ((final_entry, result), dep_node) = cx.with_cached_task(|| { - self.evaluate_goal_in_task(cx, input, inspect, &mut evaluate_goal) - }); + let (evaluation_result, dep_node) = + cx.with_cached_task(|| self.evaluate_goal_in_task(cx, input, inspect, evaluate_goal)); // We've finished computing the goal and have popped it from the stack, // lazily update its parent goal. Self::update_parent_goal( &mut self.stack, - final_entry.step_kind_from_parent, - final_entry.required_depth, - &final_entry.heads, - final_entry.encountered_overflow, - UpdateParentGoalCtxt::Ordinary(&final_entry.nested_goals), + step_kind_from_parent, + evaluation_result.required_depth, + &evaluation_result.heads, + evaluation_result.encountered_overflow, + UpdateParentGoalCtxt::Ordinary(&evaluation_result.nested_goals), ); + let result = evaluation_result.result; // We're now done with this goal. We only add the root of cycles to the global cache. // In case this goal is involved in a larger cycle add it to the provisional cache. - if final_entry.heads.is_empty() { + if evaluation_result.heads.is_empty() { if let Some((_scope, expected)) = validate_cache { // Do not try to move a goal into the cache again if we're testing // the global cache. - assert_eq!(result, expected, "input={input:?}"); + assert_eq!(evaluation_result.result, expected, "input={input:?}"); } else if D::inspect_is_noop(inspect) { - self.insert_global_cache(cx, final_entry, result, dep_node) + self.insert_global_cache(cx, input, evaluation_result, dep_node) } } else if D::ENABLE_PROVISIONAL_CACHE { debug_assert!(validate_cache.is_none(), "unexpected non-root: {input:?}"); let entry = self.provisional_cache.entry(input).or_default(); - let StackEntry { heads, encountered_overflow, .. } = final_entry; + let EvaluationResult { + encountered_overflow, + required_depth: _, + heads, + nested_goals: _, + result, + } = evaluation_result; let path_from_head = Self::cycle_path_kind( &self.stack, step_kind_from_parent, @@ -1023,19 +1064,25 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { cx: X, input: X::Input, inspect: &mut D::ProofTreeBuilder, - mut evaluate_goal: impl FnMut(&mut Self, &mut D::ProofTreeBuilder) -> X::Result, - ) -> (StackEntry<X>, X::Result) { + evaluate_goal: impl Fn(&mut Self, X, X::Input, &mut D::ProofTreeBuilder) -> X::Result + Copy, + ) -> EvaluationResult<X> { + // We reset `encountered_overflow` each time we rerun this goal + // but need to make sure we currently propagate it to the global + // cache even if only some of the evaluations actually reach the + // recursion limit. + let mut encountered_overflow = false; let mut i = 0; loop { - let result = evaluate_goal(self, inspect); + let result = evaluate_goal(self, cx, input, inspect); let stack_entry = self.stack.pop(); + encountered_overflow |= stack_entry.encountered_overflow; debug_assert_eq!(stack_entry.input, input); // If the current goal is not the root of a cycle, we are done. // // There are no provisional cache entries which depend on this goal. let Some(usage_kind) = stack_entry.has_been_used else { - return (stack_entry, result); + return EvaluationResult::finalize(stack_entry, encountered_overflow, result); }; // If it is a cycle head, we have to keep trying to prove it until @@ -1051,7 +1098,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { // final result is equal to the initial response for that case. if self.reached_fixpoint(cx, &stack_entry, usage_kind, result) { self.rebase_provisional_cache_entries(&stack_entry, |_, result| result); - return (stack_entry, result); + return EvaluationResult::finalize(stack_entry, encountered_overflow, result); } // If computing this goal results in ambiguity with no constraints, @@ -1070,7 +1117,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { self.rebase_provisional_cache_entries(&stack_entry, |input, _| { D::propagate_ambiguity(cx, input, result) }); - return (stack_entry, result); + return EvaluationResult::finalize(stack_entry, encountered_overflow, result); }; // If we've reached the fixpoint step limit, we bail with overflow and taint all @@ -1082,7 +1129,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { self.rebase_provisional_cache_entries(&stack_entry, |input, _| { D::on_fixpoint_overflow(cx, input) }); - return (stack_entry, result); + return EvaluationResult::finalize(stack_entry, encountered_overflow, result); } // Clear all provisional cache entries which depend on a previous provisional @@ -1091,9 +1138,22 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { debug!(?result, "fixpoint changed provisional results"); self.stack.push(StackEntry { - has_been_used: None, + input, + step_kind_from_parent: stack_entry.step_kind_from_parent, + available_depth: stack_entry.available_depth, provisional_result: Some(result), - ..stack_entry + // We can keep these goals from previous iterations as they are only + // ever read after finalizing this evaluation. + required_depth: stack_entry.required_depth, + heads: stack_entry.heads, + nested_goals: stack_entry.nested_goals, + // We reset these two fields when rerunning this goal. We could + // keep `encountered_overflow` as it's only used as a performance + // optimization. However, given that the proof tree will likely look + // similar to the previous iterations when reevaluating, it's better + // for caching if the reevaluation also starts out with `false`. + encountered_overflow: false, + has_been_used: None, }); } } @@ -1109,21 +1169,11 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> { fn insert_global_cache( &mut self, cx: X, - final_entry: StackEntry<X>, - result: X::Result, + input: X::Input, + evaluation_result: EvaluationResult<X>, dep_node: X::DepNodeIndex, ) { - debug!(?final_entry, ?result, "insert global cache"); - cx.with_global_cache(|cache| { - cache.insert( - cx, - final_entry.input, - result, - dep_node, - final_entry.required_depth, - final_entry.encountered_overflow, - final_entry.nested_goals, - ) - }) + debug!(?evaluation_result, "insert global cache"); + cx.with_global_cache(|cache| cache.insert(cx, input, evaluation_result, dep_node)) } } diff --git a/compiler/rustc_type_ir/src/search_graph/stack.rs b/compiler/rustc_type_ir/src/search_graph/stack.rs index 8bb247bf055..e0fd934df69 100644 --- a/compiler/rustc_type_ir/src/search_graph/stack.rs +++ b/compiler/rustc_type_ir/src/search_graph/stack.rs @@ -26,6 +26,10 @@ pub(super) struct StackEntry<X: Cx> { /// The available depth of a given goal, immutable. pub available_depth: AvailableDepth, + /// Starts out as `None` and gets set when rerunning this + /// goal in case we encounter a cycle. + pub provisional_result: Option<X::Result>, + /// The maximum depth required while evaluating this goal. pub required_depth: usize, @@ -42,10 +46,6 @@ pub(super) struct StackEntry<X: Cx> { /// The nested goals of this goal, see the doc comment of the type. pub nested_goals: NestedGoals<X>, - - /// Starts out as `None` and gets set when rerunning this - /// goal in case we encounter a cycle. - pub provisional_result: Option<X::Result>, } #[derive_where(Default; X: Cx)] |
