[prev in list] [next in list] [prev in thread] [next in thread]
List: cfe-commits
Subject: [PATCH] D122852: [OPENMP] Fix assertion in clang::ASTContext::getTypeInfoImpl
From: Jennifer Yu via Phabricator via cfe-commits <cfe-commits () lists ! llvm ! org>
Date: 2022-03-31 22:06:03
Message-ID: ex19bmXhQeKghjCIDHRbfQ () geopod-ismtpd-1-2
[Download RAW message or body]
jyu2 created this revision.
jyu2 added reviewers: ABataev, jdoerfert, mikerice.
jyu2 added a project: OpenMP.
Herald added subscribers: guansong, yaxunl.
Herald added a project: All.
jyu2 requested review of this revision.
Herald added a subscriber: sstefan1.
The problem is when mapping of array section, currently call
getTypeSizeInChars with BuiltinType::OMPArraySection causing assert.
One way to fix this is using array element type instead.
BTW with this fix, test will fail in libomptarget.so
with error message: double free or corruption (out)
But it passes with intel customized libomptarget.so
I am not sure it is clang problem. I will submit issues to=20
libtarget after this checked in for more investigation.
Repository:
rC Clang
https://reviews.llvm.org/D122852
Files:
clang/lib/CodeGen/CGOpenMPRuntime.cpp
clang/test/OpenMP/target_map_codegen_36.cpp
["D122852.419576.patch" (D122852.419576.patch)]
Index: clang/test/OpenMP/target_map_codegen_36.cpp
===================================================================
--- /dev/null
+++ clang/test/OpenMP/target_map_codegen_36.cpp
@@ -0,0 +1,514 @@
+// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py
+// expected-no-diagnostics
+#ifndef HEADER
+#define HEADER
+
+///==========================================================================///
+// RUN: %clang_cc1 -DCK36 -verify -fopenmp -fopenmp-version=50 \
-fopenmp-targets=powerpc64le-ibm-linux-gnu -x c++ -triple powerpc64le-unknown-unknown \
-emit-llvm %s -o - | FileCheck %s --check-prefix CK36 --check-prefix CK36-64 +// RUN: \
%clang_cc1 -DCK36 -fopenmp -fopenmp-version=50 \
-fopenmp-targets=powerpc64le-ibm-linux-gnu -x c++ -std=c++11 -triple \
powerpc64le-unknown-unknown -emit-pch -o %t %s +// RUN: %clang_cc1 -fopenmp \
-fopenmp-version=50 -fopenmp-targets=powerpc64le-ibm-linux-gnu -x c++ -triple \
powerpc64le-unknown-unknown -std=c++11 -include-pch %t -verify %s -emit-llvm -o - | \
FileCheck %s --check-prefix CK36 --check-prefix CK36-64 +// RUN: %clang_cc1 -DCK36 \
-verify -fopenmp -fopenmp-version=50 -fopenmp-targets=i386-pc-linux-gnu -x c++ \
-triple i386-unknown-unknown -emit-llvm %s -o - | FileCheck %s --check-prefix CK36 \
--check-prefix CK36-32 +// RUN: %clang_cc1 -DCK36 -fopenmp -fopenmp-version=50 \
-fopenmp-targets=i386-pc-linux-gnu -x c++ -std=c++11 -triple i386-unknown-unknown \
-emit-pch -o %t %s +// RUN: %clang_cc1 -fopenmp -fopenmp-version=50 \
-fopenmp-targets=i386-pc-linux-gnu -x c++ -triple i386-unknown-unknown -std=c++11 \
-include-pch %t -verify %s -emit-llvm -o - | FileCheck %s --check-prefix CK36 \
--check-prefix CK36-32 +
+// RUN: %clang_cc1 -DCK36 -verify -fopenmp-simd -fopenmp-version=50 \
-fopenmp-targets=powerpc64le-ibm-linux-gnu -x c++ -triple powerpc64le-unknown-unknown \
-emit-llvm %s -o - | FileCheck --check-prefix SIMD-ONLY32 %s +// RUN: %clang_cc1 \
-DCK36 -fopenmp-simd -fopenmp-version=50 -fopenmp-targets=powerpc64le-ibm-linux-gnu \
-x c++ -std=c++11 -triple powerpc64le-unknown-unknown -emit-pch -o %t %s +// RUN: \
%clang_cc1 -fopenmp-simd -fopenmp-version=50 \
-fopenmp-targets=powerpc64le-ibm-linux-gnu -x c++ -triple powerpc64le-unknown-unknown \
-std=c++11 -include-pch %t -verify %s -emit-llvm -o - | FileCheck --check-prefix \
SIMD-ONLY32 %s +// RUN: %clang_cc1 -DCK36 -verify -fopenmp-simd -fopenmp-version=50 \
-fopenmp-targets=i386-pc-linux-gnu -x c++ -triple i386-unknown-unknown -emit-llvm %s \
-o - | FileCheck --check-prefix SIMD-ONLY32 %s +// RUN: %clang_cc1 -DCK36 \
-fopenmp-simd -fopenmp-version=50 -fopenmp-targets=i386-pc-linux-gnu -x c++ \
-std=c++11 -triple i386-unknown-unknown -emit-pch -o %t %s +// RUN: %clang_cc1 \
-fopenmp-simd -fopenmp-version=50 -fopenmp-targets=i386-pc-linux-gnu -x c++ -triple \
i386-unknown-unknown -std=c++11 -include-pch %t -verify %s -emit-llvm -o - | \
FileCheck --check-prefix SIMD-ONLY32 %s +// SIMD-ONLY32-NOT: {{__kmpc|__tgt}}
+#ifdef CK36
+typedef struct {
+ int a;
+ double *b;
+} C;
+#pragma omp declare mapper(C s) map(to : s.a) map(tofrom : s.b [0:2])
+
+typedef struct {
+ int e;
+ int h;
+ C f;
+} D;
+// CK36-DAG: [[SIZE_TO:@.+]] = private {{.*}}constant [4 x i64] [i64 0, i64 0, i64 \
0, i64 {{48|32}}] +// CK36--DAG: [[MTYPE_TO:@.+]] = {{.+}}constant [4 x i64] [i64 32, \
i64 281474976710659, i64 281474976710659, i64 281474976711171] +
+// CK36-64-LABEL: @_Z4testv(
+// CK36-64-NEXT: entry:
+// CK36-64-NEXT: [[SA:%.*]] = alloca [10 x %struct.D], align 8
+// CK36-64-NEXT: [[X:%.*]] = alloca [2 x double], align 8
+// CK36-64-NEXT: [[Y:%.*]] = alloca [2 x double], align 8
+// CK36-64-NEXT: [[DOTOFFLOAD_BASEPTRS:%.*]] = alloca [4 x i8*], align 8
+// CK36-64-NEXT: [[DOTOFFLOAD_PTRS:%.*]] = alloca [4 x i8*], align 8
+// CK36-64-NEXT: [[DOTOFFLOAD_MAPPERS:%.*]] = alloca [4 x i8*], align 8
+// CK36-64-NEXT: [[DOTOFFLOAD_SIZES:%.*]] = alloca [4 x i64], align 8
+// CK36-64-NEXT: [[SAAA:%.*]] = alloca [10 x %struct.D], align 8
+// CK36-64-NEXT: [[SAA:%.*]] = alloca %struct.D*, align 8
+// CK36-64-NEXT: [[DOTOFFLOAD_BASEPTRS32:%.*]] = alloca [4 x i8*], align 8
+// CK36-64-NEXT: [[DOTOFFLOAD_PTRS33:%.*]] = alloca [4 x i8*], align 8
+// CK36-64-NEXT: [[DOTOFFLOAD_MAPPERS34:%.*]] = alloca [4 x i8*], align 8
+// CK36-64-NEXT: [[DOTOFFLOAD_SIZES35:%.*]] = alloca [4 x i64], align 8
+// CK36-64-NEXT: [[ARRAYIDX:%.*]] = getelementptr inbounds [2 x double], [2 x \
double]* [[X]], i64 0, i64 1 +// CK36-64-NEXT: store double 2.000000e+01, double* \
[[ARRAYIDX]], align 8 +// CK36-64-NEXT: [[ARRAYIDX1:%.*]] = getelementptr inbounds \
[2 x double], [2 x double]* [[Y]], i64 0, i64 1 +// CK36-64-NEXT: store double \
2.000000e+01, double* [[ARRAYIDX1]], align 8 +// CK36-64-NEXT: [[ARRAYIDX2:%.*]] = \
getelementptr inbounds [10 x %struct.D], [10 x %struct.D]* [[SA]], i64 0, i64 1 +// \
CK36-64-NEXT: [[E:%.*]] = getelementptr inbounds [[STRUCT_D:%.*]], %struct.D* \
[[ARRAYIDX2]], i32 0, i32 0 +// CK36-64-NEXT: store i32 111, i32* [[E]], align 8
+// CK36-64-NEXT: [[ARRAYIDX3:%.*]] = getelementptr inbounds [10 x %struct.D], [10 \
x %struct.D]* [[SA]], i64 0, i64 1 +// CK36-64-NEXT: [[F:%.*]] = getelementptr \
inbounds [[STRUCT_D]], %struct.D* [[ARRAYIDX3]], i32 0, i32 2 +// CK36-64-NEXT: \
[[A:%.*]] = getelementptr inbounds [[STRUCT_C:%.*]], %struct.C* [[F]], i32 0, i32 0 \
+// CK36-64-NEXT: store i32 222, i32* [[A]], align 8 +// CK36-64-NEXT: \
[[ARRAYIDX4:%.*]] = getelementptr inbounds [2 x double], [2 x double]* [[X]], i64 0, \
i64 0 +// CK36-64-NEXT: [[ARRAYIDX5:%.*]] = getelementptr inbounds [10 x \
%struct.D], [10 x %struct.D]* [[SA]], i64 0, i64 1 +// CK36-64-NEXT: [[F6:%.*]] = \
getelementptr inbounds [[STRUCT_D]], %struct.D* [[ARRAYIDX5]], i32 0, i32 2 +// \
CK36-64-NEXT: [[B:%.*]] = getelementptr inbounds [[STRUCT_C]], %struct.C* [[F6]], \
i32 0, i32 1 +// CK36-64-NEXT: store double* [[ARRAYIDX4]], double** [[B]], align \
8 +// CK36-64-NEXT: [[ARRAYIDX7:%.*]] = getelementptr inbounds [2 x double], [2 x \
double]* [[Y]], i64 0, i64 0 +// CK36-64-NEXT: [[ARRAYIDX8:%.*]] = getelementptr \
inbounds [10 x %struct.D], [10 x %struct.D]* [[SA]], i64 0, i64 2 +// CK36-64-NEXT: \
[[F9:%.*]] = getelementptr inbounds [[STRUCT_D]], %struct.D* [[ARRAYIDX8]], i32 0, \
i32 2 +// CK36-64-NEXT: [[B10:%.*]] = getelementptr inbounds [[STRUCT_C]], \
%struct.C* [[F9]], i32 0, i32 1 +// CK36-64-NEXT: store double* [[ARRAYIDX7]], \
double** [[B10]], align 8 +// CK36-64-NEXT: [[ARRAYIDX11:%.*]] = getelementptr \
inbounds [10 x %struct.D], [10 x %struct.D]* [[SA]], i64 0, i64 1 +// CK36-64-NEXT: \
[[TMP0:%.*]] = bitcast %struct.D* [[ARRAYIDX11]] to i8* +// CK36-64-NEXT: \
[[TMP1:%.*]] = getelementptr i8, i8* [[TMP0]], i64 23 +// CK36-64-NEXT: \
[[ARRAYIDX12:%.*]] = getelementptr inbounds [10 x %struct.D], [10 x %struct.D]* \
[[SA]], i64 0, i64 1 +// CK36-64-NEXT: [[F13:%.*]] = getelementptr inbounds \
[[STRUCT_D]], %struct.D* [[ARRAYIDX12]], i32 0, i32 2 +// CK36-64-NEXT: \
[[TMP3:%.*]] = bitcast %struct.D* [[ARRAYIDX11]] to i8* +// CK36-64-NEXT: \
[[TMP2:%.*]] = bitcast %struct.C* [[F13]] to i8* +// CK36-64-NEXT: [[TMP4:%.*]] = \
ptrtoint i8* [[TMP2]] to i64 +// CK36-64-NEXT: [[TMP5:%.*]] = ptrtoint i8* \
[[TMP3]] to i64 +// CK36-64-NEXT: [[TMP6:%.*]] = sub i64 [[TMP4]], [[TMP5]]
+// CK36-64-NEXT: [[TMP7:%.*]] = sdiv exact i64 [[TMP6]], ptrtoint (i8* \
getelementptr (i8, i8* null, i32 1) to i64) +// CK36-64-NEXT: [[TMP8:%.*]] = \
getelementptr [[STRUCT_C]], %struct.C* [[F13]], i64 1 +// CK36-64-NEXT: \
[[TMP9:%.*]] = bitcast %struct.C* [[TMP8]] to i8* +// CK36-64-NEXT: [[TMP10:%.*]] \
= getelementptr i8, i8* [[TMP1]], i64 1 +// CK36-64-NEXT: [[TMP11:%.*]] = ptrtoint \
i8* [[TMP10]] to i64 +// CK36-64-NEXT: [[TMP12:%.*]] = ptrtoint i8* [[TMP9]] to \
i64 +// CK36-64-NEXT: [[TMP13:%.*]] = sub i64 [[TMP11]], [[TMP12]]
+// CK36-64-NEXT: [[TMP14:%.*]] = sdiv exact i64 [[TMP13]], ptrtoint (i8* \
getelementptr (i8, i8* null, i32 1) to i64) +// CK36-64-NEXT: [[ARRAYIDX14:%.*]] = \
getelementptr inbounds [10 x %struct.D], [10 x %struct.D]* [[SA]], i64 0, i64 1 +// \
CK36-64-NEXT: [[TMP15:%.*]] = getelementptr [[STRUCT_D]], %struct.D* \
[[ARRAYIDX11]], i32 1 +// CK36-64-NEXT: [[TMP16:%.*]] = bitcast %struct.D* \
[[ARRAYIDX11]] to i8* +// CK36-64-NEXT: [[TMP17:%.*]] = bitcast %struct.D* \
[[TMP15]] to i8* +// CK36-64-NEXT: [[TMP18:%.*]] = ptrtoint i8* [[TMP17]] to i64
+// CK36-64-NEXT: [[TMP19:%.*]] = ptrtoint i8* [[TMP16]] to i64
+// CK36-64-NEXT: [[TMP20:%.*]] = sub i64 [[TMP18]], [[TMP19]]
+// CK36-64-NEXT: [[TMP21:%.*]] = sdiv exact i64 [[TMP20]], ptrtoint (i8* \
getelementptr (i8, i8* null, i32 1) to i64) +// CK36-64-NEXT: [[TMP22:%.*]] = \
bitcast [4 x i64]* [[DOTOFFLOAD_SIZES]] to i8* +// CK36-64-NEXT: call void \
@llvm.memcpy.p0i8.p0i8.i64(i8* align 8 [[TMP22]], i8* align 8 bitcast ([4 x i64]* \
@.offload_sizes to i8*), i64 32, i1 false) +// CK36-64-NEXT: [[TMP23:%.*]] = \
getelementptr inbounds [4 x i8*], [4 x i8*]* [[DOTOFFLOAD_BASEPTRS]], i32 0, i32 0 \
+// CK36-64-NEXT: [[TMP24:%.*]] = bitcast i8** [[TMP23]] to [10 x %struct.D]** +// \
CK36-64-NEXT: store [10 x %struct.D]* [[SA]], [10 x %struct.D]** [[TMP24]], align \
8 +// CK36-64-NEXT: [[TMP25:%.*]] = getelementptr inbounds [4 x i8*], [4 x i8*]* \
[[DOTOFFLOAD_PTRS]], i32 0, i32 0 +// CK36-64-NEXT: [[TMP26:%.*]] = bitcast i8** \
[[TMP25]] to %struct.D** +// CK36-64-NEXT: store %struct.D* [[ARRAYIDX11]], \
%struct.D** [[TMP26]], align 8 +// CK36-64-NEXT: [[TMP27:%.*]] = getelementptr \
inbounds [4 x i64], [4 x i64]* [[DOTOFFLOAD_SIZES]], i32 0, i32 0 +// CK36-64-NEXT: \
store i64 [[TMP21]], i64* [[TMP27]], align 8 +// CK36-64-NEXT: [[TMP28:%.*]] = \
getelementptr inbounds [4 x i8*], [4 x i8*]* [[DOTOFFLOAD_MAPPERS]], i64 0, i64 0 +// \
CK36-64-NEXT: store i8* null, i8** [[TMP28]], align 8 +// CK36-64-NEXT: \
[[TMP29:%.*]] = getelementptr inbounds [4 x i8*], [4 x i8*]* [[DOTOFFLOAD_BASEPTRS]], \
i32 0, i32 1 +// CK36-64-NEXT: [[TMP30:%.*]] = bitcast i8** [[TMP29]] to [10 x \
%struct.D]** +// CK36-64-NEXT: store [10 x %struct.D]* [[SA]], [10 x %struct.D]** \
[[TMP30]], align 8 +// CK36-64-NEXT: [[TMP31:%.*]] = getelementptr inbounds [4 x \
i8*], [4 x i8*]* [[DOTOFFLOAD_PTRS]], i32 0, i32 1 +// CK36-64-NEXT: [[TMP32:%.*]] \
= bitcast i8** [[TMP31]] to %struct.D** +// CK36-64-NEXT: store %struct.D* \
[[ARRAYIDX11]], %struct.D** [[TMP32]], align 8 +// CK36-64-NEXT: [[TMP33:%.*]] = \
getelementptr inbounds [4 x i64], [4 x i64]* [[DOTOFFLOAD_SIZES]], i32 0, i32 1 +// \
CK36-64-NEXT: store i64 [[TMP7]], i64* [[TMP33]], align 8 +// CK36-64-NEXT: \
[[TMP34:%.*]] = getelementptr inbounds [4 x i8*], [4 x i8*]* [[DOTOFFLOAD_MAPPERS]], \
i64 0, i64 1 +// CK36-64-NEXT: store i8* null, i8** [[TMP34]], align 8
+// CK36-64-NEXT: [[TMP35:%.*]] = getelementptr inbounds [4 x i8*], [4 x i8*]* \
[[DOTOFFLOAD_BASEPTRS]], i32 0, i32 2 +// CK36-64-NEXT: [[TMP36:%.*]] = bitcast \
i8** [[TMP35]] to [10 x %struct.D]** +// CK36-64-NEXT: store [10 x %struct.D]* \
[[SA]], [10 x %struct.D]** [[TMP36]], align 8 +// CK36-64-NEXT: [[TMP37:%.*]] = \
getelementptr inbounds [4 x i8*], [4 x i8*]* [[DOTOFFLOAD_PTRS]], i32 0, i32 2 +// \
CK36-64-NEXT: [[TMP38:%.*]] = bitcast i8** [[TMP37]] to %struct.C** +// \
CK36-64-NEXT: store %struct.C* [[TMP8]], %struct.C** [[TMP38]], align 8 +// \
CK36-64-NEXT: [[TMP39:%.*]] = getelementptr inbounds [4 x i64], [4 x i64]* \
[[DOTOFFLOAD_SIZES]], i32 0, i32 2 +// CK36-64-NEXT: store i64 [[TMP14]], i64* \
[[TMP39]], align 8 +// CK36-64-NEXT: [[TMP40:%.*]] = getelementptr inbounds [4 x \
i8*], [4 x i8*]* [[DOTOFFLOAD_MAPPERS]], i64 0, i64 2 +// CK36-64-NEXT: store i8* \
null, i8** [[TMP40]], align 8 +// CK36-64-NEXT: [[TMP41:%.*]] = getelementptr \
inbounds [4 x i8*], [4 x i8*]* [[DOTOFFLOAD_BASEPTRS]], i32 0, i32 3 +// \
CK36-64-NEXT: [[TMP42:%.*]] = bitcast i8** [[TMP41]] to [10 x %struct.D]** +// \
CK36-64-NEXT: store [10 x %struct.D]* [[SA]], [10 x %struct.D]** [[TMP42]], align \
8 +// CK36-64-NEXT: [[TMP43:%.*]] = getelementptr inbounds [4 x i8*], [4 x i8*]* \
[[DOTOFFLOAD_PTRS]], i32 0, i32 3 +// CK36-64-NEXT: [[TMP44:%.*]] = bitcast i8** \
[[TMP43]] to %struct.D** +// CK36-64-NEXT: store %struct.D* [[ARRAYIDX14]], \
%struct.D** [[TMP44]], align 8 +// CK36-64-NEXT: [[TMP45:%.*]] = getelementptr \
inbounds [4 x i8*], [4 x i8*]* [[DOTOFFLOAD_MAPPERS]], i64 0, i64 3 +// CK36-64-NEXT: \
store i8* null, i8** [[TMP45]], align 8 +// CK36-64-NEXT: [[TMP46:%.*]] = \
getelementptr inbounds [4 x i8*], [4 x i8*]* [[DOTOFFLOAD_BASEPTRS]], i32 0, i32 0 \
+// CK36-64-NEXT: [[TMP47:%.*]] = getelementptr inbounds [4 x i8*], [4 x i8*]* \
[[DOTOFFLOAD_PTRS]], i32 0, i32 0 +// CK36-64-NEXT: [[TMP48:%.*]] = getelementptr \
inbounds [4 x i64], [4 x i64]* [[DOTOFFLOAD_SIZES]], i32 0, i32 0 +// CK36-64-NEXT: \
[[TMP49:%.*]] = call i32 @__tgt_target_mapper(%struct.ident_t* @[[GLOB1:[0-9]+]], i64 \
-1, i8* @.__omp_offloading_{{.*}}__Z4testv_{{.*}}.region_id, i32 4, i8** [[TMP46]], \
i8** [[TMP47]], i64* [[TMP48]], i64* getelementptr inbounds ([4 x i64], [4 x i64]* \
@.offload_maptypes, i32 0, i32 0), i8** null, i8** null) +// CK36-64-NEXT: \
[[TMP50:%.*]] = icmp ne i32 [[TMP49]], 0 +// CK36-64-NEXT: br i1 [[TMP50]], label \
[[OMP_OFFLOAD_FAILED:%.*]], label [[OMP_OFFLOAD_CONT:%.*]] +// CK36-64: \
omp_offload.failed: +// CK36-64-NEXT: call void \
@__omp_offloading_{{.*}}__Z4testv_{{.*}}([10 x %struct.D]* [[SA]]) #[[ATTR3:[0-9]+]] \
+// CK36-64-NEXT: br label [[OMP_OFFLOAD_CONT]] +// CK36-64: \
omp_offload.cont: +// CK36-64-NEXT: [[ARRAYDECAY:%.*]] = getelementptr inbounds \
[10 x %struct.D], [10 x %struct.D]* [[SAAA]], i64 0, i64 0 +// CK36-64-NEXT: store \
%struct.D* [[ARRAYDECAY]], %struct.D** [[SAA]], align 8 +// CK36-64-NEXT: \
[[TMP51:%.*]] = load %struct.D*, %struct.D** [[SAA]], align 8 +// CK36-64-NEXT: \
[[ARRAYIDX15:%.*]] = getelementptr inbounds [[STRUCT_D]], %struct.D* [[TMP51]], i64 1 \
+// CK36-64-NEXT: [[E16:%.*]] = getelementptr inbounds [[STRUCT_D]], %struct.D* \
[[ARRAYIDX15]], i32 0, i32 0 +// CK36-64-NEXT: store i32 111, i32* [[E16]], align \
8 +// CK36-64-NEXT: [[TMP52:%.*]] = load %struct.D*, %struct.D** [[SAA]], align 8
+// CK36-64-NEXT: [[ARRAYIDX17:%.*]] = getelementptr inbounds [[STRUCT_D]], \
%struct.D* [[TMP52]], i64 1 +// CK36-64-NEXT: [[F18:%.*]] = getelementptr inbounds \
[[STRUCT_D]], %struct.D* [[ARRAYIDX17]], i32 0, i32 2 +// CK36-64-NEXT: \
[[A19:%.*]] = getelementptr inbounds [[STRUCT_C]], %struct.C* [[F18]], i32 0, i32 0 \
+// CK36-64-NEXT: store i32 222, i32* [[A19]], align 8 +// CK36-64-NEXT: \
[[ARRAYIDX20:%.*]] = getelementptr inbounds [2 x double], [2 x double]* [[X]], i64 0, \
i64 0 +// CK36-64-NEXT: [[TMP53:%.*]] = load %struct.D*, %struct.D** [[SAA]], \
align 8 +// CK36-64-NEXT: [[ARRAYIDX21:%.*]] = getelementptr inbounds \
[[STRUCT_D]], %struct.D* [[TMP53]], i64 1 +// CK36-64-NEXT: [[F22:%.*]] = \
getelementptr inbounds [[STRUCT_D]], %struct.D* [[ARRAYIDX21]], i32 0, i32 2 +// \
CK36-64-NEXT: [[B23:%.*]] = getelementptr inbounds [[STRUCT_C]], %struct.C* \
[[F22]], i32 0, i32 1 +// CK36-64-NEXT: store double* [[ARRAYIDX20]], double** \
[[B23]], align 8 +// CK36-64-NEXT: [[ARRAYIDX24:%.*]] = getelementptr inbounds [2 \
x double], [2 x double]* [[Y]], i64 0, i64 0 +// CK36-64-NEXT: [[TMP54:%.*]] = \
load %struct.D*, %struct.D** [[SAA]], align 8 +// CK36-64-NEXT: [[ARRAYIDX25:%.*]] \
= getelementptr inbounds [[STRUCT_D]], %struct.D* [[TMP54]], i64 2 +// CK36-64-NEXT: \
[[F26:%.*]] = getelementptr inbounds [[STRUCT_D]], %struct.D* [[ARRAYIDX25]], i32 0, \
i32 2 +// CK36-64-NEXT: [[B27:%.*]] = getelementptr inbounds [[STRUCT_C]], \
%struct.C* [[F26]], i32 0, i32 1 +// CK36-64-NEXT: store double* [[ARRAYIDX24]], \
double** [[B27]], align 8 +// CK36-64-NEXT: [[TMP55:%.*]] = load %struct.D*, \
%struct.D** [[SAA]], align 8 +// CK36-64-NEXT: [[TMP56:%.*]] = load %struct.D*, \
%struct.D** [[SAA]], align 8 +// CK36-64-NEXT: [[TMP57:%.*]] = load %struct.D*, \
%struct.D** [[SAA]], align 8 +// CK36-64-NEXT: [[ARRAYIDX28:%.*]] = getelementptr \
inbounds [[STRUCT_D]], %struct.D* [[TMP57]], i64 1 +// CK36-64-NEXT: [[TMP58:%.*]] \
= bitcast %struct.D* [[ARRAYIDX28]] to i8* +// CK36-64-NEXT: [[TMP59:%.*]] = \
getelementptr i8, i8* [[TMP58]], i64 23 +// CK36-64-NEXT: [[TMP60:%.*]] = load \
%struct.D*, %struct.D** [[SAA]], align 8 +// CK36-64-NEXT: [[ARRAYIDX29:%.*]] = \
getelementptr inbounds [[STRUCT_D]], %struct.D* [[TMP60]], i64 1 +// CK36-64-NEXT: \
[[F30:%.*]] = getelementptr inbounds [[STRUCT_D]], %struct.D* [[ARRAYIDX29]], i32 0, \
i32 2 +// CK36-64-NEXT: [[TMP61:%.*]] = bitcast %struct.D* [[ARRAYIDX28]] to i8*
+// CK36-64-NEXT: [[TMP62:%.*]] = bitcast %struct.C* [[F30]] to i8*
+// CK36-64-NEXT: [[TMP63:%.*]] = ptrtoint i8* [[TMP62]] to i64
+// CK36-64-NEXT: [[TMP64:%.*]] = ptrtoint i8* [[TMP61]] to i64
+// CK36-64-NEXT: [[TMP65:%.*]] = sub i64 [[TMP63]], [[TMP64]]
+// CK36-64-NEXT: [[TMP66:%.*]] = sdiv exact i64 [[TMP65]], ptrtoint (i8* \
getelementptr (i8, i8* null, i32 1) to i64) +// CK36-64-NEXT: [[TMP67:%.*]] = \
getelementptr [[STRUCT_C]], %struct.C* [[F30]], i64 1 +// CK36-64-NEXT: \
[[TMP68:%.*]] = bitcast %struct.C* [[TMP67]] to i8* +// CK36-64-NEXT: \
[[TMP69:%.*]] = getelementptr i8, i8* [[TMP59]], i64 1 +// CK36-64-NEXT: \
[[TMP70:%.*]] = ptrtoint i8* [[TMP69]] to i64 +// CK36-64-NEXT: [[TMP71:%.*]] = \
ptrtoint i8* [[TMP68]] to i64 +// CK36-64-NEXT: [[TMP72:%.*]] = sub i64 [[TMP70]], \
[[TMP71]] +// CK36-64-NEXT: [[TMP73:%.*]] = sdiv exact i64 [[TMP72]], ptrtoint \
(i8* getelementptr (i8, i8* null, i32 1) to i64) +// CK36-64-NEXT: [[TMP74:%.*]] = \
load %struct.D*, %struct.D** [[SAA]], align 8 +// CK36-64-NEXT: [[TMP75:%.*]] = \
load %struct.D*, %struct.D** [[SAA]], align 8 +// CK36-64-NEXT: [[ARRAYIDX31:%.*]] \
= getelementptr inbounds [[STRUCT_D]], %struct.D* [[TMP75]], i64 1 +// CK36-64-NEXT: \
[[TMP76:%.*]] = getelementptr [[STRUCT_D]], %struct.D* [[ARRAYIDX28]], i32 1 +// \
CK36-64-NEXT: [[TMP77:%.*]] = bitcast %struct.D* [[ARRAYIDX28]] to i8* +// \
CK36-64-NEXT: [[TMP78:%.*]] = bitcast %struct.D* [[TMP76]] to i8* +// \
CK36-64-NEXT: [[TMP79:%.*]] = ptrtoint i8* [[TMP78]] to i64 +// CK36-64-NEXT: \
[[TMP80:%.*]] = ptrtoint i8* [[TMP77]] to i64 +// CK36-64-NEXT: [[TMP81:%.*]] = \
sub i64 [[TMP79]], [[TMP80]] +// CK36-64-NEXT: [[TMP82:%.*]] = sdiv exact i64 \
[[TMP81]], ptrtoint (i8* getelementptr (i8, i8* null, i32 1) to i64) +// \
CK36-64-NEXT: [[TMP83:%.*]] = bitcast [4 x i64]* [[DOTOFFLOAD_SIZES35]] to i8* +// \
CK36-64-NEXT: call void @llvm.memcpy.p0i8.p0i8.i64(i8* align 8 [[TMP83]], i8* \
align 8 bitcast ([4 x i64]* @.offload_sizes.1 to i8*), i64 32, i1 false) +// \
CK36-64-NEXT: [[TMP84:%.*]] = getelementptr inbounds [4 x i8*], [4 x i8*]* \
[[DOTOFFLOAD_BASEPTRS32]], i32 0, i32 0 +// CK36-64-NEXT: [[TMP85:%.*]] = bitcast \
i8** [[TMP84]] to %struct.D** +// CK36-64-NEXT: store %struct.D* [[TMP56]], \
%struct.D** [[TMP85]], align 8 +// CK36-64-NEXT: [[TMP86:%.*]] = getelementptr \
inbounds [4 x i8*], [4 x i8*]* [[DOTOFFLOAD_PTRS33]], i32 0, i32 0 +// CK36-64-NEXT: \
[[TMP87:%.*]] = bitcast i8** [[TMP86]] to %struct.D** +// CK36-64-NEXT: store \
%struct.D* [[ARRAYIDX28]], %struct.D** [[TMP87]], align 8 +// CK36-64-NEXT: \
[[TMP88:%.*]] = getelementptr inbounds [4 x i64], [4 x i64]* [[DOTOFFLOAD_SIZES35]], \
i32 0, i32 0 +// CK36-64-NEXT: store i64 [[TMP82]], i64* [[TMP88]], align 8
+// CK36-64-NEXT: [[TMP89:%.*]] = getelementptr inbounds [4 x i8*], [4 x i8*]* \
[[DOTOFFLOAD_MAPPERS34]], i64 0, i64 0 +// CK36-64-NEXT: store i8* null, i8** \
[[TMP89]], align 8 +// CK36-64-NEXT: [[TMP90:%.*]] = getelementptr inbounds [4 x \
i8*], [4 x i8*]* [[DOTOFFLOAD_BASEPTRS32]], i32 0, i32 1 +// CK36-64-NEXT: \
[[TMP91:%.*]] = bitcast i8** [[TMP90]] to %struct.D** +// CK36-64-NEXT: store \
%struct.D* [[TMP56]], %struct.D** [[TMP91]], align 8 +// CK36-64-NEXT: \
[[TMP92:%.*]] = getelementptr inbounds [4 x i8*], [4 x i8*]* [[DOTOFFLOAD_PTRS33]], \
i32 0, i32 1 +// CK36-64-NEXT: [[TMP93:%.*]] = bitcast i8** [[TMP92]] to \
%struct.D** +// CK36-64-NEXT: store %struct.D* [[ARRAYIDX28]], %struct.D** \
[[TMP93]], align 8 +// CK36-64-NEXT: [[TMP94:%.*]] = getelementptr inbounds [4 x \
i64], [4 x i64]* [[DOTOFFLOAD_SIZES35]], i32 0, i32 1 +// CK36-64-NEXT: store i64 \
[[TMP66]], i64* [[TMP94]], align 8 +// CK36-64-NEXT: [[TMP95:%.*]] = getelementptr \
inbounds [4 x i8*], [4 x i8*]* [[DOTOFFLOAD_MAPPERS34]], i64 0, i64 1 +// \
CK36-64-NEXT: store i8* null, i8** [[TMP95]], align 8 +// CK36-64-NEXT: \
[[TMP96:%.*]] = getelementptr inbounds [4 x i8*], [4 x i8*]* \
[[DOTOFFLOAD_BASEPTRS32]], i32 0, i32 2 +// CK36-64-NEXT: [[TMP97:%.*]] = bitcast \
i8** [[TMP96]] to %struct.D** +// CK36-64-NEXT: store %struct.D* [[TMP56]], \
%struct.D** [[TMP97]], align 8 +// CK36-64-NEXT: [[TMP98:%.*]] = getelementptr \
inbounds [4 x i8*], [4 x i8*]* [[DOTOFFLOAD_PTRS33]], i32 0, i32 2 +// CK36-64-NEXT: \
[[TMP99:%.*]] = bitcast i8** [[TMP98]] to %struct.C** +// CK36-64-NEXT: store \
%struct.C* [[TMP67]], %struct.C** [[TMP99]], align 8 +// CK36-64-NEXT: \
[[TMP100:%.*]] = getelementptr inbounds [4 x i64], [4 x i64]* [[DOTOFFLOAD_SIZES35]], \
i32 0, i32 2 +// CK36-64-NEXT: store i64 [[TMP73]], i64* [[TMP100]], align 8
+// CK36-64-NEXT: [[TMP101:%.*]] = getelementptr inbounds [4 x i8*], [4 x i8*]* \
[[DOTOFFLOAD_MAPPERS34]], i64 0, i64 2 +// CK36-64-NEXT: store i8* null, i8** \
[[TMP101]], align 8 +// CK36-64-NEXT: [[TMP102:%.*]] = getelementptr inbounds [4 x \
i8*], [4 x i8*]* [[DOTOFFLOAD_BASEPTRS32]], i32 0, i32 3 +// CK36-64-NEXT: \
[[TMP103:%.*]] = bitcast i8** [[TMP102]] to %struct.D** +// CK36-64-NEXT: store \
%struct.D* [[TMP74]], %struct.D** [[TMP103]], align 8 +// CK36-64-NEXT: \
[[TMP104:%.*]] = getelementptr inbounds [4 x i8*], [4 x i8*]* [[DOTOFFLOAD_PTRS33]], \
i32 0, i32 3 +// CK36-64-NEXT: [[TMP105:%.*]] = bitcast i8** [[TMP104]] to \
%struct.D** +// CK36-64-NEXT: store %struct.D* [[ARRAYIDX31]], %struct.D** \
[[TMP105]], align 8 +// CK36-64-NEXT: [[TMP106:%.*]] = getelementptr inbounds [4 x \
i8*], [4 x i8*]* [[DOTOFFLOAD_MAPPERS34]], i64 0, i64 3 +// CK36-64-NEXT: store \
i8* null, i8** [[TMP106]], align 8 +// CK36-64-NEXT: [[TMP107:%.*]] = \
getelementptr inbounds [4 x i8*], [4 x i8*]* [[DOTOFFLOAD_BASEPTRS32]], i32 0, i32 0 \
+// CK36-64-NEXT: [[TMP108:%.*]] = getelementptr inbounds [4 x i8*], [4 x i8*]* \
[[DOTOFFLOAD_PTRS33]], i32 0, i32 0 +// CK36-64-NEXT: [[TMP109:%.*]] = \
getelementptr inbounds [4 x i64], [4 x i64]* [[DOTOFFLOAD_SIZES35]], i32 0, i32 0 +// \
CK36-64-NEXT: [[TMP110:%.*]] = call i32 @__tgt_target_mapper(%struct.ident_t* \
@[[GLOB1]], i64 -1, i8* @.__omp_offloading_{{.*}}__Z4testv_{{.*}}.region_id, i32 4, \
i8** [[TMP107]], i8** [[TMP108]], i64* [[TMP109]], i64* getelementptr inbounds ([4 x \
i64], [4 x i64]* @.offload_maptypes.2, i32 0, i32 0), i8** null, i8** null) +// \
CK36-64-NEXT: [[TMP111:%.*]] = icmp ne i32 [[TMP110]], 0 +// CK36-64-NEXT: br \
i1 [[TMP111]], label [[OMP_OFFLOAD_FAILED36:%.*]], label [[OMP_OFFLOAD_CONT37:%.*]] \
+// CK36-64: omp_offload.failed36: +// CK36-64-NEXT: call void \
@__omp_offloading_{{.*}}_Z4testv_{{.*}}(%struct.D* [[TMP55]]) #[[ATTR3]] +// \
CK36-64-NEXT: br label [[OMP_OFFLOAD_CONT37]] +// CK36-64: \
omp_offload.cont37: +// CK36-64-NEXT: ret void
+//
+// CK36-32-LABEL: @_Z4testv(
+// CK36-32-NEXT: entry:
+// CK36-32-NEXT: [[SA:%.*]] = alloca [10 x %struct.D], align 4
+// CK36-32-NEXT: [[X:%.*]] = alloca [2 x double], align 8
+// CK36-32-NEXT: [[Y:%.*]] = alloca [2 x double], align 8
+// CK36-32-NEXT: [[DOTOFFLOAD_BASEPTRS:%.*]] = alloca [4 x i8*], align 4
+// CK36-32-NEXT: [[DOTOFFLOAD_PTRS:%.*]] = alloca [4 x i8*], align 4
+// CK36-32-NEXT: [[DOTOFFLOAD_MAPPERS:%.*]] = alloca [4 x i8*], align 4
+// CK36-32-NEXT: [[DOTOFFLOAD_SIZES:%.*]] = alloca [4 x i64], align 4
+// CK36-32-NEXT: [[SAAA:%.*]] = alloca [10 x %struct.D], align 4
+// CK36-32-NEXT: [[SAA:%.*]] = alloca %struct.D*, align 4
+// CK36-32-NEXT: [[DOTOFFLOAD_BASEPTRS32:%.*]] = alloca [4 x i8*], align 4
+// CK36-32-NEXT: [[DOTOFFLOAD_PTRS33:%.*]] = alloca [4 x i8*], align 4
+// CK36-32-NEXT: [[DOTOFFLOAD_MAPPERS34:%.*]] = alloca [4 x i8*], align 4
+// CK36-32-NEXT: [[DOTOFFLOAD_SIZES35:%.*]] = alloca [4 x i64], align 4
+// CK36-32-NEXT: [[ARRAYIDX:%.*]] = getelementptr inbounds [2 x double], [2 x \
double]* [[X]], i32 0, i32 1 +// CK36-32-NEXT: store double 2.000000e+01, double* \
[[ARRAYIDX]], align 8 +// CK36-32-NEXT: [[ARRAYIDX1:%.*]] = getelementptr inbounds \
[2 x double], [2 x double]* [[Y]], i32 0, i32 1 +// CK36-32-NEXT: store double \
2.000000e+01, double* [[ARRAYIDX1]], align 8 +// CK36-32-NEXT: [[ARRAYIDX2:%.*]] = \
getelementptr inbounds [10 x %struct.D], [10 x %struct.D]* [[SA]], i32 0, i32 1 +// \
CK36-32-NEXT: [[E:%.*]] = getelementptr inbounds [[STRUCT_D:%.*]], %struct.D* \
[[ARRAYIDX2]], i32 0, i32 0 +// CK36-32-NEXT: store i32 111, i32* [[E]], align 4
+// CK36-32-NEXT: [[ARRAYIDX3:%.*]] = getelementptr inbounds [10 x %struct.D], [10 \
x %struct.D]* [[SA]], i32 0, i32 1 +// CK36-32-NEXT: [[F:%.*]] = getelementptr \
inbounds [[STRUCT_D]], %struct.D* [[ARRAYIDX3]], i32 0, i32 2 +// CK36-32-NEXT: \
[[A:%.*]] = getelementptr inbounds [[STRUCT_C:%.*]], %struct.C* [[F]], i32 0, i32 0 \
+// CK36-32-NEXT: store i32 222, i32* [[A]], align 4 +// CK36-32-NEXT: \
[[ARRAYIDX4:%.*]] = getelementptr inbounds [2 x double], [2 x double]* [[X]], i32 0, \
i32 0 +// CK36-32-NEXT: [[ARRAYIDX5:%.*]] = getelementptr inbounds [10 x \
%struct.D], [10 x %struct.D]* [[SA]], i32 0, i32 1 +// CK36-32-NEXT: [[F6:%.*]] = \
getelementptr inbounds [[STRUCT_D]], %struct.D* [[ARRAYIDX5]], i32 0, i32 2 +// \
CK36-32-NEXT: [[B:%.*]] = getelementptr inbounds [[STRUCT_C]], %struct.C* [[F6]], \
i32 0, i32 1 +// CK36-32-NEXT: store double* [[ARRAYIDX4]], double** [[B]], align \
4 +// CK36-32-NEXT: [[ARRAYIDX7:%.*]] = getelementptr inbounds [2 x double], [2 x \
double]* [[Y]], i32 0, i32 0 +// CK36-32-NEXT: [[ARRAYIDX8:%.*]] = getelementptr \
inbounds [10 x %struct.D], [10 x %struct.D]* [[SA]], i32 0, i32 2 +// CK36-32-NEXT: \
[[F9:%.*]] = getelementptr inbounds [[STRUCT_D]], %struct.D* [[ARRAYIDX8]], i32 0, \
i32 2 +// CK36-32-NEXT: [[B10:%.*]] = getelementptr inbounds [[STRUCT_C]], \
%struct.C* [[F9]], i32 0, i32 1 +// CK36-32-NEXT: store double* [[ARRAYIDX7]], \
double** [[B10]], align 4 +// CK36-32-NEXT: [[ARRAYIDX11:%.*]] = getelementptr \
inbounds [10 x %struct.D], [10 x %struct.D]* [[SA]], i32 0, i32 1 +// CK36-32-NEXT: \
[[TMP0:%.*]] = bitcast %struct.D* [[ARRAYIDX11]] to i8* +// CK36-32-NEXT: \
[[TMP1:%.*]] = getelementptr i8, i8* [[TMP0]], i32 15 +// CK36-32-NEXT: \
[[ARRAYIDX12:%.*]] = getelementptr inbounds [10 x %struct.D], [10 x %struct.D]* \
[[SA]], i32 0, i32 1 +// CK36-32-NEXT: [[F13:%.*]] = getelementptr inbounds \
[[STRUCT_D]], %struct.D* [[ARRAYIDX12]], i32 0, i32 2 +// CK36-32-DAG: \
[[TMP2:%.*]] = bitcast %struct.C* [[F13]] to i8* +// CK36-32-DAG: [[TMP3:%.*]] = \
bitcast %struct.D* [[ARRAYIDX11]] to i8* +// CK36-32-NEXT: [[TMP4:%.*]] = ptrtoint \
i8* [[TMP2]] to i64 +// CK36-32-NEXT: [[TMP5:%.*]] = ptrtoint i8* [[TMP3]] to i64
+// CK36-32-NEXT: [[TMP6:%.*]] = sub i64 [[TMP4]], [[TMP5]]
+// CK36-32-NEXT: [[TMP7:%.*]] = sdiv exact i64 [[TMP6]], ptrtoint (i8* \
getelementptr (i8, i8* null, i32 1) to i64) +// CK36-32-NEXT: [[TMP8:%.*]] = \
getelementptr [[STRUCT_C]], %struct.C* [[F13]], i32 1 +// CK36-32-DAG: \
[[TMP9:%.*]] = getelementptr i8, i8* [[TMP1]], i32 1 +// CK36-32-DAG: \
[[TMP10:%.*]] = bitcast %struct.C* [[TMP8]] to i8* +// CK36-32-NEXT: [[TMP11:%.*]] \
= ptrtoint i8* [[TMP9]] to i64 +// CK36-32-NEXT: [[TMP12:%.*]] = ptrtoint i8* \
[[TMP10]] to i64 +// CK36-32-NEXT: [[TMP13:%.*]] = sub i64 [[TMP11]], [[TMP12]]
+// CK36-32-NEXT: [[TMP14:%.*]] = sdiv exact i64 [[TMP13]], ptrtoint (i8* \
getelementptr (i8, i8* null, i32 1) to i64) +// CK36-32-NEXT: [[ARRAYIDX14:%.*]] = \
getelementptr inbounds [10 x %struct.D], [10 x %struct.D]* [[SA]], i32 0, i32 1 +// \
CK36-32-NEXT: [[TMP15:%.*]] = getelementptr [[STRUCT_D]], %struct.D* \
[[ARRAYIDX11]], i32 1 +// CK36-32-NEXT: [[TMP16:%.*]] = bitcast %struct.D* \
[[ARRAYIDX11]] to i8* +// CK36-32-NEXT: [[TMP17:%.*]] = bitcast %struct.D* \
[[TMP15]] to i8* +// CK36-32-NEXT: [[TMP18:%.*]] = ptrtoint i8* [[TMP17]] to i64
+// CK36-32-NEXT: [[TMP19:%.*]] = ptrtoint i8* [[TMP16]] to i64
+// CK36-32-NEXT: [[TMP20:%.*]] = sub i64 [[TMP18]], [[TMP19]]
+// CK36-32-NEXT: [[TMP21:%.*]] = sdiv exact i64 [[TMP20]], ptrtoint (i8* \
getelementptr (i8, i8* null, i32 1) to i64) +// CK36-32-NEXT: [[TMP22:%.*]] = \
bitcast [4 x i64]* [[DOTOFFLOAD_SIZES]] to i8* +// CK36-32-NEXT: call void \
@llvm.memcpy.p0i8.p0i8.i32(i8* align 4 [[TMP22]], i8* align 4 bitcast ([4 x i64]* \
@.offload_sizes to i8*), i32 32, i1 false) +// CK36-32-NEXT: [[TMP23:%.*]] = \
getelementptr inbounds [4 x i8*], [4 x i8*]* [[DOTOFFLOAD_BASEPTRS]], i32 0, i32 0 \
+// CK36-32-NEXT: [[TMP24:%.*]] = bitcast i8** [[TMP23]] to [10 x %struct.D]** +// \
CK36-32-NEXT: store [10 x %struct.D]* [[SA]], [10 x %struct.D]** [[TMP24]], align \
4 +// CK36-32-NEXT: [[TMP25:%.*]] = getelementptr inbounds [4 x i8*], [4 x i8*]* \
[[DOTOFFLOAD_PTRS]], i32 0, i32 0 +// CK36-32-NEXT: [[TMP26:%.*]] = bitcast i8** \
[[TMP25]] to %struct.D** +// CK36-32-NEXT: store %struct.D* [[ARRAYIDX11]], \
%struct.D** [[TMP26]], align 4 +// CK36-32-NEXT: [[TMP27:%.*]] = getelementptr \
inbounds [4 x i64], [4 x i64]* [[DOTOFFLOAD_SIZES]], i32 0, i32 0 +// CK36-32-NEXT: \
store i64 [[TMP21]], i64* [[TMP27]], align 4 +// CK36-32-NEXT: [[TMP28:%.*]] = \
getelementptr inbounds [4 x i8*], [4 x i8*]* [[DOTOFFLOAD_MAPPERS]], i32 0, i32 0 +// \
CK36-32-NEXT: store i8* null, i8** [[TMP28]], align 4 +// CK36-32-NEXT: \
[[TMP29:%.*]] = getelementptr inbounds [4 x i8*], [4 x i8*]* [[DOTOFFLOAD_BASEPTRS]], \
i32 0, i32 1 +// CK36-32-NEXT: [[TMP30:%.*]] = bitcast i8** [[TMP29]] to [10 x \
%struct.D]** +// CK36-32-NEXT: store [10 x %struct.D]* [[SA]], [10 x %struct.D]** \
[[TMP30]], align 4 +// CK36-32-NEXT: [[TMP31:%.*]] = getelementptr inbounds [4 x \
i8*], [4 x i8*]* [[DOTOFFLOAD_PTRS]], i32 0, i32 1 +// CK36-32-NEXT: [[TMP32:%.*]] \
= bitcast i8** [[TMP31]] to %struct.D** +// CK36-32-NEXT: store %struct.D* \
[[ARRAYIDX11]], %struct.D** [[TMP32]], align 4 +// CK36-32-NEXT: [[TMP33:%.*]] = \
getelementptr inbounds [4 x i64], [4 x i64]* [[DOTOFFLOAD_SIZES]], i32 0, i32 1 +// \
CK36-32-NEXT: store i64 [[TMP7]], i64* [[TMP33]], align 4 +// CK36-32-NEXT: \
[[TMP34:%.*]] = getelementptr inbounds [4 x i8*], [4 x i8*]* [[DOTOFFLOAD_MAPPERS]], \
i32 0, i32 1 +// CK36-32-NEXT: store i8* null, i8** [[TMP34]], align 4
+// CK36-32-NEXT: [[TMP35:%.*]] = getelementptr inbounds [4 x i8*], [4 x i8*]* \
[[DOTOFFLOAD_BASEPTRS]], i32 0, i32 2 +// CK36-32-NEXT: [[TMP36:%.*]] = bitcast \
i8** [[TMP35]] to [10 x %struct.D]** +// CK36-32-NEXT: store [10 x %struct.D]* \
[[SA]], [10 x %struct.D]** [[TMP36]], align 4 +// CK36-32-NEXT: [[TMP37:%.*]] = \
getelementptr inbounds [4 x i8*], [4 x i8*]* [[DOTOFFLOAD_PTRS]], i32 0, i32 2 +// \
CK36-32-NEXT: [[TMP38:%.*]] = bitcast i8** [[TMP37]] to %struct.C** +// \
CK36-32-NEXT: store %struct.C* [[TMP8]], %struct.C** [[TMP38]], align 4 +// \
CK36-32-NEXT: [[TMP39:%.*]] = getelementptr inbounds [4 x i64], [4 x i64]* \
[[DOTOFFLOAD_SIZES]], i32 0, i32 2 +// CK36-32-NEXT: store i64 [[TMP14]], i64* \
[[TMP39]], align 4 +// CK36-32-NEXT: [[TMP40:%.*]] = getelementptr inbounds [4 x \
i8*], [4 x i8*]* [[DOTOFFLOAD_MAPPERS]], i32 0, i32 2 +// CK36-32-NEXT: store i8* \
null, i8** [[TMP40]], align 4 +// CK36-32-NEXT: [[TMP41:%.*]] = getelementptr \
inbounds [4 x i8*], [4 x i8*]* [[DOTOFFLOAD_BASEPTRS]], i32 0, i32 3 +// \
CK36-32-NEXT: [[TMP42:%.*]] = bitcast i8** [[TMP41]] to [10 x %struct.D]** +// \
CK36-32-NEXT: store [10 x %struct.D]* [[SA]], [10 x %struct.D]** [[TMP42]], align \
4 +// CK36-32-NEXT: [[TMP43:%.*]] = getelementptr inbounds [4 x i8*], [4 x i8*]* \
[[DOTOFFLOAD_PTRS]], i32 0, i32 3 +// CK36-32-NEXT: [[TMP44:%.*]] = bitcast i8** \
[[TMP43]] to %struct.D** +// CK36-32-NEXT: store %struct.D* [[ARRAYIDX14]], \
%struct.D** [[TMP44]], align 4 +// CK36-32-NEXT: [[TMP45:%.*]] = getelementptr \
inbounds [4 x i8*], [4 x i8*]* [[DOTOFFLOAD_MAPPERS]], i32 0, i32 3 +// CK36-32-NEXT: \
store i8* null, i8** [[TMP45]], align 4 +// CK36-32-NEXT: [[TMP46:%.*]] = \
getelementptr inbounds [4 x i8*], [4 x i8*]* [[DOTOFFLOAD_BASEPTRS]], i32 0, i32 0 \
+// CK36-32-NEXT: [[TMP47:%.*]] = getelementptr inbounds [4 x i8*], [4 x i8*]* \
[[DOTOFFLOAD_PTRS]], i32 0, i32 0 +// CK36-32-NEXT: [[TMP48:%.*]] = getelementptr \
inbounds [4 x i64], [4 x i64]* [[DOTOFFLOAD_SIZES]], i32 0, i32 0 +// CK36-32-NEXT: \
[[TMP49:%.*]] = call i32 @__tgt_target_mapper(%struct.ident_t* @[[GLOB1:[0-9]+]], i64 \
-1, i8* @.__omp_offloading_{{.*}}__Z4testv_{{.*}}.region_id, i32 4, i8** [[TMP46]], \
i8** [[TMP47]], i64* [[TMP48]], i64* getelementptr inbounds ([4 x i64], [4 x i64]* \
@.offload_maptypes, i32 0, i32 0), i8** null, i8** null) +// CK36-32-NEXT: \
[[TMP50:%.*]] = icmp ne i32 [[TMP49]], 0 +// CK36-32-NEXT: br i1 [[TMP50]], label \
[[OMP_OFFLOAD_FAILED:%.*]], label [[OMP_OFFLOAD_CONT:%.*]] +// CK36-32: \
omp_offload.failed: +// CK36-32-NEXT: call void \
@__omp_offloading_{{.*}}__Z4testv_{{.*}}([10 x %struct.D]* [[SA]]) #[[ATTR3:[0-9]+]] \
+// CK36-32-NEXT: br label [[OMP_OFFLOAD_CONT]] +// CK36-32: \
omp_offload.cont: +// CK36-32-NEXT: [[ARRAYDECAY:%.*]] = getelementptr inbounds \
[10 x %struct.D], [10 x %struct.D]* [[SAAA]], i32 0, i32 0 +// CK36-32-NEXT: store \
%struct.D* [[ARRAYDECAY]], %struct.D** [[SAA]], align 4 +// CK36-32-NEXT: \
[[TMP51:%.*]] = load %struct.D*, %struct.D** [[SAA]], align 4 +// CK36-32-NEXT: \
[[ARRAYIDX15:%.*]] = getelementptr inbounds [[STRUCT_D]], %struct.D* [[TMP51]], i32 1 \
+// CK36-32-NEXT: [[E16:%.*]] = getelementptr inbounds [[STRUCT_D]], %struct.D* \
[[ARRAYIDX15]], i32 0, i32 0 +// CK36-32-NEXT: store i32 111, i32* [[E16]], align \
4 +// CK36-32-NEXT: [[TMP52:%.*]] = load %struct.D*, %struct.D** [[SAA]], align 4
+// CK36-32-NEXT: [[ARRAYIDX17:%.*]] = getelementptr inbounds [[STRUCT_D]], \
%struct.D* [[TMP52]], i32 1 +// CK36-32-NEXT: [[F18:%.*]] = getelementptr inbounds \
[[STRUCT_D]], %struct.D* [[ARRAYIDX17]], i32 0, i32 2 +// CK36-32-NEXT: \
[[A19:%.*]] = getelementptr inbounds [[STRUCT_C]], %struct.C* [[F18]], i32 0, i32 0 \
+// CK36-32-NEXT: store i32 222, i32* [[A19]], align 4 +// CK36-32-NEXT: \
[[ARRAYIDX20:%.*]] = getelementptr inbounds [2 x double], [2 x double]* [[X]], i32 0, \
i32 0 +// CK36-32-NEXT: [[TMP53:%.*]] = load %struct.D*, %struct.D** [[SAA]], \
align 4 +// CK36-32-NEXT: [[ARRAYIDX21:%.*]] = getelementptr inbounds \
[[STRUCT_D]], %struct.D* [[TMP53]], i32 1 +// CK36-32-NEXT: [[F22:%.*]] = \
getelementptr inbounds [[STRUCT_D]], %struct.D* [[ARRAYIDX21]], i32 0, i32 2 +// \
CK36-32-NEXT: [[B23:%.*]] = getelementptr inbounds [[STRUCT_C]], %struct.C* \
[[F22]], i32 0, i32 1 +// CK36-32-NEXT: store double* [[ARRAYIDX20]], double** \
[[B23]], align 4 +// CK36-32-NEXT: [[ARRAYIDX24:%.*]] = getelementptr inbounds [2 \
x double], [2 x double]* [[Y]], i32 0, i32 0 +// CK36-32-NEXT: [[TMP54:%.*]] = \
load %struct.D*, %struct.D** [[SAA]], align 4 +// CK36-32-NEXT: [[ARRAYIDX25:%.*]] \
= getelementptr inbounds [[STRUCT_D]], %struct.D* [[TMP54]], i32 2 +// CK36-32-NEXT: \
[[F26:%.*]] = getelementptr inbounds [[STRUCT_D]], %struct.D* [[ARRAYIDX25]], i32 0, \
i32 2 +// CK36-32-NEXT: [[B27:%.*]] = getelementptr inbounds [[STRUCT_C]], \
%struct.C* [[F26]], i32 0, i32 1 +// CK36-32-NEXT: store double* [[ARRAYIDX24]], \
double** [[B27]], align 4 +// CK36-32-NEXT: [[TMP55:%.*]] = load %struct.D*, \
%struct.D** [[SAA]], align 4 +// CK36-32-NEXT: [[TMP56:%.*]] = load %struct.D*, \
%struct.D** [[SAA]], align 4 +// CK36-32-NEXT: [[TMP57:%.*]] = load %struct.D*, \
%struct.D** [[SAA]], align 4 +// CK36-32-NEXT: [[ARRAYIDX28:%.*]] = getelementptr \
inbounds [[STRUCT_D]], %struct.D* [[TMP57]], i32 1 +// CK36-32-NEXT: [[TMP58:%.*]] \
= bitcast %struct.D* [[ARRAYIDX28]] to i8* +// CK36-32-NEXT: [[TMP59:%.*]] = \
getelementptr i8, i8* [[TMP58]], i32 15 +// CK36-32-NEXT: [[TMP60:%.*]] = load \
%struct.D*, %struct.D** [[SAA]], align 4 +// CK36-32-NEXT: [[ARRAYIDX29:%.*]] = \
getelementptr inbounds [[STRUCT_D]], %struct.D* [[TMP60]], i32 1 +// CK36-32-NEXT: \
[[F30:%.*]] = getelementptr inbounds [[STRUCT_D]], %struct.D* [[ARRAYIDX29]], i32 0, \
i32 2 +// CK36-32-DAG: [[TMP61:%.*]] = bitcast %struct.C* [[F30]] to i8*
+// CK36-32-DAG: [[TMP62:%.*]] = bitcast %struct.D* [[ARRAYIDX28]] to i8*
+// CK36-32-NEXT: [[TMP63:%.*]] = ptrtoint i8* [[TMP61]] to i64
+// CK36-32-NEXT: [[TMP64:%.*]] = ptrtoint i8* [[TMP62]] to i64
+// CK36-32-NEXT: [[TMP65:%.*]] = sub i64 [[TMP63]], [[TMP64]]
+// CK36-32-NEXT: [[TMP66:%.*]] = sdiv exact i64 [[TMP65]], ptrtoint (i8* \
getelementptr (i8, i8* null, i32 1) to i64) +// CK36-32-NEXT: [[TMP67:%.*]] = \
getelementptr [[STRUCT_C]], %struct.C* [[F30]], i32 1 +// CK36-32-DAG: \
[[TMP68:%.*]] = getelementptr i8, i8* [[TMP59]], i32 1 +// CK36-32-DAG: \
[[TMP69:%.*]] = bitcast %struct.C* [[TMP67]] to i8* +// CK36-32-NEXT: \
[[TMP70:%.*]] = ptrtoint i8* [[TMP68]] to i64 +// CK36-32-NEXT: [[TMP71:%.*]] = \
ptrtoint i8* [[TMP69]] to i64 +// CK36-32-NEXT: [[TMP72:%.*]] = sub i64 [[TMP70]], \
[[TMP71]] +// CK36-32-NEXT: [[TMP73:%.*]] = sdiv exact i64 [[TMP72]], ptrtoint \
(i8* getelementptr (i8, i8* null, i32 1) to i64) +// CK36-32-NEXT: [[TMP74:%.*]] = \
load %struct.D*, %struct.D** [[SAA]], align 4 +// CK36-32-NEXT: [[TMP75:%.*]] = \
load %struct.D*, %struct.D** [[SAA]], align 4 +// CK36-32-NEXT: [[ARRAYIDX31:%.*]] \
= getelementptr inbounds [[STRUCT_D]], %struct.D* [[TMP75]], i32 1 +// CK36-32-NEXT: \
[[TMP76:%.*]] = getelementptr [[STRUCT_D]], %struct.D* [[ARRAYIDX28]], i32 1 +// \
CK36-32-NEXT: [[TMP77:%.*]] = bitcast %struct.D* [[ARRAYIDX28]] to i8* +// \
CK36-32-NEXT: [[TMP78:%.*]] = bitcast %struct.D* [[TMP76]] to i8* +// \
CK36-32-NEXT: [[TMP79:%.*]] = ptrtoint i8* [[TMP78]] to i64 +// CK36-32-NEXT: \
[[TMP80:%.*]] = ptrtoint i8* [[TMP77]] to i64 +// CK36-32-NEXT: [[TMP81:%.*]] = \
sub i64 [[TMP79]], [[TMP80]] +// CK36-32-NEXT: [[TMP82:%.*]] = sdiv exact i64 \
[[TMP81]], ptrtoint (i8* getelementptr (i8, i8* null, i32 1) to i64) +// \
CK36-32-NEXT: [[TMP83:%.*]] = bitcast [4 x i64]* [[DOTOFFLOAD_SIZES35]] to i8* +// \
CK36-32-NEXT: call void @llvm.memcpy.p0i8.p0i8.i32(i8* align 4 [[TMP83]], i8* \
align 4 bitcast ([4 x i64]* @.offload_sizes.1 to i8*), i32 32, i1 false) +// \
CK36-32-NEXT: [[TMP84:%.*]] = getelementptr inbounds [4 x i8*], [4 x i8*]* \
[[DOTOFFLOAD_BASEPTRS32]], i32 0, i32 0 +// CK36-32-NEXT: [[TMP85:%.*]] = bitcast \
i8** [[TMP84]] to %struct.D** +// CK36-32-NEXT: store %struct.D* [[TMP56]], \
%struct.D** [[TMP85]], align 4 +// CK36-32-NEXT: [[TMP86:%.*]] = getelementptr \
inbounds [4 x i8*], [4 x i8*]* [[DOTOFFLOAD_PTRS33]], i32 0, i32 0 +// CK36-32-NEXT: \
[[TMP87:%.*]] = bitcast i8** [[TMP86]] to %struct.D** +// CK36-32-NEXT: store \
%struct.D* [[ARRAYIDX28]], %struct.D** [[TMP87]], align 4 +// CK36-32-NEXT: \
[[TMP88:%.*]] = getelementptr inbounds [4 x i64], [4 x i64]* [[DOTOFFLOAD_SIZES35]], \
i32 0, i32 0 +// CK36-32-NEXT: store i64 [[TMP82]], i64* [[TMP88]], align 4
+// CK36-32-NEXT: [[TMP89:%.*]] = getelementptr inbounds [4 x i8*], [4 x i8*]* \
[[DOTOFFLOAD_MAPPERS34]], i32 0, i32 0 +// CK36-32-NEXT: store i8* null, i8** \
[[TMP89]], align 4 +// CK36-32-NEXT: [[TMP90:%.*]] = getelementptr inbounds [4 x \
i8*], [4 x i8*]* [[DOTOFFLOAD_BASEPTRS32]], i32 0, i32 1 +// CK36-32-NEXT: \
[[TMP91:%.*]] = bitcast i8** [[TMP90]] to %struct.D** +// CK36-32-NEXT: store \
%struct.D* [[TMP56]], %struct.D** [[TMP91]], align 4 +// CK36-32-NEXT: \
[[TMP92:%.*]] = getelementptr inbounds [4 x i8*], [4 x i8*]* [[DOTOFFLOAD_PTRS33]], \
i32 0, i32 1 +// CK36-32-NEXT: [[TMP93:%.*]] = bitcast i8** [[TMP92]] to \
%struct.D** +// CK36-32-NEXT: store %struct.D* [[ARRAYIDX28]], %struct.D** \
[[TMP93]], align 4 +// CK36-32-NEXT: [[TMP94:%.*]] = getelementptr inbounds [4 x \
i64], [4 x i64]* [[DOTOFFLOAD_SIZES35]], i32 0, i32 1 +// CK36-32-NEXT: store i64 \
[[TMP66]], i64* [[TMP94]], align 4 +// CK36-32-NEXT: [[TMP95:%.*]] = getelementptr \
inbounds [4 x i8*], [4 x i8*]* [[DOTOFFLOAD_MAPPERS34]], i32 0, i32 1 +// \
CK36-32-NEXT: store i8* null, i8** [[TMP95]], align 4 +// CK36-32-NEXT: \
[[TMP96:%.*]] = getelementptr inbounds [4 x i8*], [4 x i8*]* \
[[DOTOFFLOAD_BASEPTRS32]], i32 0, i32 2 +// CK36-32-NEXT: [[TMP97:%.*]] = bitcast \
i8** [[TMP96]] to %struct.D** +// CK36-32-NEXT: store %struct.D* [[TMP56]], \
%struct.D** [[TMP97]], align 4 +// CK36-32-NEXT: [[TMP98:%.*]] = getelementptr \
inbounds [4 x i8*], [4 x i8*]* [[DOTOFFLOAD_PTRS33]], i32 0, i32 2 +// CK36-32-NEXT: \
[[TMP99:%.*]] = bitcast i8** [[TMP98]] to %struct.C** +// CK36-32-NEXT: store \
%struct.C* [[TMP67]], %struct.C** [[TMP99]], align 4 +// CK36-32-NEXT: \
[[TMP100:%.*]] = getelementptr inbounds [4 x i64], [4 x i64]* [[DOTOFFLOAD_SIZES35]], \
i32 0, i32 2 +// CK36-32-NEXT: store i64 [[TMP73]], i64* [[TMP100]], align 4
+// CK36-32-NEXT: [[TMP101:%.*]] = getelementptr inbounds [4 x i8*], [4 x i8*]* \
[[DOTOFFLOAD_MAPPERS34]], i32 0, i32 2 +// CK36-32-NEXT: store i8* null, i8** \
[[TMP101]], align 4 +// CK36-32-NEXT: [[TMP102:%.*]] = getelementptr inbounds [4 x \
i8*], [4 x i8*]* [[DOTOFFLOAD_BASEPTRS32]], i32 0, i32 3 +// CK36-32-NEXT: \
[[TMP103:%.*]] = bitcast i8** [[TMP102]] to %struct.D** +// CK36-32-NEXT: store \
%struct.D* [[TMP74]], %struct.D** [[TMP103]], align 4 +// CK36-32-NEXT: \
[[TMP104:%.*]] = getelementptr inbounds [4 x i8*], [4 x i8*]* [[DOTOFFLOAD_PTRS33]], \
i32 0, i32 3 +// CK36-32-NEXT: [[TMP105:%.*]] = bitcast i8** [[TMP104]] to \
%struct.D** +// CK36-32-NEXT: store %struct.D* [[ARRAYIDX31]], %struct.D** \
[[TMP105]], align 4 +// CK36-32-NEXT: [[TMP106:%.*]] = getelementptr inbounds [4 x \
i8*], [4 x i8*]* [[DOTOFFLOAD_MAPPERS34]], i32 0, i32 3 +// CK36-32-NEXT: store \
i8* null, i8** [[TMP106]], align 4 +// CK36-32-NEXT: [[TMP107:%.*]] = \
getelementptr inbounds [4 x i8*], [4 x i8*]* [[DOTOFFLOAD_BASEPTRS32]], i32 0, i32 0 \
+// CK36-32-NEXT: [[TMP108:%.*]] = getelementptr inbounds [4 x i8*], [4 x i8*]* \
[[DOTOFFLOAD_PTRS33]], i32 0, i32 0 +// CK36-32-NEXT: [[TMP109:%.*]] = \
getelementptr inbounds [4 x i64], [4 x i64]* [[DOTOFFLOAD_SIZES35]], i32 0, i32 0 +// \
CK36-32-NEXT: [[TMP110:%.*]] = call i32 @__tgt_target_mapper(%struct.ident_t* \
@[[GLOB1]], i64 -1, i8* @.__omp_offloading_{{.*}}__Z4testv_{{.*}}.region_id, i32 4, \
i8** [[TMP107]], i8** [[TMP108]], i64* [[TMP109]], i64* getelementptr inbounds ([4 x \
i64], [4 x i64]* @.offload_maptypes.2, i32 0, i32 0), i8** null, i8** null) +// \
CK36-32-NEXT: [[TMP111:%.*]] = icmp ne i32 [[TMP110]], 0 +// CK36-32-NEXT: br \
i1 [[TMP111]], label [[OMP_OFFLOAD_FAILED36:%.*]], label [[OMP_OFFLOAD_CONT37:%.*]] \
+// CK36-32: omp_offload.failed36: +// CK36-32-NEXT: call void \
@__omp_offloading_{{.*}}__Z4testv_{{.*}}(%struct.D* [[TMP55]]) #[[ATTR3]] +// \
CK36-32-NEXT: br label [[OMP_OFFLOAD_CONT37]] +// CK36-32: \
omp_offload.cont37: +// CK36-32-NEXT: ret void
+//
+void test() {
+ D sa[10];
+ double x[2], y[2];
+ y[1] = x[1] = 20;
+
+ sa[1].e = 111;
+ sa[1].f.a = 222;
+ sa[1].f.b = &x[0];
+ sa[2].f.b = &y[0];
+
+#pragma omp target map(tofrom : sa[1:2])
+ {
+ sa[1].e = 166;
+ sa[1].f.a = 177;
+ sa[1].f.b[1] = 140.;
+ sa[2].e = 266;
+ sa[2].f.a = 277;
+ sa[2].f.b[1] = 240.;
+ }
+
+ D saaa[10];
+ D *saa = saaa;
+ saa[1].e = 111;
+ saa[1].f.a = 222;
+ saa[1].f.b = &x[0];
+ saa[2].f.b = &y[0];
+
+#pragma omp target map(tofrom : saa[1:2])
+ {
+ saa[1].e = 166;
+ saa[1].f.a = 177;
+ saa[1].f.b[1] = 140.;
+ saa[2].e = 266;
+ saa[2].f.a = 277;
+ saa[2].f.b[1] = 240.;
+ }
+}
+
+
+#endif // CK36
+#endif
Index: clang/lib/CodeGen/CGOpenMPRuntime.cpp
===================================================================
--- clang/lib/CodeGen/CGOpenMPRuntime.cpp
+++ clang/lib/CodeGen/CGOpenMPRuntime.cpp
@@ -8068,8 +8068,20 @@
// Mark the whole struct as the struct that requires allocation on the
// device.
PartialStruct.LowestElem = {0, LowestElem};
- CharUnits TypeSize = CGF.getContext().getTypeSizeInChars(
- I->getAssociatedExpression()->getType());
+ // For array section, size of array of element type is used.
+ QualType CanonType =
+ I->getAssociatedExpression()->getType().getCanonicalType();
+ if (CanonType->isSpecificBuiltinType(BuiltinType::OMPArraySection)) {
+ const auto *OASE = cast<OMPArraySectionExpr>(
+ I->getAssociatedExpression()->IgnoreParenImpCasts());
+ QualType BaseType =
+ OMPArraySectionExpr::getBaseOriginalType(OASE->getBase());
+ if (const auto *ATy = BaseType->getAsArrayTypeUnsafe())
+ CanonType = ATy->getElementType();
+ else
+ CanonType = BaseType->getPointeeType();
+ }
+ CharUnits TypeSize = CGF.getContext().getTypeSizeInChars(CanonType);
Address HB = CGF.Builder.CreateConstGEP(
CGF.Builder.CreatePointerBitCastOrAddrSpaceCast(
LowestElem, CGF.VoidPtrTy, CGF.Int8Ty),
[Attachment #4 (text/plain)]
_______________________________________________
cfe-commits mailing list
cfe-commits@lists.llvm.org
https://lists.llvm.org/cgi-bin/mailman/listinfo/cfe-commits
[prev in list] [next in list] [prev in thread] [next in thread]
Configure |
About |
News |
Add a list |
Sponsored by KoreLogic