@@ -8861,7 +8861,8 @@ build_omp_struct_comp_nodes (enum tree_code code, tree grp_start, tree grp_end,
static tree
extract_base_bit_offset (tree base, poly_int64 *bitposp,
- poly_offset_int *poffsetp)
+ poly_offset_int *poffsetp,
+ bool *variable_offset)
{
tree offset;
poly_int64 bitsize, bitpos;
@@ -8879,10 +8880,13 @@ extract_base_bit_offset (tree base, poly_int64 *bitposp,
if (offset && poly_int_tree_p (offset))
{
poffset = wi::to_poly_offset (offset);
- offset = NULL_TREE;
+ *variable_offset = false;
}
else
- poffset = 0;
+ {
+ poffset = 0;
+ *variable_offset = (offset != NULL_TREE);
+ }
if (maybe_ne (bitpos, 0))
poffset += bits_to_bytes_round_down (bitpos);
@@ -9038,6 +9042,7 @@ omp_get_attachment (omp_mapping_group *grp)
return error_mark_node;
case GOMP_MAP_STRUCT:
+ case GOMP_MAP_STRUCT_UNORD:
case GOMP_MAP_FORCE_DEVICEPTR:
case GOMP_MAP_DEVICE_RESIDENT:
case GOMP_MAP_LINK:
@@ -9123,6 +9128,7 @@ omp_group_last (tree *start_p)
break;
case GOMP_MAP_STRUCT:
+ case GOMP_MAP_STRUCT_UNORD:
{
unsigned HOST_WIDE_INT num_mappings
= tree_to_uhwi (OMP_CLAUSE_SIZE (c));
@@ -9282,6 +9288,7 @@ omp_group_base (omp_mapping_group *grp, unsigned int *chained,
return error_mark_node;
case GOMP_MAP_STRUCT:
+ case GOMP_MAP_STRUCT_UNORD:
{
unsigned HOST_WIDE_INT num_mappings
= tree_to_uhwi (OMP_CLAUSE_SIZE (node));
@@ -9917,7 +9924,8 @@ omp_directive_maps_explicitly (hash_map<tree_operand_hash,
/* We might be called during omp_build_struct_sibling_lists, when
GOMP_MAP_STRUCT might have been inserted at the start of the group.
Skip over that, and also possibly the node after it. */
- if (OMP_CLAUSE_MAP_KIND (grp_first) == GOMP_MAP_STRUCT)
+ if (OMP_CLAUSE_MAP_KIND (grp_first) == GOMP_MAP_STRUCT
+ || OMP_CLAUSE_MAP_KIND (grp_first) == GOMP_MAP_STRUCT_UNORD)
{
grp_first = OMP_CLAUSE_CHAIN (grp_first);
if (OMP_CLAUSE_MAP_KIND (grp_first) == GOMP_MAP_FIRSTPRIVATE_POINTER
@@ -10619,7 +10627,9 @@ omp_accumulate_sibling_list (enum omp_region_type region_type,
}
}
- tree base = extract_base_bit_offset (ocd, &cbitpos, &coffset);
+ bool variable_offset;
+ tree base
+ = extract_base_bit_offset (ocd, &cbitpos, &coffset, &variable_offset);
int base_token;
for (base_token = addr_tokens.length () - 1; base_token >= 0; base_token--)
@@ -10655,14 +10665,20 @@ omp_accumulate_sibling_list (enum omp_region_type region_type,
if (!struct_map_to_clause || struct_map_to_clause->get (base) == NULL)
{
- tree l = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
-
- OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
- OMP_CLAUSE_DECL (l) = unshare_expr (base);
- OMP_CLAUSE_SIZE (l) = size_int (1);
+ enum gomp_map_kind str_kind = GOMP_MAP_STRUCT;
if (struct_map_to_clause == NULL)
struct_map_to_clause = new hash_map<tree_operand_hash, tree>;
+
+ if (variable_offset)
+ str_kind = GOMP_MAP_STRUCT_UNORD;
+
+ tree l = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
+
+ OMP_CLAUSE_SET_MAP_KIND (l, str_kind);
+ OMP_CLAUSE_DECL (l) = unshare_expr (base);
+ OMP_CLAUSE_SIZE (l) = size_int (1);
+
struct_map_to_clause->put (base, l);
/* On first iterating through the clause list, we insert the struct node
@@ -10899,6 +10915,11 @@ omp_accumulate_sibling_list (enum omp_region_type region_type,
{
tree *osc = struct_map_to_clause->get (base);
tree *sc = NULL, *scp = NULL;
+ bool unordered = false;
+
+ if (osc && OMP_CLAUSE_MAP_KIND (*osc) == GOMP_MAP_STRUCT_UNORD)
+ unordered = true;
+
unsigned HOST_WIDE_INT i, elems = tree_to_uhwi (OMP_CLAUSE_SIZE (*osc));
sc = &OMP_CLAUSE_CHAIN (*osc);
/* The struct mapping might be immediately followed by a
@@ -10939,12 +10960,20 @@ omp_accumulate_sibling_list (enum omp_region_type region_type,
== REFERENCE_TYPE))
sc_decl = TREE_OPERAND (sc_decl, 0);
- tree base2 = extract_base_bit_offset (sc_decl, &bitpos, &offset);
+ bool variable_offset2;
+ tree base2 = extract_base_bit_offset (sc_decl, &bitpos, &offset,
+ &variable_offset2);
if (!base2 || !operand_equal_p (base2, base, 0))
break;
if (scp)
continue;
- if ((region_type & ORT_ACC) != 0)
+ if (variable_offset2)
+ {
+ OMP_CLAUSE_SET_MAP_KIND (*osc, GOMP_MAP_STRUCT_UNORD);
+ unordered = true;
+ break;
+ }
+ else if ((region_type & ORT_ACC) != 0)
{
/* For OpenACC, allow (ignore) duplicate struct accesses in
the middle of a mapping clause, e.g. "mystruct->foo" in:
@@ -10976,6 +11005,15 @@ omp_accumulate_sibling_list (enum omp_region_type region_type,
}
}
+ /* If this is an unordered struct, just insert the new element at the
+ end of the list. */
+ if (unordered)
+ {
+ for (; i < elems; i++)
+ sc = &OMP_CLAUSE_CHAIN (*sc);
+ scp = NULL;
+ }
+
OMP_CLAUSE_SIZE (*osc)
= size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc), size_one_node);
@@ -11363,14 +11401,42 @@ omp_build_struct_sibling_lists (enum tree_code code,
/* This is the first sorted node in the struct sibling list. Use it
to recalculate the correct bias to use.
- (&first_node - attach_decl). */
- tree first_node = OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (attach));
- first_node = build_fold_addr_expr (first_node);
- first_node = fold_convert (ptrdiff_type_node, first_node);
+ (&first_node - attach_decl).
+ For GOMP_MAP_STRUCT_UNORD, we need e.g. the
+ min(min(min(first,second),third),fourth) element, because the
+ elements aren't in any particular order. */
+ tree lowest_addr;
+ if (OMP_CLAUSE_MAP_KIND (struct_node) == GOMP_MAP_STRUCT_UNORD)
+ {
+ tree first_node = OMP_CLAUSE_CHAIN (attach);
+ unsigned HOST_WIDE_INT num_mappings
+ = tree_to_uhwi (OMP_CLAUSE_SIZE (struct_node));
+ lowest_addr = OMP_CLAUSE_DECL (first_node);
+ lowest_addr = build_fold_addr_expr (lowest_addr);
+ lowest_addr = fold_convert (pointer_sized_int_node, lowest_addr);
+ tree next_node = OMP_CLAUSE_CHAIN (first_node);
+ while (num_mappings > 1)
+ {
+ tree tmp = OMP_CLAUSE_DECL (next_node);
+ tmp = build_fold_addr_expr (tmp);
+ tmp = fold_convert (pointer_sized_int_node, tmp);
+ lowest_addr = fold_build2 (MIN_EXPR, pointer_sized_int_node,
+ lowest_addr, tmp);
+ next_node = OMP_CLAUSE_CHAIN (next_node);
+ num_mappings--;
+ }
+ lowest_addr = fold_convert (ptrdiff_type_node, lowest_addr);
+ }
+ else
+ {
+ tree first_node = OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (attach));
+ first_node = build_fold_addr_expr (first_node);
+ lowest_addr = fold_convert (ptrdiff_type_node, first_node);
+ }
tree attach_decl = OMP_CLAUSE_DECL (attach);
attach_decl = fold_convert (ptrdiff_type_node, attach_decl);
OMP_CLAUSE_SIZE (attach)
- = fold_build2 (MINUS_EXPR, ptrdiff_type_node, first_node,
+ = fold_build2 (MINUS_EXPR, ptrdiff_type_node, lowest_addr,
attach_decl);
/* Remove GOMP_MAP_ATTACH node from after struct node. */
@@ -11918,7 +11984,8 @@ gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
GOVD_FIRSTPRIVATE | GOVD_SEEN);
}
- if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
+ if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
+ || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT_UNORD)
&& (addr_tokens[0]->type == STRUCTURE_BASE
|| addr_tokens[0]->type == ARRAY_BASE)
&& addr_tokens[0]->u.structure_base_kind == BASE_DECL)
@@ -13505,7 +13572,8 @@ gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
}
}
}
- if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
+ if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
+ || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT_UNORD)
&& (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA))
{
remove = true;
@@ -13549,7 +13617,8 @@ gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
in target block and none of the mapping has always modifier,
remove all the struct element mappings, which immediately
follow the GOMP_MAP_STRUCT map clause. */
- if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
+ if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
+ || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT_UNORD)
{
HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
while (cnt--)
@@ -16328,6 +16397,7 @@ gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
have_clause = false;
break;
case GOMP_MAP_STRUCT:
+ case GOMP_MAP_STRUCT_UNORD:
have_clause = false;
break;
default:
@@ -12780,6 +12780,7 @@ lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
case GOMP_MAP_FIRSTPRIVATE_POINTER:
case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
case GOMP_MAP_STRUCT:
+ case GOMP_MAP_STRUCT_UNORD:
case GOMP_MAP_ALWAYS_POINTER:
case GOMP_MAP_ATTACH:
case GOMP_MAP_DETACH:
@@ -967,6 +967,9 @@ dump_omp_clause (pretty_printer *pp, tree clause, int spc, dump_flags_t flags)
case GOMP_MAP_STRUCT:
pp_string (pp, "struct");
break;
+ case GOMP_MAP_STRUCT_UNORD:
+ pp_string (pp, "struct_unord");
+ break;
case GOMP_MAP_ALWAYS_POINTER:
pp_string (pp, "always_pointer");
break;
@@ -138,6 +138,12 @@ enum gomp_map_kind
(address of the last adjacent entry plus its size). */
GOMP_MAP_STRUCT = (GOMP_MAP_FLAG_SPECIAL_2
| GOMP_MAP_FLAG_SPECIAL | 0),
+ /* As above, but followed by an unordered list of adjacent entries.
+ At present, this is used only to diagnose incorrect usage of variable
+ indices into arrays of structs. */
+ GOMP_MAP_STRUCT_UNORD = (GOMP_MAP_FLAG_SPECIAL_3
+ | GOMP_MAP_FLAG_SPECIAL_2
+ | GOMP_MAP_FLAG_SPECIAL | 0),
/* On a location of a pointer/reference that is assumed to be already mapped
earlier, store the translated address of the preceeding mapping.
No refcount is bumped by this, and the store is done unconditionally. */
@@ -1028,6 +1028,7 @@ find_group_last (int pos, size_t mapnum, size_t *sizes, unsigned short *kinds)
break;
case GOMP_MAP_STRUCT:
+ case GOMP_MAP_STRUCT_UNORD:
pos += sizes[pos];
break;
@@ -1088,6 +1089,7 @@ goacc_enter_data_internal (struct gomp_device_descr *acc_dev, size_t mapnum,
switch (kinds[i] & 0xff)
{
case GOMP_MAP_STRUCT:
+ case GOMP_MAP_STRUCT_UNORD:
{
size = (uintptr_t) hostaddrs[group_last] + sizes[group_last]
- (uintptr_t) hostaddrs[i];
@@ -1297,6 +1299,7 @@ goacc_exit_data_internal (struct gomp_device_descr *acc_dev, size_t mapnum,
break;
case GOMP_MAP_STRUCT:
+ case GOMP_MAP_STRUCT_UNORD:
/* Skip the 'GOMP_MAP_STRUCT' itself, and use the regular processing
for all its entries. This special handling exists for GCC 10.1
compatibility; afterwards, we're not generating these no-op
@@ -1435,7 +1438,8 @@ GOACC_enter_exit_data (int flags_m, size_t mapnum, void **hostaddrs,
if (kind == GOMP_MAP_POINTER
|| kind == GOMP_MAP_TO_PSET
- || kind == GOMP_MAP_STRUCT)
+ || kind == GOMP_MAP_STRUCT
+ || kind == GOMP_MAP_STRUCT_UNORD)
continue;
if (kind == GOMP_MAP_FORCE_ALLOC
@@ -1061,7 +1061,8 @@ gomp_map_vars_internal (struct gomp_device_descr *devicep,
tgt->list[i].offset = 0;
continue;
}
- else if ((kind & typemask) == GOMP_MAP_STRUCT)
+ else if ((kind & typemask) == GOMP_MAP_STRUCT
+ || (kind & typemask) == GOMP_MAP_STRUCT_UNORD)
{
size_t first = i + 1;
size_t last = i + sizes[i];
@@ -1440,6 +1441,20 @@ gomp_map_vars_internal (struct gomp_device_descr *devicep,
tgt->list[i].offset = OFFSET_INLINED;
}
continue;
+ case GOMP_MAP_STRUCT_UNORD:
+ if (sizes[i] > 1)
+ {
+ void *first = hostaddrs[i + 1];
+ for (size_t j = i + 1; j < i + sizes[i]; j++)
+ if (hostaddrs[j + 1] != first)
+ {
+ gomp_mutex_unlock (&devicep->lock);
+ gomp_fatal ("Mapped array elements must be the "
+ "same (%p vs %p)", first,
+ hostaddrs[j + 1]);
+ }
+ }
+ /* Fallthrough. */
case GOMP_MAP_STRUCT:
first = i + 1;
last = i + sizes[i];
@@ -1557,9 +1572,40 @@ gomp_map_vars_internal (struct gomp_device_descr *devicep,
k->host_end = k->host_start + sizeof (void *);
splay_tree_key n = splay_tree_lookup (mem_map, k);
if (n && n->refcount != REFCOUNT_LINK)
- gomp_map_vars_existing (devicep, aq, n, k, &tgt->list[i],
- kind & typemask, false, implicit, cbufp,
- refcount_set);
+ {
+ if (field_tgt_clear != FIELD_TGT_EMPTY)
+ {
+ /* For this condition to be true, there must be a
+ duplicate struct element mapping. This can happen with
+ GOMP_MAP_STRUCT_UNORD mappings, for example. */
+ tgt->list[i].key = n;
+ if (openmp_p)
+ {
+ assert ((n->refcount & REFCOUNT_STRUCTELEM) != 0);
+ assert (field_tgt_structelem_first != NULL);
+
+ if (i == field_tgt_clear)
+ {
+ n->refcount |= REFCOUNT_STRUCTELEM_FLAG_LAST;
+ field_tgt_structelem_first = NULL;
+ }
+ }
+ if (i == field_tgt_clear)
+ field_tgt_clear = FIELD_TGT_EMPTY;
+ gomp_increment_refcount (n, refcount_set);
+ tgt->list[i].copy_from
+ = GOMP_MAP_COPY_FROM_P (kind & typemask);
+ tgt->list[i].always_copy_from
+ = GOMP_MAP_ALWAYS_FROM_P (kind & typemask);
+ tgt->list[i].is_attach = false;
+ tgt->list[i].offset = 0;
+ tgt->list[i].length = k->host_end - k->host_start;
+ }
+ else
+ gomp_map_vars_existing (devicep, aq, n, k, &tgt->list[i],
+ kind & typemask, false, implicit,
+ cbufp, refcount_set);
+ }
else
{
k->aux = NULL;
@@ -3314,7 +3360,8 @@ GOMP_target_enter_exit_data (int device, size_t mapnum, void **hostaddrs,
size_t i, j;
if ((flags & GOMP_TARGET_FLAG_EXIT_DATA) == 0)
for (i = 0; i < mapnum; i++)
- if ((kinds[i] & 0xff) == GOMP_MAP_STRUCT)
+ if ((kinds[i] & 0xff) == GOMP_MAP_STRUCT
+ || (kinds[i] & 0xff) == GOMP_MAP_STRUCT_UNORD)
{
gomp_map_vars (devicep, sizes[i] + 1, &hostaddrs[i], NULL, &sizes[i],
&kinds[i], true, &refcount_set,
@@ -3409,7 +3456,8 @@ gomp_target_task_fn (void *data)
htab_t refcount_set = htab_create (ttask->mapnum);
if ((ttask->flags & GOMP_TARGET_FLAG_EXIT_DATA) == 0)
for (i = 0; i < ttask->mapnum; i++)
- if ((ttask->kinds[i] & 0xff) == GOMP_MAP_STRUCT)
+ if ((ttask->kinds[i] & 0xff) == GOMP_MAP_STRUCT
+ || (ttask->kinds[i] & 0xff) == GOMP_MAP_STRUCT_UNORD)
{
gomp_map_vars (devicep, ttask->sizes[i] + 1, &ttask->hostaddrs[i],
NULL, &ttask->sizes[i], &ttask->kinds[i], true,
new file mode 100644
@@ -0,0 +1,38 @@
+#include <stdlib.h>
+#include <assert.h>
+
+struct st {
+ int *p;
+};
+
+int main (void)
+{
+ struct st s[2];
+ s[0].p = (int *) calloc (5, sizeof (int));
+ s[1].p = (int *) calloc (5, sizeof (int));
+
+#pragma omp target map(s[0].p, s[1].p, s[0].p[0:2], s[1].p[1:3])
+ {
+ s[0].p[0] = 5;
+ s[1].p[1] = 7;
+ }
+
+#pragma omp target map(s, s[0].p[0:2], s[1].p[1:3])
+ {
+ s[0].p[0]++;
+ s[1].p[1]++;
+ }
+
+#pragma omp target map(s[0:2], s[0].p[0:2], s[1].p[1:3])
+ {
+ s[0].p[0]++;
+ s[1].p[1]++;
+ }
+
+ assert (s[0].p[0] == 7);
+ assert (s[1].p[1] == 9);
+
+ free (s[0].p);
+ free (s[1].p);
+ return 0;
+}
new file mode 100644
@@ -0,0 +1,58 @@
+#include <stdlib.h>
+#include <assert.h>
+
+struct st {
+ int *p;
+};
+
+int main (void)
+{
+ struct st s[10];
+
+ for (int i = 0; i < 10; i++)
+ s[i].p = (int *) calloc (5, sizeof (int));
+
+ for (int i = 0; i < 10; i++)
+ for (int j = 0; j < 10; j++)
+ for (int k = 0; k < 10; k++)
+ {
+ if (i == j || j == k || i == k)
+ continue;
+
+#pragma omp target map(s[i].p, s[j].p, s[k].p, s[i].p[0:2], s[j].p[1:3], \
+ s[k].p[2])
+ {
+ s[i].p[0]++;
+ s[j].p[1]++;
+ s[k].p[2]++;
+ }
+
+#pragma omp target map(s, s[i].p[0:2], s[j].p[1:3], s[k].p[2])
+ {
+ s[i].p[0]++;
+ s[j].p[1]++;
+ s[k].p[2]++;
+ }
+
+#pragma omp target map(s[0:10], s[i].p[0:2], s[j].p[1:3], s[k].p[2])
+ {
+ s[i].p[0]++;
+ s[j].p[1]++;
+ s[k].p[2]++;
+ }
+ }
+
+ for (int i = 0; i < 10; i++)
+ {
+ assert (s[i].p[0] == 216);
+ assert (s[i].p[1] == 216);
+ assert (s[i].p[2] == 216);
+ free (s[i].p);
+ }
+
+ return 0;
+}
+
+/* { dg-output "(\n|\r|\r\n)" } */
+/* { dg-output "libgomp: Mapped array elements must be the same .*(\n|\r|\r\n)+" } */
+/* { dg-shouldfail "" { offload_device_nonshared_as } } */
new file mode 100644
@@ -0,0 +1,68 @@
+#include <stdlib.h>
+#include <assert.h>
+
+struct st {
+ int *p;
+};
+
+struct tt {
+ struct st a[10];
+};
+
+struct ut {
+ struct tt *t;
+};
+
+int main (void)
+{
+ struct tt *t = (struct tt *) malloc (sizeof *t);
+ struct ut *u = (struct ut *) malloc (sizeof *u);
+
+ for (int i = 0; i < 10; i++)
+ t->a[i].p = (int *) calloc (5, sizeof (int));
+
+ u->t = t;
+
+ for (int i = 0; i < 10; i++)
+ for (int j = 0; j < 10; j++)
+ for (int k = 0; k < 10; k++)
+ {
+ if (i == j || j == k || i == k)
+ continue;
+
+ /* This one can use "firstprivate" for T... */
+#pragma omp target map(t->a[i].p, t->a[j].p, t->a[k].p, \
+ t->a[i].p[0:2], t->a[j].p[1:3], t->a[k].p[2])
+ {
+ t->a[i].p[0]++;
+ t->a[j].p[1]++;
+ t->a[k].p[2]++;
+ }
+
+ /* ...but this one must use attach/detach for T. */
+#pragma omp target map(u->t, u->t->a[i].p, u->t->a[j].p, u->t->a[k].p, \
+ u->t->a[i].p[0:2], u->t->a[j].p[1:3], u->t->a[k].p[2])
+ {
+ u->t->a[i].p[0]++;
+ u->t->a[j].p[1]++;
+ u->t->a[k].p[2]++;
+ }
+ }
+
+ for (int i = 0; i < 10; i++)
+ {
+ assert (t->a[i].p[0] == 144);
+ assert (t->a[i].p[1] == 144);
+ assert (t->a[i].p[2] == 144);
+ free (t->a[i].p);
+ }
+
+ free (u);
+ free (t);
+
+ return 0;
+}
+
+/* { dg-output "(\n|\r|\r\n)" } */
+/* { dg-output "libgomp: Mapped array elements must be the same .*(\n|\r|\r\n)+" } */
+/* { dg-shouldfail "" { offload_device_nonshared_as } } */
new file mode 100644
@@ -0,0 +1,54 @@
+! { dg-do run }
+
+type t
+ integer, pointer :: p(:)
+end type t
+
+type(t) :: var(3)
+integer :: i, j
+
+allocate (var(1)%p, source=[1,2,3,5])
+allocate (var(2)%p, source=[2,3,5])
+allocate (var(3)%p(1:3))
+
+var(3)%p = 0
+
+do i = 1, 3
+ do j = 1, 3
+!$omp target map(var(i)%p, var(j)%p)
+ var(i)%p(1) = 5
+ var(j)%p(2) = 7
+!$omp end target
+
+ if (i.ne.j) then
+!$omp target map(var(i)%p(1:3), var(i)%p, var(j)%p)
+ var(i)%p(1) = var(i)%p(1) + 1
+ var(j)%p(2) = var(j)%p(2) + 1
+!$omp end target
+
+!$omp target map(var(i)%p, var(j)%p, var(j)%p(1:3))
+ var(i)%p(1) = var(i)%p(1) + 1
+ var(j)%p(2) = var(j)%p(2) + 1
+!$omp end target
+
+!$omp target map(var(i)%p, var(i)%p(1:3), var(j)%p, var(j)%p(2))
+ var(i)%p(1) = var(i)%p(1) + 1
+ var(j)%p(2) = var(j)%p(2) + 1
+!$omp end target
+ end if
+
+ if (i.eq.j) then
+ if (var(i)%p(1).ne.5) stop 1
+ if (var(j)%p(2).ne.7) stop 2
+ else
+ if (var(i)%p(1).ne.8) stop 3
+ if (var(j)%p(2).ne.10) stop 4
+ end if
+ end do
+end do
+
+end
+
+! { dg-output "(\n|\r|\r\n)" }
+! { dg-output "libgomp: Mapped array elements must be the same .*(\n|\r|\r\n)+" }
+! { dg-shouldfail "" { offload_device_nonshared_as } }