summaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
Diffstat (limited to 'gcc')
-rw-r--r--gcc/ChangeLog262
-rw-r--r--gcc/bitmap.c16
-rw-r--r--gcc/builtin-types.def18
-rw-r--r--gcc/c-common.h1
-rw-r--r--gcc/c-cppbuiltin.c2
-rw-r--r--gcc/c-omp.c315
-rw-r--r--gcc/c-parser.c393
-rw-r--r--gcc/c-pragma.c2
-rw-r--r--gcc/c-pragma.h8
-rw-r--r--gcc/c-tree.h2
-rw-r--r--gcc/c-typeck.c34
-rw-r--r--gcc/cp/ChangeLog67
-rw-r--r--gcc/cp/call.c2
-rw-r--r--gcc/cp/cp-gimplify.c111
-rw-r--r--gcc/cp/cp-objcp-common.h2
-rw-r--r--gcc/cp/cp-tree.h11
-rw-r--r--gcc/cp/decl.c2
-rw-r--r--gcc/cp/parser.c676
-rw-r--r--gcc/cp/pt.c255
-rw-r--r--gcc/cp/semantics.c691
-rw-r--r--gcc/fortran/ChangeLog84
-rw-r--r--gcc/fortran/dump-parse-tree.c13
-rw-r--r--gcc/fortran/f95-lang.c8
-rw-r--r--gcc/fortran/gfortran.h15
-rw-r--r--gcc/fortran/match.h2
-rw-r--r--gcc/fortran/openmp.c178
-rw-r--r--gcc/fortran/parse.c23
-rw-r--r--gcc/fortran/resolve.c19
-rw-r--r--gcc/fortran/scanner.c18
-rw-r--r--gcc/fortran/st.c2
-rw-r--r--gcc/fortran/trans-openmp.c535
-rw-r--r--gcc/fortran/trans.c2
-rw-r--r--gcc/fortran/trans.h6
-rw-r--r--gcc/fortran/types.def21
-rw-r--r--gcc/gimple-low.c1
-rw-r--r--gcc/gimplify.c350
-rw-r--r--gcc/hooks.c8
-rw-r--r--gcc/hooks.h1
-rw-r--r--gcc/ipa-struct-reorg.c1
-rw-r--r--gcc/langhooks-def.h8
-rw-r--r--gcc/langhooks.h14
-rw-r--r--gcc/matrix-reorg.c4
-rw-r--r--gcc/omp-builtins.def59
-rw-r--r--gcc/omp-low.c1937
-rw-r--r--gcc/testsuite/ChangeLog24
-rw-r--r--gcc/testsuite/g++.dg/gomp/for-16.C5
-rw-r--r--gcc/testsuite/g++.dg/gomp/pr27499.C2
-rw-r--r--gcc/testsuite/g++.dg/gomp/pr34607.C4
-rw-r--r--gcc/testsuite/g++.dg/gomp/predetermined-1.C33
-rw-r--r--gcc/testsuite/g++.dg/gomp/task-1.C17
-rw-r--r--gcc/testsuite/g++.dg/gomp/tls-3.C4
-rw-r--r--gcc/testsuite/g++.dg/gomp/tls-4.C16
-rw-r--r--gcc/testsuite/gcc.dg/gomp/appendix-a/a.35.4.c2
-rw-r--r--gcc/testsuite/gcc.dg/gomp/appendix-a/a.35.6.c2
-rw-r--r--gcc/testsuite/gcc.dg/gomp/collapse-1.c92
-rw-r--r--gcc/testsuite/gcc.dg/gomp/nesting-1.c198
-rw-r--r--gcc/testsuite/gcc.dg/gomp/pr27499.c2
-rw-r--r--gcc/testsuite/gfortran.dg/gomp/appendix-a/a.33.4.f902
-rw-r--r--gcc/testsuite/gfortran.dg/gomp/appendix-a/a.35.4.f902
-rw-r--r--gcc/testsuite/gfortran.dg/gomp/appendix-a/a.35.6.f902
-rw-r--r--gcc/testsuite/gfortran.dg/gomp/collapse1.f9057
-rw-r--r--gcc/testsuite/gfortran.dg/gomp/omp_parse1.f904
-rw-r--r--gcc/testsuite/gfortran.dg/gomp/reduction1.f902
-rw-r--r--gcc/testsuite/gfortran.dg/gomp/sharing-3.f9037
-rw-r--r--gcc/tree-cfg.c6
-rw-r--r--gcc/tree-gimple.c1
-rw-r--r--gcc/tree-inline.c1
-rw-r--r--gcc/tree-nested.c193
-rw-r--r--gcc/tree-parloops.c17
-rw-r--r--gcc/tree-pretty-print.c98
-rw-r--r--gcc/tree-ssa-operands.c23
-rw-r--r--gcc/tree.c25
-rw-r--r--gcc/tree.def17
-rw-r--r--gcc/tree.h47
74 files changed, 5942 insertions, 1172 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index e4cc38734c0..ce1eae63991 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,265 @@
+2008-06-06 Jakub Jelinek <jakub@redhat.com>
+
+ * c-cppbuiltin.c (c_cpp_builtins): Change _OPENMP value to
+ 200805.
+ * langhooks.h (struct lang_hooks_for_decls): Add omp_finish_clause.
+ Add omp_private_outer_ref hook, add another argument to
+ omp_clause_default_ctor hook.
+ * langhooks-def.h (LANG_HOOKS_OMP_FINISH_CLAUSE): Define.
+ (LANG_HOOKS_OMP_PRIVATE_OUTER_REF): Define.
+ (LANG_HOOKS_OMP_CLAUSE_DEFAULT_CTOR): Change to
+ hook_tree_tree_tree_tree_null.
+ (LANG_HOOKS_DECLS): Add LANG_HOOKS_OMP_FINISH_CLAUSE and
+ LANG_HOOKS_OMP_PRIVATE_OUTER_REF.
+ * hooks.c (hook_tree_tree_tree_tree_null): New function.
+ * hooks.h (hook_tree_tree_tree_tree_null): New prototype.
+ * tree.def (OMP_TASK): New tree code.
+ * tree.h (OMP_TASK_COPYFN, OMP_TASK_ARG_SIZE, OMP_TASK_ARG_ALIGN,
+ OMP_CLAUSE_PRIVATE_OUTER_REF, OMP_CLAUSE_LASTPRIVATE_STMT,
+ OMP_CLAUSE_COLLAPSE_ITERVAR, OMP_CLAUSE_COLLAPSE_COUNT,
+ OMP_TASKREG_CHECK, OMP_TASKREG_BODY, OMP_TASKREG_CLAUSES,
+ OMP_TASKREG_FN, OMP_TASKREG_DATA_ARG, OMP_TASK_BODY,
+ OMP_TASK_CLAUSES, OMP_TASK_FN, OMP_TASK_DATA_ARG,
+ OMP_CLAUSE_COLLAPSE_EXPR): Define.
+ (enum omp_clause_default_kind): Add OMP_CLAUSE_DEFAULT_FIRSTPRIVATE.
+ (OMP_DIRECTIVE_P): Add OMP_TASK.
+ (OMP_CLAUSE_COLLAPSE, OMP_CLAUSE_UNTIED): New clause codes.
+ (OMP_CLAUSE_SCHEDULE_AUTO): New schedule kind.
+ * tree.c (omp_clause_code_name): Add OMP_CLAUSE_COLLAPSE
+ and OMP_CLAUSE_UNTIED entries.
+ (omp_clause_num_ops): Likewise. Increase OMP_CLAUSE_LASTPRIVATE
+ num_ops to 2.
+ (walk_tree_1): Handle OMP_CLAUSE_COLLAPSE and OMP_CLAUSE_UNTIED.
+ Walk OMP_CLAUSE_LASTPRIVATE_STMT.
+ * tree-pretty-print.c (dump_omp_clause): Handle
+ OMP_CLAUSE_SCHEDULE_AUTO, OMP_CLAUSE_UNTIED, OMP_CLAUSE_COLLAPSE,
+ OMP_CLAUSE_DEFAULT_FIRSTPRIVATE.
+ (dump_generic_node): Handle OMP_TASK and collapsed OMP_FOR loops.
+ * c-omp.c (c_finish_omp_for): Allow pointer iterators. Remove
+ warning about unsigned iterators. Change decl/init/cond/incr
+ arguments to TREE_VECs, check arguments for all collapsed loops.
+ (c_finish_omp_taskwait): New function.
+ (c_split_parallel_clauses): Put OMP_CLAUSE_COLLAPSE clause to
+ ws_clauses.
+ * c-parser.c (c_parser_omp_for_loop): Parse collapsed loops. Call
+ default_function_array_conversion on init. Add par_clauses argument.
+ If decl is present in parallel's lastprivate clause, change it to
+ shared and add lastprivate clause for decl to OMP_FOR_CLAUSES.
+ Add clauses argument, on success set OMP_FOR_CLAUSES to it. Look up
+ collapse count in clauses.
+ (c_parser_omp_for, c_parser_omp_parallel): Adjust
+ c_parser_omp_for_loop callers.
+ (OMP_FOR_CLAUSE_MASK): Add 1 << PRAGMA_OMP_CLAUSE_COLLAPSE.
+ (c_parser_pragma): Handle PRAGMA_OMP_TASKWAIT.
+ (c_parser_omp_clause_name): Handle collapse and untied clauses.
+ (c_parser_omp_clause_collapse, c_parser_omp_clause_untied): New
+ functions.
+ (c_parser_omp_clause_schedule): Handle schedule(auto).
+ Include correct location in the error message.
+ (c_parser_omp_all_clauses): Handle PRAGMA_OMP_CLAUSE_COLLAPSE
+ and PRAGMA_OMP_CLAUSE_UNTIED.
+ (OMP_TASK_CLAUSE_MASK): Define.
+ (c_parser_omp_task, c_parser_omp_taskwait): New functions.
+ (c_parser_omp_construct): Handle PRAGMA_OMP_TASK.
+ * tree-nested.c (convert_nonlocal_omp_clauses,
+ convert_local_omp_clauses): Handle OMP_CLAUSE_LASTPRIVATE_STMT,
+ OMP_CLAUSE_REDUCTION_INIT, OMP_CLAUSE_REDUCTION_MERGE,
+ OMP_CLAUSE_COLLAPSE and OMP_CLAUSE_UNTIED.
+ Don't handle TREE_STATIC or DECL_EXTERNAL VAR_DECLs in
+ OMP_CLAUSE_DECL.
+ (conver_nonlocal_reference, convert_local_reference,
+ convert_call_expr): Handle OMP_TASK the same as OMP_PARALLEL. Use
+ OMP_TASKREG_* macros rather than OMP_PARALLEL_*.
+ (walk_omp_for): Adjust for OMP_FOR_{INIT,COND,INCR} changes.
+ * tree-gimple.c (is_gimple_stmt): Handle OMP_TASK.
+ * c-tree.h (c_begin_omp_task, c_finish_omp_task): New prototypes.
+ * c-pragma.h (PRAGMA_OMP_TASK, PRAGMA_OMP_TASKWAIT): New.
+ (PRAGMA_OMP_CLAUSE_COLLAPSE, PRAGMA_OMP_CLAUSE_UNTIED): New.
+ * c-typeck.c (c_begin_omp_task, c_finish_omp_task): New functions.
+ (c_finish_omp_clauses): Handle OMP_CLAUSE_COLLAPSE and
+ OMP_CLAUSE_UNTIED.
+ * c-pragma.c (init_pragma): Init omp task and omp taskwait pragmas.
+ * c-common.h (c_finish_omp_taskwait): New prototype.
+ * gimple-low.c (lower_stmt): Handle OMP_TASK.
+ * tree-parloops.c (create_parallel_loop): Create 1 entry
+ vectors for OMP_FOR_{INIT,COND,INCR}.
+ * tree-cfg.c (remove_useless_stmts_1): Handle OMP_* containers.
+ (make_edges): Handle OMP_TASK.
+ * tree-ssa-operands.c (get_expr_operands): Handle collapsed OMP_FOR
+ loops, adjust for OMP_FOR_{INIT,COND,INCR} changes.
+ * tree-inline.c (estimate_num_insns_1): Handle OMP_TASK.
+ * builtin-types.def (BT_PTR_ULONGLONG, BT_PTR_FN_VOID_PTR_PTR,
+ BT_FN_BOOL_ULONGLONGPTR_ULONGLONGPTR,
+ BT_FN_BOOL_BOOL_ULL_ULL_ULL_ULLPTR_ULLPTR,
+ BT_FN_BOOL_BOOL_ULL_ULL_ULL_ULL_ULLPTR_ULLPTR,
+ BT_FN_VOID_OMPFN_PTR_OMPCPYFN_LONG_LONG_BOOL_UINT): New.
+ * omp-builtins.def (BUILT_IN_GOMP_TASK, BUILT_IN_GOMP_TASKWAIT,
+ BUILT_IN_GOMP_LOOP_ULL_STATIC_START,
+ BUILT_IN_GOMP_LOOP_ULL_DYNAMIC_START,
+ BUILT_IN_GOMP_LOOP_ULL_GUIDED_START,
+ BUILT_IN_GOMP_LOOP_ULL_RUNTIME_START,
+ BUILT_IN_GOMP_LOOP_ULL_ORDERED_STATIC_START,
+ BUILT_IN_GOMP_LOOP_ULL_ORDERED_DYNAMIC_START,
+ BUILT_IN_GOMP_LOOP_ULL_ORDERED_GUIDED_START,
+ BUILT_IN_GOMP_LOOP_ULL_ORDERED_RUNTIME_START,
+ BUILT_IN_GOMP_LOOP_ULL_STATIC_NEXT,
+ BUILT_IN_GOMP_LOOP_ULL_DYNAMIC_NEXT,
+ BUILT_IN_GOMP_LOOP_ULL_GUIDED_NEXT,
+ BUILT_IN_GOMP_LOOP_ULL_RUNTIME_NEXT,
+ BUILT_IN_GOMP_LOOP_ULL_ORDERED_STATIC_NEXT,
+ BUILT_IN_GOMP_LOOP_ULL_ORDERED_DYNAMIC_NEXT,
+ BUILT_IN_GOMP_LOOP_ULL_ORDERED_GUIDED_NEXT,
+ BUILT_IN_GOMP_LOOP_ULL_ORDERED_RUNTIME_NEXT): New builtins.
+ * gimplify.c (gimplify_omp_for): Allow pointer type for decl,
+ handle POINTER_PLUS_EXPR. If loop counter has been replaced and
+ original iterator is present in lastprivate clause or if
+ collapse > 1, set OMP_CLAUSE_LASTPRIVATE_STMT. Handle collapsed
+ OMP_FOR loops, adjust for OMP_FOR_{INIT,COND,INCR} changes.
+ (gimplify_expr): Handle OMP_SECTIONS_SWITCH and OMP_TASK.
+ (enum gimplify_omp_var_data): Add GOVD_PRIVATE_OUTER_REF.
+ (omp_notice_variable): Set GOVD_PRIVATE_OUTER_REF if needed,
+ if it is set, lookup var in outer contexts too. Handle
+ OMP_CLAUSE_DEFAULT_FIRSTPRIVATE. Handle vars that are supposed
+ to be implicitly determined firstprivate for task regions.
+ (gimplify_scan_omp_clauses): Set GOVD_PRIVATE_OUTER_REF if needed,
+ if it is set, lookup var in outer contexts too. Set
+ OMP_CLAUSE_PRIVATE_OUTER_REF if GOVD_PRIVATE_OUTER_REF is set.
+ Handle OMP_CLAUSE_LASTPRIVATE_STMT, OMP_CLAUSE_COLLAPSE and
+ OMP_CLAUSE_UNTIED. Take region_type as last argument
+ instead of in_parallel and in_combined_parallel.
+ (gimplify_omp_parallel, gimplify_omp_for, gimplify_omp_workshare):
+ Adjust callers.
+ (gimplify_adjust_omp_clauses_1): Set OMP_CLAUSE_PRIVATE_OUTER_REF if
+ GOVD_PRIVATE_OUTER_REF is set. Call omp_finish_clause
+ langhook.
+ (new_omp_context): Set default_kind to
+ OMP_CLAUSE_DEFAULT_UNSPECIFIED for OMP_TASK regions.
+ (omp_region_type): New enum.
+ (struct gimplify_omp_ctx): Remove is_parallel and is_combined_parallel
+ fields, add region_type.
+ (new_omp_context): Take region_type as argument instead of is_parallel
+ and is_combined_parallel.
+ (gimple_add_tmp_var, omp_firstprivatize_variable, omp_notice_variable,
+ omp_is_private, omp_check_private): Adjust ctx->is_parallel and
+ ctx->is_combined_parallel checks.
+ (gimplify_omp_task): New function.
+ (gimplify_adjust_omp_clauses): Handle OMP_CLAUSE_COLLAPSE and
+ OMP_CLAUSE_UNTIED.
+ * omp-low.c (extract_omp_for_data): Use schedule(static)
+ for schedule(auto). Handle pointer and unsigned iterators.
+ Compute fd->iter_type. Handle POINTER_PLUS_EXPR increments.
+ Add loops argument. Extract data for collapsed OMP_FOR loops.
+ (expand_parallel_call): Assert sched_kind isn't auto,
+ map runtime schedule to index 3.
+ (struct omp_for_data_loop): New type.
+ (struct omp_for_data): Remove v, n1, n2, step, cond_code fields.
+ Add loop, loops, collapse and iter_type fields.
+ (workshare_safe_to_combine_p): Disallow combined for if
+ iter_type is unsigned long long. Don't combine collapse > 1 loops
+ unless all bounds and steps are constant. Adjust extract_omp_for_data
+ caller.
+ (expand_omp_for_generic): Handle pointer, unsigned and long long
+ iterators. Handle collapsed OMP_FOR loops. Adjust
+ for struct omp_for_data changes. If libgomp function doesn't return
+ boolean_type_node, add comparison of the return value with 0.
+ (expand_omp_for_static_nochunk, expand_omp_for_static_chunk): Handle
+ pointer, unsigned and long long iterators. Adjust for struct
+ omp_for_data changes.
+ (expand_omp_for): Assert sched_kind isn't auto, map runtime schedule
+ to index 3. Use GOMP_loop_ull*{start,next} if iter_type is
+ unsigned long long. Allocate loops array, pass it to
+ extract_omp_for_data. For collapse > 1 loops use always
+ expand_omp_for_generic.
+ (omp_context): Add sfield_map and srecord_type fields.
+ (is_task_ctx, lookup_sfield): New functions.
+ (use_pointer_for_field): Use is_task_ctx helper. Change first
+ argument's type from const_tree to tree. Clarify comment.
+ In OMP_TASK disallow copy-in/out sharing.
+ (build_sender_ref): Call lookup_sfield instead of lookup_field.
+ (install_var_field): Add mask argument. Populate both record_type
+ and srecord_type if needed.
+ (delete_omp_context): Destroy sfield_map, clear DECL_ABSTRACT_ORIGIN
+ in srecord_type.
+ (fixup_child_record_type): Also remap FIELD_DECL's DECL_SIZE{,_UNIT}
+ and DECL_FIELD_OFFSET.
+ (scan_sharing_clauses): Adjust install_var_field callers. For
+ firstprivate clauses on explicit tasks allocate the var by value in
+ record_type unconditionally, rather than by reference.
+ Handle OMP_CLAUSE_PRIVATE_OUTER_REF. Scan OMP_CLAUSE_LASTPRIVATE_STMT.
+ Use is_taskreg_ctx instead of is_parallel_ctx.
+ Handle OMP_CLAUSE_COLLAPSE and OMP_CLAUSE_UNTIED.
+ (create_omp_child_function_name): Add task_copy argument, use
+ *_omp_cpyfn* names if it is true.
+ (create_omp_child_function): Add task_copy argument, if true create
+ *_omp_cpyfn* helper function.
+ (scan_omp_parallel): Adjust create_omp_child_function callers.
+ Rename parallel_nesting_level to taskreg_nesting_level.
+ (scan_omp_task): New function.
+ (lower_rec_input_clauses): Don't run constructors for firstprivate
+ explicit task vars which are initialized by *_omp_cpyfn*.
+ Pass outer var ref to omp_clause_default_ctor hook if
+ OMP_CLAUSE_PRIVATE_OUTER_REF or OMP_CLAUSE_LASTPRIVATE.
+ Replace OMP_CLAUSE_REDUCTION_PLACEHOLDER decls in
+ OMP_CLAUSE_REDUCTION_INIT.
+ (lower_send_clauses): Clear DECL_ABSTRACT_ORIGIN if in task to
+ avoid duplicate setting of fields. Handle
+ OMP_CLAUSE_PRIVATE_OUTER_REF.
+ (lower_send_shared_vars): Use srecord_type if non-NULL. Don't
+ copy-out if TREE_READONLY, only copy-in.
+ (expand_task_copyfn): New function.
+ (expand_task_call): New function.
+ (struct omp_taskcopy_context): New type.
+ (task_copyfn_copy_decl, task_copyfn_remap_type, create_task_copyfn):
+ New functions.
+ (lower_omp_parallel): Rename to...
+ (lower_omp_taskreg): ... this. Use OMP_TASKREG_* macros where needed.
+ Call create_task_copyfn if srecord_type is needed. Adjust
+ sender_decl type.
+ (task_shared_vars): New variable.
+ (check_omp_nesting_restrictions): Warn if work-sharing,
+ barrier, master or ordered region is closely nested inside OMP_TASK.
+ Add warnings for barrier if closely nested inside of work-sharing,
+ ordered, or master region.
+ (scan_omp_1): Call check_omp_nesting_restrictions even for
+ GOMP_barrier calls. Rename parallel_nesting_level to
+ taskreg_nesting_level. Handle OMP_TASK.
+ (lower_lastprivate_clauses): Even if some lastprivate is found on a
+ work-sharing construct, continue looking for them on parent parallel
+ construct.
+ (lower_omp_for_lastprivate): Add lastprivate clauses
+ to the beginning of dlist rather than end. Adjust for struct
+ omp_for_data changes.
+ (lower_omp_for): Add rec input clauses before OMP_FOR_PRE_BODY,
+ not after it. Handle collapsed OMP_FOR loops, adjust for
+ OMP_FOR_{INIT,COND,INCR} changes, adjust extract_omp_for_data
+ caller.
+ (get_ws_args_for): Adjust extract_omp_for_data caller.
+ (scan_omp_for): Handle collapsed OMP_FOR
+ loops, adjust for OMP_FOR_{INIT,COND,INCR} changes.
+ (lower_omp_single_simple): If libgomp function doesn't return
+ boolean_type_node, add comparison of the return value with 0.
+ (diagnose_sb_1, diagnose_sb_2): Handle collapsed OMP_FOR
+ loops, adjust for OMP_FOR_{INIT,COND,INCR} changes. Handle OMP_TASK.
+ (parallel_nesting_level): Rename to...
+ (taskreg_nesting_level): ... this.
+ (is_taskreg_ctx): New function.
+ (build_outer_var_ref, omp_copy_decl): Use is_taskreg_ctx instead
+ of is_parallel_ctx.
+ (execute_lower_omp): Rename parallel_nesting_level to
+ taskreg_nesting_level.
+ (expand_omp_parallel): Rename to...
+ (expand_omp_taskreg): ... this. Use OMP_TASKREG_* macros where needed.
+ Call omp_task_call for OMP_TASK regions.
+ (expand_omp): Adjust caller, handle OMP_TASK.
+ (lower_omp_1): Adjust lower_omp_taskreg caller, handle OMP_TASK.
+
+ * bitmap.c (bitmap_default_obstack_depth): New variable.
+ (bitmap_obstack_initialize, bitmap_obstack_release): Do nothing
+ if argument is NULL and bitmap_default_obstack is already initialized.
+ * ipa-struct-reorg.c (do_reorg_1): Call bitmap_obstack_release
+ at the end.
+ * matrix-reorg.c (matrix_reorg): Likewise.
+
2008-06-06 Uros Bizjak <ubizjak@gmail.com>
* config/i386/i386.md (*indirect_jump): Macroize using P
diff --git a/gcc/bitmap.c b/gcc/bitmap.c
index c2a66f96a73..97e60de6b3c 100644
--- a/gcc/bitmap.c
+++ b/gcc/bitmap.c
@@ -119,6 +119,7 @@ register_overhead (bitmap b, int amount)
/* Global data */
bitmap_element bitmap_zero_bits; /* An element of all zero bits. */
bitmap_obstack bitmap_default_obstack; /* The default bitmap obstack. */
+static int bitmap_default_obstack_depth;
static GTY((deletable)) bitmap_element *bitmap_ggc_free; /* Freelist of
GC'd elements. */
@@ -302,7 +303,11 @@ void
bitmap_obstack_initialize (bitmap_obstack *bit_obstack)
{
if (!bit_obstack)
- bit_obstack = &bitmap_default_obstack;
+ {
+ if (bitmap_default_obstack_depth++)
+ return;
+ bit_obstack = &bitmap_default_obstack;
+ }
#if !defined(__GNUC__) || (__GNUC__ < 2)
#define __alignof__(type) 0
@@ -323,7 +328,14 @@ void
bitmap_obstack_release (bitmap_obstack *bit_obstack)
{
if (!bit_obstack)
- bit_obstack = &bitmap_default_obstack;
+ {
+ if (--bitmap_default_obstack_depth)
+ {
+ gcc_assert (bitmap_default_obstack_depth > 0);
+ return;
+ }
+ bit_obstack = &bitmap_default_obstack;
+ }
bit_obstack->elements = NULL;
bit_obstack->heads = NULL;
diff --git a/gcc/builtin-types.def b/gcc/builtin-types.def
index 25b5a0964f5..7d25e5aad6d 100644
--- a/gcc/builtin-types.def
+++ b/gcc/builtin-types.def
@@ -1,4 +1,4 @@
-/* Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007
+/* Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
Free Software Foundation, Inc.
This file is part of GCC.
@@ -121,6 +121,7 @@ DEF_PRIMITIVE_TYPE (BT_I16, builtin_type_for_size (BITS_PER_UNIT*16, 1))
DEF_POINTER_TYPE (BT_PTR_CONST_STRING, BT_CONST_STRING)
DEF_POINTER_TYPE (BT_PTR_LONG, BT_LONG)
+DEF_POINTER_TYPE (BT_PTR_ULONGLONG, BT_ULONGLONG)
DEF_POINTER_TYPE (BT_PTR_PTR, BT_PTR)
DEF_FUNCTION_TYPE_0 (BT_FN_VOID, BT_VOID)
@@ -308,6 +309,10 @@ DEF_FUNCTION_TYPE_2 (BT_FN_I8_VPTR_I8, BT_I8, BT_VOLATILE_PTR, BT_I8)
DEF_FUNCTION_TYPE_2 (BT_FN_I16_VPTR_I16, BT_I16, BT_VOLATILE_PTR, BT_I16)
DEF_FUNCTION_TYPE_2 (BT_FN_BOOL_LONGPTR_LONGPTR,
BT_BOOL, BT_PTR_LONG, BT_PTR_LONG)
+DEF_FUNCTION_TYPE_2 (BT_FN_BOOL_ULONGLONGPTR_ULONGLONGPTR,
+ BT_BOOL, BT_PTR_ULONGLONG, BT_PTR_ULONGLONG)
+
+DEF_POINTER_TYPE (BT_PTR_FN_VOID_PTR_PTR, BT_FN_VOID_PTR_PTR)
DEF_FUNCTION_TYPE_3 (BT_FN_STRING_STRING_CONST_STRING_SIZE,
BT_STRING, BT_STRING, BT_CONST_STRING, BT_SIZE)
@@ -410,10 +415,21 @@ DEF_FUNCTION_TYPE_6 (BT_FN_BOOL_LONG_LONG_LONG_LONG_LONGPTR_LONGPTR,
DEF_FUNCTION_TYPE_6 (BT_FN_VOID_OMPFN_PTR_UINT_LONG_LONG_LONG,
BT_VOID, BT_PTR_FN_VOID_PTR, BT_PTR, BT_UINT,
BT_LONG, BT_LONG, BT_LONG)
+DEF_FUNCTION_TYPE_6 (BT_FN_BOOL_BOOL_ULL_ULL_ULL_ULLPTR_ULLPTR,
+ BT_BOOL, BT_BOOL, BT_ULONGLONG, BT_ULONGLONG,
+ BT_ULONGLONG, BT_PTR_ULONGLONG, BT_PTR_ULONGLONG)
DEF_FUNCTION_TYPE_7 (BT_FN_VOID_OMPFN_PTR_UINT_LONG_LONG_LONG_LONG,
BT_VOID, BT_PTR_FN_VOID_PTR, BT_PTR, BT_UINT,
BT_LONG, BT_LONG, BT_LONG, BT_LONG)
+DEF_FUNCTION_TYPE_7 (BT_FN_VOID_OMPFN_PTR_OMPCPYFN_LONG_LONG_BOOL_UINT,
+ BT_VOID, BT_PTR_FN_VOID_PTR, BT_PTR,
+ BT_PTR_FN_VOID_PTR_PTR, BT_LONG, BT_LONG,
+ BT_BOOL, BT_UINT)
+DEF_FUNCTION_TYPE_7 (BT_FN_BOOL_BOOL_ULL_ULL_ULL_ULL_ULLPTR_ULLPTR,
+ BT_BOOL, BT_BOOL, BT_ULONGLONG, BT_ULONGLONG,
+ BT_ULONGLONG, BT_ULONGLONG,
+ BT_PTR_ULONGLONG, BT_PTR_ULONGLONG)
DEF_FUNCTION_TYPE_VAR_0 (BT_FN_VOID_VAR, BT_VOID)
DEF_FUNCTION_TYPE_VAR_0 (BT_FN_INT_VAR, BT_INT)
diff --git a/gcc/c-common.h b/gcc/c-common.h
index 7ad0be5a425..82c018b559f 100644
--- a/gcc/c-common.h
+++ b/gcc/c-common.h
@@ -995,6 +995,7 @@ extern tree c_finish_omp_ordered (tree);
extern void c_finish_omp_barrier (void);
extern tree c_finish_omp_atomic (enum tree_code, tree, tree);
extern void c_finish_omp_flush (void);
+extern void c_finish_omp_taskwait (void);
extern tree c_finish_omp_for (location_t, tree, tree, tree, tree, tree, tree);
extern void c_split_parallel_clauses (tree, tree *, tree *);
extern enum omp_clause_default_kind c_omp_predetermined_sharing (tree);
diff --git a/gcc/c-cppbuiltin.c b/gcc/c-cppbuiltin.c
index 5948fbd64f6..82bd5c27313 100644
--- a/gcc/c-cppbuiltin.c
+++ b/gcc/c-cppbuiltin.c
@@ -659,7 +659,7 @@ c_cpp_builtins (cpp_reader *pfile)
cpp_define (pfile, "__SSP__=1");
if (flag_openmp)
- cpp_define (pfile, "_OPENMP=200505");
+ cpp_define (pfile, "_OPENMP=200805");
builtin_define_type_sizeof ("__SIZEOF_INT__", integer_type_node);
builtin_define_type_sizeof ("__SIZEOF_LONG__", long_integer_type_node);
diff --git a/gcc/c-omp.c b/gcc/c-omp.c
index cdca2bcd4a9..1da71d27b9c 100644
--- a/gcc/c-omp.c
+++ b/gcc/c-omp.c
@@ -1,7 +1,7 @@
/* This file contains routines to construct GNU OpenMP constructs,
called from parsing in the C and C++ front ends.
- Copyright (C) 2005, 2007 Free Software Foundation, Inc.
+ Copyright (C) 2005, 2007, 2008 Free Software Foundation, Inc.
Contributed by Richard Henderson <rth@redhat.com>,
Diego Novillo <dnovillo@redhat.com>.
@@ -80,6 +80,19 @@ c_finish_omp_barrier (void)
}
+/* Complete a #pragma omp taskwait construct. */
+
+void
+c_finish_omp_taskwait (void)
+{
+ tree x;
+
+ x = built_in_decls[BUILT_IN_GOMP_TASKWAIT];
+ x = build_call_expr (x, 0);
+ add_stmt (x);
+}
+
+
/* Complete a #pragma omp atomic construct. The expression to be
implemented atomically is LHS code= RHS. The value returned is
either error_mark_node (if the construct was erroneous) or an
@@ -197,170 +210,205 @@ check_omp_for_incr_expr (tree exp, tree decl)
}
/* Validate and emit code for the OpenMP directive #pragma omp for.
- INIT, COND, INCR, BODY and PRE_BODY are the five basic elements
- of the loop (initialization expression, controlling predicate, increment
- expression, body of the loop and statements to go before the loop).
- DECL is the iteration variable. */
+ DECLV is a vector of iteration variables, for each collapsed loop.
+ INITV, CONDV and INCRV are vectors containing initialization
+ expressions, controlling predicates and increment expressions.
+ BODY is the body of the loop and PRE_BODY statements that go before
+ the loop. */
tree
-c_finish_omp_for (location_t locus, tree decl, tree init, tree cond,
- tree incr, tree body, tree pre_body)
+c_finish_omp_for (location_t locus, tree declv, tree initv, tree condv,
+ tree incrv, tree body, tree pre_body)
{
- location_t elocus = locus;
+ location_t elocus;
bool fail = false;
+ int i;
- if (EXPR_HAS_LOCATION (init))
- elocus = EXPR_LOCATION (init);
-
- /* Validate the iteration variable. */
- if (!INTEGRAL_TYPE_P (TREE_TYPE (decl)))
+ gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (initv));
+ gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (condv));
+ gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (incrv));
+ for (i = 0; i < TREE_VEC_LENGTH (declv); i++)
{
- error ("%Hinvalid type for iteration variable %qE", &elocus, decl);
- fail = true;
- }
- if (TYPE_UNSIGNED (TREE_TYPE (decl)))
- warning (0, "%Hiteration variable %qE is unsigned", &elocus, decl);
+ tree decl = TREE_VEC_ELT (declv, i);
+ tree init = TREE_VEC_ELT (initv, i);
+ tree cond = TREE_VEC_ELT (condv, i);
+ tree incr = TREE_VEC_ELT (incrv, i);
+
+ elocus = locus;
+ if (EXPR_HAS_LOCATION (init))
+ elocus = EXPR_LOCATION (init);
+
+ /* Validate the iteration variable. */
+ if (!INTEGRAL_TYPE_P (TREE_TYPE (decl))
+ && TREE_CODE (TREE_TYPE (decl)) != POINTER_TYPE)
+ {
+ error ("%Hinvalid type for iteration variable %qE", &elocus, decl);
+ fail = true;
+ }
- /* In the case of "for (int i = 0...)", init will be a decl. It should
- have a DECL_INITIAL that we can turn into an assignment. */
- if (init == decl)
- {
- elocus = DECL_SOURCE_LOCATION (decl);
+ /* In the case of "for (int i = 0...)", init will be a decl. It should
+ have a DECL_INITIAL that we can turn into an assignment. */
+ if (init == decl)
+ {
+ elocus = DECL_SOURCE_LOCATION (decl);
+
+ init = DECL_INITIAL (decl);
+ if (init == NULL)
+ {
+ error ("%H%qE is not initialized", &elocus, decl);
+ init = integer_zero_node;
+ fail = true;
+ }
- init = DECL_INITIAL (decl);
- if (init == NULL)
+ init = build_modify_expr (decl, NOP_EXPR, init);
+ SET_EXPR_LOCATION (init, elocus);
+ }
+ gcc_assert (TREE_CODE (init) == MODIFY_EXPR);
+ gcc_assert (TREE_OPERAND (init, 0) == decl);
+
+ if (cond == NULL_TREE)
{
- error ("%H%qE is not initialized", &elocus, decl);
- init = integer_zero_node;
+ error ("%Hmissing controlling predicate", &elocus);
fail = true;
}
+ else
+ {
+ bool cond_ok = false;
- init = build_modify_expr (decl, NOP_EXPR, init);
- SET_EXPR_LOCATION (init, elocus);
- }
- gcc_assert (TREE_CODE (init) == MODIFY_EXPR);
- gcc_assert (TREE_OPERAND (init, 0) == decl);
-
- if (cond == NULL_TREE)
- {
- error ("%Hmissing controlling predicate", &elocus);
- fail = true;
- }
- else
- {
- bool cond_ok = false;
+ if (EXPR_HAS_LOCATION (cond))
+ elocus = EXPR_LOCATION (cond);
- if (EXPR_HAS_LOCATION (cond))
- elocus = EXPR_LOCATION (cond);
+ if (TREE_CODE (cond) == LT_EXPR
+ || TREE_CODE (cond) == LE_EXPR
+ || TREE_CODE (cond) == GT_EXPR
+ || TREE_CODE (cond) == GE_EXPR)
+ {
+ tree op0 = TREE_OPERAND (cond, 0);
+ tree op1 = TREE_OPERAND (cond, 1);
- if (TREE_CODE (cond) == LT_EXPR
- || TREE_CODE (cond) == LE_EXPR
- || TREE_CODE (cond) == GT_EXPR
- || TREE_CODE (cond) == GE_EXPR)
- {
- tree op0 = TREE_OPERAND (cond, 0);
- tree op1 = TREE_OPERAND (cond, 1);
+ /* 2.5.1. The comparison in the condition is computed in
+ the type of DECL, otherwise the behavior is undefined.
- /* 2.5.1. The comparison in the condition is computed in the type
- of DECL, otherwise the behavior is undefined.
+ For example:
+ long n; int i;
+ i < n;
- For example:
- long n; int i;
- i < n;
+ according to ISO will be evaluated as:
+ (long)i < n;
- according to ISO will be evaluated as:
- (long)i < n;
+ We want to force:
+ i < (int)n; */
+ if (TREE_CODE (op0) == NOP_EXPR
+ && decl == TREE_OPERAND (op0, 0))
+ {
+ TREE_OPERAND (cond, 0) = TREE_OPERAND (op0, 0);
+ TREE_OPERAND (cond, 1)
+ = fold_build1 (NOP_EXPR, TREE_TYPE (decl),
+ TREE_OPERAND (cond, 1));
+ }
+ else if (TREE_CODE (op1) == NOP_EXPR
+ && decl == TREE_OPERAND (op1, 0))
+ {
+ TREE_OPERAND (cond, 1) = TREE_OPERAND (op1, 0);
+ TREE_OPERAND (cond, 0)
+ = fold_build1 (NOP_EXPR, TREE_TYPE (decl),
+ TREE_OPERAND (cond, 0));
+ }
- We want to force:
- i < (int)n; */
- if (TREE_CODE (op0) == NOP_EXPR
- && decl == TREE_OPERAND (op0, 0))
- {
- TREE_OPERAND (cond, 0) = TREE_OPERAND (op0, 0);
- TREE_OPERAND (cond, 1) = fold_build1 (NOP_EXPR, TREE_TYPE (decl),
- TREE_OPERAND (cond, 1));
- }
- else if (TREE_CODE (op1) == NOP_EXPR
- && decl == TREE_OPERAND (op1, 0))
- {
- TREE_OPERAND (cond, 1) = TREE_OPERAND (op1, 0);
- TREE_OPERAND (cond, 0) = fold_build1 (NOP_EXPR, TREE_TYPE (decl),
- TREE_OPERAND (cond, 0));
+ if (decl == TREE_OPERAND (cond, 0))
+ cond_ok = true;
+ else if (decl == TREE_OPERAND (cond, 1))
+ {
+ TREE_SET_CODE (cond,
+ swap_tree_comparison (TREE_CODE (cond)));
+ TREE_OPERAND (cond, 1) = TREE_OPERAND (cond, 0);
+ TREE_OPERAND (cond, 0) = decl;
+ cond_ok = true;
+ }
}
- if (decl == TREE_OPERAND (cond, 0))
- cond_ok = true;
- else if (decl == TREE_OPERAND (cond, 1))
+ if (!cond_ok)
{
- TREE_SET_CODE (cond, swap_tree_comparison (TREE_CODE (cond)));
- TREE_OPERAND (cond, 1) = TREE_OPERAND (cond, 0);
- TREE_OPERAND (cond, 0) = decl;
- cond_ok = true;
+ error ("%Hinvalid controlling predicate", &elocus);
+ fail = true;
}
}
- if (!cond_ok)
+ if (incr == NULL_TREE)
{
- error ("%Hinvalid controlling predicate", &elocus);
+ error ("%Hmissing increment expression", &elocus);
fail = true;
}
- }
-
- if (incr == NULL_TREE)
- {
- error ("%Hmissing increment expression", &elocus);
- fail = true;
- }
- else
- {
- bool incr_ok = false;
-
- if (EXPR_HAS_LOCATION (incr))
- elocus = EXPR_LOCATION (incr);
-
- /* Check all the valid increment expressions: v++, v--, ++v, --v,
- v = v + incr, v = incr + v and v = v - incr. */
- switch (TREE_CODE (incr))
+ else
{
- case POSTINCREMENT_EXPR:
- case PREINCREMENT_EXPR:
- case POSTDECREMENT_EXPR:
- case PREDECREMENT_EXPR:
- incr_ok = (TREE_OPERAND (incr, 0) == decl);
- break;
+ bool incr_ok = false;
- case MODIFY_EXPR:
- if (TREE_OPERAND (incr, 0) != decl)
- break;
- if (TREE_OPERAND (incr, 1) == decl)
- break;
- if (TREE_CODE (TREE_OPERAND (incr, 1)) == PLUS_EXPR
- && (TREE_OPERAND (TREE_OPERAND (incr, 1), 0) == decl
- || TREE_OPERAND (TREE_OPERAND (incr, 1), 1) == decl))
- incr_ok = true;
- else if (TREE_CODE (TREE_OPERAND (incr, 1)) == MINUS_EXPR
- && TREE_OPERAND (TREE_OPERAND (incr, 1), 0) == decl)
- incr_ok = true;
- else
+ if (EXPR_HAS_LOCATION (incr))
+ elocus = EXPR_LOCATION (incr);
+
+ /* Check all the valid increment expressions: v++, v--, ++v, --v,
+ v = v + incr, v = incr + v and v = v - incr. */
+ switch (TREE_CODE (incr))
{
- tree t = check_omp_for_incr_expr (TREE_OPERAND (incr, 1), decl);
- if (t != error_mark_node)
+ case POSTINCREMENT_EXPR:
+ case PREINCREMENT_EXPR:
+ case POSTDECREMENT_EXPR:
+ case PREDECREMENT_EXPR:
+ if (TREE_OPERAND (incr, 0) != decl)
+ break;
+
+ incr_ok = true;
+ if (POINTER_TYPE_P (TREE_TYPE (decl)))
{
- incr_ok = true;
- t = build2 (PLUS_EXPR, TREE_TYPE (decl), decl, t);
+ tree t = fold_convert (sizetype, TREE_OPERAND (incr, 1));
+
+ if (TREE_CODE (incr) == POSTDECREMENT_EXPR
+ || TREE_CODE (incr) == PREDECREMENT_EXPR)
+ t = fold_build1 (NEGATE_EXPR, sizetype, t);
+ t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (decl), decl, t);
incr = build2 (MODIFY_EXPR, void_type_node, decl, t);
}
- }
- break;
+ break;
+
+ case MODIFY_EXPR:
+ if (TREE_OPERAND (incr, 0) != decl)
+ break;
+ if (TREE_OPERAND (incr, 1) == decl)
+ break;
+ if (TREE_CODE (TREE_OPERAND (incr, 1)) == PLUS_EXPR
+ && (TREE_OPERAND (TREE_OPERAND (incr, 1), 0) == decl
+ || TREE_OPERAND (TREE_OPERAND (incr, 1), 1) == decl))
+ incr_ok = true;
+ else if ((TREE_CODE (TREE_OPERAND (incr, 1)) == MINUS_EXPR
+ || (TREE_CODE (TREE_OPERAND (incr, 1))
+ == POINTER_PLUS_EXPR))
+ && TREE_OPERAND (TREE_OPERAND (incr, 1), 0) == decl)
+ incr_ok = true;
+ else
+ {
+ tree t = check_omp_for_incr_expr (TREE_OPERAND (incr, 1),
+ decl);
+ if (t != error_mark_node)
+ {
+ incr_ok = true;
+ t = build2 (PLUS_EXPR, TREE_TYPE (decl), decl, t);
+ incr = build2 (MODIFY_EXPR, void_type_node, decl, t);
+ }
+ }
+ break;
- default:
- break;
- }
- if (!incr_ok)
- {
- error ("%Hinvalid increment expression", &elocus);
- fail = true;
+ default:
+ break;
+ }
+ if (!incr_ok)
+ {
+ error ("%Hinvalid increment expression", &elocus);
+ fail = true;
+ }
}
+
+ TREE_VEC_ELT (initv, i) = init;
+ TREE_VEC_ELT (incrv, i) = incr;
}
if (fail)
@@ -370,9 +418,9 @@ c_finish_omp_for (location_t locus, tree decl, tree init, tree cond,
tree t = make_node (OMP_FOR);
TREE_TYPE (t) = void_type_node;
- OMP_FOR_INIT (t) = init;
- OMP_FOR_COND (t) = cond;
- OMP_FOR_INCR (t) = incr;
+ OMP_FOR_INIT (t) = initv;
+ OMP_FOR_COND (t) = condv;
+ OMP_FOR_INCR (t) = incrv;
OMP_FOR_BODY (t) = body;
OMP_FOR_PRE_BODY (t) = pre_body;
@@ -416,6 +464,7 @@ c_split_parallel_clauses (tree clauses, tree *par_clauses, tree *ws_clauses)
case OMP_CLAUSE_SCHEDULE:
case OMP_CLAUSE_ORDERED:
+ case OMP_CLAUSE_COLLAPSE:
OMP_CLAUSE_CHAIN (clauses) = *ws_clauses;
*ws_clauses = clauses;
break;
diff --git a/gcc/c-parser.c b/gcc/c-parser.c
index d28ec9c91a9..7607a8dfdcf 100644
--- a/gcc/c-parser.c
+++ b/gcc/c-parser.c
@@ -1018,6 +1018,7 @@ static void c_parser_omp_construct (c_parser *);
static void c_parser_omp_threadprivate (c_parser *);
static void c_parser_omp_barrier (c_parser *);
static void c_parser_omp_flush (c_parser *);
+static void c_parser_omp_taskwait (c_parser *);
enum pragma_context { pragma_external, pragma_stmt, pragma_compound };
static bool c_parser_pragma (c_parser *, enum pragma_context);
@@ -6674,6 +6675,17 @@ c_parser_pragma (c_parser *parser, enum pragma_context context)
c_parser_omp_flush (parser);
return false;
+ case PRAGMA_OMP_TASKWAIT:
+ if (context != pragma_compound)
+ {
+ if (context == pragma_stmt)
+ c_parser_error (parser, "%<#pragma omp taskwait%> may only be "
+ "used in compound statements");
+ goto bad_stmt;
+ }
+ c_parser_omp_taskwait (parser);
+ return false;
+
case PRAGMA_OMP_THREADPRIVATE:
c_parser_omp_threadprivate (parser);
return false;
@@ -6781,7 +6793,9 @@ c_parser_omp_clause_name (c_parser *parser)
switch (p[0])
{
case 'c':
- if (!strcmp ("copyin", p))
+ if (!strcmp ("collapse", p))
+ result = PRAGMA_OMP_CLAUSE_COLLAPSE;
+ else if (!strcmp ("copyin", p))
result = PRAGMA_OMP_CLAUSE_COPYIN;
else if (!strcmp ("copyprivate", p))
result = PRAGMA_OMP_CLAUSE_COPYPRIVATE;
@@ -6818,6 +6832,10 @@ c_parser_omp_clause_name (c_parser *parser)
else if (!strcmp ("shared", p))
result = PRAGMA_OMP_CLAUSE_SHARED;
break;
+ case 'u':
+ if (!strcmp ("untied", p))
+ result = PRAGMA_OMP_CLAUSE_UNTIED;
+ break;
}
}
@@ -6906,6 +6924,41 @@ c_parser_omp_var_list_parens (c_parser *parser, enum tree_code kind, tree list)
return list;
}
+/* OpenMP 3.0:
+ collapse ( constant-expression ) */
+
+static tree
+c_parser_omp_clause_collapse (c_parser *parser, tree list)
+{
+ tree c, num = error_mark_node;
+ HOST_WIDE_INT n;
+ location_t loc;
+
+ check_no_duplicate_clause (list, OMP_CLAUSE_COLLAPSE, "collapse");
+
+ loc = c_parser_peek_token (parser)->location;
+ if (c_parser_require (parser, CPP_OPEN_PAREN, "expected %<(%>"))
+ {
+ num = c_parser_expr_no_commas (parser, NULL).value;
+ c_parser_skip_until_found (parser, CPP_CLOSE_PAREN, "expected %<)%>");
+ }
+ if (num == error_mark_node)
+ return list;
+ if (!INTEGRAL_TYPE_P (TREE_TYPE (num))
+ || !host_integerp (num, 0)
+ || (n = tree_low_cst (num, 0)) <= 0
+ || (int) n != n)
+ {
+ error ("%Hcollapse argument needs positive constant integer expression",
+ &loc);
+ return list;
+ }
+ c = build_omp_clause (OMP_CLAUSE_COLLAPSE);
+ OMP_CLAUSE_COLLAPSE_EXPR (c) = num;
+ OMP_CLAUSE_CHAIN (c) = list;
+ return c;
+}
+
/* OpenMP 2.5:
copyin ( variable-list ) */
@@ -7164,7 +7217,7 @@ c_parser_omp_clause_reduction (c_parser *parser, tree list)
schedule ( schedule-kind , expression )
schedule-kind:
- static | dynamic | guided | runtime
+ static | dynamic | guided | runtime | auto
*/
static tree
@@ -7208,6 +7261,8 @@ c_parser_omp_clause_schedule (c_parser *parser, tree list)
}
else if (c_parser_next_token_is_keyword (parser, RID_STATIC))
OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_STATIC;
+ else if (c_parser_next_token_is_keyword (parser, RID_AUTO))
+ OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_AUTO;
else
goto invalid_kind;
@@ -7223,6 +7278,9 @@ c_parser_omp_clause_schedule (c_parser *parser, tree list)
if (OMP_CLAUSE_SCHEDULE_KIND (c) == OMP_CLAUSE_SCHEDULE_RUNTIME)
error ("%Hschedule %<runtime%> does not take "
"a %<chunk_size%> parameter", &here);
+ else if (OMP_CLAUSE_SCHEDULE_KIND (c) == OMP_CLAUSE_SCHEDULE_AUTO)
+ error ("%Hschedule %<auto%> does not take "
+ "a %<chunk_size%> parameter", &here);
else if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE)
OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c) = t;
else
@@ -7253,6 +7311,22 @@ c_parser_omp_clause_shared (c_parser *parser, tree list)
return c_parser_omp_var_list_parens (parser, OMP_CLAUSE_SHARED, list);
}
+/* OpenMP 3.0:
+ untied */
+
+static tree
+c_parser_omp_clause_untied (c_parser *parser ATTRIBUTE_UNUSED, tree list)
+{
+ tree c;
+
+ /* FIXME: Should we allow duplicates? */
+ check_no_duplicate_clause (list, OMP_CLAUSE_UNTIED, "untied");
+
+ c = build_omp_clause (OMP_CLAUSE_UNTIED);
+ OMP_CLAUSE_CHAIN (c) = list;
+ return c;
+}
+
/* Parse all OpenMP clauses. The set clauses allowed by the directive
is a bitmask in MASK. Return the list of clauses found; the result
of clause default goes in *pdefault. */
@@ -7280,6 +7354,10 @@ c_parser_omp_all_clauses (c_parser *parser, unsigned int mask,
switch (c_kind)
{
+ case PRAGMA_OMP_CLAUSE_COLLAPSE:
+ clauses = c_parser_omp_clause_collapse (parser, clauses);
+ c_name = "collapse";
+ break;
case PRAGMA_OMP_CLAUSE_COPYIN:
clauses = c_parser_omp_clause_copyin (parser, clauses);
c_name = "copyin";
@@ -7332,6 +7410,10 @@ c_parser_omp_all_clauses (c_parser *parser, unsigned int mask,
clauses = c_parser_omp_clause_shared (parser, clauses);
c_name = "shared";
break;
+ case PRAGMA_OMP_CLAUSE_UNTIED:
+ clauses = c_parser_omp_clause_untied (parser, clauses);
+ c_name = "untied";
+ break;
default:
c_parser_error (parser, "expected %<#pragma omp%> clause");
goto saw_error;
@@ -7527,10 +7609,24 @@ c_parser_omp_flush (c_parser *parser)
so that we can push a new decl if necessary to make it private. */
static tree
-c_parser_omp_for_loop (c_parser *parser)
+c_parser_omp_for_loop (c_parser *parser, tree clauses, tree *par_clauses)
{
- tree decl, cond, incr, save_break, save_cont, body, init;
+ tree decl, cond, incr, save_break, save_cont, body, init, stmt, cl;
+ tree declv, condv, incrv, initv, for_block = NULL, ret = NULL;
location_t loc;
+ bool fail = false, open_brace_parsed = false;
+ int i, collapse = 1, nbraces = 0;
+
+ for (cl = clauses; cl; cl = OMP_CLAUSE_CHAIN (cl))
+ if (OMP_CLAUSE_CODE (cl) == OMP_CLAUSE_COLLAPSE)
+ collapse = tree_low_cst (OMP_CLAUSE_COLLAPSE_EXPR (cl), 0);
+
+ gcc_assert (collapse >= 1);
+
+ declv = make_tree_vec (collapse);
+ initv = make_tree_vec (collapse);
+ condv = make_tree_vec (collapse);
+ incrv = make_tree_vec (collapse);
if (!c_parser_next_token_is_keyword (parser, RID_FOR))
{
@@ -7540,61 +7636,136 @@ c_parser_omp_for_loop (c_parser *parser)
loc = c_parser_peek_token (parser)->location;
c_parser_consume_token (parser);
- if (!c_parser_require (parser, CPP_OPEN_PAREN, "expected %<(%>"))
- return NULL;
-
- /* Parse the initialization declaration or expression. */
- if (c_parser_next_token_starts_declspecs (parser))
+ for (i = 0; i < collapse; i++)
{
- c_parser_declaration_or_fndef (parser, true, true, true, true);
- decl = check_for_loop_decls ();
- if (decl == NULL)
- goto error_init;
- if (DECL_INITIAL (decl) == error_mark_node)
- decl = error_mark_node;
- init = decl;
- }
- else if (c_parser_next_token_is (parser, CPP_NAME)
- && c_parser_peek_2nd_token (parser)->type == CPP_EQ)
- {
- decl = c_parser_postfix_expression (parser).value;
+ int bracecount = 0;
- c_parser_require (parser, CPP_EQ, "expected %<=%>");
+ if (!c_parser_require (parser, CPP_OPEN_PAREN, "expected %<(%>"))
+ goto pop_scopes;
- init = c_parser_expr_no_commas (parser, NULL).value;
- init = build_modify_expr (decl, NOP_EXPR, init);
- init = c_process_expr_stmt (init);
+ /* Parse the initialization declaration or expression. */
+ if (c_parser_next_token_starts_declspecs (parser))
+ {
+ if (i > 0)
+ for_block
+ = tree_cons (NULL, c_begin_compound_stmt (true), for_block);
+ c_parser_declaration_or_fndef (parser, true, true, true, true);
+ decl = check_for_loop_decls ();
+ if (decl == NULL)
+ goto error_init;
+ if (DECL_INITIAL (decl) == error_mark_node)
+ decl = error_mark_node;
+ init = decl;
+ }
+ else if (c_parser_next_token_is (parser, CPP_NAME)
+ && c_parser_peek_2nd_token (parser)->type == CPP_EQ)
+ {
+ struct c_expr init_exp;
+
+ decl = c_parser_postfix_expression (parser).value;
+
+ c_parser_require (parser, CPP_EQ, "expected %<=%>");
+
+ init_exp = c_parser_expr_no_commas (parser, NULL);
+ init_exp = default_function_array_conversion (init_exp);
+ init = build_modify_expr (decl, NOP_EXPR, init_exp.value);
+ init = c_process_expr_stmt (init);
+ c_parser_skip_until_found (parser, CPP_SEMICOLON, "expected %<;%>");
+ }
+ else
+ {
+ error_init:
+ c_parser_error (parser,
+ "expected iteration declaration or initialization");
+ c_parser_skip_until_found (parser, CPP_CLOSE_PAREN,
+ "expected %<)%>");
+ fail = true;
+ goto parse_next;
+ }
+
+ /* Parse the loop condition. */
+ cond = NULL_TREE;
+ if (c_parser_next_token_is_not (parser, CPP_SEMICOLON))
+ {
+ cond = c_parser_expression_conv (parser).value;
+ cond = c_objc_common_truthvalue_conversion (cond);
+ if (CAN_HAVE_LOCATION_P (cond))
+ SET_EXPR_LOCATION (cond, input_location);
+ }
c_parser_skip_until_found (parser, CPP_SEMICOLON, "expected %<;%>");
- }
- else
- goto error_init;
- /* Parse the loop condition. */
- cond = NULL_TREE;
- if (c_parser_next_token_is_not (parser, CPP_SEMICOLON))
- {
- cond = c_parser_expression_conv (parser).value;
- cond = c_objc_common_truthvalue_conversion (cond);
- if (CAN_HAVE_LOCATION_P (cond))
- SET_EXPR_LOCATION (cond, input_location);
- }
- c_parser_skip_until_found (parser, CPP_SEMICOLON, "expected %<;%>");
+ /* Parse the increment expression. */
+ incr = NULL_TREE;
+ if (c_parser_next_token_is_not (parser, CPP_CLOSE_PAREN))
+ incr = c_process_expr_stmt (c_parser_expression (parser).value);
+ c_parser_skip_until_found (parser, CPP_CLOSE_PAREN, "expected %<)%>");
- /* Parse the increment expression. */
- incr = NULL_TREE;
- if (c_parser_next_token_is_not (parser, CPP_CLOSE_PAREN))
- incr = c_process_expr_stmt (c_parser_expression (parser).value);
- c_parser_skip_until_found (parser, CPP_CLOSE_PAREN, "expected %<)%>");
+ if (decl == NULL || decl == error_mark_node || init == error_mark_node)
+ fail = true;
+ else
+ {
+ TREE_VEC_ELT (declv, i) = decl;
+ TREE_VEC_ELT (initv, i) = init;
+ TREE_VEC_ELT (condv, i) = cond;
+ TREE_VEC_ELT (incrv, i) = incr;
+ }
+
+ parse_next:
+ if (i == collapse - 1)
+ break;
+
+ /* FIXME: OpenMP 3.0 draft isn't very clear on what exactly is allowed
+ in between the collapsed for loops to be still considered perfectly
+ nested. Hopefully the final version clarifies this.
+ For now handle (multiple) {'s and empty statements. */
+ do
+ {
+ if (c_parser_next_token_is_keyword (parser, RID_FOR))
+ {
+ c_parser_consume_token (parser);
+ break;
+ }
+ else if (c_parser_next_token_is (parser, CPP_OPEN_BRACE))
+ {
+ c_parser_consume_token (parser);
+ bracecount++;
+ }
+ else if (bracecount
+ && c_parser_next_token_is (parser, CPP_SEMICOLON))
+ c_parser_consume_token (parser);
+ else
+ {
+ c_parser_error (parser, "not enough perfectly nested loops");
+ if (bracecount)
+ {
+ open_brace_parsed = true;
+ bracecount--;
+ }
+ fail = true;
+ collapse = 0;
+ break;
+ }
+ }
+ while (1);
+
+ nbraces += bracecount;
+ }
- parse_body:
save_break = c_break_label;
c_break_label = size_one_node;
save_cont = c_cont_label;
c_cont_label = NULL_TREE;
body = push_stmt_list ();
- add_stmt (c_parser_c99_block_statement (parser));
+ if (open_brace_parsed)
+ {
+ stmt = c_begin_compound_stmt (true);
+ c_parser_compound_statement_nostart (parser);
+ add_stmt (c_end_compound_stmt (stmt, true));
+ }
+ else
+ add_stmt (c_parser_c99_block_statement (parser));
if (c_cont_label)
add_stmt (build1 (LABEL_EXPR, void_type_node, c_cont_label));
@@ -7602,17 +7773,82 @@ c_parser_omp_for_loop (c_parser *parser)
c_break_label = save_break;
c_cont_label = save_cont;
+ while (nbraces)
+ {
+ if (c_parser_next_token_is (parser, CPP_CLOSE_BRACE))
+ {
+ c_parser_consume_token (parser);
+ nbraces--;
+ }
+ else if (c_parser_next_token_is (parser, CPP_SEMICOLON))
+ c_parser_consume_token (parser);
+ else
+ {
+ c_parser_error (parser, "collapsed loops not perfectly nested");
+ while (nbraces)
+ {
+ stmt = c_begin_compound_stmt (true);
+ add_stmt (body);
+ c_parser_compound_statement_nostart (parser);
+ body = c_end_compound_stmt (stmt, true);
+ nbraces--;
+ }
+ goto pop_scopes;
+ }
+ }
+
/* Only bother calling c_finish_omp_for if we haven't already generated
an error from the initialization parsing. */
- if (decl != NULL && decl != error_mark_node && init != error_mark_node)
- return c_finish_omp_for (loc, decl, init, cond, incr, body, NULL);
- return NULL;
-
- error_init:
- c_parser_error (parser, "expected iteration declaration or initialization");
- c_parser_skip_until_found (parser, CPP_CLOSE_PAREN, "expected %<)%>");
- decl = init = cond = incr = NULL_TREE;
- goto parse_body;
+ if (!fail)
+ {
+ stmt = c_finish_omp_for (loc, declv, initv, condv, incrv, body, NULL);
+ if (stmt)
+ {
+ if (par_clauses != NULL)
+ {
+ tree *c;
+ for (c = par_clauses; *c ; )
+ if (OMP_CLAUSE_CODE (*c) != OMP_CLAUSE_FIRSTPRIVATE
+ && OMP_CLAUSE_CODE (*c) != OMP_CLAUSE_LASTPRIVATE)
+ c = &OMP_CLAUSE_CHAIN (*c);
+ else
+ {
+ for (i = 0; i < collapse; i++)
+ if (TREE_VEC_ELT (declv, i) == OMP_CLAUSE_DECL (*c))
+ break;
+ if (i == collapse)
+ c = &OMP_CLAUSE_CHAIN (*c);
+ else if (OMP_CLAUSE_CODE (*c) == OMP_CLAUSE_FIRSTPRIVATE)
+ {
+ error ("%Hiteration variable %qD should not be firstprivate",
+ &loc, OMP_CLAUSE_DECL (*c));
+ *c = OMP_CLAUSE_CHAIN (*c);
+ }
+ else
+ {
+ /* Copy lastprivate (decl) clause to OMP_FOR_CLAUSES,
+ change it to shared (decl) in
+ OMP_PARALLEL_CLAUSES. */
+ tree l = build_omp_clause (OMP_CLAUSE_LASTPRIVATE);
+ OMP_CLAUSE_DECL (l) = OMP_CLAUSE_DECL (*c);
+ OMP_CLAUSE_CHAIN (l) = clauses;
+ clauses = l;
+ OMP_CLAUSE_SET_CODE (*c, OMP_CLAUSE_SHARED);
+ }
+ }
+ }
+ OMP_FOR_CLAUSES (stmt) = clauses;
+ }
+ ret = stmt;
+ }
+pop_scopes:
+ while (for_block)
+ {
+ stmt = c_end_compound_stmt (TREE_VALUE (for_block), true);
+ add_stmt (stmt);
+ for_block = TREE_CHAIN (for_block);
+ }
+ return ret;
}
/* OpenMP 2.5:
@@ -7627,6 +7863,7 @@ c_parser_omp_for_loop (c_parser *parser)
| (1u << PRAGMA_OMP_CLAUSE_REDUCTION) \
| (1u << PRAGMA_OMP_CLAUSE_ORDERED) \
| (1u << PRAGMA_OMP_CLAUSE_SCHEDULE) \
+ | (1u << PRAGMA_OMP_CLAUSE_COLLAPSE) \
| (1u << PRAGMA_OMP_CLAUSE_NOWAIT))
static tree
@@ -7638,9 +7875,7 @@ c_parser_omp_for (c_parser *parser)
"#pragma omp for");
block = c_begin_compound_stmt (true);
- ret = c_parser_omp_for_loop (parser);
- if (ret)
- OMP_FOR_CLAUSES (ret) = clauses;
+ ret = c_parser_omp_for_loop (parser, clauses, NULL);
block = c_end_compound_stmt (block, true);
add_stmt (block);
@@ -7845,9 +8080,7 @@ c_parser_omp_parallel (c_parser *parser)
case PRAGMA_OMP_PARALLEL_FOR:
block = c_begin_omp_parallel ();
c_split_parallel_clauses (clauses, &par_clause, &ws_clause);
- stmt = c_parser_omp_for_loop (parser);
- if (stmt)
- OMP_FOR_CLAUSES (stmt) = ws_clause;
+ c_parser_omp_for_loop (parser, ws_clause, &par_clause);
stmt = c_finish_omp_parallel (par_clause, block);
OMP_PARALLEL_COMBINED (stmt) = 1;
break;
@@ -7894,6 +8127,43 @@ c_parser_omp_single (c_parser *parser)
return add_stmt (stmt);
}
+/* OpenMP 3.0:
+ # pragma omp task task-clause[optseq] new-line
+*/
+
+#define OMP_TASK_CLAUSE_MASK \
+ ( (1u << PRAGMA_OMP_CLAUSE_IF) \
+ | (1u << PRAGMA_OMP_CLAUSE_UNTIED) \
+ | (1u << PRAGMA_OMP_CLAUSE_DEFAULT) \
+ | (1u << PRAGMA_OMP_CLAUSE_PRIVATE) \
+ | (1u << PRAGMA_OMP_CLAUSE_FIRSTPRIVATE) \
+ | (1u << PRAGMA_OMP_CLAUSE_SHARED))
+
+static tree
+c_parser_omp_task (c_parser *parser)
+{
+ tree clauses, block;
+
+ clauses = c_parser_omp_all_clauses (parser, OMP_TASK_CLAUSE_MASK,
+ "#pragma omp task");
+
+ block = c_begin_omp_task ();
+ c_parser_statement (parser);
+ return c_finish_omp_task (clauses, block);
+}
+
+/* OpenMP 3.0:
+ # pragma omp taskwait new-line
+*/
+
+static void
+c_parser_omp_taskwait (c_parser *parser)
+{
+ c_parser_consume_pragma (parser);
+ c_parser_skip_to_pragma_eol (parser);
+
+ c_finish_omp_taskwait ();
+}
/* Main entry point to parsing most OpenMP pragmas. */
@@ -7940,6 +8210,9 @@ c_parser_omp_construct (c_parser *parser)
case PRAGMA_OMP_SINGLE:
stmt = c_parser_omp_single (parser);
break;
+ case PRAGMA_OMP_TASK:
+ stmt = c_parser_omp_task (parser);
+ break;
default:
gcc_unreachable ();
}
diff --git a/gcc/c-pragma.c b/gcc/c-pragma.c
index 44e95b81c0a..81b9910b41a 100644
--- a/gcc/c-pragma.c
+++ b/gcc/c-pragma.c
@@ -896,6 +896,8 @@ static const struct omp_pragma_def omp_pragmas[] = {
{ "section", PRAGMA_OMP_SECTION },
{ "sections", PRAGMA_OMP_SECTIONS },
{ "single", PRAGMA_OMP_SINGLE },
+ { "task", PRAGMA_OMP_TASK },
+ { "taskwait", PRAGMA_OMP_TASKWAIT },
{ "threadprivate", PRAGMA_OMP_THREADPRIVATE }
};
diff --git a/gcc/c-pragma.h b/gcc/c-pragma.h
index 747a053b2e8..188afb8dbaa 100644
--- a/gcc/c-pragma.h
+++ b/gcc/c-pragma.h
@@ -41,6 +41,8 @@ typedef enum pragma_kind {
PRAGMA_OMP_SECTION,
PRAGMA_OMP_SECTIONS,
PRAGMA_OMP_SINGLE,
+ PRAGMA_OMP_TASK,
+ PRAGMA_OMP_TASKWAIT,
PRAGMA_OMP_THREADPRIVATE,
PRAGMA_GCC_PCH_PREPROCESS,
@@ -49,11 +51,12 @@ typedef enum pragma_kind {
} pragma_kind;
-/* All clauses defined by OpenMP 2.5.
+/* All clauses defined by OpenMP 2.5 and 3.0.
Used internally by both C and C++ parsers. */
typedef enum pragma_omp_clause {
PRAGMA_OMP_CLAUSE_NONE = 0,
+ PRAGMA_OMP_CLAUSE_COLLAPSE,
PRAGMA_OMP_CLAUSE_COPYIN,
PRAGMA_OMP_CLAUSE_COPYPRIVATE,
PRAGMA_OMP_CLAUSE_DEFAULT,
@@ -66,7 +69,8 @@ typedef enum pragma_omp_clause {
PRAGMA_OMP_CLAUSE_PRIVATE,
PRAGMA_OMP_CLAUSE_REDUCTION,
PRAGMA_OMP_CLAUSE_SCHEDULE,
- PRAGMA_OMP_CLAUSE_SHARED
+ PRAGMA_OMP_CLAUSE_SHARED,
+ PRAGMA_OMP_CLAUSE_UNTIED
} pragma_omp_clause;
extern struct cpp_reader* parse_in;
diff --git a/gcc/c-tree.h b/gcc/c-tree.h
index 02dfc6122d1..14df0444377 100644
--- a/gcc/c-tree.h
+++ b/gcc/c-tree.h
@@ -596,6 +596,8 @@ extern void c_end_vm_scope (unsigned int);
extern tree c_expr_to_decl (tree, bool *, bool *);
extern tree c_begin_omp_parallel (void);
extern tree c_finish_omp_parallel (tree, tree);
+extern tree c_begin_omp_task (void);
+extern tree c_finish_omp_task (tree, tree);
extern tree c_finish_omp_clauses (tree);
/* Set to 0 at beginning of a function definition, set to 1 if
diff --git a/gcc/c-typeck.c b/gcc/c-typeck.c
index b52a349ac6f..23880d16a43 100644
--- a/gcc/c-typeck.c
+++ b/gcc/c-typeck.c
@@ -8681,6 +8681,8 @@ c_begin_omp_parallel (void)
return block;
}
+/* Generate OMP_PARALLEL, with CLAUSES and BLOCK as its compound statement. */
+
tree
c_finish_omp_parallel (tree clauses, tree block)
{
@@ -8696,6 +8698,36 @@ c_finish_omp_parallel (tree clauses, tree block)
return add_stmt (stmt);
}
+/* Like c_begin_compound_stmt, except force the retention of the BLOCK. */
+
+tree
+c_begin_omp_task (void)
+{
+ tree block;
+
+ keep_next_level ();
+ block = c_begin_compound_stmt (true);
+
+ return block;
+}
+
+/* Generate OMP_TASK, with CLAUSES and BLOCK as its compound statement. */
+
+tree
+c_finish_omp_task (tree clauses, tree block)
+{
+ tree stmt;
+
+ block = c_end_compound_stmt (block, true);
+
+ stmt = make_node (OMP_TASK);
+ TREE_TYPE (stmt) = void_type_node;
+ OMP_TASK_CLAUSES (stmt) = clauses;
+ OMP_TASK_BODY (stmt) = block;
+
+ return add_stmt (stmt);
+}
+
/* For all elements of CLAUSES, validate them vs OpenMP constraints.
Remove any elements from the list that are invalid. */
@@ -8856,6 +8888,8 @@ c_finish_omp_clauses (tree clauses)
case OMP_CLAUSE_NOWAIT:
case OMP_CLAUSE_ORDERED:
case OMP_CLAUSE_DEFAULT:
+ case OMP_CLAUSE_UNTIED:
+ case OMP_CLAUSE_COLLAPSE:
pc = &OMP_CLAUSE_CHAIN (c);
continue;
diff --git a/gcc/cp/ChangeLog b/gcc/cp/ChangeLog
index 54afddc9851..936db240b42 100644
--- a/gcc/cp/ChangeLog
+++ b/gcc/cp/ChangeLog
@@ -1,3 +1,70 @@
+2008-06-06 Jakub Jelinek <jakub@redhat.com>
+
+ * cp-tree.h (cxx_omp_finish_clause, cxx_omp_create_clause_info,
+ dependent_omp_for_p, begin_omp_task, finish_omp_task,
+ finish_omp_taskwait): New prototypes.
+ (cxx_omp_clause_default_ctor): Add outer argument.
+ (finish_omp_for): Add new clauses argument.
+ * cp-gimplify.c (cxx_omp_finish_clause): New function.
+ (cxx_omp_predetermined_sharing): Moved from semantics.c, rewritten.
+ (cxx_omp_clause_default_ctor): Add outer argument.
+ (cp_genericize_r): Walk OMP_CLAUSE_LASTPRIVATE_STMT.
+ * cp-objcp-common.h (LANG_HOOKS_OMP_FINISH_CLAUSE): Define.
+ * parser.c (cp_parser_omp_for_loop): Parse collapsed for loops.
+ Add par_clauses argument. If decl is present in parallel's
+ lastprivate clause, change that clause to shared and add
+ a lastprivate clause for decl to OMP_FOR_CLAUSES.
+ Fix wording of error messages. Adjust finish_omp_for caller.
+ Add clauses argument. Parse loops with random access iterators.
+ (cp_parser_omp_clause_collapse, cp_parser_omp_clause_untied): New
+ functions.
+ (cp_parser_omp_for, cp_parser_omp_parallel): Adjust
+ cp_parser_omp_for_loop callers.
+ (cp_parser_omp_for_cond, cp_parser_omp_for_incr): New helper
+ functions.
+ (cp_parser_omp_clause_name): Handle collapse and untied
+ clauses.
+ (cp_parser_omp_clause_schedule): Handle auto schedule.
+ (cp_parser_omp_all_clauses): Handle PRAGMA_OMP_CLAUSE_COLLAPSE
+ and PRAGMA_OMP_CLAUSE_UNTIED.
+ (OMP_FOR_CLAUSE_MASK): Add PRAGMA_OMP_CLAUSE_COLLAPSE.
+ (OMP_TASK_CLAUSE_MASK): Define.
+ (cp_parser_omp_task, cp_parser_omp_taskwait): New functions.
+ (cp_parser_omp_construct): Handle PRAGMA_OMP_TASK.
+ (cp_parser_pragma): Handle PRAGMA_OMP_TASK and
+ PRAGMA_OMP_TASKWAIT.
+ * pt.c (tsubst_omp_clauses): Handle OMP_CLAUSE_COLLAPSE and
+ OMP_CLAUSE_UNTIED. Handle OMP_CLAUSE_LASTPRIVATE_STMT.
+ (tsubst_omp_for_iterator): New function.
+ (dependent_omp_for_p): New function.
+ (tsubst_expr) <case OMP_FOR>: Use it. Handle collapsed OMP_FOR
+ loops. Adjust finish_omp_for caller. Handle loops with random
+ access iterators. Adjust for OMP_FOR_{INIT,COND,INCR} changes.
+ (tsubst_expr): Handle OMP_TASK.
+ * semantics.c (cxx_omp_create_clause_info): New function.
+ (finish_omp_clauses): Call it. Handle OMP_CLAUSE_UNTIED and
+ OMP_CLAUSE_COLLAPSE.
+ (cxx_omp_predetermined_sharing): Removed.
+ * semantics.c (finish_omp_for): Allow pointer iterators. Use
+ handle_omp_for_class_iterator and dependent_omp_for_p. Handle
+ collapsed for loops. Adjust c_finish_omp_for caller. Add new
+ clauses argument. Fix check for type dependent cond or incr.
+ Set OMP_FOR_CLAUSES to clauses. Use cp_convert instead of
+ fold_convert to convert incr amount to difference_type. Only
+ fold if not in template. If decl is mentioned in lastprivate
+ clause, set OMP_CLAUSE_LASTPRIVATE_STMT. Handle loops with random
+ access iterators. Adjust for OMP_FOR_{INIT,COND,INCR}
+ changes.
+ (finish_omp_threadprivate): Allow static class members of the
+ current class.
+ (handle_omp_for_class_iterator, begin_omp_task, finish_omp_task,
+ finish_omp_taskwait): New functions.
+
+ * parser.c (cp_parser_binary_expression): Add prec argument.
+ (cp_parser_assignment_expression): Adjust caller.
+ * cp-tree.h (outer_curly_brace_block): New prototype.
+ * decl.c (outer_curly_brace_block): No longer static.
+
2008-06-02 Paolo Carlini <paolo.carlini@oracle.com>
PR c++/36404
diff --git a/gcc/cp/call.c b/gcc/cp/call.c
index 0948c790c41..1d54e7cb43e 100644
--- a/gcc/cp/call.c
+++ b/gcc/cp/call.c
@@ -333,7 +333,7 @@ build_call_a (tree function, int n, tree *argarray)
nothrow = ((decl && TREE_NOTHROW (decl))
|| TYPE_NOTHROW_P (TREE_TYPE (TREE_TYPE (function))));
- if (decl && TREE_THIS_VOLATILE (decl) && cfun)
+ if (decl && TREE_THIS_VOLATILE (decl) && cfun && cp_function_chain)
current_function_returns_abnormally = 1;
if (decl && TREE_DEPRECATED (decl))
diff --git a/gcc/cp/cp-gimplify.c b/gcc/cp/cp-gimplify.c
index cc3e8479921..c6d64dfbb75 100644
--- a/gcc/cp/cp-gimplify.c
+++ b/gcc/cp/cp-gimplify.c
@@ -694,10 +694,19 @@ cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
else if (TREE_CODE (stmt) == OMP_CLAUSE)
switch (OMP_CLAUSE_CODE (stmt))
{
+ case OMP_CLAUSE_LASTPRIVATE:
+ /* Don't dereference an invisiref in OpenMP clauses. */
+ if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
+ {
+ *walk_subtrees = 0;
+ if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
+ cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
+ cp_genericize_r, p_set, NULL);
+ }
+ break;
case OMP_CLAUSE_PRIVATE:
case OMP_CLAUSE_SHARED:
case OMP_CLAUSE_FIRSTPRIVATE:
- case OMP_CLAUSE_LASTPRIVATE:
case OMP_CLAUSE_COPYIN:
case OMP_CLAUSE_COPYPRIVATE:
/* Don't dereference an invisiref in OpenMP clauses. */
@@ -893,7 +902,8 @@ cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
NULL if there's nothing to do. */
tree
-cxx_omp_clause_default_ctor (tree clause, tree decl)
+cxx_omp_clause_default_ctor (tree clause, tree decl,
+ tree outer ATTRIBUTE_UNUSED)
{
tree info = CP_OMP_CLAUSE_INFO (clause);
tree ret = NULL;
@@ -958,3 +968,100 @@ cxx_omp_privatize_by_reference (const_tree decl)
{
return is_invisiref_parm (decl);
}
+
+/* True if OpenMP sharing attribute of DECL is predetermined. */
+
+enum omp_clause_default_kind
+cxx_omp_predetermined_sharing (tree decl)
+{
+ tree type;
+
+ /* Static data members are predetermined as shared. */
+ if (TREE_STATIC (decl))
+ {
+ tree ctx = CP_DECL_CONTEXT (decl);
+ if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
+ return OMP_CLAUSE_DEFAULT_SHARED;
+ }
+
+ type = TREE_TYPE (decl);
+ if (TREE_CODE (type) == REFERENCE_TYPE)
+ {
+ if (!is_invisiref_parm (decl))
+ return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
+ type = TREE_TYPE (type);
+
+ if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
+ {
+ /* NVR doesn't preserve const qualification of the
+ variable's type. */
+ tree outer = outer_curly_brace_block (current_function_decl);
+ tree var;
+
+ if (outer)
+ for (var = BLOCK_VARS (outer); var; var = TREE_CHAIN (var))
+ if (DECL_NAME (decl) == DECL_NAME (var)
+ && (TYPE_MAIN_VARIANT (type)
+ == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
+ {
+ if (TYPE_READONLY (TREE_TYPE (var)))
+ type = TREE_TYPE (var);
+ break;
+ }
+ }
+ }
+
+ if (type == error_mark_node)
+ return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
+
+ /* Variables with const-qualified type having no mutable member
+ are predetermined shared. */
+ if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
+ return OMP_CLAUSE_DEFAULT_SHARED;
+
+ return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
+}
+
+/* Finalize an implicitly determined clause. */
+
+void
+cxx_omp_finish_clause (tree c)
+{
+ tree decl, inner_type;
+ bool make_shared = false;
+
+ if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
+ return;
+
+ decl = OMP_CLAUSE_DECL (c);
+ decl = require_complete_type (decl);
+ inner_type = TREE_TYPE (decl);
+ if (decl == error_mark_node)
+ make_shared = true;
+ else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
+ {
+ if (is_invisiref_parm (decl))
+ inner_type = TREE_TYPE (inner_type);
+ else
+ {
+ error ("%qE implicitly determined as %<firstprivate%> has reference type",
+ decl);
+ make_shared = true;
+ }
+ }
+
+ /* We're interested in the base element, not arrays. */
+ while (TREE_CODE (inner_type) == ARRAY_TYPE)
+ inner_type = TREE_TYPE (inner_type);
+
+ /* Check for special function availability by building a call to one.
+ Save the results, because later we won't be in the right context
+ for making these queries. */
+ if (!make_shared
+ && CLASS_TYPE_P (inner_type)
+ && cxx_omp_create_clause_info (c, inner_type, false, true, false))
+ make_shared = true;
+
+ if (make_shared)
+ OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
+}
diff --git a/gcc/cp/cp-objcp-common.h b/gcc/cp/cp-objcp-common.h
index b2b8405fccd..1121eb08479 100644
--- a/gcc/cp/cp-objcp-common.h
+++ b/gcc/cp/cp-objcp-common.h
@@ -141,6 +141,8 @@ extern tree objcp_tsubst_copy_and_build (tree, tree, tsubst_flags_t,
#define LANG_HOOKS_OMP_CLAUSE_ASSIGN_OP cxx_omp_clause_assign_op
#undef LANG_HOOKS_OMP_CLAUSE_DTOR
#define LANG_HOOKS_OMP_CLAUSE_DTOR cxx_omp_clause_dtor
+#undef LANG_HOOKS_OMP_FINISH_CLAUSE
+#define LANG_HOOKS_OMP_FINISH_CLAUSE cxx_omp_finish_clause
#undef LANG_HOOKS_OMP_PRIVATIZE_BY_REFERENCE
#define LANG_HOOKS_OMP_PRIVATIZE_BY_REFERENCE cxx_omp_privatize_by_reference
diff --git a/gcc/cp/cp-tree.h b/gcc/cp/cp-tree.h
index 0c3d0dd21aa..952020ad045 100644
--- a/gcc/cp/cp-tree.h
+++ b/gcc/cp/cp-tree.h
@@ -4235,6 +4235,7 @@ extern void start_preparsed_function (tree, tree, int);
extern int start_function (cp_decl_specifier_seq *, const cp_declarator *, tree);
extern tree begin_function_body (void);
extern void finish_function_body (tree);
+extern tree outer_curly_brace_block (tree);
extern tree finish_function (int);
extern tree start_method (cp_decl_specifier_seq *, const cp_declarator *, tree);
extern tree finish_method (tree);
@@ -4468,6 +4469,7 @@ extern bool type_dependent_expression_p (tree);
extern bool any_type_dependent_arguments_p (const_tree);
extern bool value_dependent_expression_p (tree);
extern bool any_value_dependent_elements_p (const_tree);
+extern bool dependent_omp_for_p (tree, tree, tree, tree);
extern tree resolve_typename_type (tree, bool);
extern tree template_for_substitution (tree);
extern tree build_non_dependent_expr (tree);
@@ -4666,17 +4668,22 @@ extern tree begin_omp_structured_block (void);
extern tree finish_omp_structured_block (tree);
extern tree begin_omp_parallel (void);
extern tree finish_omp_parallel (tree, tree);
+extern tree begin_omp_task (void);
+extern tree finish_omp_task (tree, tree);
extern tree finish_omp_for (location_t, tree, tree,
- tree, tree, tree, tree);
+ tree, tree, tree, tree, tree);
extern void finish_omp_atomic (enum tree_code, tree, tree);
extern void finish_omp_barrier (void);
extern void finish_omp_flush (void);
+extern void finish_omp_taskwait (void);
extern enum omp_clause_default_kind cxx_omp_predetermined_sharing (tree);
-extern tree cxx_omp_clause_default_ctor (tree, tree);
+extern tree cxx_omp_clause_default_ctor (tree, tree, tree);
extern tree cxx_omp_clause_copy_ctor (tree, tree, tree);
extern tree cxx_omp_clause_assign_op (tree, tree, tree);
extern tree cxx_omp_clause_dtor (tree, tree);
+extern void cxx_omp_finish_clause (tree);
extern bool cxx_omp_privatize_by_reference (const_tree);
+extern bool cxx_omp_create_clause_info (tree, tree, bool, bool, bool);
extern tree baselink_for_fns (tree);
extern void finish_static_assert (tree, tree, location_t,
bool);
diff --git a/gcc/cp/decl.c b/gcc/cp/decl.c
index 0898d5d7fd2..8056518e746 100644
--- a/gcc/cp/decl.c
+++ b/gcc/cp/decl.c
@@ -11759,7 +11759,7 @@ finish_function_body (tree compstmt)
of curly braces, skipping the artificial block created for constructor
initializers. */
-static tree
+tree
outer_curly_brace_block (tree fndecl)
{
tree block = BLOCK_SUBBLOCKS (DECL_INITIAL (fndecl));
diff --git a/gcc/cp/parser.c b/gcc/cp/parser.c
index c3383e6441e..5ca1bd7e533 100644
--- a/gcc/cp/parser.c
+++ b/gcc/cp/parser.c
@@ -1611,7 +1611,7 @@ static tree cp_parser_delete_expression
static tree cp_parser_cast_expression
(cp_parser *, bool, bool);
static tree cp_parser_binary_expression
- (cp_parser *, bool);
+ (cp_parser *, bool, enum cp_parser_prec);
static tree cp_parser_question_colon_clause
(cp_parser *, tree);
static tree cp_parser_assignment_expression
@@ -6008,14 +6008,15 @@ cp_parser_cast_expression (cp_parser *parser, bool address_p, bool cast_p)
: binops_by_token[token->type].prec)
static tree
-cp_parser_binary_expression (cp_parser* parser, bool cast_p)
+cp_parser_binary_expression (cp_parser* parser, bool cast_p,
+ enum cp_parser_prec prec)
{
cp_parser_expression_stack stack;
cp_parser_expression_stack_entry *sp = &stack[0];
tree lhs, rhs;
cp_token *token;
enum tree_code tree_type, lhs_type, rhs_type;
- enum cp_parser_prec prec = PREC_NOT_OPERATOR, new_prec, lookahead_prec;
+ enum cp_parser_prec new_prec, lookahead_prec;
bool overloaded_p;
/* Parse the first expression. */
@@ -6192,7 +6193,7 @@ cp_parser_assignment_expression (cp_parser* parser, bool cast_p)
else
{
/* Parse the binary expressions (logical-or-expression). */
- expr = cp_parser_binary_expression (parser, cast_p);
+ expr = cp_parser_binary_expression (parser, cast_p, PREC_NOT_OPERATOR);
/* If the next token is a `?' then we're actually looking at a
conditional-expression. */
if (cp_lexer_next_token_is (parser->lexer, CPP_QUERY))
@@ -19493,7 +19494,9 @@ cp_parser_omp_clause_name (cp_parser *parser)
switch (p[0])
{
case 'c':
- if (!strcmp ("copyin", p))
+ if (!strcmp ("collapse", p))
+ result = PRAGMA_OMP_CLAUSE_COLLAPSE;
+ else if (!strcmp ("copyin", p))
result = PRAGMA_OMP_CLAUSE_COPYIN;
else if (!strcmp ("copyprivate", p))
result = PRAGMA_OMP_CLAUSE_COPYPRIVATE;
@@ -19526,6 +19529,10 @@ cp_parser_omp_clause_name (cp_parser *parser)
else if (!strcmp ("shared", p))
result = PRAGMA_OMP_CLAUSE_SHARED;
break;
+ case 'u':
+ if (!strcmp ("untied", p))
+ result = PRAGMA_OMP_CLAUSE_UNTIED;
+ break;
}
}
@@ -19628,6 +19635,47 @@ cp_parser_omp_var_list (cp_parser *parser, enum omp_clause_code kind, tree list)
return list;
}
+/* OpenMP 3.0:
+ collapse ( constant-expression ) */
+
+static tree
+cp_parser_omp_clause_collapse (cp_parser *parser, tree list)
+{
+ tree c, num;
+ location_t loc;
+ HOST_WIDE_INT n;
+
+ loc = cp_lexer_peek_token (parser->lexer)->location;
+ if (!cp_parser_require (parser, CPP_OPEN_PAREN, "%<(%>"))
+ return list;
+
+ num = cp_parser_constant_expression (parser, false, NULL);
+
+ if (!cp_parser_require (parser, CPP_CLOSE_PAREN, "%<)%>"))
+ cp_parser_skip_to_closing_parenthesis (parser, /*recovering=*/true,
+ /*or_comma=*/false,
+ /*consume_paren=*/true);
+
+ if (num == error_mark_node)
+ return list;
+ num = fold_non_dependent_expr (num);
+ if (!INTEGRAL_TYPE_P (TREE_TYPE (num))
+ || !host_integerp (num, 0)
+ || (n = tree_low_cst (num, 0)) <= 0
+ || (int) n != n)
+ {
+ error ("%Hcollapse argument needs positive constant integer expression", &loc);
+ return list;
+ }
+
+ check_no_duplicate_clause (list, OMP_CLAUSE_COLLAPSE, "collapse");
+ c = build_omp_clause (OMP_CLAUSE_COLLAPSE);
+ OMP_CLAUSE_CHAIN (c) = list;
+ OMP_CLAUSE_COLLAPSE_EXPR (c) = num;
+
+ return c;
+}
+
/* OpenMP 2.5:
default ( shared | none ) */
@@ -19839,7 +19887,7 @@ cp_parser_omp_clause_reduction (cp_parser *parser, tree list)
schedule ( schedule-kind , expression )
schedule-kind:
- static | dynamic | guided | runtime */
+ static | dynamic | guided | runtime | auto */
static tree
cp_parser_omp_clause_schedule (cp_parser *parser, tree list)
@@ -19882,6 +19930,8 @@ cp_parser_omp_clause_schedule (cp_parser *parser, tree list)
}
else if (cp_lexer_next_token_is_keyword (parser->lexer, RID_STATIC))
OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_STATIC;
+ else if (cp_lexer_next_token_is_keyword (parser->lexer, RID_AUTO))
+ OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_AUTO;
else
goto invalid_kind;
cp_lexer_consume_token (parser->lexer);
@@ -19897,6 +19947,9 @@ cp_parser_omp_clause_schedule (cp_parser *parser, tree list)
else if (OMP_CLAUSE_SCHEDULE_KIND (c) == OMP_CLAUSE_SCHEDULE_RUNTIME)
error ("schedule %<runtime%> does not take "
"a %<chunk_size%> parameter");
+ else if (OMP_CLAUSE_SCHEDULE_KIND (c) == OMP_CLAUSE_SCHEDULE_AUTO)
+ error ("schedule %<auto%> does not take "
+ "a %<chunk_size%> parameter");
else
OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c) = t;
@@ -19919,6 +19972,21 @@ cp_parser_omp_clause_schedule (cp_parser *parser, tree list)
return list;
}
+/* OpenMP 3.0:
+ untied */
+
+static tree
+cp_parser_omp_clause_untied (cp_parser *parser ATTRIBUTE_UNUSED, tree list)
+{
+ tree c;
+
+ check_no_duplicate_clause (list, OMP_CLAUSE_UNTIED, "untied");
+
+ c = build_omp_clause (OMP_CLAUSE_UNTIED);
+ OMP_CLAUSE_CHAIN (c) = list;
+ return c;
+}
+
/* Parse all OpenMP clauses. The set clauses allowed by the directive
is a bitmask in MASK. Return the list of clauses found; the result
of clause default goes in *pdefault. */
@@ -19944,6 +20012,10 @@ cp_parser_omp_all_clauses (cp_parser *parser, unsigned int mask,
switch (c_kind)
{
+ case PRAGMA_OMP_CLAUSE_COLLAPSE:
+ clauses = cp_parser_omp_clause_collapse (parser, clauses);
+ c_name = "collapse";
+ break;
case PRAGMA_OMP_CLAUSE_COPYIN:
clauses = cp_parser_omp_var_list (parser, OMP_CLAUSE_COPYIN, clauses);
c_name = "copyin";
@@ -20001,6 +20073,10 @@ cp_parser_omp_all_clauses (cp_parser *parser, unsigned int mask,
clauses);
c_name = "shared";
break;
+ case PRAGMA_OMP_CLAUSE_UNTIED:
+ clauses = cp_parser_omp_clause_untied (parser, clauses);
+ c_name = "nowait";
+ break;
default:
cp_parser_error (parser, "expected %<#pragma omp%> clause");
goto saw_error;
@@ -20210,94 +20286,454 @@ cp_parser_omp_flush (cp_parser *parser, cp_token *pragma_tok)
finish_omp_flush ();
}
-/* Parse the restricted form of the for statment allowed by OpenMP. */
+/* Helper function, to parse omp for increment expression. */
static tree
-cp_parser_omp_for_loop (cp_parser *parser)
+cp_parser_omp_for_cond (cp_parser *parser, tree decl)
{
- tree init, cond, incr, body, decl, pre_body;
- location_t loc;
+ tree lhs = cp_parser_cast_expression (parser, false, false), rhs;
+ enum tree_code op;
+ cp_token *token;
- if (!cp_lexer_next_token_is_keyword (parser->lexer, RID_FOR))
+ if (lhs != decl)
{
- cp_parser_error (parser, "for statement expected");
- return NULL;
+ cp_parser_skip_to_end_of_statement (parser);
+ return error_mark_node;
}
- loc = cp_lexer_consume_token (parser->lexer)->location;
- if (!cp_parser_require (parser, CPP_OPEN_PAREN, "%<(%>"))
- return NULL;
- init = decl = NULL;
- pre_body = push_stmt_list ();
- if (cp_lexer_next_token_is_not (parser->lexer, CPP_SEMICOLON))
+ token = cp_lexer_peek_token (parser->lexer);
+ op = binops_by_token [token->type].tree_type;
+ switch (op)
+ {
+ case LT_EXPR:
+ case LE_EXPR:
+ case GT_EXPR:
+ case GE_EXPR:
+ break;
+ default:
+ cp_parser_skip_to_end_of_statement (parser);
+ return error_mark_node;
+ }
+
+ cp_lexer_consume_token (parser->lexer);
+ rhs = cp_parser_binary_expression (parser, false,
+ PREC_RELATIONAL_EXPRESSION);
+ if (rhs == error_mark_node
+ || cp_lexer_next_token_is_not (parser->lexer, CPP_SEMICOLON))
{
- cp_decl_specifier_seq type_specifiers;
+ cp_parser_skip_to_end_of_statement (parser);
+ return error_mark_node;
+ }
- /* First, try to parse as an initialized declaration. See
- cp_parser_condition, from whence the bulk of this is copied. */
+ return build2 (op, boolean_type_node, lhs, rhs);
+}
- cp_parser_parse_tentatively (parser);
- cp_parser_type_specifier_seq (parser, /*is_condition=*/false,
- &type_specifiers);
- if (!cp_parser_error_occurred (parser))
+/* Helper function, to parse omp for increment expression. */
+
+static tree
+cp_parser_omp_for_incr (cp_parser *parser, tree decl)
+{
+ cp_token *token = cp_lexer_peek_token (parser->lexer);
+ enum tree_code op;
+ tree lhs, rhs;
+ cp_id_kind idk;
+ bool decl_first;
+
+ if (token->type == CPP_PLUS_PLUS || token->type == CPP_MINUS_MINUS)
+ {
+ op = (token->type == CPP_PLUS_PLUS
+ ? PREINCREMENT_EXPR : PREDECREMENT_EXPR);
+ cp_lexer_consume_token (parser->lexer);
+ lhs = cp_parser_cast_expression (parser, false, false);
+ if (lhs != decl)
+ return error_mark_node;
+ return build2 (op, TREE_TYPE (decl), decl, NULL_TREE);
+ }
+
+ lhs = cp_parser_primary_expression (parser, false, false, false, &idk);
+ if (lhs != decl)
+ return error_mark_node;
+
+ token = cp_lexer_peek_token (parser->lexer);
+ if (token->type == CPP_PLUS_PLUS || token->type == CPP_MINUS_MINUS)
+ {
+ op = (token->type == CPP_PLUS_PLUS
+ ? POSTINCREMENT_EXPR : POSTDECREMENT_EXPR);
+ cp_lexer_consume_token (parser->lexer);
+ return build2 (op, TREE_TYPE (decl), decl, NULL_TREE);
+ }
+
+ op = cp_parser_assignment_operator_opt (parser);
+ if (op == ERROR_MARK)
+ return error_mark_node;
+
+ if (op != NOP_EXPR)
+ {
+ rhs = cp_parser_assignment_expression (parser, false);
+ rhs = build2 (op, TREE_TYPE (decl), decl, rhs);
+ return build2 (MODIFY_EXPR, TREE_TYPE (decl), decl, rhs);
+ }
+
+ lhs = cp_parser_binary_expression (parser, false,
+ PREC_ADDITIVE_EXPRESSION);
+ token = cp_lexer_peek_token (parser->lexer);
+ decl_first = lhs == decl;
+ if (decl_first)
+ lhs = NULL_TREE;
+ if (token->type != CPP_PLUS
+ && token->type != CPP_MINUS)
+ return error_mark_node;
+
+ do
+ {
+ op = token->type == CPP_PLUS ? PLUS_EXPR : MINUS_EXPR;
+ cp_lexer_consume_token (parser->lexer);
+ rhs = cp_parser_binary_expression (parser, false,
+ PREC_ADDITIVE_EXPRESSION);
+ token = cp_lexer_peek_token (parser->lexer);
+ if (token->type == CPP_PLUS || token->type == CPP_MINUS || decl_first)
{
- tree asm_specification, attributes;
- cp_declarator *declarator;
-
- declarator = cp_parser_declarator (parser,
- CP_PARSER_DECLARATOR_NAMED,
- /*ctor_dtor_or_conv_p=*/NULL,
- /*parenthesized_p=*/NULL,
- /*member_p=*/false);
- attributes = cp_parser_attributes_opt (parser);
- asm_specification = cp_parser_asm_specification_opt (parser);
+ if (lhs == NULL_TREE)
+ {
+ if (op == PLUS_EXPR)
+ lhs = rhs;
+ else
+ lhs = build_x_unary_op (NEGATE_EXPR, rhs, tf_warning_or_error);
+ }
+ else
+ lhs = build_x_binary_op (op, lhs, ERROR_MARK, rhs, ERROR_MARK,
+ NULL, tf_warning_or_error);
+ }
+ }
+ while (token->type == CPP_PLUS || token->type == CPP_MINUS);
- cp_parser_require (parser, CPP_EQ, "%<=%>");
- if (cp_parser_parse_definitely (parser))
+ if (!decl_first)
+ {
+ if (rhs != decl || op == MINUS_EXPR)
+ return error_mark_node;
+ rhs = build2 (op, TREE_TYPE (decl), lhs, decl);
+ }
+ else
+ rhs = build2 (PLUS_EXPR, TREE_TYPE (decl), decl, lhs);
+
+ return build2 (MODIFY_EXPR, TREE_TYPE (decl), decl, rhs);
+}
+
+/* Parse the restricted form of the for statment allowed by OpenMP. */
+
+static tree
+cp_parser_omp_for_loop (cp_parser *parser, tree clauses, tree *par_clauses)
+{
+ tree init, cond, incr, body, decl, pre_body = NULL_TREE, ret;
+ tree for_block = NULL_TREE, real_decl, initv, condv, incrv, declv;
+ tree this_pre_body, cl;
+ location_t loc_first;
+ bool collapse_err = false;
+ int i, collapse = 1, nbraces = 0;
+
+ for (cl = clauses; cl; cl = OMP_CLAUSE_CHAIN (cl))
+ if (OMP_CLAUSE_CODE (cl) == OMP_CLAUSE_COLLAPSE)
+ collapse = tree_low_cst (OMP_CLAUSE_COLLAPSE_EXPR (cl), 0);
+
+ gcc_assert (collapse >= 1);
+
+ declv = make_tree_vec (collapse);
+ initv = make_tree_vec (collapse);
+ condv = make_tree_vec (collapse);
+ incrv = make_tree_vec (collapse);
+
+ loc_first = cp_lexer_peek_token (parser->lexer)->location;
+
+ for (i = 0; i < collapse; i++)
+ {
+ int bracecount = 0;
+ bool add_private_clause = false;
+ location_t loc;
+
+ if (!cp_lexer_next_token_is_keyword (parser->lexer, RID_FOR))
+ {
+ cp_parser_error (parser, "for statement expected");
+ return NULL;
+ }
+ loc = cp_lexer_consume_token (parser->lexer)->location;
+
+ if (!cp_parser_require (parser, CPP_OPEN_PAREN, "%<(%>"))
+ return NULL;
+
+ init = decl = real_decl = NULL;
+ this_pre_body = push_stmt_list ();
+ if (cp_lexer_next_token_is_not (parser->lexer, CPP_SEMICOLON))
+ {
+ cp_decl_specifier_seq type_specifiers;
+
+ /* First, try to parse as an initialized declaration. See
+ cp_parser_condition, from whence the bulk of this is copied. */
+
+ cp_parser_parse_tentatively (parser);
+ cp_parser_type_specifier_seq (parser, /*is_condition=*/false,
+ &type_specifiers);
+ if (!cp_parser_error_occurred (parser))
{
- tree pushed_scope;
+ tree asm_specification, attributes;
+ cp_declarator *declarator;
+
+ declarator = cp_parser_declarator (parser,
+ CP_PARSER_DECLARATOR_NAMED,
+ /*ctor_dtor_or_conv_p=*/NULL,
+ /*parenthesized_p=*/NULL,
+ /*member_p=*/false);
+ attributes = cp_parser_attributes_opt (parser);
+ asm_specification = cp_parser_asm_specification_opt (parser);
+
+ if (cp_lexer_next_token_is_not (parser->lexer, CPP_EQ))
+ cp_parser_require (parser, CPP_EQ, "%<=%>");
+ if (cp_parser_parse_definitely (parser))
+ {
+ tree pushed_scope;
+
+ decl = start_decl (declarator, &type_specifiers,
+ /*initialized_p=*/false, attributes,
+ /*prefix_attributes=*/NULL_TREE,
+ &pushed_scope);
+
+ if (CLASS_TYPE_P (TREE_TYPE (decl))
+ || type_dependent_expression_p (decl))
+ {
+ bool is_parenthesized_init, is_non_constant_init;
+
+ init = cp_parser_initializer (parser,
+ &is_parenthesized_init,
+ &is_non_constant_init);
+
+ cp_finish_decl (decl, init, !is_non_constant_init,
+ asm_specification,
+ LOOKUP_ONLYCONVERTING);
+ if (CLASS_TYPE_P (TREE_TYPE (decl)))
+ {
+ for_block
+ = tree_cons (NULL, this_pre_body, for_block);
+ init = NULL_TREE;
+ }
+ else
+ init = pop_stmt_list (this_pre_body);
+ this_pre_body = NULL_TREE;
+ }
+ else
+ {
+ cp_parser_require (parser, CPP_EQ, "%<=%>");
+ init = cp_parser_assignment_expression (parser, false);
- decl = start_decl (declarator, &type_specifiers,
- /*initialized_p=*/false, attributes,
- /*prefix_attributes=*/NULL_TREE,
- &pushed_scope);
+ if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
+ init = error_mark_node;
+ else
+ cp_finish_decl (decl, NULL_TREE,
+ /*init_const_expr_p=*/false,
+ asm_specification,
+ LOOKUP_ONLYCONVERTING);
+ }
- init = cp_parser_assignment_expression (parser, false);
+ if (pushed_scope)
+ pop_scope (pushed_scope);
+ }
+ }
+ else
+ cp_parser_abort_tentative_parse (parser);
- if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
- init = error_mark_node;
+ /* If parsing as an initialized declaration failed, try again as
+ a simple expression. */
+ if (decl == NULL)
+ {
+ cp_id_kind idk;
+ cp_parser_parse_tentatively (parser);
+ decl = cp_parser_primary_expression (parser, false, false,
+ false, &idk);
+ if (!cp_parser_error_occurred (parser)
+ && decl
+ && DECL_P (decl)
+ && CLASS_TYPE_P (TREE_TYPE (decl)))
+ {
+ tree rhs;
+
+ cp_parser_parse_definitely (parser);
+ cp_parser_require (parser, CPP_EQ, "%<=%>");
+ rhs = cp_parser_assignment_expression (parser, false);
+ finish_expr_stmt (build_x_modify_expr (decl, NOP_EXPR,
+ rhs,
+ tf_warning_or_error));
+ add_private_clause = true;
+ }
else
- cp_finish_decl (decl, NULL_TREE, /*init_const_expr_p=*/false,
- asm_specification, LOOKUP_ONLYCONVERTING);
+ {
+ decl = NULL;
+ cp_parser_abort_tentative_parse (parser);
+ init = cp_parser_expression (parser, false);
+ if (init)
+ {
+ if (TREE_CODE (init) == MODIFY_EXPR
+ || TREE_CODE (init) == MODOP_EXPR)
+ real_decl = TREE_OPERAND (init, 0);
+ }
+ }
+ }
+ }
+ cp_parser_require (parser, CPP_SEMICOLON, "%<;%>");
+ if (this_pre_body)
+ {
+ this_pre_body = pop_stmt_list (this_pre_body);
+ if (pre_body)
+ {
+ tree t = pre_body;
+ pre_body = push_stmt_list ();
+ add_stmt (t);
+ add_stmt (this_pre_body);
+ pre_body = pop_stmt_list (pre_body);
+ }
+ else
+ pre_body = this_pre_body;
+ }
- if (pushed_scope)
- pop_scope (pushed_scope);
+ if (decl)
+ real_decl = decl;
+ if (par_clauses != NULL && real_decl != NULL_TREE)
+ {
+ tree *c;
+ for (c = par_clauses; *c ; )
+ if (OMP_CLAUSE_CODE (*c) == OMP_CLAUSE_FIRSTPRIVATE
+ && OMP_CLAUSE_DECL (*c) == real_decl)
+ {
+ error ("%Hiteration variable %qD should not be firstprivate",
+ &loc, real_decl);
+ *c = OMP_CLAUSE_CHAIN (*c);
+ }
+ else if (OMP_CLAUSE_CODE (*c) == OMP_CLAUSE_LASTPRIVATE
+ && OMP_CLAUSE_DECL (*c) == real_decl)
+ {
+ /* Add lastprivate (decl) clause to OMP_FOR_CLAUSES,
+ change it to shared (decl) in OMP_PARALLEL_CLAUSES. */
+ tree l = build_omp_clause (OMP_CLAUSE_LASTPRIVATE);
+ OMP_CLAUSE_DECL (l) = real_decl;
+ OMP_CLAUSE_CHAIN (l) = clauses;
+ CP_OMP_CLAUSE_INFO (l) = CP_OMP_CLAUSE_INFO (*c);
+ clauses = l;
+ OMP_CLAUSE_SET_CODE (*c, OMP_CLAUSE_SHARED);
+ CP_OMP_CLAUSE_INFO (*c) = NULL;
+ add_private_clause = false;
+ }
+ else
+ {
+ if (OMP_CLAUSE_CODE (*c) == OMP_CLAUSE_PRIVATE
+ && OMP_CLAUSE_DECL (*c) == real_decl)
+ add_private_clause = false;
+ c = &OMP_CLAUSE_CHAIN (*c);
+ }
+ }
+
+ if (add_private_clause)
+ {
+ tree c;
+ for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
+ {
+ if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
+ || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
+ && OMP_CLAUSE_DECL (c) == decl)
+ break;
+ else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
+ && OMP_CLAUSE_DECL (c) == decl)
+ error ("%Hiteration variable %qD should not be firstprivate",
+ &loc, decl);
+ else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
+ && OMP_CLAUSE_DECL (c) == decl)
+ error ("%Hiteration variable %qD should not be reduction",
+ &loc, decl);
+ }
+ if (c == NULL)
+ {
+ c = build_omp_clause (OMP_CLAUSE_PRIVATE);
+ OMP_CLAUSE_DECL (c) = decl;
+ c = finish_omp_clauses (c);
+ if (c)
+ {
+ OMP_CLAUSE_CHAIN (c) = clauses;
+ clauses = c;
+ }
}
}
- else
- cp_parser_abort_tentative_parse (parser);
- /* If parsing as an initialized declaration failed, try again as
- a simple expression. */
- if (decl == NULL)
- init = cp_parser_expression (parser, false);
- }
- cp_parser_require (parser, CPP_SEMICOLON, "%<;%>");
- pre_body = pop_stmt_list (pre_body);
+ cond = NULL;
+ if (cp_lexer_next_token_is_not (parser->lexer, CPP_SEMICOLON))
+ {
+ /* If decl is an iterator, preserve LHS and RHS of the relational
+ expr until finish_omp_for. */
+ if (decl
+ && (type_dependent_expression_p (decl)
+ || CLASS_TYPE_P (TREE_TYPE (decl))))
+ cond = cp_parser_omp_for_cond (parser, decl);
+ else
+ cond = cp_parser_condition (parser);
+ }
+ cp_parser_require (parser, CPP_SEMICOLON, "%<;%>");
- cond = NULL;
- if (cp_lexer_next_token_is_not (parser->lexer, CPP_SEMICOLON))
- cond = cp_parser_condition (parser);
- cp_parser_require (parser, CPP_SEMICOLON, "%<;%>");
+ incr = NULL;
+ if (cp_lexer_next_token_is_not (parser->lexer, CPP_CLOSE_PAREN))
+ {
+ /* If decl is an iterator, preserve the operator on decl
+ until finish_omp_for. */
+ if (decl
+ && (type_dependent_expression_p (decl)
+ || CLASS_TYPE_P (TREE_TYPE (decl))))
+ incr = cp_parser_omp_for_incr (parser, decl);
+ else
+ incr = cp_parser_expression (parser, false);
+ }
- incr = NULL;
- if (cp_lexer_next_token_is_not (parser->lexer, CPP_CLOSE_PAREN))
- incr = cp_parser_expression (parser, false);
+ if (!cp_parser_require (parser, CPP_CLOSE_PAREN, "%<)%>"))
+ cp_parser_skip_to_closing_parenthesis (parser, /*recovering=*/true,
+ /*or_comma=*/false,
+ /*consume_paren=*/true);
- if (!cp_parser_require (parser, CPP_CLOSE_PAREN, "%<)%>"))
- cp_parser_skip_to_closing_parenthesis (parser, /*recovering=*/true,
- /*or_comma=*/false,
- /*consume_paren=*/true);
+ TREE_VEC_ELT (declv, i) = decl;
+ TREE_VEC_ELT (initv, i) = init;
+ TREE_VEC_ELT (condv, i) = cond;
+ TREE_VEC_ELT (incrv, i) = incr;
+
+ if (i == collapse - 1)
+ break;
+
+ /* FIXME: OpenMP 3.0 draft isn't very clear on what exactly is allowed
+ in between the collapsed for loops to be still considered perfectly
+ nested. Hopefully the final version clarifies this.
+ For now handle (multiple) {'s and empty statements. */
+ cp_parser_parse_tentatively (parser);
+ do
+ {
+ if (cp_lexer_next_token_is_keyword (parser->lexer, RID_FOR))
+ break;
+ else if (cp_lexer_next_token_is (parser->lexer, CPP_OPEN_BRACE))
+ {
+ cp_lexer_consume_token (parser->lexer);
+ bracecount++;
+ }
+ else if (bracecount
+ && cp_lexer_next_token_is (parser->lexer, CPP_SEMICOLON))
+ cp_lexer_consume_token (parser->lexer);
+ else
+ {
+ loc = cp_lexer_peek_token (parser->lexer)->location;
+ error ("%Hnot enough collapsed for loops", &loc);
+ collapse_err = true;
+ cp_parser_abort_tentative_parse (parser);
+ declv = NULL_TREE;
+ break;
+ }
+ }
+ while (1);
+
+ if (declv)
+ {
+ cp_parser_parse_definitely (parser);
+ nbraces += bracecount;
+ }
+ }
/* Note that we saved the original contents of this flag when we entered
the structured block, and so we don't need to re-save it here. */
@@ -20309,7 +20745,38 @@ cp_parser_omp_for_loop (cp_parser *parser)
cp_parser_statement (parser, NULL_TREE, false, NULL);
body = pop_stmt_list (body);
- return finish_omp_for (loc, decl, init, cond, incr, body, pre_body);
+ if (declv == NULL_TREE)
+ ret = NULL_TREE;
+ else
+ ret = finish_omp_for (loc_first, declv, initv, condv, incrv, body,
+ pre_body, clauses);
+
+ while (nbraces)
+ {
+ if (cp_lexer_next_token_is (parser->lexer, CPP_CLOSE_BRACE))
+ {
+ cp_lexer_consume_token (parser->lexer);
+ nbraces--;
+ }
+ else if (cp_lexer_next_token_is (parser->lexer, CPP_SEMICOLON))
+ cp_lexer_consume_token (parser->lexer);
+ else
+ {
+ if (!collapse_err)
+ error ("collapsed loops not perfectly nested");
+ collapse_err = true;
+ cp_parser_statement_seq_opt (parser, NULL);
+ cp_parser_require (parser, CPP_CLOSE_BRACE, "%<}%>");
+ }
+ }
+
+ while (for_block)
+ {
+ add_stmt (pop_stmt_list (TREE_VALUE (for_block)));
+ for_block = TREE_CHAIN (for_block);
+ }
+
+ return ret;
}
/* OpenMP 2.5:
@@ -20323,7 +20790,8 @@ cp_parser_omp_for_loop (cp_parser *parser)
| (1u << PRAGMA_OMP_CLAUSE_REDUCTION) \
| (1u << PRAGMA_OMP_CLAUSE_ORDERED) \
| (1u << PRAGMA_OMP_CLAUSE_SCHEDULE) \
- | (1u << PRAGMA_OMP_CLAUSE_NOWAIT))
+ | (1u << PRAGMA_OMP_CLAUSE_NOWAIT) \
+ | (1u << PRAGMA_OMP_CLAUSE_COLLAPSE))
static tree
cp_parser_omp_for (cp_parser *parser, cp_token *pragma_tok)
@@ -20337,9 +20805,7 @@ cp_parser_omp_for (cp_parser *parser, cp_token *pragma_tok)
sb = begin_omp_structured_block ();
save = cp_parser_begin_omp_structured_block (parser);
- ret = cp_parser_omp_for_loop (parser);
- if (ret)
- OMP_FOR_CLAUSES (ret) = clauses;
+ ret = cp_parser_omp_for_loop (parser, clauses, NULL);
cp_parser_end_omp_structured_block (parser, save);
add_stmt (finish_omp_structured_block (sb));
@@ -20537,9 +21003,7 @@ cp_parser_omp_parallel (cp_parser *parser, cp_token *pragma_tok)
case PRAGMA_OMP_PARALLEL_FOR:
c_split_parallel_clauses (clauses, &par_clause, &ws_clause);
- stmt = cp_parser_omp_for_loop (parser);
- if (stmt)
- OMP_FOR_CLAUSES (stmt) = ws_clause;
+ cp_parser_omp_for_loop (parser, ws_clause, &par_clause);
break;
case PRAGMA_OMP_PARALLEL_SECTIONS:
@@ -20584,6 +21048,43 @@ cp_parser_omp_single (cp_parser *parser, cp_token *pragma_tok)
return add_stmt (stmt);
}
+/* OpenMP 3.0:
+ # pragma omp task task-clause[optseq] new-line
+ structured-block */
+
+#define OMP_TASK_CLAUSE_MASK \
+ ( (1u << PRAGMA_OMP_CLAUSE_IF) \
+ | (1u << PRAGMA_OMP_CLAUSE_UNTIED) \
+ | (1u << PRAGMA_OMP_CLAUSE_DEFAULT) \
+ | (1u << PRAGMA_OMP_CLAUSE_PRIVATE) \
+ | (1u << PRAGMA_OMP_CLAUSE_FIRSTPRIVATE) \
+ | (1u << PRAGMA_OMP_CLAUSE_SHARED))
+
+static tree
+cp_parser_omp_task (cp_parser *parser, cp_token *pragma_tok)
+{
+ tree clauses, block;
+ unsigned int save;
+
+ clauses = cp_parser_omp_all_clauses (parser, OMP_TASK_CLAUSE_MASK,
+ "#pragma omp task", pragma_tok);
+ block = begin_omp_task ();
+ save = cp_parser_begin_omp_structured_block (parser);
+ cp_parser_statement (parser, NULL_TREE, false, NULL);
+ cp_parser_end_omp_structured_block (parser, save);
+ return finish_omp_task (clauses, block);
+}
+
+/* OpenMP 3.0:
+ # pragma omp taskwait new-line */
+
+static void
+cp_parser_omp_taskwait (cp_parser *parser, cp_token *pragma_tok)
+{
+ cp_parser_require_pragma_eol (parser, pragma_tok);
+ finish_omp_taskwait ();
+}
+
/* OpenMP 2.5:
# pragma omp threadprivate (variable-list) */
@@ -20631,6 +21132,9 @@ cp_parser_omp_construct (cp_parser *parser, cp_token *pragma_tok)
case PRAGMA_OMP_SINGLE:
stmt = cp_parser_omp_single (parser, pragma_tok);
break;
+ case PRAGMA_OMP_TASK:
+ stmt = cp_parser_omp_task (parser, pragma_tok);
+ break;
default:
gcc_unreachable ();
}
@@ -20738,6 +21242,21 @@ cp_parser_pragma (cp_parser *parser, enum pragma_context context)
}
break;
+ case PRAGMA_OMP_TASKWAIT:
+ switch (context)
+ {
+ case pragma_compound:
+ cp_parser_omp_taskwait (parser, pragma_tok);
+ return false;
+ case pragma_stmt:
+ error ("%<#pragma omp taskwait%> may only be "
+ "used in compound statements");
+ break;
+ default:
+ goto bad_stmt;
+ }
+ break;
+
case PRAGMA_OMP_THREADPRIVATE:
cp_parser_omp_threadprivate (parser, pragma_tok);
return false;
@@ -20750,6 +21269,7 @@ cp_parser_pragma (cp_parser *parser, enum pragma_context context)
case PRAGMA_OMP_PARALLEL:
case PRAGMA_OMP_SECTIONS:
case PRAGMA_OMP_SINGLE:
+ case PRAGMA_OMP_TASK:
if (context == pragma_external)
goto bad_stmt;
cp_parser_omp_construct (parser, pragma_tok);
diff --git a/gcc/cp/pt.c b/gcc/cp/pt.c
index 4bb43addb74..f141b74a6fd 100644
--- a/gcc/cp/pt.c
+++ b/gcc/cp/pt.c
@@ -10214,16 +10214,26 @@ tsubst_omp_clauses (tree clauses, tree args, tsubst_flags_t complain,
switch (OMP_CLAUSE_CODE (nc))
{
+ case OMP_CLAUSE_LASTPRIVATE:
+ if (OMP_CLAUSE_LASTPRIVATE_STMT (oc))
+ {
+ OMP_CLAUSE_LASTPRIVATE_STMT (nc) = push_stmt_list ();
+ tsubst_expr (OMP_CLAUSE_LASTPRIVATE_STMT (oc), args, complain,
+ in_decl, /*integral_constant_expression_p=*/false);
+ OMP_CLAUSE_LASTPRIVATE_STMT (nc)
+ = pop_stmt_list (OMP_CLAUSE_LASTPRIVATE_STMT (nc));
+ }
+ /* FALLTHRU */
case OMP_CLAUSE_PRIVATE:
case OMP_CLAUSE_SHARED:
case OMP_CLAUSE_FIRSTPRIVATE:
- case OMP_CLAUSE_LASTPRIVATE:
case OMP_CLAUSE_REDUCTION:
case OMP_CLAUSE_COPYIN:
case OMP_CLAUSE_COPYPRIVATE:
case OMP_CLAUSE_IF:
case OMP_CLAUSE_NUM_THREADS:
case OMP_CLAUSE_SCHEDULE:
+ case OMP_CLAUSE_COLLAPSE:
OMP_CLAUSE_OPERAND (nc, 0)
= tsubst_expr (OMP_CLAUSE_OPERAND (oc, 0), args, complain,
in_decl, /*integral_constant_expression_p=*/false);
@@ -10231,6 +10241,7 @@ tsubst_omp_clauses (tree clauses, tree args, tsubst_flags_t complain,
case OMP_CLAUSE_NOWAIT:
case OMP_CLAUSE_ORDERED:
case OMP_CLAUSE_DEFAULT:
+ case OMP_CLAUSE_UNTIED:
break;
default:
gcc_unreachable ();
@@ -10274,6 +10285,137 @@ tsubst_copy_asm_operands (tree t, tree args, tsubst_flags_t complain,
#undef RECUR
}
+/* Substitute one OMP_FOR iterator. */
+
+static void
+tsubst_omp_for_iterator (tree t, int i, tree declv, tree initv,
+ tree condv, tree incrv, tree *clauses,
+ tree args, tsubst_flags_t complain, tree in_decl,
+ bool integral_constant_expression_p)
+{
+#define RECUR(NODE) \
+ tsubst_expr ((NODE), args, complain, in_decl, \
+ integral_constant_expression_p)
+ tree decl, init, cond, incr;
+
+ init = TREE_VEC_ELT (OMP_FOR_INIT (t), i);
+ gcc_assert (TREE_CODE (init) == MODIFY_EXPR);
+ decl = RECUR (TREE_OPERAND (init, 0));
+ init = TREE_OPERAND (init, 1);
+ gcc_assert (!type_dependent_expression_p (decl));
+
+ if (!CLASS_TYPE_P (TREE_TYPE (decl)))
+ {
+ cond = RECUR (TREE_VEC_ELT (OMP_FOR_COND (t), i));
+ incr = TREE_VEC_ELT (OMP_FOR_INCR (t), i);
+ if (TREE_CODE (incr) == MODIFY_EXPR)
+ incr = build_x_modify_expr (RECUR (TREE_OPERAND (incr, 0)), NOP_EXPR,
+ RECUR (TREE_OPERAND (incr, 1)),
+ complain);
+ else
+ incr = RECUR (incr);
+ TREE_VEC_ELT (declv, i) = decl;
+ TREE_VEC_ELT (initv, i) = init;
+ TREE_VEC_ELT (condv, i) = cond;
+ TREE_VEC_ELT (incrv, i) = incr;
+ return;
+ }
+
+ if (init && TREE_CODE (init) != DECL_EXPR)
+ {
+ tree c;
+ for (c = *clauses; c ; c = OMP_CLAUSE_CHAIN (c))
+ {
+ if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
+ || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
+ && OMP_CLAUSE_DECL (c) == decl)
+ break;
+ else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
+ && OMP_CLAUSE_DECL (c) == decl)
+ error ("iteration variable %qD should not be firstprivate", decl);
+ else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
+ && OMP_CLAUSE_DECL (c) == decl)
+ error ("iteration variable %qD should not be reduction", decl);
+ }
+ if (c == NULL)
+ {
+ c = build_omp_clause (OMP_CLAUSE_PRIVATE);
+ OMP_CLAUSE_DECL (c) = decl;
+ c = finish_omp_clauses (c);
+ if (c)
+ {
+ OMP_CLAUSE_CHAIN (c) = *clauses;
+ *clauses = c;
+ }
+ }
+ }
+ cond = TREE_VEC_ELT (OMP_FOR_COND (t), i);
+ if (COMPARISON_CLASS_P (cond))
+ cond = build2 (TREE_CODE (cond), boolean_type_node,
+ RECUR (TREE_OPERAND (cond, 0)),
+ RECUR (TREE_OPERAND (cond, 1)));
+ else
+ cond = RECUR (cond);
+ incr = TREE_VEC_ELT (OMP_FOR_INCR (t), i);
+ switch (TREE_CODE (incr))
+ {
+ case PREINCREMENT_EXPR:
+ case PREDECREMENT_EXPR:
+ case POSTINCREMENT_EXPR:
+ case POSTDECREMENT_EXPR:
+ incr = build2 (TREE_CODE (incr), TREE_TYPE (decl),
+ RECUR (TREE_OPERAND (incr, 0)), NULL_TREE);
+ break;
+ case MODIFY_EXPR:
+ if (TREE_CODE (TREE_OPERAND (incr, 1)) == PLUS_EXPR
+ || TREE_CODE (TREE_OPERAND (incr, 1)) == MINUS_EXPR)
+ {
+ tree rhs = TREE_OPERAND (incr, 1);
+ incr = build2 (MODIFY_EXPR, TREE_TYPE (decl),
+ RECUR (TREE_OPERAND (incr, 0)),
+ build2 (TREE_CODE (rhs), TREE_TYPE (decl),
+ RECUR (TREE_OPERAND (rhs, 0)),
+ RECUR (TREE_OPERAND (rhs, 1))));
+ }
+ else
+ incr = RECUR (incr);
+ break;
+ case MODOP_EXPR:
+ if (TREE_CODE (TREE_OPERAND (incr, 1)) == PLUS_EXPR
+ || TREE_CODE (TREE_OPERAND (incr, 1)) == MINUS_EXPR)
+ {
+ tree lhs = RECUR (TREE_OPERAND (incr, 0));
+ incr = build2 (MODIFY_EXPR, TREE_TYPE (decl), lhs,
+ build2 (TREE_CODE (TREE_OPERAND (incr, 1)),
+ TREE_TYPE (decl), lhs,
+ RECUR (TREE_OPERAND (incr, 2))));
+ }
+ else if (TREE_CODE (TREE_OPERAND (incr, 1)) == NOP_EXPR
+ && (TREE_CODE (TREE_OPERAND (incr, 2)) == PLUS_EXPR
+ || (TREE_CODE (TREE_OPERAND (incr, 2)) == MINUS_EXPR)))
+ {
+ tree rhs = TREE_OPERAND (incr, 2);
+ incr = build2 (MODIFY_EXPR, TREE_TYPE (decl),
+ RECUR (TREE_OPERAND (incr, 0)),
+ build2 (TREE_CODE (rhs), TREE_TYPE (decl),
+ RECUR (TREE_OPERAND (rhs, 0)),
+ RECUR (TREE_OPERAND (rhs, 1))));
+ }
+ else
+ incr = RECUR (incr);
+ break;
+ default:
+ incr = RECUR (incr);
+ break;
+ }
+
+ TREE_VEC_ELT (declv, i) = decl;
+ TREE_VEC_ELT (initv, i) = init;
+ TREE_VEC_ELT (condv, i) = cond;
+ TREE_VEC_ELT (incrv, i) = incr;
+#undef RECUR
+}
+
/* Like tsubst_copy for expressions, etc. but also does semantic
processing. */
@@ -10597,21 +10739,55 @@ tsubst_expr (tree t, tree args, tsubst_flags_t complain, tree in_decl,
= OMP_PARALLEL_COMBINED (t);
break;
+ case OMP_TASK:
+ tmp = tsubst_omp_clauses (OMP_TASK_CLAUSES (t),
+ args, complain, in_decl);
+ stmt = begin_omp_task ();
+ RECUR (OMP_TASK_BODY (t));
+ finish_omp_task (tmp, stmt);
+ break;
+
case OMP_FOR:
{
- tree clauses, decl, init, cond, incr, body, pre_body;
+ tree clauses, body, pre_body;
+ tree declv, initv, condv, incrv;
+ int i;
clauses = tsubst_omp_clauses (OMP_FOR_CLAUSES (t),
args, complain, in_decl);
- init = OMP_FOR_INIT (t);
- gcc_assert (TREE_CODE (init) == MODIFY_EXPR);
- decl = RECUR (TREE_OPERAND (init, 0));
- init = RECUR (TREE_OPERAND (init, 1));
- cond = RECUR (OMP_FOR_COND (t));
- incr = RECUR (OMP_FOR_INCR (t));
+ declv = make_tree_vec (TREE_VEC_LENGTH (OMP_FOR_INIT (t)));
+ initv = make_tree_vec (TREE_VEC_LENGTH (OMP_FOR_INIT (t)));
+ condv = make_tree_vec (TREE_VEC_LENGTH (OMP_FOR_INIT (t)));
+ incrv = make_tree_vec (TREE_VEC_LENGTH (OMP_FOR_INIT (t)));
+
+ for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (t)); i++)
+ tsubst_omp_for_iterator (t, i, declv, initv, condv, incrv,
+ &clauses, args, complain, in_decl,
+ integral_constant_expression_p);
stmt = begin_omp_structured_block ();
+ for (i = 0; i < TREE_VEC_LENGTH (initv); i++)
+ if (TREE_VEC_ELT (initv, i) == NULL
+ || TREE_CODE (TREE_VEC_ELT (initv, i)) != DECL_EXPR)
+ TREE_VEC_ELT (initv, i) = RECUR (TREE_VEC_ELT (initv, i));
+ else if (CLASS_TYPE_P (TREE_TYPE (TREE_VEC_ELT (initv, i))))
+ {
+ tree init = RECUR (TREE_VEC_ELT (initv, i));
+ gcc_assert (init == TREE_VEC_ELT (declv, i));
+ TREE_VEC_ELT (initv, i) = NULL_TREE;
+ }
+ else
+ {
+ tree decl_expr = TREE_VEC_ELT (initv, i);
+ tree init = DECL_INITIAL (DECL_EXPR_DECL (decl_expr));
+ gcc_assert (init != NULL);
+ TREE_VEC_ELT (initv, i) = RECUR (init);
+ DECL_INITIAL (DECL_EXPR_DECL (decl_expr)) = NULL;
+ RECUR (decl_expr);
+ DECL_INITIAL (DECL_EXPR_DECL (decl_expr)) = init;
+ }
+
pre_body = push_stmt_list ();
RECUR (OMP_FOR_PRE_BODY (t));
pre_body = pop_stmt_list (pre_body);
@@ -10620,10 +10796,8 @@ tsubst_expr (tree t, tree args, tsubst_flags_t complain, tree in_decl,
RECUR (OMP_FOR_BODY (t));
body = pop_stmt_list (body);
- t = finish_omp_for (EXPR_LOCATION (t), decl, init, cond, incr, body,
- pre_body);
- if (t)
- OMP_FOR_CLAUSES (t) = clauses;
+ t = finish_omp_for (EXPR_LOCATION (t), declv, initv, condv, incrv,
+ body, pre_body, clauses);
add_stmt (finish_omp_structured_block (stmt));
}
@@ -16195,6 +16369,63 @@ dependent_template_id_p (tree tmpl, tree args)
|| any_dependent_template_arguments_p (args));
}
+/* Returns TRUE if OMP_FOR with DECLV, INITV, CONDV and INCRV vectors
+ is dependent. */
+
+bool
+dependent_omp_for_p (tree declv, tree initv, tree condv, tree incrv)
+{
+ int i;
+
+ if (!processing_template_decl)
+ return false;
+
+ for (i = 0; i < TREE_VEC_LENGTH (declv); i++)
+ {
+ tree decl = TREE_VEC_ELT (declv, i);
+ tree init = TREE_VEC_ELT (initv, i);
+ tree cond = TREE_VEC_ELT (condv, i);
+ tree incr = TREE_VEC_ELT (incrv, i);
+
+ if (type_dependent_expression_p (decl))
+ return true;
+
+ if (init && type_dependent_expression_p (init))
+ return true;
+
+ if (type_dependent_expression_p (cond))
+ return true;
+
+ if (COMPARISON_CLASS_P (cond)
+ && (type_dependent_expression_p (TREE_OPERAND (cond, 0))
+ || type_dependent_expression_p (TREE_OPERAND (cond, 1))))
+ return true;
+
+ if (TREE_CODE (incr) == MODOP_EXPR)
+ {
+ if (type_dependent_expression_p (TREE_OPERAND (incr, 0))
+ || type_dependent_expression_p (TREE_OPERAND (incr, 2)))
+ return true;
+ }
+ else if (type_dependent_expression_p (incr))
+ return true;
+ else if (TREE_CODE (incr) == MODIFY_EXPR)
+ {
+ if (type_dependent_expression_p (TREE_OPERAND (incr, 0)))
+ return true;
+ else if (BINARY_CLASS_P (TREE_OPERAND (incr, 1)))
+ {
+ tree t = TREE_OPERAND (incr, 1);
+ if (type_dependent_expression_p (TREE_OPERAND (t, 0))
+ || type_dependent_expression_p (TREE_OPERAND (t, 1)))
+ return true;
+ }
+ }
+ }
+
+ return false;
+}
+
/* TYPE is a TYPENAME_TYPE. Returns the ordinary TYPE to which the
TYPENAME_TYPE corresponds. Returns the original TYPENAME_TYPE if
no such TYPE can be found. Note that this function peers inside
diff --git a/gcc/cp/semantics.c b/gcc/cp/semantics.c
index 96999bf7e89..83d23394cb5 100644
--- a/gcc/cp/semantics.c
+++ b/gcc/cp/semantics.c
@@ -3359,6 +3359,94 @@ omp_clause_info_fndecl (tree t, tree type)
return NULL_TREE;
}
+/* Create CP_OMP_CLAUSE_INFO for clause C. Returns true if it is invalid. */
+
+bool
+cxx_omp_create_clause_info (tree c, tree type, bool need_default_ctor,
+ bool need_copy_ctor, bool need_copy_assignment)
+{
+ int save_errorcount = errorcount;
+ tree info, t;
+
+ /* Always allocate 3 elements for simplicity. These are the
+ function decls for the ctor, dtor, and assignment op.
+ This layout is known to the three lang hooks,
+ cxx_omp_clause_default_init, cxx_omp_clause_copy_init,
+ and cxx_omp_clause_assign_op. */
+ info = make_tree_vec (3);
+ CP_OMP_CLAUSE_INFO (c) = info;
+
+ if (need_default_ctor
+ || (need_copy_ctor && !TYPE_HAS_TRIVIAL_INIT_REF (type)))
+ {
+ if (need_default_ctor)
+ t = NULL;
+ else
+ {
+ t = build_int_cst (build_pointer_type (type), 0);
+ t = build1 (INDIRECT_REF, type, t);
+ t = build_tree_list (NULL, t);
+ }
+ t = build_special_member_call (NULL_TREE, complete_ctor_identifier,
+ t, type, LOOKUP_NORMAL,
+ tf_warning_or_error);
+
+ if (targetm.cxx.cdtor_returns_this () || errorcount)
+ /* Because constructors and destructors return this,
+ the call will have been cast to "void". Remove the
+ cast here. We would like to use STRIP_NOPS, but it
+ wouldn't work here because TYPE_MODE (t) and
+ TYPE_MODE (TREE_OPERAND (t, 0)) are different.
+ They are VOIDmode and Pmode, respectively. */
+ if (TREE_CODE (t) == NOP_EXPR)
+ t = TREE_OPERAND (t, 0);
+
+ TREE_VEC_ELT (info, 0) = get_callee_fndecl (t);
+ }
+
+ if ((need_default_ctor || need_copy_ctor)
+ && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (type))
+ {
+ t = build_int_cst (build_pointer_type (type), 0);
+ t = build1 (INDIRECT_REF, type, t);
+ t = build_special_member_call (t, complete_dtor_identifier,
+ NULL, type, LOOKUP_NORMAL,
+ tf_warning_or_error);
+
+ if (targetm.cxx.cdtor_returns_this () || errorcount)
+ /* Because constructors and destructors return this,
+ the call will have been cast to "void". Remove the
+ cast here. We would like to use STRIP_NOPS, but it
+ wouldn't work here because TYPE_MODE (t) and
+ TYPE_MODE (TREE_OPERAND (t, 0)) are different.
+ They are VOIDmode and Pmode, respectively. */
+ if (TREE_CODE (t) == NOP_EXPR)
+ t = TREE_OPERAND (t, 0);
+
+ TREE_VEC_ELT (info, 1) = omp_clause_info_fndecl (t, type);
+ }
+
+ if (need_copy_assignment && !TYPE_HAS_TRIVIAL_ASSIGN_REF (type))
+ {
+ t = build_int_cst (build_pointer_type (type), 0);
+ t = build1 (INDIRECT_REF, type, t);
+ t = build_special_member_call (t, ansi_assopname (NOP_EXPR),
+ build_tree_list (NULL, t),
+ type, LOOKUP_NORMAL,
+ tf_warning_or_error);
+
+ /* We'll have called convert_from_reference on the call, which
+ may well have added an indirect_ref. It's unneeded here,
+ and in the way, so kill it. */
+ if (TREE_CODE (t) == INDIRECT_REF)
+ t = TREE_OPERAND (t, 0);
+
+ TREE_VEC_ELT (info, 2) = omp_clause_info_fndecl (t, type);
+ }
+
+ return errorcount != save_errorcount;
+}
+
/* For all elements of CLAUSES, validate them vs OpenMP constraints.
Remove any elements from the list that are invalid. */
@@ -3499,6 +3587,8 @@ finish_omp_clauses (tree clauses)
case OMP_CLAUSE_NOWAIT:
case OMP_CLAUSE_ORDERED:
case OMP_CLAUSE_DEFAULT:
+ case OMP_CLAUSE_UNTIED:
+ case OMP_CLAUSE_COLLAPSE:
break;
default:
@@ -3662,93 +3752,10 @@ finish_omp_clauses (tree clauses)
for making these queries. */
if (CLASS_TYPE_P (inner_type)
&& (need_default_ctor || need_copy_ctor || need_copy_assignment)
- && !type_dependent_expression_p (t))
- {
- int save_errorcount = errorcount;
- tree info;
-
- /* Always allocate 3 elements for simplicity. These are the
- function decls for the ctor, dtor, and assignment op.
- This layout is known to the three lang hooks,
- cxx_omp_clause_default_init, cxx_omp_clause_copy_init,
- and cxx_omp_clause_assign_op. */
- info = make_tree_vec (3);
- CP_OMP_CLAUSE_INFO (c) = info;
-
- if (need_default_ctor
- || (need_copy_ctor
- && !TYPE_HAS_TRIVIAL_INIT_REF (inner_type)))
- {
- if (need_default_ctor)
- t = NULL;
- else
- {
- t = build_int_cst (build_pointer_type (inner_type), 0);
- t = build1 (INDIRECT_REF, inner_type, t);
- t = build_tree_list (NULL, t);
- }
- t = build_special_member_call (NULL_TREE,
- complete_ctor_identifier,
- t, inner_type, LOOKUP_NORMAL,
- tf_warning_or_error);
-
- if (targetm.cxx.cdtor_returns_this () || errorcount)
- /* Because constructors and destructors return this,
- the call will have been cast to "void". Remove the
- cast here. We would like to use STRIP_NOPS, but it
- wouldn't work here because TYPE_MODE (t) and
- TYPE_MODE (TREE_OPERAND (t, 0)) are different.
- They are VOIDmode and Pmode, respectively. */
- if (TREE_CODE (t) == NOP_EXPR)
- t = TREE_OPERAND (t, 0);
-
- TREE_VEC_ELT (info, 0) = get_callee_fndecl (t);
- }
-
- if ((need_default_ctor || need_copy_ctor)
- && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (inner_type))
- {
- t = build_int_cst (build_pointer_type (inner_type), 0);
- t = build1 (INDIRECT_REF, inner_type, t);
- t = build_special_member_call (t, complete_dtor_identifier,
- NULL, inner_type, LOOKUP_NORMAL,
- tf_warning_or_error);
-
- if (targetm.cxx.cdtor_returns_this () || errorcount)
- /* Because constructors and destructors return this,
- the call will have been cast to "void". Remove the
- cast here. We would like to use STRIP_NOPS, but it
- wouldn't work here because TYPE_MODE (t) and
- TYPE_MODE (TREE_OPERAND (t, 0)) are different.
- They are VOIDmode and Pmode, respectively. */
- if (TREE_CODE (t) == NOP_EXPR)
- t = TREE_OPERAND (t, 0);
-
- TREE_VEC_ELT (info, 1) = omp_clause_info_fndecl (t, inner_type);
- }
-
- if (need_copy_assignment
- && !TYPE_HAS_TRIVIAL_ASSIGN_REF (inner_type))
- {
- t = build_int_cst (build_pointer_type (inner_type), 0);
- t = build1 (INDIRECT_REF, inner_type, t);
- t = build_special_member_call (t, ansi_assopname (NOP_EXPR),
- build_tree_list (NULL, t),
- inner_type, LOOKUP_NORMAL,
- tf_warning_or_error);
-
- /* We'll have called convert_from_reference on the call, which
- may well have added an indirect_ref. It's unneeded here,
- and in the way, so kill it. */
- if (TREE_CODE (t) == INDIRECT_REF)
- t = TREE_OPERAND (t, 0);
-
- TREE_VEC_ELT (info, 2) = omp_clause_info_fndecl (t, inner_type);
- }
-
- if (errorcount != save_errorcount)
- remove = true;
- }
+ && !type_dependent_expression_p (t)
+ && cxx_omp_create_clause_info (c, inner_type, need_default_ctor,
+ need_copy_ctor, need_copy_assignment))
+ remove = true;
if (remove)
*pc = OMP_CLAUSE_CHAIN (c);
@@ -3787,9 +3794,10 @@ finish_omp_threadprivate (tree vars)
error ("automatic variable %qE cannot be %<threadprivate%>", v);
else if (! COMPLETE_TYPE_P (TREE_TYPE (v)))
error ("%<threadprivate%> %qE has incomplete type", v);
- else if (TREE_STATIC (v) && TYPE_P (CP_DECL_CONTEXT (v)))
- error ("%<threadprivate%> %qE is not file, namespace "
- "or block scope variable", v);
+ else if (TREE_STATIC (v) && TYPE_P (CP_DECL_CONTEXT (v))
+ && CP_DECL_CONTEXT (v) != current_class_type)
+ error ("%<threadprivate%> %qE directive not "
+ "in %qT definition", v, CP_DECL_CONTEXT (v));
else
{
/* Allocate a LANG_SPECIFIC structure for V, if needed. */
@@ -3855,6 +3863,252 @@ finish_omp_parallel (tree clauses, tree body)
return add_stmt (stmt);
}
+tree
+begin_omp_task (void)
+{
+ keep_next_level (true);
+ return begin_omp_structured_block ();
+}
+
+tree
+finish_omp_task (tree clauses, tree body)
+{
+ tree stmt;
+
+ body = finish_omp_structured_block (body);
+
+ stmt = make_node (OMP_TASK);
+ TREE_TYPE (stmt) = void_type_node;
+ OMP_TASK_CLAUSES (stmt) = clauses;
+ OMP_TASK_BODY (stmt) = body;
+
+ return add_stmt (stmt);
+}
+
+/* Helper function for finish_omp_for. Convert Ith random access iterator
+ into integral iterator. Return FALSE if successful. */
+
+static bool
+handle_omp_for_class_iterator (int i, location_t locus, tree declv, tree initv,
+ tree condv, tree incrv, tree *body,
+ tree *pre_body, tree clauses)
+{
+ tree diff, iter_init, iter_incr = NULL, last;
+ tree incr_var = NULL, orig_pre_body, orig_body, c;
+ tree decl = TREE_VEC_ELT (declv, i);
+ tree init = TREE_VEC_ELT (initv, i);
+ tree cond = TREE_VEC_ELT (condv, i);
+ tree incr = TREE_VEC_ELT (incrv, i);
+ tree iter = decl;
+ location_t elocus = locus;
+
+ if (init && EXPR_HAS_LOCATION (init))
+ elocus = EXPR_LOCATION (init);
+
+ switch (TREE_CODE (cond))
+ {
+ case GT_EXPR:
+ case GE_EXPR:
+ case LT_EXPR:
+ case LE_EXPR:
+ if (TREE_OPERAND (cond, 0) != iter)
+ cond = error_mark_node;
+ else
+ {
+ tree tem = build_x_binary_op (TREE_CODE (cond), iter, ERROR_MARK,
+ TREE_OPERAND (cond, 1), ERROR_MARK,
+ NULL, tf_warning_or_error);
+ if (error_operand_p (tem))
+ return true;
+ }
+ break;
+ default:
+ cond = error_mark_node;
+ break;
+ }
+ if (cond == error_mark_node)
+ {
+ error ("%Hinvalid controlling predicate", &elocus);
+ return true;
+ }
+ diff = build_x_binary_op (MINUS_EXPR, TREE_OPERAND (cond, 1),
+ ERROR_MARK, iter, ERROR_MARK, NULL,
+ tf_warning_or_error);
+ if (error_operand_p (diff))
+ return true;
+ if (TREE_CODE (TREE_TYPE (diff)) != INTEGER_TYPE)
+ {
+ error ("%Hdifference between %qE and %qD does not have integer type",
+ &elocus, TREE_OPERAND (cond, 1), iter);
+ return true;
+ }
+
+ switch (TREE_CODE (incr))
+ {
+ case PREINCREMENT_EXPR:
+ case PREDECREMENT_EXPR:
+ case POSTINCREMENT_EXPR:
+ case POSTDECREMENT_EXPR:
+ if (TREE_OPERAND (incr, 0) != iter)
+ {
+ incr = error_mark_node;
+ break;
+ }
+ iter_incr = build_x_unary_op (TREE_CODE (incr), iter,
+ tf_warning_or_error);
+ if (error_operand_p (iter_incr))
+ return true;
+ else if (TREE_CODE (incr) == PREINCREMENT_EXPR
+ || TREE_CODE (incr) == POSTINCREMENT_EXPR)
+ incr = integer_one_node;
+ else
+ incr = integer_minus_one_node;
+ break;
+ case MODIFY_EXPR:
+ if (TREE_OPERAND (incr, 0) != iter)
+ incr = error_mark_node;
+ else if (TREE_CODE (TREE_OPERAND (incr, 1)) == PLUS_EXPR
+ || TREE_CODE (TREE_OPERAND (incr, 1)) == MINUS_EXPR)
+ {
+ tree rhs = TREE_OPERAND (incr, 1);
+ if (TREE_OPERAND (rhs, 0) == iter)
+ {
+ if (TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 1)))
+ != INTEGER_TYPE)
+ incr = error_mark_node;
+ else
+ {
+ iter_incr = build_x_modify_expr (iter, TREE_CODE (rhs),
+ TREE_OPERAND (rhs, 1),
+ tf_warning_or_error);
+ if (error_operand_p (iter_incr))
+ return true;
+ incr = TREE_OPERAND (rhs, 1);
+ incr = cp_convert (TREE_TYPE (diff), incr);
+ if (TREE_CODE (rhs) == MINUS_EXPR)
+ {
+ incr = build1 (NEGATE_EXPR, TREE_TYPE (diff), incr);
+ incr = fold_if_not_in_template (incr);
+ }
+ if (TREE_CODE (incr) != INTEGER_CST
+ && (TREE_CODE (incr) != NOP_EXPR
+ || (TREE_CODE (TREE_OPERAND (incr, 0))
+ != INTEGER_CST)))
+ iter_incr = NULL;
+ }
+ }
+ else if (TREE_OPERAND (rhs, 1) == iter)
+ {
+ if (TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 0))) != INTEGER_TYPE
+ || TREE_CODE (rhs) != PLUS_EXPR)
+ incr = error_mark_node;
+ else
+ {
+ iter_incr = build_x_binary_op (PLUS_EXPR,
+ TREE_OPERAND (rhs, 0),
+ ERROR_MARK, iter,
+ ERROR_MARK, NULL,
+ tf_warning_or_error);
+ if (error_operand_p (iter_incr))
+ return true;
+ iter_incr = build_x_modify_expr (iter, NOP_EXPR,
+ iter_incr,
+ tf_warning_or_error);
+ if (error_operand_p (iter_incr))
+ return true;
+ incr = TREE_OPERAND (rhs, 0);
+ iter_incr = NULL;
+ }
+ }
+ else
+ incr = error_mark_node;
+ }
+ else
+ incr = error_mark_node;
+ break;
+ default:
+ incr = error_mark_node;
+ break;
+ }
+
+ if (incr == error_mark_node)
+ {
+ error ("%Hinvalid increment expression", &elocus);
+ return true;
+ }
+
+ incr = cp_convert (TREE_TYPE (diff), incr);
+ for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
+ if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
+ && OMP_CLAUSE_DECL (c) == iter)
+ break;
+
+ decl = create_temporary_var (TREE_TYPE (diff));
+ pushdecl (decl);
+ add_decl_expr (decl);
+ last = create_temporary_var (TREE_TYPE (diff));
+ pushdecl (last);
+ add_decl_expr (last);
+ if (c && iter_incr == NULL)
+ {
+ incr_var = create_temporary_var (TREE_TYPE (diff));
+ pushdecl (incr_var);
+ add_decl_expr (incr_var);
+ }
+ gcc_assert (stmts_are_full_exprs_p ());
+
+ orig_pre_body = *pre_body;
+ *pre_body = push_stmt_list ();
+ if (orig_pre_body)
+ add_stmt (orig_pre_body);
+ if (init != NULL)
+ finish_expr_stmt (build_x_modify_expr (iter, NOP_EXPR, init,
+ tf_warning_or_error));
+ init = build_int_cst (TREE_TYPE (diff), 0);
+ if (c && iter_incr == NULL)
+ {
+ finish_expr_stmt (build_x_modify_expr (incr_var, NOP_EXPR,
+ incr, tf_warning_or_error));
+ incr = incr_var;
+ iter_incr = build_x_modify_expr (iter, PLUS_EXPR, incr,
+ tf_warning_or_error);
+ }
+ finish_expr_stmt (build_x_modify_expr (last, NOP_EXPR, init,
+ tf_warning_or_error));
+ *pre_body = pop_stmt_list (*pre_body);
+
+ cond = cp_build_binary_op (TREE_CODE (cond), decl, diff,
+ tf_warning_or_error);
+ incr = build_modify_expr (decl, PLUS_EXPR, incr);
+
+ orig_body = *body;
+ *body = push_stmt_list ();
+ iter_init = build2 (MINUS_EXPR, TREE_TYPE (diff), decl, last);
+ iter_init = build_x_modify_expr (iter, PLUS_EXPR, iter_init,
+ tf_warning_or_error);
+ iter_init = build1 (NOP_EXPR, void_type_node, iter_init);
+ finish_expr_stmt (iter_init);
+ finish_expr_stmt (build_x_modify_expr (last, NOP_EXPR, decl,
+ tf_warning_or_error));
+ add_stmt (orig_body);
+ *body = pop_stmt_list (*body);
+
+ if (c)
+ {
+ OMP_CLAUSE_LASTPRIVATE_STMT (c) = push_stmt_list ();
+ finish_expr_stmt (iter_incr);
+ OMP_CLAUSE_LASTPRIVATE_STMT (c)
+ = pop_stmt_list (OMP_CLAUSE_LASTPRIVATE_STMT (c));
+ }
+
+ TREE_VEC_ELT (declv, i) = decl;
+ TREE_VEC_ELT (initv, i) = init;
+ TREE_VEC_ELT (condv, i) = cond;
+ TREE_VEC_ELT (incrv, i) = incr;
+
+ return false;
+}
+
/* Build and validate an OMP_FOR statement. CLAUSES, BODY, COND, INCR
are directly for their associated operands in the statement. DECL
and INIT are a combo; if DECL is NULL then INIT ought to be a
@@ -3863,126 +4117,203 @@ finish_omp_parallel (tree clauses, tree body)
sk_omp scope. */
tree
-finish_omp_for (location_t locus, tree decl, tree init, tree cond,
- tree incr, tree body, tree pre_body)
+finish_omp_for (location_t locus, tree declv, tree initv, tree condv,
+ tree incrv, tree body, tree pre_body, tree clauses)
{
- tree omp_for = NULL;
+ tree omp_for = NULL, orig_incr = NULL;
+ tree decl, init, cond, incr;
+ location_t elocus;
+ int i;
- if (decl == NULL)
+ gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (initv));
+ gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (condv));
+ gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (incrv));
+ for (i = 0; i < TREE_VEC_LENGTH (declv); i++)
{
- if (init != NULL)
- switch (TREE_CODE (init))
- {
- case MODIFY_EXPR:
- decl = TREE_OPERAND (init, 0);
- init = TREE_OPERAND (init, 1);
- break;
- case MODOP_EXPR:
- if (TREE_CODE (TREE_OPERAND (init, 1)) == NOP_EXPR)
+ decl = TREE_VEC_ELT (declv, i);
+ init = TREE_VEC_ELT (initv, i);
+ cond = TREE_VEC_ELT (condv, i);
+ incr = TREE_VEC_ELT (incrv, i);
+ elocus = locus;
+
+ if (decl == NULL)
+ {
+ if (init != NULL)
+ switch (TREE_CODE (init))
{
+ case MODIFY_EXPR:
decl = TREE_OPERAND (init, 0);
- init = TREE_OPERAND (init, 2);
+ init = TREE_OPERAND (init, 1);
+ break;
+ case MODOP_EXPR:
+ if (TREE_CODE (TREE_OPERAND (init, 1)) == NOP_EXPR)
+ {
+ decl = TREE_OPERAND (init, 0);
+ init = TREE_OPERAND (init, 2);
+ }
+ break;
+ default:
+ break;
}
- break;
- default:
- break;
- }
- if (decl == NULL)
- {
- error ("expected iteration declaration or initialization");
- return NULL;
+ if (decl == NULL)
+ {
+ error ("%Hexpected iteration declaration or initialization",
+ &locus);
+ return NULL;
+ }
}
- }
- if (type_dependent_expression_p (decl)
- || type_dependent_expression_p (init)
- || (cond && type_dependent_expression_p (cond))
- || (incr && type_dependent_expression_p (incr)))
- {
- tree stmt;
+ if (init && EXPR_HAS_LOCATION (init))
+ elocus = EXPR_LOCATION (init);
if (cond == NULL)
{
- error ("%Hmissing controlling predicate", &locus);
+ error ("%Hmissing controlling predicate", &elocus);
return NULL;
}
if (incr == NULL)
{
- error ("%Hmissing increment expression", &locus);
+ error ("%Hmissing increment expression", &elocus);
return NULL;
}
+ TREE_VEC_ELT (declv, i) = decl;
+ TREE_VEC_ELT (initv, i) = init;
+ }
+
+ if (dependent_omp_for_p (declv, initv, condv, incrv))
+ {
+ tree stmt;
+
stmt = make_node (OMP_FOR);
- /* This is really just a place-holder. We'll be decomposing this
- again and going through the build_modify_expr path below when
- we instantiate the thing. */
- init = build2 (MODIFY_EXPR, void_type_node, decl, init);
+ for (i = 0; i < TREE_VEC_LENGTH (declv); i++)
+ {
+ /* This is really just a place-holder. We'll be decomposing this
+ again and going through the cp_build_modify_expr path below when
+ we instantiate the thing. */
+ TREE_VEC_ELT (initv, i)
+ = build2 (MODIFY_EXPR, void_type_node, TREE_VEC_ELT (declv, i),
+ TREE_VEC_ELT (initv, i));
+ }
TREE_TYPE (stmt) = void_type_node;
- OMP_FOR_INIT (stmt) = init;
- OMP_FOR_COND (stmt) = cond;
- OMP_FOR_INCR (stmt) = incr;
+ OMP_FOR_INIT (stmt) = initv;
+ OMP_FOR_COND (stmt) = condv;
+ OMP_FOR_INCR (stmt) = incrv;
OMP_FOR_BODY (stmt) = body;
OMP_FOR_PRE_BODY (stmt) = pre_body;
+ OMP_FOR_CLAUSES (stmt) = clauses;
SET_EXPR_LOCATION (stmt, locus);
return add_stmt (stmt);
}
- if (!DECL_P (decl))
- {
- error ("expected iteration declaration or initialization");
- return NULL;
- }
+ if (processing_template_decl)
+ orig_incr = make_tree_vec (TREE_VEC_LENGTH (incrv));
- if (!INTEGRAL_TYPE_P (TREE_TYPE (decl)))
+ for (i = 0; i < TREE_VEC_LENGTH (declv); )
{
- location_t elocus = locus;
-
- if (EXPR_HAS_LOCATION (init))
+ decl = TREE_VEC_ELT (declv, i);
+ init = TREE_VEC_ELT (initv, i);
+ cond = TREE_VEC_ELT (condv, i);
+ incr = TREE_VEC_ELT (incrv, i);
+ if (orig_incr)
+ TREE_VEC_ELT (orig_incr, i) = incr;
+ elocus = locus;
+
+ if (init && EXPR_HAS_LOCATION (init))
elocus = EXPR_LOCATION (init);
- error ("%Hinvalid type for iteration variable %qE", &elocus, decl);
- return NULL;
- }
- if (pre_body == NULL || IS_EMPTY_STMT (pre_body))
- pre_body = NULL;
- else if (! processing_template_decl)
- {
- add_stmt (pre_body);
- pre_body = NULL;
- }
+ if (!DECL_P (decl))
+ {
+ error ("%Hexpected iteration declaration or initialization",
+ &elocus);
+ return NULL;
+ }
- if (!processing_template_decl)
- init = fold_build_cleanup_point_expr (TREE_TYPE (init), init);
- init = cp_build_modify_expr (decl, NOP_EXPR, init, tf_warning_or_error);
- if (cond && TREE_SIDE_EFFECTS (cond) && COMPARISON_CLASS_P (cond))
- {
- int n = TREE_SIDE_EFFECTS (TREE_OPERAND (cond, 1)) != 0;
- tree t = TREE_OPERAND (cond, n);
+ if (incr && TREE_CODE (incr) == MODOP_EXPR)
+ {
+ if (orig_incr)
+ TREE_VEC_ELT (orig_incr, i) = incr;
+ incr = cp_build_modify_expr (TREE_OPERAND (incr, 0),
+ TREE_CODE (TREE_OPERAND (incr, 1)),
+ TREE_OPERAND (incr, 2),
+ tf_warning_or_error);
+ }
+
+ if (CLASS_TYPE_P (TREE_TYPE (decl)))
+ {
+ if (handle_omp_for_class_iterator (i, locus, declv, initv, condv,
+ incrv, &body, &pre_body, clauses))
+ return NULL;
+ continue;
+ }
+
+ if (!INTEGRAL_TYPE_P (TREE_TYPE (decl))
+ && TREE_CODE (TREE_TYPE (decl)) != POINTER_TYPE)
+ {
+ error ("%Hinvalid type for iteration variable %qE", &elocus, decl);
+ return NULL;
+ }
if (!processing_template_decl)
- TREE_OPERAND (cond, n)
- = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
+ init = fold_build_cleanup_point_expr (TREE_TYPE (init), init);
+ init = cp_build_modify_expr (decl, NOP_EXPR, init, tf_warning_or_error);
+ if (cond && TREE_SIDE_EFFECTS (cond) && COMPARISON_CLASS_P (cond))
+ {
+ int n = TREE_SIDE_EFFECTS (TREE_OPERAND (cond, 1)) != 0;
+ tree t = TREE_OPERAND (cond, n);
+
+ if (!processing_template_decl)
+ TREE_OPERAND (cond, n)
+ = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
+ }
+ if (decl == error_mark_node || init == error_mark_node)
+ return NULL;
+
+ TREE_VEC_ELT (declv, i) = decl;
+ TREE_VEC_ELT (initv, i) = init;
+ TREE_VEC_ELT (condv, i) = cond;
+ TREE_VEC_ELT (incrv, i) = incr;
+ i++;
}
- if (decl != error_mark_node && init != error_mark_node)
- omp_for = c_finish_omp_for (locus, decl, init, cond, incr, body, pre_body);
- if (omp_for != NULL
- && TREE_CODE (OMP_FOR_INCR (omp_for)) == MODIFY_EXPR
- && TREE_SIDE_EFFECTS (TREE_OPERAND (OMP_FOR_INCR (omp_for), 1))
- && BINARY_CLASS_P (TREE_OPERAND (OMP_FOR_INCR (omp_for), 1)))
+
+ if (IS_EMPTY_STMT (pre_body))
+ pre_body = NULL;
+
+ omp_for = c_finish_omp_for (locus, declv, initv, condv, incrv,
+ body, pre_body);
+
+ if (omp_for == NULL)
+ return NULL;
+
+ for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INCR (omp_for)); i++)
{
- tree t = TREE_OPERAND (OMP_FOR_INCR (omp_for), 1);
- int n = TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)) != 0;
+ tree incr = TREE_VEC_ELT (OMP_FOR_INCR (omp_for), i);
- if (!processing_template_decl)
- TREE_OPERAND (t, n)
- = fold_build_cleanup_point_expr (TREE_TYPE (TREE_OPERAND (t, n)),
- TREE_OPERAND (t, n));
+ if (TREE_CODE (incr) != MODIFY_EXPR)
+ continue;
+
+ if (TREE_SIDE_EFFECTS (TREE_OPERAND (incr, 1))
+ && BINARY_CLASS_P (TREE_OPERAND (incr, 1)))
+ {
+ tree t = TREE_OPERAND (incr, 1);
+ int n = TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)) != 0;
+
+ if (!processing_template_decl)
+ TREE_OPERAND (t, n)
+ = fold_build_cleanup_point_expr (TREE_TYPE (TREE_OPERAND (t, n)),
+ TREE_OPERAND (t, n));
+ }
+
+ if (orig_incr)
+ TREE_VEC_ELT (OMP_FOR_INCR (omp_for), i) = TREE_VEC_ELT (orig_incr, i);
}
+ if (omp_for != NULL)
+ OMP_FOR_CLAUSES (omp_for) = clauses;
return omp_for;
}
@@ -4039,26 +4370,12 @@ finish_omp_flush (void)
finish_expr_stmt (stmt);
}
-/* True if OpenMP sharing attribute of DECL is predetermined. */
-
-enum omp_clause_default_kind
-cxx_omp_predetermined_sharing (tree decl)
+void
+finish_omp_taskwait (void)
{
- enum omp_clause_default_kind kind;
-
- kind = c_omp_predetermined_sharing (decl);
- if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
- return kind;
-
- /* Static data members are predetermined as shared. */
- if (TREE_STATIC (decl))
- {
- tree ctx = CP_DECL_CONTEXT (decl);
- if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
- return OMP_CLAUSE_DEFAULT_SHARED;
- }
-
- return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
+ tree fn = built_in_decls[BUILT_IN_GOMP_TASKWAIT];
+ tree stmt = finish_call_expr (fn, NULL, false, false, tf_warning_or_error);
+ finish_expr_stmt (stmt);
}
void
diff --git a/gcc/fortran/ChangeLog b/gcc/fortran/ChangeLog
index 826b40972f9..5df5de5cf74 100644
--- a/gcc/fortran/ChangeLog
+++ b/gcc/fortran/ChangeLog
@@ -1,3 +1,87 @@
+2008-06-06 Jakub Jelinek <jakub@redhat.com>
+
+ * scanner.c (skip_free_comments, skip_fixed_comments): Handle tabs.
+ * parse.c (next_free): Allow tab after !$omp.
+ (decode_omp_directive): Handle !$omp task, !$omp taskwait
+ and !$omp end task.
+ (case_executable): Add ST_OMP_TASKWAIT.
+ (case_exec_markers): Add ST_OMP_TASK.
+ (gfc_ascii_statement): Handle ST_OMP_TASK, ST_OMP_END_TASK and
+ ST_OMP_TASKWAIT.
+ (parse_omp_structured_block, parse_executable): Handle ST_OMP_TASK.
+ * gfortran.h (gfc_find_sym_in_expr): New prototype.
+ (gfc_statement): Add ST_OMP_TASK, ST_OMP_END_TASK and ST_OMP_TASKWAIT.
+ (gfc_omp_clauses): Add OMP_SCHED_AUTO to sched_kind,
+ OMP_DEFAULT_FIRSTPRIVATE to default_sharing. Add collapse and
+ untied fields.
+ (gfc_exec_op): Add EXEC_OMP_TASK and EXEC_OMP_TASKWAIT.
+ * f95-lang.c (LANG_HOOKS_OMP_CLAUSE_COPY_CTOR,
+ LANG_HOOKS_OMP_CLAUSE_ASSIGN_OP, LANG_HOOKS_OMP_CLAUSE_DTOR,
+ LANG_HOOKS_OMP_PRIVATE_OUTER_REF): Define.
+ * trans.h (gfc_omp_clause_default_ctor): Add another argument.
+ (gfc_omp_clause_copy_ctor, gfc_omp_clause_assign_op,
+ gfc_omp_clause_dtor, gfc_omp_private_outer_ref): New prototypes.
+ * types.def (BT_ULONGLONG, BT_PTR_ULONGLONG,
+ BT_FN_BOOL_ULONGLONGPTR_ULONGLONGPTR,
+ BT_FN_BOOL_BOOL_ULL_ULL_ULL_ULLPTR_ULLPTR,
+ BT_FN_BOOL_BOOL_ULL_ULL_ULL_ULL_ULLPTR_ULLPTR,
+ BT_FN_VOID_PTR_PTR, BT_PTR_FN_VOID_PTR_PTR,
+ BT_FN_VOID_OMPFN_PTR_OMPCPYFN_LONG_LONG_BOOL_UINT): New.
+ (BT_BOOL): Use integer type with BOOL_TYPE_SIZE rather
+ than boolean_type_node.
+ * dump-parse-tree.c (gfc_show_omp_node): Handle EXEC_OMP_TASK,
+ EXEC_OMP_TASKWAIT, OMP_SCHED_AUTO, OMP_DEFAULT_FIRSTPRIVATE,
+ untied and collapse clauses.
+ (gfc_show_code_node): Handle EXEC_OMP_TASK and EXEC_OMP_TASKWAIT.
+ * trans.c (gfc_trans_code): Handle EXEC_OMP_TASK and
+ EXEC_OMP_TASKWAIT.
+ * st.c (gfc_free_statement): Likewise.
+ * resolve.c (gfc_resolve_blocks, resolve_code): Likewise.
+ (find_sym_in_expr): Rename to...
+ (gfc_find_sym_in_expr): ... this. No longer static.
+ (resolve_allocate_expr, resolve_ordinary_assign): Adjust caller.
+ * match.h (gfc_match_omp_task, gfc_match_omp_taskwait): New
+ prototypes.
+ * openmp.c (resolve_omp_clauses): Allow allocatable arrays in
+ firstprivate, lastprivate, reduction, copyprivate and copyin
+ clauses.
+ (omp_current_do_code): Made static.
+ (omp_current_do_collapse): New variable.
+ (gfc_resolve_omp_do_blocks): Compute omp_current_do_collapse,
+ clear omp_current_do_code and omp_current_do_collapse on return.
+ (gfc_resolve_do_iterator): Handle collapsed do loops.
+ (resolve_omp_do): Likewise, diagnose errorneous collapsed do loops.
+ (OMP_CLAUSE_COLLAPSE, OMP_CLAUSE_UNTIED): Define.
+ (gfc_match_omp_clauses): Handle default (firstprivate),
+ schedule (auto), untied and collapse (n) clauses.
+ (OMP_DO_CLAUSES): Add OMP_CLAUSE_COLLAPSE.
+ (OMP_TASK_CLAUSES): Define.
+ (gfc_match_omp_task, gfc_match_omp_taskwait): New functions.
+ * trans-openmp.c (gfc_omp_private_outer_ref): New function.
+ (gfc_omp_clause_default_ctor): Add outer argument. For allocatable
+ arrays allocate them with the bounds of the outer var if outer
+ var is allocated.
+ (gfc_omp_clause_copy_ctor, gfc_omp_clause_assign_op,
+ gfc_omp_clause_dtor): New functions.
+ (gfc_trans_omp_array_reduction): If decl is allocatable array,
+ allocate it with outer var's bounds in OMP_CLAUSE_REDUCTION_INIT
+ and deallocate it in OMP_CLAUSE_REDUCTION_MERGE.
+ (gfc_omp_predetermined_sharing): Return OMP_CLAUSE_DEFAULT_SHARED
+ for assumed-size arrays.
+ (gfc_trans_omp_do): Add par_clauses argument. If dovar is
+ present in lastprivate clause and do loop isn't simple,
+ set OMP_CLAUSE_LASTPRIVATE_STMT. If dovar is present in
+ parallel's lastprivate clause, change it to shared and add
+ lastprivate clause to OMP_FOR_CLAUSES. Handle collapsed do loops.
+ (gfc_trans_omp_directive): Adjust gfc_trans_omp_do callers.
+ (gfc_trans_omp_parallel_do): Likewise. Move collapse clause to
+ OMP_FOR from OMP_PARALLEL.
+ (gfc_trans_omp_clauses): Handle OMP_SCHED_AUTO,
+ OMP_DEFAULT_FIRSTPRIVATE, untied and collapse clauses.
+ (gfc_trans_omp_task, gfc_trans_omp_taskwait): New functions.
+ (gfc_trans_omp_directive): Handle EXEC_OMP_TASK and
+ EXEC_OMP_TASKWAIT.
+
2008-06-04 Janus Weil <janus@gcc.gnu.org>
PR fortran/36322
diff --git a/gcc/fortran/dump-parse-tree.c b/gcc/fortran/dump-parse-tree.c
index 44a4941e7b4..80ff5bcecb7 100644
--- a/gcc/fortran/dump-parse-tree.c
+++ b/gcc/fortran/dump-parse-tree.c
@@ -848,6 +848,8 @@ show_omp_node (int level, gfc_code *c)
case EXEC_OMP_PARALLEL_WORKSHARE: name = "PARALLEL WORKSHARE"; break;
case EXEC_OMP_SECTIONS: name = "SECTIONS"; break;
case EXEC_OMP_SINGLE: name = "SINGLE"; break;
+ case EXEC_OMP_TASK: name = "TASK"; break;
+ case EXEC_OMP_TASKWAIT: name = "TASKWAIT"; break;
case EXEC_OMP_WORKSHARE: name = "WORKSHARE"; break;
default:
gcc_unreachable ();
@@ -863,6 +865,7 @@ show_omp_node (int level, gfc_code *c)
case EXEC_OMP_SINGLE:
case EXEC_OMP_WORKSHARE:
case EXEC_OMP_PARALLEL_WORKSHARE:
+ case EXEC_OMP_TASK:
omp_clauses = c->ext.omp_clauses;
break;
case EXEC_OMP_CRITICAL:
@@ -878,6 +881,7 @@ show_omp_node (int level, gfc_code *c)
}
return;
case EXEC_OMP_BARRIER:
+ case EXEC_OMP_TASKWAIT:
return;
default:
break;
@@ -907,6 +911,7 @@ show_omp_node (int level, gfc_code *c)
case OMP_SCHED_DYNAMIC: type = "DYNAMIC"; break;
case OMP_SCHED_GUIDED: type = "GUIDED"; break;
case OMP_SCHED_RUNTIME: type = "RUNTIME"; break;
+ case OMP_SCHED_AUTO: type = "AUTO"; break;
default:
gcc_unreachable ();
}
@@ -926,7 +931,7 @@ show_omp_node (int level, gfc_code *c)
case OMP_DEFAULT_NONE: type = "NONE"; break;
case OMP_DEFAULT_PRIVATE: type = "PRIVATE"; break;
case OMP_DEFAULT_SHARED: type = "SHARED"; break;
- case OMP_SCHED_RUNTIME: type = "RUNTIME"; break;
+ case OMP_DEFAULT_FIRSTPRIVATE: type = "FIRSTPRIVATE"; break;
default:
gcc_unreachable ();
}
@@ -934,6 +939,10 @@ show_omp_node (int level, gfc_code *c)
}
if (omp_clauses->ordered)
fputs (" ORDERED", dumpfile);
+ if (omp_clauses->untied)
+ fputs (" UNTIED", dumpfile);
+ if (omp_clauses->collapse)
+ fprintf (dumpfile, " COLLAPSE(%d)", omp_clauses->collapse);
for (list_type = 0; list_type < OMP_LIST_NUM; list_type++)
if (omp_clauses->lists[list_type] != NULL
&& list_type != OMP_LIST_COPYPRIVATE)
@@ -1806,6 +1815,8 @@ show_code_node (int level, gfc_code *c)
case EXEC_OMP_PARALLEL_WORKSHARE:
case EXEC_OMP_SECTIONS:
case EXEC_OMP_SINGLE:
+ case EXEC_OMP_TASK:
+ case EXEC_OMP_TASKWAIT:
case EXEC_OMP_WORKSHARE:
show_omp_node (level, c);
break;
diff --git a/gcc/fortran/f95-lang.c b/gcc/fortran/f95-lang.c
index 63c380b61ea..42ab57a9606 100644
--- a/gcc/fortran/f95-lang.c
+++ b/gcc/fortran/f95-lang.c
@@ -115,8 +115,12 @@ static alias_set_type gfc_get_alias_set (tree);
#undef LANG_HOOKS_OMP_PRIVATIZE_BY_REFERENCE
#undef LANG_HOOKS_OMP_PREDETERMINED_SHARING
#undef LANG_HOOKS_OMP_CLAUSE_DEFAULT_CTOR
+#undef LANG_HOOKS_OMP_CLAUSE_COPY_CTOR
+#undef LANG_HOOKS_OMP_CLAUSE_ASSIGN_OP
+#undef LANG_HOOKS_OMP_CLAUSE_DTOR
#undef LANG_HOOKS_OMP_DISREGARD_VALUE_EXPR
#undef LANG_HOOKS_OMP_PRIVATE_DEBUG_CLAUSE
+#undef LANG_HOOKS_OMP_PRIVATE_OUTER_REF
#undef LANG_HOOKS_OMP_FIRSTPRIVATIZE_TYPE_SIZES
#undef LANG_HOOKS_BUILTIN_FUNCTION
#undef LANG_HOOKS_GET_ARRAY_DESCR_INFO
@@ -137,8 +141,12 @@ static alias_set_type gfc_get_alias_set (tree);
#define LANG_HOOKS_OMP_PRIVATIZE_BY_REFERENCE gfc_omp_privatize_by_reference
#define LANG_HOOKS_OMP_PREDETERMINED_SHARING gfc_omp_predetermined_sharing
#define LANG_HOOKS_OMP_CLAUSE_DEFAULT_CTOR gfc_omp_clause_default_ctor
+#define LANG_HOOKS_OMP_CLAUSE_COPY_CTOR gfc_omp_clause_copy_ctor
+#define LANG_HOOKS_OMP_CLAUSE_ASSIGN_OP gfc_omp_clause_assign_op
+#define LANG_HOOKS_OMP_CLAUSE_DTOR gfc_omp_clause_dtor
#define LANG_HOOKS_OMP_DISREGARD_VALUE_EXPR gfc_omp_disregard_value_expr
#define LANG_HOOKS_OMP_PRIVATE_DEBUG_CLAUSE gfc_omp_private_debug_clause
+#define LANG_HOOKS_OMP_PRIVATE_OUTER_REF gfc_omp_private_outer_ref
#define LANG_HOOKS_OMP_FIRSTPRIVATIZE_TYPE_SIZES \
gfc_omp_firstprivatize_type_sizes
#define LANG_HOOKS_BUILTIN_FUNCTION gfc_builtin_function
diff --git a/gcc/fortran/gfortran.h b/gcc/fortran/gfortran.h
index d4f9771e610..8665a48c566 100644
--- a/gcc/fortran/gfortran.h
+++ b/gcc/fortran/gfortran.h
@@ -228,7 +228,8 @@ typedef enum
ST_OMP_END_WORKSHARE, ST_OMP_DO, ST_OMP_FLUSH, ST_OMP_MASTER, ST_OMP_ORDERED,
ST_OMP_PARALLEL, ST_OMP_PARALLEL_DO, ST_OMP_PARALLEL_SECTIONS,
ST_OMP_PARALLEL_WORKSHARE, ST_OMP_SECTIONS, ST_OMP_SECTION, ST_OMP_SINGLE,
- ST_OMP_THREADPRIVATE, ST_OMP_WORKSHARE, ST_PROCEDURE,
+ ST_OMP_THREADPRIVATE, ST_OMP_WORKSHARE, ST_OMP_TASK, ST_OMP_END_TASK,
+ ST_OMP_TASKWAIT, ST_PROCEDURE,
ST_GET_FCN_CHARACTERISTICS, ST_NONE
}
gfc_statement;
@@ -927,7 +928,8 @@ typedef struct gfc_omp_clauses
OMP_SCHED_STATIC,
OMP_SCHED_DYNAMIC,
OMP_SCHED_GUIDED,
- OMP_SCHED_RUNTIME
+ OMP_SCHED_RUNTIME,
+ OMP_SCHED_AUTO
} sched_kind;
struct gfc_expr *chunk_size;
enum
@@ -935,9 +937,11 @@ typedef struct gfc_omp_clauses
OMP_DEFAULT_UNKNOWN,
OMP_DEFAULT_NONE,
OMP_DEFAULT_PRIVATE,
- OMP_DEFAULT_SHARED
+ OMP_DEFAULT_SHARED,
+ OMP_DEFAULT_FIRSTPRIVATE
} default_sharing;
- bool nowait, ordered;
+ int collapse;
+ bool nowait, ordered, untied;
}
gfc_omp_clauses;
@@ -1760,7 +1764,7 @@ typedef enum
EXEC_OMP_PARALLEL_SECTIONS, EXEC_OMP_PARALLEL_WORKSHARE,
EXEC_OMP_SECTIONS, EXEC_OMP_SINGLE, EXEC_OMP_WORKSHARE,
EXEC_OMP_ATOMIC, EXEC_OMP_BARRIER, EXEC_OMP_END_NOWAIT,
- EXEC_OMP_END_SINGLE
+ EXEC_OMP_END_SINGLE, EXEC_OMP_TASK, EXEC_OMP_TASKWAIT
}
gfc_exec_op;
@@ -2040,6 +2044,7 @@ bool gfc_post_options (const char **);
/* iresolve.c */
const char * gfc_get_string (const char *, ...) ATTRIBUTE_PRINTF_1;
+bool gfc_find_sym_in_expr (gfc_symbol *, gfc_expr *);
/* error.c */
diff --git a/gcc/fortran/match.h b/gcc/fortran/match.h
index 3f8d31074e8..5ee91fb62de 100644
--- a/gcc/fortran/match.h
+++ b/gcc/fortran/match.h
@@ -119,6 +119,8 @@ match gfc_match_omp_parallel_sections (void);
match gfc_match_omp_parallel_workshare (void);
match gfc_match_omp_sections (void);
match gfc_match_omp_single (void);
+match gfc_match_omp_task (void);
+match gfc_match_omp_taskwait (void);
match gfc_match_omp_threadprivate (void);
match gfc_match_omp_workshare (void);
match gfc_match_omp_end_nowait (void);
diff --git a/gcc/fortran/openmp.c b/gcc/fortran/openmp.c
index 9c0bae497bf..28f1cc24dfd 100644
--- a/gcc/fortran/openmp.c
+++ b/gcc/fortran/openmp.c
@@ -182,6 +182,8 @@ cleanup:
#define OMP_CLAUSE_SCHEDULE (1 << 9)
#define OMP_CLAUSE_DEFAULT (1 << 10)
#define OMP_CLAUSE_ORDERED (1 << 11)
+#define OMP_CLAUSE_COLLAPSE (1 << 12)
+#define OMP_CLAUSE_UNTIED (1 << 13)
/* Match OpenMP directive clauses. MASK is a bitmask of
clauses that are allowed for a particular directive. */
@@ -335,6 +337,8 @@ gfc_match_omp_clauses (gfc_omp_clauses **cp, int mask)
c->default_sharing = OMP_DEFAULT_PRIVATE;
else if (gfc_match ("default ( none )") == MATCH_YES)
c->default_sharing = OMP_DEFAULT_NONE;
+ else if (gfc_match ("default ( firstprivate )") == MATCH_YES)
+ c->default_sharing = OMP_DEFAULT_FIRSTPRIVATE;
if (c->default_sharing != OMP_DEFAULT_UNKNOWN)
continue;
}
@@ -351,10 +355,13 @@ gfc_match_omp_clauses (gfc_omp_clauses **cp, int mask)
c->sched_kind = OMP_SCHED_GUIDED;
else if (gfc_match ("runtime") == MATCH_YES)
c->sched_kind = OMP_SCHED_RUNTIME;
+ else if (gfc_match ("auto") == MATCH_YES)
+ c->sched_kind = OMP_SCHED_AUTO;
if (c->sched_kind != OMP_SCHED_NONE)
{
match m = MATCH_NO;
- if (c->sched_kind != OMP_SCHED_RUNTIME)
+ if (c->sched_kind != OMP_SCHED_RUNTIME
+ && c->sched_kind != OMP_SCHED_AUTO)
m = gfc_match (" , %e )", &c->chunk_size);
if (m != MATCH_YES)
m = gfc_match_char (')');
@@ -372,6 +379,36 @@ gfc_match_omp_clauses (gfc_omp_clauses **cp, int mask)
c->ordered = needs_space = true;
continue;
}
+ if ((mask & OMP_CLAUSE_UNTIED) && !c->untied
+ && gfc_match ("untied") == MATCH_YES)
+ {
+ c->untied = needs_space = true;
+ continue;
+ }
+ if ((mask & OMP_CLAUSE_COLLAPSE) && !c->collapse)
+ {
+ gfc_expr *cexpr = NULL;
+ match m = gfc_match ("collapse ( %e )", &cexpr);
+
+ if (m == MATCH_YES)
+ {
+ int collapse;
+ const char *p = gfc_extract_int (cexpr, &collapse);
+ if (p)
+ {
+ gfc_error (p);
+ collapse = 1;
+ }
+ else if (collapse <= 0)
+ {
+ gfc_error ("COLLAPSE clause argument not constant positive integer at %C");
+ collapse = 1;
+ }
+ c->collapse = collapse;
+ gfc_free_expr (cexpr);
+ continue;
+ }
+ }
break;
}
@@ -393,10 +430,13 @@ gfc_match_omp_clauses (gfc_omp_clauses **cp, int mask)
#define OMP_DO_CLAUSES \
(OMP_CLAUSE_PRIVATE | OMP_CLAUSE_FIRSTPRIVATE \
| OMP_CLAUSE_LASTPRIVATE | OMP_CLAUSE_REDUCTION \
- | OMP_CLAUSE_SCHEDULE | OMP_CLAUSE_ORDERED)
+ | OMP_CLAUSE_SCHEDULE | OMP_CLAUSE_ORDERED | OMP_CLAUSE_COLLAPSE)
#define OMP_SECTIONS_CLAUSES \
(OMP_CLAUSE_PRIVATE | OMP_CLAUSE_FIRSTPRIVATE \
| OMP_CLAUSE_LASTPRIVATE | OMP_CLAUSE_REDUCTION)
+#define OMP_TASK_CLAUSES \
+ (OMP_CLAUSE_PRIVATE | OMP_CLAUSE_FIRSTPRIVATE | OMP_CLAUSE_SHARED \
+ | OMP_CLAUSE_IF | OMP_CLAUSE_DEFAULT | OMP_CLAUSE_UNTIED)
match
gfc_match_omp_parallel (void)
@@ -411,6 +451,29 @@ gfc_match_omp_parallel (void)
match
+gfc_match_omp_task (void)
+{
+ gfc_omp_clauses *c;
+ if (gfc_match_omp_clauses (&c, OMP_TASK_CLAUSES) != MATCH_YES)
+ return MATCH_ERROR;
+ new_st.op = EXEC_OMP_TASK;
+ new_st.ext.omp_clauses = c;
+ return MATCH_YES;
+}
+
+
+match
+gfc_match_omp_taskwait (void)
+{
+ if (gfc_match_omp_eos () != MATCH_YES)
+ return MATCH_ERROR;
+ new_st.op = EXEC_OMP_TASKWAIT;
+ new_st.ext.omp_clauses = NULL;
+ return MATCH_YES;
+}
+
+
+match
gfc_match_omp_critical (void)
{
char n[GFC_MAX_SYMBOL_LEN+1];
@@ -809,9 +872,6 @@ resolve_omp_clauses (gfc_code *code)
if (!n->sym->attr.threadprivate)
gfc_error ("Non-THREADPRIVATE object '%s' in COPYIN clause"
" at %L", n->sym->name, &code->loc);
- if (n->sym->attr.allocatable)
- gfc_error ("COPYIN clause object '%s' is ALLOCATABLE at %L",
- n->sym->name, &code->loc);
if (n->sym->ts.type == BT_DERIVED && n->sym->ts.derived->attr.alloc_comp)
gfc_error ("COPYIN clause object '%s' at %L has ALLOCATABLE components",
n->sym->name, &code->loc);
@@ -823,9 +883,6 @@ resolve_omp_clauses (gfc_code *code)
if (n->sym->as && n->sym->as->type == AS_ASSUMED_SIZE)
gfc_error ("Assumed size array '%s' in COPYPRIVATE clause "
"at %L", n->sym->name, &code->loc);
- if (n->sym->attr.allocatable)
- gfc_error ("COPYPRIVATE clause object '%s' is ALLOCATABLE "
- "at %L", n->sym->name, &code->loc);
if (n->sym->ts.type == BT_DERIVED && n->sym->ts.derived->attr.alloc_comp)
gfc_error ("COPYPRIVATE clause object '%s' at %L has ALLOCATABLE components",
n->sym->name, &code->loc);
@@ -856,9 +913,6 @@ resolve_omp_clauses (gfc_code *code)
if (n->sym->attr.pointer)
gfc_error ("POINTER object '%s' in %s clause at %L",
n->sym->name, name, &code->loc);
- if (n->sym->attr.allocatable)
- gfc_error ("%s clause object '%s' is ALLOCATABLE at %L",
- name, n->sym->name, &code->loc);
/* Variables in REDUCTION-clauses must be of intrinsic type (flagged below). */
if ((list < OMP_LIST_REDUCTION_FIRST || list > OMP_LIST_REDUCTION_LAST) &&
n->sym->ts.type == BT_DERIVED && n->sym->ts.derived->attr.alloc_comp)
@@ -1246,15 +1300,34 @@ struct omp_context
struct pointer_set_t *private_iterators;
struct omp_context *previous;
} *omp_current_ctx;
-gfc_code *omp_current_do_code;
-
+static gfc_code *omp_current_do_code;
+static int omp_current_do_collapse;
void
gfc_resolve_omp_do_blocks (gfc_code *code, gfc_namespace *ns)
{
if (code->block->next && code->block->next->op == EXEC_DO)
- omp_current_do_code = code->block->next;
+ {
+ int i;
+ gfc_code *c;
+
+ omp_current_do_code = code->block->next;
+ omp_current_do_collapse = code->ext.omp_clauses->collapse;
+ for (i = 1, c = omp_current_do_code; i < omp_current_do_collapse; i++)
+ {
+ c = c->block;
+ if (c->op != EXEC_DO || c->next == NULL)
+ break;
+ c = c->next;
+ if (c->op != EXEC_DO)
+ break;
+ }
+ if (i < omp_current_do_collapse || omp_current_do_collapse <= 0)
+ omp_current_do_collapse = 1;
+ }
gfc_resolve_blocks (code->block, ns);
+ omp_current_do_collapse = 0;
+ omp_current_do_code = NULL;
}
@@ -1294,6 +1367,8 @@ void
gfc_resolve_do_iterator (gfc_code *code, gfc_symbol *sym)
{
struct omp_context *ctx;
+ int i = omp_current_do_collapse;
+ gfc_code *c = omp_current_do_code;
if (sym->attr.threadprivate)
return;
@@ -1301,8 +1376,14 @@ gfc_resolve_do_iterator (gfc_code *code, gfc_symbol *sym)
/* !$omp do and !$omp parallel do iteration variable is predetermined
private just in the !$omp do resp. !$omp parallel do construct,
with no implications for the outer parallel constructs. */
- if (code == omp_current_do_code)
- return;
+
+ while (i-- >= 1)
+ {
+ if (code == c)
+ return;
+
+ c = c->block->next;
+ }
for (ctx = omp_current_ctx; ctx; ctx = ctx->previous)
{
@@ -1326,8 +1407,8 @@ gfc_resolve_do_iterator (gfc_code *code, gfc_symbol *sym)
static void
resolve_omp_do (gfc_code *code)
{
- gfc_code *do_code;
- int list;
+ gfc_code *do_code, *c;
+ int list, i, collapse;
gfc_namelist *n;
gfc_symbol *dovar;
@@ -1335,11 +1416,17 @@ resolve_omp_do (gfc_code *code)
resolve_omp_clauses (code);
do_code = code->block->next;
- if (do_code->op == EXEC_DO_WHILE)
- gfc_error ("!$OMP DO cannot be a DO WHILE or DO without loop control "
- "at %L", &do_code->loc);
- else
+ collapse = code->ext.omp_clauses->collapse;
+ if (collapse <= 0)
+ collapse = 1;
+ for (i = 1; i <= collapse; i++)
{
+ if (do_code->op == EXEC_DO_WHILE)
+ {
+ gfc_error ("!$OMP DO cannot be a DO WHILE or DO without loop control "
+ "at %L", &do_code->loc);
+ break;
+ }
gcc_assert (do_code->op == EXEC_DO);
if (do_code->ext.iterator->var->ts.type != BT_INTEGER)
gfc_error ("!$OMP DO iteration variable must be of type integer at %L",
@@ -1359,6 +1446,53 @@ resolve_omp_do (gfc_code *code)
&do_code->loc);
break;
}
+ if (i > 1)
+ {
+ gfc_code *do_code2 = code->block->next;
+ int j;
+
+ for (j = 1; j < i; j++)
+ {
+ gfc_symbol *ivar = do_code2->ext.iterator->var->symtree->n.sym;
+ if (dovar == ivar
+ || gfc_find_sym_in_expr (ivar, do_code->ext.iterator->start)
+ || gfc_find_sym_in_expr (ivar, do_code->ext.iterator->end)
+ || gfc_find_sym_in_expr (ivar, do_code->ext.iterator->step))
+ {
+ gfc_error ("!$OMP DO collapsed loops don't form rectangular iteration space at %L",
+ &do_code->loc);
+ break;
+ }
+ if (j < i)
+ break;
+ do_code2 = do_code2->block->next;
+ }
+ }
+ if (i == collapse)
+ break;
+ for (c = do_code->next; c; c = c->next)
+ if (c->op != EXEC_NOP && c->op != EXEC_CONTINUE)
+ {
+ gfc_error ("collapsed !$OMP DO loops not perfectly nested at %L",
+ &c->loc);
+ break;
+ }
+ if (c)
+ break;
+ do_code = do_code->block;
+ if (do_code->op != EXEC_DO && do_code->op != EXEC_DO_WHILE)
+ {
+ gfc_error ("not enough DO loops for collapsed !$OMP DO at %L",
+ &code->loc);
+ break;
+ }
+ do_code = do_code->next;
+ if (do_code->op != EXEC_DO && do_code->op != EXEC_DO_WHILE)
+ {
+ gfc_error ("not enough DO loops for collapsed !$OMP DO at %L",
+ &code->loc);
+ break;
+ }
}
}
diff --git a/gcc/fortran/parse.c b/gcc/fortran/parse.c
index 33f13c92200..c35db2d9cf6 100644
--- a/gcc/fortran/parse.c
+++ b/gcc/fortran/parse.c
@@ -515,6 +515,7 @@ decode_omp_directive (void)
match ("end parallel", gfc_match_omp_eos, ST_OMP_END_PARALLEL);
match ("end sections", gfc_match_omp_end_nowait, ST_OMP_END_SECTIONS);
match ("end single", gfc_match_omp_end_single, ST_OMP_END_SINGLE);
+ match ("end task", gfc_match_omp_eos, ST_OMP_END_TASK);
match ("end workshare", gfc_match_omp_end_nowait,
ST_OMP_END_WORKSHARE);
break;
@@ -541,6 +542,8 @@ decode_omp_directive (void)
match ("single", gfc_match_omp_single, ST_OMP_SINGLE);
break;
case 't':
+ match ("task", gfc_match_omp_task, ST_OMP_TASK);
+ match ("taskwait", gfc_match_omp_taskwait, ST_OMP_TASKWAIT);
match ("threadprivate", gfc_match_omp_threadprivate,
ST_OMP_THREADPRIVATE);
case 'w':
@@ -641,7 +644,7 @@ next_free (void)
for (i = 0; i < 5; i++, c = gfc_next_ascii_char ())
gcc_assert (c == "!$omp"[i]);
- gcc_assert (c == ' ');
+ gcc_assert (c == ' ' || c == '\t');
gfc_gobble_whitespace ();
return decode_omp_directive ();
}
@@ -870,7 +873,7 @@ next_statement (void)
case ST_POINTER_ASSIGNMENT: case ST_EXIT: case ST_CYCLE: \
case ST_ASSIGNMENT: case ST_ARITHMETIC_IF: case ST_WHERE: case ST_FORALL: \
case ST_LABEL_ASSIGNMENT: case ST_FLUSH: case ST_OMP_FLUSH: \
- case ST_OMP_BARRIER
+ case ST_OMP_BARRIER: case ST_OMP_TASKWAIT
/* Statements that mark other executable statements. */
@@ -879,7 +882,8 @@ next_statement (void)
case ST_OMP_PARALLEL_SECTIONS: case ST_OMP_SECTIONS: case ST_OMP_ORDERED: \
case ST_OMP_CRITICAL: case ST_OMP_MASTER: case ST_OMP_SINGLE: \
case ST_OMP_DO: case ST_OMP_PARALLEL_DO: case ST_OMP_ATOMIC: \
- case ST_OMP_WORKSHARE: case ST_OMP_PARALLEL_WORKSHARE
+ case ST_OMP_WORKSHARE: case ST_OMP_PARALLEL_WORKSHARE: \
+ case ST_OMP_TASK
/* Declaration statements */
@@ -1351,6 +1355,9 @@ gfc_ascii_statement (gfc_statement st)
case ST_OMP_END_SINGLE:
p = "!$OMP END SINGLE";
break;
+ case ST_OMP_END_TASK:
+ p = "!$OMP END TASK";
+ break;
case ST_OMP_END_WORKSHARE:
p = "!$OMP END WORKSHARE";
break;
@@ -1384,6 +1391,12 @@ gfc_ascii_statement (gfc_statement st)
case ST_OMP_SINGLE:
p = "!$OMP SINGLE";
break;
+ case ST_OMP_TASK:
+ p = "!$OMP TASK";
+ break;
+ case ST_OMP_TASKWAIT:
+ p = "!$OMP TASKWAIT";
+ break;
case ST_OMP_THREADPRIVATE:
p = "!$OMP THREADPRIVATE";
break;
@@ -2857,6 +2870,9 @@ parse_omp_structured_block (gfc_statement omp_st, bool workshare_stmts_only)
case ST_OMP_SINGLE:
omp_end_st = ST_OMP_END_SINGLE;
break;
+ case ST_OMP_TASK:
+ omp_end_st = ST_OMP_END_TASK;
+ break;
case ST_OMP_WORKSHARE:
omp_end_st = ST_OMP_END_WORKSHARE;
break;
@@ -3067,6 +3083,7 @@ parse_executable (gfc_statement st)
case ST_OMP_CRITICAL:
case ST_OMP_MASTER:
case ST_OMP_SINGLE:
+ case ST_OMP_TASK:
parse_omp_structured_block (st, false);
break;
diff --git a/gcc/fortran/resolve.c b/gcc/fortran/resolve.c
index b5b76b6f7a0..2787e293021 100644
--- a/gcc/fortran/resolve.c
+++ b/gcc/fortran/resolve.c
@@ -4670,8 +4670,8 @@ sym_in_expr (gfc_expr *e, gfc_symbol *sym, int *f ATTRIBUTE_UNUSED)
return false;
}
-static bool
-find_sym_in_expr (gfc_symbol *sym, gfc_expr *e)
+bool
+gfc_find_sym_in_expr (gfc_symbol *sym, gfc_expr *e)
{
return gfc_traverse_expr (e, sym, sym_in_expr, 0);
}
@@ -4868,8 +4868,10 @@ check_symbols:
if (sym->ts.type == BT_DERIVED)
continue;
- if ((ar->start[i] != NULL && find_sym_in_expr (sym, ar->start[i]))
- || (ar->end[i] != NULL && find_sym_in_expr (sym, ar->end[i])))
+ if ((ar->start[i] != NULL
+ && gfc_find_sym_in_expr (sym, ar->start[i]))
+ || (ar->end[i] != NULL
+ && gfc_find_sym_in_expr (sym, ar->end[i])))
{
gfc_error ("'%s' must not appear an the array specification at "
"%L in the same ALLOCATE statement where it is "
@@ -5982,6 +5984,8 @@ gfc_resolve_blocks (gfc_code *b, gfc_namespace *ns)
case EXEC_OMP_PARALLEL_WORKSHARE:
case EXEC_OMP_SECTIONS:
case EXEC_OMP_SINGLE:
+ case EXEC_OMP_TASK:
+ case EXEC_OMP_TASKWAIT:
case EXEC_OMP_WORKSHARE:
break;
@@ -6100,8 +6104,8 @@ resolve_ordinary_assign (gfc_code *code, gfc_namespace *ns)
{
for (n = 0; n < ref->u.ar.dimen; n++)
if (ref->u.ar.dimen_type[n] == DIMEN_VECTOR
- && find_sym_in_expr (lhs->symtree->n.sym,
- ref->u.ar.start[n]))
+ && gfc_find_sym_in_expr (lhs->symtree->n.sym,
+ ref->u.ar.start[n]))
ref->u.ar.start[n]
= gfc_get_parentheses (ref->u.ar.start[n]);
}
@@ -6176,6 +6180,7 @@ resolve_code (gfc_code *code, gfc_namespace *ns)
case EXEC_OMP_PARALLEL:
case EXEC_OMP_PARALLEL_DO:
case EXEC_OMP_PARALLEL_SECTIONS:
+ case EXEC_OMP_TASK:
omp_workshare_save = omp_workshare_flag;
omp_workshare_flag = 0;
gfc_resolve_omp_parallel_blocks (code, ns);
@@ -6418,6 +6423,7 @@ resolve_code (gfc_code *code, gfc_namespace *ns)
case EXEC_OMP_ORDERED:
case EXEC_OMP_SECTIONS:
case EXEC_OMP_SINGLE:
+ case EXEC_OMP_TASKWAIT:
case EXEC_OMP_WORKSHARE:
gfc_resolve_omp_directive (code, ns);
break;
@@ -6426,6 +6432,7 @@ resolve_code (gfc_code *code, gfc_namespace *ns)
case EXEC_OMP_PARALLEL_DO:
case EXEC_OMP_PARALLEL_SECTIONS:
case EXEC_OMP_PARALLEL_WORKSHARE:
+ case EXEC_OMP_TASK:
omp_workshare_save = omp_workshare_flag;
omp_workshare_flag = 0;
gfc_resolve_omp_directive (code, ns);
diff --git a/gcc/fortran/scanner.c b/gcc/fortran/scanner.c
index 02d87b4f4ce..1b0eeca1e65 100644
--- a/gcc/fortran/scanner.c
+++ b/gcc/fortran/scanner.c
@@ -702,7 +702,8 @@ skip_free_comments (void)
if (((c = next_char ()) == 'm' || c == 'M')
&& ((c = next_char ()) == 'p' || c == 'P'))
{
- if ((c = next_char ()) == ' ' || continue_flag)
+ if ((c = next_char ()) == ' ' || c == '\t'
+ || continue_flag)
{
while (gfc_is_whitespace (c))
c = next_char ();
@@ -724,7 +725,7 @@ skip_free_comments (void)
next_char ();
c = next_char ();
}
- if (continue_flag || c == ' ')
+ if (continue_flag || c == ' ' || c == '\t')
{
gfc_current_locus = old_loc;
next_char ();
@@ -820,11 +821,11 @@ skip_fixed_comments (void)
c = next_char ();
if (c != '\n'
&& ((openmp_flag && continue_flag)
- || c == ' ' || c == '0'))
+ || c == ' ' || c == '\t' || c == '0'))
{
- c = next_char ();
- while (gfc_is_whitespace (c))
+ do
c = next_char ();
+ while (gfc_is_whitespace (c));
if (c != '\n' && c != '!')
{
/* Canonicalize to *$omp. */
@@ -843,6 +844,11 @@ skip_fixed_comments (void)
for (col = 3; col < 6; col++, c = next_char ())
if (c == ' ')
continue;
+ else if (c == '\t')
+ {
+ col = 6;
+ break;
+ }
else if (c < '0' || c > '9')
break;
else
@@ -850,7 +856,7 @@ skip_fixed_comments (void)
if (col == 6 && c != '\n'
&& ((continue_flag && !digit_seen)
- || c == ' ' || c == '0'))
+ || c == ' ' || c == '\t' || c == '0'))
{
gfc_current_locus = start;
start.nextc[0] = ' ';
diff --git a/gcc/fortran/st.c b/gcc/fortran/st.c
index 0f0e4813d28..abe7b94865c 100644
--- a/gcc/fortran/st.c
+++ b/gcc/fortran/st.c
@@ -171,6 +171,7 @@ gfc_free_statement (gfc_code *p)
case EXEC_OMP_PARALLEL_SECTIONS:
case EXEC_OMP_SECTIONS:
case EXEC_OMP_SINGLE:
+ case EXEC_OMP_TASK:
case EXEC_OMP_WORKSHARE:
case EXEC_OMP_PARALLEL_WORKSHARE:
gfc_free_omp_clauses (p->ext.omp_clauses);
@@ -189,6 +190,7 @@ gfc_free_statement (gfc_code *p)
case EXEC_OMP_MASTER:
case EXEC_OMP_ORDERED:
case EXEC_OMP_END_NOWAIT:
+ case EXEC_OMP_TASKWAIT:
break;
default:
diff --git a/gcc/fortran/trans-openmp.c b/gcc/fortran/trans-openmp.c
index c6c4baeca63..6f99800a014 100644
--- a/gcc/fortran/trans-openmp.c
+++ b/gcc/fortran/trans-openmp.c
@@ -84,6 +84,17 @@ gfc_omp_predetermined_sharing (tree decl)
if (GFC_DECL_CRAY_POINTEE (decl))
return OMP_CLAUSE_DEFAULT_PRIVATE;
+ /* Assumed-size arrays are predetermined to inherit sharing
+ attributes of the associated actual argument, which is shared
+ for all we care. */
+ if (TREE_CODE (decl) == PARM_DECL
+ && GFC_ARRAY_TYPE_P (TREE_TYPE (decl))
+ && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_UNKNOWN
+ && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl),
+ GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl)) - 1)
+ == NULL)
+ return OMP_CLAUSE_DEFAULT_SHARED;
+
/* COMMON and EQUIVALENCE decls are shared. They
are only referenced through DECL_VALUE_EXPR of the variables
contained in them. If those are privatized, they will not be
@@ -98,27 +109,179 @@ gfc_omp_predetermined_sharing (tree decl)
}
+/* Return true if DECL in private clause needs
+ OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
+bool
+gfc_omp_private_outer_ref (tree decl)
+{
+ tree type = TREE_TYPE (decl);
+
+ if (GFC_DESCRIPTOR_TYPE_P (type)
+ && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
+ return true;
+
+ return false;
+}
+
/* Return code to initialize DECL with its default constructor, or
NULL if there's nothing to do. */
tree
-gfc_omp_clause_default_ctor (tree clause ATTRIBUTE_UNUSED, tree decl)
+gfc_omp_clause_default_ctor (tree clause, tree decl, tree outer)
{
- tree type = TREE_TYPE (decl);
- stmtblock_t block;
+ tree type = TREE_TYPE (decl), rank, size, esize, ptr, cond, then_b, else_b;
+ stmtblock_t block, cond_block;
- if (! GFC_DESCRIPTOR_TYPE_P (type))
+ if (! GFC_DESCRIPTOR_TYPE_P (type)
+ || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
return NULL;
+ gcc_assert (outer != NULL);
+ gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_PRIVATE
+ || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LASTPRIVATE);
+
/* Allocatable arrays in PRIVATE clauses need to be set to
- "not currently allocated" allocation status. */
- gfc_init_block (&block);
+ "not currently allocated" allocation status if outer
+ array is "not currently allocated", otherwise should be allocated. */
+ gfc_start_block (&block);
+
+ gfc_init_block (&cond_block);
+
+ gfc_add_modify_expr (&cond_block, decl, outer);
+ rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
+ size = gfc_conv_descriptor_ubound (decl, rank);
+ size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
+ gfc_conv_descriptor_lbound (decl, rank));
+ size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
+ gfc_index_one_node);
+ if (GFC_TYPE_ARRAY_RANK (type) > 1)
+ size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
+ gfc_conv_descriptor_stride (decl, rank));
+ esize = fold_convert (gfc_array_index_type,
+ TYPE_SIZE_UNIT (gfc_get_element_type (type)));
+ size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
+ size = gfc_evaluate_now (fold_convert (size_type_node, size), &cond_block);
+ ptr = gfc_allocate_array_with_status (&cond_block,
+ build_int_cst (pvoid_type_node, 0),
+ size, NULL);
+ gfc_conv_descriptor_data_set_tuples (&cond_block, decl, ptr);
+ then_b = gfc_finish_block (&cond_block);
+
+ gfc_init_block (&cond_block);
+ gfc_conv_descriptor_data_set_tuples (&cond_block, decl, null_pointer_node);
+ else_b = gfc_finish_block (&cond_block);
+
+ cond = fold_build2 (NE_EXPR, boolean_type_node,
+ fold_convert (pvoid_type_node,
+ gfc_conv_descriptor_data_get (outer)),
+ null_pointer_node);
+ gfc_add_expr_to_block (&block, build3 (COND_EXPR, void_type_node,
+ cond, then_b, else_b));
- gfc_conv_descriptor_data_set_tuples (&block, decl, null_pointer_node);
+ return gfc_finish_block (&block);
+}
+
+/* Build and return code for a copy constructor from SRC to DEST. */
+
+tree
+gfc_omp_clause_copy_ctor (tree clause, tree dest, tree src)
+{
+ tree type = TREE_TYPE (dest), ptr, size, esize, rank, call;
+ stmtblock_t block;
+
+ if (! GFC_DESCRIPTOR_TYPE_P (type)
+ || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
+ return build_gimple_modify_stmt (dest, src);
+
+ gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_FIRSTPRIVATE);
+
+ /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
+ and copied from SRC. */
+ gfc_start_block (&block);
+
+ gfc_add_modify_expr (&block, dest, src);
+ rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
+ size = gfc_conv_descriptor_ubound (dest, rank);
+ size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
+ gfc_conv_descriptor_lbound (dest, rank));
+ size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
+ gfc_index_one_node);
+ if (GFC_TYPE_ARRAY_RANK (type) > 1)
+ size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
+ gfc_conv_descriptor_stride (dest, rank));
+ esize = fold_convert (gfc_array_index_type,
+ TYPE_SIZE_UNIT (gfc_get_element_type (type)));
+ size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
+ size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
+ ptr = gfc_allocate_array_with_status (&block,
+ build_int_cst (pvoid_type_node, 0),
+ size, NULL);
+ gfc_conv_descriptor_data_set_tuples (&block, dest, ptr);
+ call = build_call_expr (built_in_decls[BUILT_IN_MEMCPY], 3, ptr,
+ fold_convert (pvoid_type_node,
+ gfc_conv_descriptor_data_get (src)),
+ size);
+ gfc_add_expr_to_block (&block, fold_convert (void_type_node, call));
return gfc_finish_block (&block);
}
+/* Similarly, except use an assignment operator instead. */
+
+tree
+gfc_omp_clause_assign_op (tree clause ATTRIBUTE_UNUSED, tree dest, tree src)
+{
+ tree type = TREE_TYPE (dest), rank, size, esize, call;
+ stmtblock_t block;
+
+ if (! GFC_DESCRIPTOR_TYPE_P (type)
+ || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
+ return build_gimple_modify_stmt (dest, src);
+
+ /* Handle copying allocatable arrays. */
+ gfc_start_block (&block);
+
+ rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
+ size = gfc_conv_descriptor_ubound (dest, rank);
+ size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
+ gfc_conv_descriptor_lbound (dest, rank));
+ size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
+ gfc_index_one_node);
+ if (GFC_TYPE_ARRAY_RANK (type) > 1)
+ size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
+ gfc_conv_descriptor_stride (dest, rank));
+ esize = fold_convert (gfc_array_index_type,
+ TYPE_SIZE_UNIT (gfc_get_element_type (type)));
+ size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
+ size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
+ call = build_call_expr (built_in_decls[BUILT_IN_MEMCPY], 3,
+ fold_convert (pvoid_type_node,
+ gfc_conv_descriptor_data_get (dest)),
+ fold_convert (pvoid_type_node,
+ gfc_conv_descriptor_data_get (src)),
+ size);
+ gfc_add_expr_to_block (&block, fold_convert (void_type_node, call));
+
+ return gfc_finish_block (&block);
+}
+
+/* Build and return code destructing DECL. Return NULL if nothing
+ to be done. */
+
+tree
+gfc_omp_clause_dtor (tree clause ATTRIBUTE_UNUSED, tree decl)
+{
+ tree type = TREE_TYPE (decl);
+
+ if (! GFC_DESCRIPTOR_TYPE_P (type)
+ || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
+ return NULL;
+
+ /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
+ to be deallocated if they were allocated. */
+ return gfc_trans_dealloc_allocated (decl);
+}
+
/* Return true if DECL's DECL_VALUE_EXPR (if any) should be
disregarded in OpenMP construct, because it is going to be
@@ -429,7 +592,39 @@ gfc_trans_omp_array_reduction (tree c, gfc_symbol *sym, locus where)
/* Create the init statement list. */
pushlevel (0);
- stmt = gfc_trans_assignment (e1, e2, false);
+ if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl))
+ && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_ALLOCATABLE)
+ {
+ /* If decl is an allocatable array, it needs to be allocated
+ with the same bounds as the outer var. */
+ tree type = TREE_TYPE (decl), rank, size, esize, ptr;
+ stmtblock_t block;
+
+ gfc_start_block (&block);
+
+ gfc_add_modify_expr (&block, decl, outer_sym.backend_decl);
+ rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
+ size = gfc_conv_descriptor_ubound (decl, rank);
+ size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
+ gfc_conv_descriptor_lbound (decl, rank));
+ size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
+ gfc_index_one_node);
+ if (GFC_TYPE_ARRAY_RANK (type) > 1)
+ size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
+ gfc_conv_descriptor_stride (decl, rank));
+ esize = fold_convert (gfc_array_index_type,
+ TYPE_SIZE_UNIT (gfc_get_element_type (type)));
+ size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
+ size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
+ ptr = gfc_allocate_array_with_status (&block,
+ build_int_cst (pvoid_type_node, 0),
+ size, NULL);
+ gfc_conv_descriptor_data_set_tuples (&block, decl, ptr);
+ gfc_add_expr_to_block (&block, gfc_trans_assignment (e1, e2, false));
+ stmt = gfc_finish_block (&block);
+ }
+ else
+ stmt = gfc_trans_assignment (e1, e2, false);
if (TREE_CODE (stmt) != BIND_EXPR)
stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
else
@@ -438,7 +633,20 @@ gfc_trans_omp_array_reduction (tree c, gfc_symbol *sym, locus where)
/* Create the merge statement list. */
pushlevel (0);
- stmt = gfc_trans_assignment (e3, e4, false);
+ if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl))
+ && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_ALLOCATABLE)
+ {
+ /* If decl is an allocatable array, it needs to be deallocated
+ afterwards. */
+ stmtblock_t block;
+
+ gfc_start_block (&block);
+ gfc_add_expr_to_block (&block, gfc_trans_assignment (e3, e4, false));
+ gfc_add_expr_to_block (&block, gfc_trans_dealloc_allocated (decl));
+ stmt = gfc_finish_block (&block);
+ }
+ else
+ stmt = gfc_trans_assignment (e3, e4, false);
if (TREE_CODE (stmt) != BIND_EXPR)
stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
else
@@ -639,6 +847,9 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
case OMP_SCHED_RUNTIME:
OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_RUNTIME;
break;
+ case OMP_SCHED_AUTO:
+ OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_AUTO;
+ break;
default:
gcc_unreachable ();
}
@@ -659,6 +870,9 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
case OMP_DEFAULT_PRIVATE:
OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_PRIVATE;
break;
+ case OMP_DEFAULT_FIRSTPRIVATE:
+ OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
+ break;
default:
gcc_unreachable ();
}
@@ -677,6 +891,19 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
omp_clauses = gfc_trans_add_clause (c, omp_clauses);
}
+ if (clauses->untied)
+ {
+ c = build_omp_clause (OMP_CLAUSE_UNTIED);
+ omp_clauses = gfc_trans_add_clause (c, omp_clauses);
+ }
+
+ if (clauses->collapse)
+ {
+ c = build_omp_clause (OMP_CLAUSE_COLLAPSE);
+ OMP_CLAUSE_COLLAPSE_EXPR (c) = build_int_cst (NULL, clauses->collapse);
+ omp_clauses = gfc_trans_add_clause (c, omp_clauses);
+ }
+
return omp_clauses;
}
@@ -893,20 +1120,28 @@ gfc_trans_omp_critical (gfc_code *code)
static tree
gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock,
- gfc_omp_clauses *do_clauses)
+ gfc_omp_clauses *do_clauses, tree par_clauses)
{
gfc_se se;
tree dovar, stmt, from, to, step, type, init, cond, incr;
tree count = NULL_TREE, cycle_label, tmp, omp_clauses;
stmtblock_t block;
stmtblock_t body;
- int simple = 0;
- bool dovar_found = false;
gfc_omp_clauses *clauses = code->ext.omp_clauses;
+ gfc_code *outermost;
+ int i, collapse = clauses->collapse;
+ tree dovar_init = NULL_TREE;
- code = code->block->next;
+ if (collapse <= 0)
+ collapse = 1;
+
+ outermost = code = code->block->next;
gcc_assert (code->op == EXEC_DO);
+ init = make_tree_vec (collapse);
+ cond = make_tree_vec (collapse);
+ incr = make_tree_vec (collapse);
+
if (pblock == NULL)
{
gfc_start_block (&block);
@@ -914,107 +1149,168 @@ gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock,
}
omp_clauses = gfc_trans_omp_clauses (pblock, do_clauses, code->loc);
- if (clauses)
- {
- gfc_namelist *n;
- for (n = clauses->lists[OMP_LIST_LASTPRIVATE]; n != NULL; n = n->next)
- if (code->ext.iterator->var->symtree->n.sym == n->sym)
- break;
- if (n == NULL)
- for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next)
- if (code->ext.iterator->var->symtree->n.sym == n->sym)
- break;
- if (n != NULL)
- dovar_found = true;
- }
- /* Evaluate all the expressions in the iterator. */
- gfc_init_se (&se, NULL);
- gfc_conv_expr_lhs (&se, code->ext.iterator->var);
- gfc_add_block_to_block (pblock, &se.pre);
- dovar = se.expr;
- type = TREE_TYPE (dovar);
- gcc_assert (TREE_CODE (type) == INTEGER_TYPE);
-
- gfc_init_se (&se, NULL);
- gfc_conv_expr_val (&se, code->ext.iterator->start);
- gfc_add_block_to_block (pblock, &se.pre);
- from = gfc_evaluate_now (se.expr, pblock);
-
- gfc_init_se (&se, NULL);
- gfc_conv_expr_val (&se, code->ext.iterator->end);
- gfc_add_block_to_block (pblock, &se.pre);
- to = gfc_evaluate_now (se.expr, pblock);
-
- gfc_init_se (&se, NULL);
- gfc_conv_expr_val (&se, code->ext.iterator->step);
- gfc_add_block_to_block (pblock, &se.pre);
- step = gfc_evaluate_now (se.expr, pblock);
-
- /* Special case simple loops. */
- if (integer_onep (step))
- simple = 1;
- else if (tree_int_cst_equal (step, integer_minus_one_node))
- simple = -1;
-
- /* Loop body. */
- if (simple)
+ for (i = 0; i < collapse; i++)
{
- init = build2_v (GIMPLE_MODIFY_STMT, dovar, from);
- cond = fold_build2 (simple > 0 ? LE_EXPR : GE_EXPR, boolean_type_node,
- dovar, to);
- incr = fold_build2 (PLUS_EXPR, type, dovar, step);
- incr = fold_build2 (GIMPLE_MODIFY_STMT, type, dovar, incr);
- if (pblock != &block)
+ int simple = 0;
+ int dovar_found = 0;
+
+ if (clauses)
{
- pushlevel (0);
- gfc_start_block (&block);
+ gfc_namelist *n;
+ for (n = clauses->lists[OMP_LIST_LASTPRIVATE]; n != NULL;
+ n = n->next)
+ if (code->ext.iterator->var->symtree->n.sym == n->sym)
+ break;
+ if (n != NULL)
+ dovar_found = 1;
+ else if (n == NULL)
+ for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next)
+ if (code->ext.iterator->var->symtree->n.sym == n->sym)
+ break;
+ if (n != NULL)
+ dovar_found++;
}
- gfc_start_block (&body);
- }
- else
- {
- /* STEP is not 1 or -1. Use:
- for (count = 0; count < (to + step - from) / step; count++)
- {
- dovar = from + count * step;
- body;
- cycle_label:;
- } */
- tmp = fold_build2 (MINUS_EXPR, type, step, from);
- tmp = fold_build2 (PLUS_EXPR, type, to, tmp);
- tmp = fold_build2 (TRUNC_DIV_EXPR, type, tmp, step);
- tmp = gfc_evaluate_now (tmp, pblock);
- count = gfc_create_var (type, "count");
- init = build2_v (GIMPLE_MODIFY_STMT, count, build_int_cst (type, 0));
- cond = fold_build2 (LT_EXPR, boolean_type_node, count, tmp);
- incr = fold_build2 (PLUS_EXPR, type, count, build_int_cst (type, 1));
- incr = fold_build2 (GIMPLE_MODIFY_STMT, type, count, incr);
-
- if (pblock != &block)
+
+ /* Evaluate all the expressions in the iterator. */
+ gfc_init_se (&se, NULL);
+ gfc_conv_expr_lhs (&se, code->ext.iterator->var);
+ gfc_add_block_to_block (pblock, &se.pre);
+ dovar = se.expr;
+ type = TREE_TYPE (dovar);
+ gcc_assert (TREE_CODE (type) == INTEGER_TYPE);
+
+ gfc_init_se (&se, NULL);
+ gfc_conv_expr_val (&se, code->ext.iterator->start);
+ gfc_add_block_to_block (pblock, &se.pre);
+ from = gfc_evaluate_now (se.expr, pblock);
+
+ gfc_init_se (&se, NULL);
+ gfc_conv_expr_val (&se, code->ext.iterator->end);
+ gfc_add_block_to_block (pblock, &se.pre);
+ to = gfc_evaluate_now (se.expr, pblock);
+
+ gfc_init_se (&se, NULL);
+ gfc_conv_expr_val (&se, code->ext.iterator->step);
+ gfc_add_block_to_block (pblock, &se.pre);
+ step = gfc_evaluate_now (se.expr, pblock);
+
+ /* Special case simple loops. */
+ if (integer_onep (step))
+ simple = 1;
+ else if (tree_int_cst_equal (step, integer_minus_one_node))
+ simple = -1;
+
+ /* Loop body. */
+ if (simple)
{
- pushlevel (0);
- gfc_start_block (&block);
+ TREE_VEC_ELT (init, i) = build2_v (GIMPLE_MODIFY_STMT, dovar, from);
+ TREE_VEC_ELT (cond, i) = fold_build2 (simple > 0 ? LE_EXPR : GE_EXPR,
+ boolean_type_node, dovar, to);
+ TREE_VEC_ELT (incr, i) = fold_build2 (PLUS_EXPR, type, dovar, step);
+ TREE_VEC_ELT (incr, i) = fold_build2 (GIMPLE_MODIFY_STMT, type, dovar,
+ TREE_VEC_ELT (incr, i));
+ }
+ else
+ {
+ /* STEP is not 1 or -1. Use:
+ for (count = 0; count < (to + step - from) / step; count++)
+ {
+ dovar = from + count * step;
+ body;
+ cycle_label:;
+ } */
+ tmp = fold_build2 (MINUS_EXPR, type, step, from);
+ tmp = fold_build2 (PLUS_EXPR, type, to, tmp);
+ tmp = fold_build2 (TRUNC_DIV_EXPR, type, tmp, step);
+ tmp = gfc_evaluate_now (tmp, pblock);
+ count = gfc_create_var (type, "count");
+ TREE_VEC_ELT (init, i) = build2_v (GIMPLE_MODIFY_STMT, count,
+ build_int_cst (type, 0));
+ TREE_VEC_ELT (cond, i) = fold_build2 (LT_EXPR, boolean_type_node,
+ count, tmp);
+ TREE_VEC_ELT (incr, i) = fold_build2 (PLUS_EXPR, type, count,
+ build_int_cst (type, 1));
+ TREE_VEC_ELT (incr, i) = fold_build2 (GIMPLE_MODIFY_STMT, type,
+ count, TREE_VEC_ELT (incr, i));
+
+ /* Initialize DOVAR. */
+ tmp = fold_build2 (MULT_EXPR, type, count, step);
+ tmp = fold_build2 (PLUS_EXPR, type, from, tmp);
+ dovar_init = tree_cons (dovar, tmp, dovar_init);
}
- gfc_start_block (&body);
- /* Initialize DOVAR. */
- tmp = fold_build2 (MULT_EXPR, type, count, step);
- tmp = fold_build2 (PLUS_EXPR, type, from, tmp);
- gfc_add_modify_stmt (&body, dovar, tmp);
+ if (!dovar_found)
+ {
+ tmp = build_omp_clause (OMP_CLAUSE_PRIVATE);
+ OMP_CLAUSE_DECL (tmp) = dovar;
+ omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
+ }
+ else if (dovar_found == 2)
+ {
+ tree c = NULL;
+
+ tmp = NULL;
+ if (!simple)
+ {
+ /* If dovar is lastprivate, but different counter is used,
+ dovar += step needs to be added to
+ OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
+ will have the value on entry of the last loop, rather
+ than value after iterator increment. */
+ tmp = gfc_evaluate_now (step, pblock);
+ tmp = fold_build2 (PLUS_EXPR, type, dovar, tmp);
+ tmp = fold_build2 (GIMPLE_MODIFY_STMT, type, dovar, tmp);
+ for (c = omp_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
+ if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
+ && OMP_CLAUSE_DECL (c) == dovar)
+ {
+ OMP_CLAUSE_LASTPRIVATE_STMT (c) = tmp;
+ break;
+ }
+ }
+ if (c == NULL && par_clauses != NULL)
+ {
+ for (c = par_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
+ if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
+ && OMP_CLAUSE_DECL (c) == dovar)
+ {
+ tree l = build_omp_clause (OMP_CLAUSE_LASTPRIVATE);
+ OMP_CLAUSE_DECL (l) = dovar;
+ OMP_CLAUSE_CHAIN (l) = omp_clauses;
+ OMP_CLAUSE_LASTPRIVATE_STMT (l) = tmp;
+ omp_clauses = l;
+ OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_SHARED);
+ break;
+ }
+ }
+ gcc_assert (simple || c != NULL);
+ }
+ if (!simple)
+ {
+ tmp = build_omp_clause (OMP_CLAUSE_PRIVATE);
+ OMP_CLAUSE_DECL (tmp) = count;
+ omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
+ }
+
+ if (i + 1 < collapse)
+ code = code->block->next;
}
- if (!dovar_found)
+ if (pblock != &block)
{
- tmp = build_omp_clause (OMP_CLAUSE_PRIVATE);
- OMP_CLAUSE_DECL (tmp) = dovar;
- omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
+ pushlevel (0);
+ gfc_start_block (&block);
}
- if (!simple)
+
+ gfc_start_block (&body);
+
+ dovar_init = nreverse (dovar_init);
+ while (dovar_init)
{
- tmp = build_omp_clause (OMP_CLAUSE_PRIVATE);
- OMP_CLAUSE_DECL (tmp) = count;
- omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
+ gfc_add_modify_stmt (&body, TREE_PURPOSE (dovar_init),
+ TREE_VALUE (dovar_init));
+ dovar_init = TREE_CHAIN (dovar_init);
}
/* Cycle statement is implemented with a goto. Exit statement must not be
@@ -1107,9 +1403,11 @@ gfc_trans_omp_parallel_do (gfc_code *code)
do_clauses.sched_kind = parallel_clauses.sched_kind;
do_clauses.chunk_size = parallel_clauses.chunk_size;
do_clauses.ordered = parallel_clauses.ordered;
+ do_clauses.collapse = parallel_clauses.collapse;
parallel_clauses.sched_kind = OMP_SCHED_NONE;
parallel_clauses.chunk_size = NULL;
parallel_clauses.ordered = false;
+ parallel_clauses.collapse = 0;
omp_clauses = gfc_trans_omp_clauses (&block, &parallel_clauses,
code->loc);
}
@@ -1118,7 +1416,7 @@ gfc_trans_omp_parallel_do (gfc_code *code)
pblock = &block;
else
pushlevel (0);
- stmt = gfc_trans_omp_do (code, pblock, &do_clauses);
+ stmt = gfc_trans_omp_do (code, pblock, &do_clauses, omp_clauses);
if (TREE_CODE (stmt) != BIND_EXPR)
stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
else
@@ -1221,6 +1519,31 @@ gfc_trans_omp_single (gfc_code *code, gfc_omp_clauses *clauses)
}
static tree
+gfc_trans_omp_task (gfc_code *code)
+{
+ stmtblock_t block;
+ tree stmt, body_stmt, omp_clauses;
+
+ gfc_start_block (&block);
+ omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
+ code->loc);
+ body_stmt = gfc_trans_omp_code (code->block->next, true);
+ stmt = make_node (OMP_TASK);
+ TREE_TYPE (stmt) = void_type_node;
+ OMP_TASK_CLAUSES (stmt) = omp_clauses;
+ OMP_TASK_BODY (stmt) = body_stmt;
+ gfc_add_expr_to_block (&block, stmt);
+ return gfc_finish_block (&block);
+}
+
+static tree
+gfc_trans_omp_taskwait (void)
+{
+ tree decl = built_in_decls [BUILT_IN_GOMP_TASKWAIT];
+ return build_call_expr (decl, 0);
+}
+
+static tree
gfc_trans_omp_workshare (gfc_code *code, gfc_omp_clauses *clauses)
{
/* XXX */
@@ -1239,7 +1562,7 @@ gfc_trans_omp_directive (gfc_code *code)
case EXEC_OMP_CRITICAL:
return gfc_trans_omp_critical (code);
case EXEC_OMP_DO:
- return gfc_trans_omp_do (code, NULL, code->ext.omp_clauses);
+ return gfc_trans_omp_do (code, NULL, code->ext.omp_clauses, NULL);
case EXEC_OMP_FLUSH:
return gfc_trans_omp_flush ();
case EXEC_OMP_MASTER:
@@ -1258,6 +1581,10 @@ gfc_trans_omp_directive (gfc_code *code)
return gfc_trans_omp_sections (code, code->ext.omp_clauses);
case EXEC_OMP_SINGLE:
return gfc_trans_omp_single (code, code->ext.omp_clauses);
+ case EXEC_OMP_TASK:
+ return gfc_trans_omp_task (code);
+ case EXEC_OMP_TASKWAIT:
+ return gfc_trans_omp_taskwait ();
case EXEC_OMP_WORKSHARE:
return gfc_trans_omp_workshare (code, code->ext.omp_clauses);
default:
diff --git a/gcc/fortran/trans.c b/gcc/fortran/trans.c
index f303128a28d..51e0cdd6aad 100644
--- a/gcc/fortran/trans.c
+++ b/gcc/fortran/trans.c
@@ -1135,6 +1135,8 @@ gfc_trans_code (gfc_code * code)
case EXEC_OMP_PARALLEL_WORKSHARE:
case EXEC_OMP_SECTIONS:
case EXEC_OMP_SINGLE:
+ case EXEC_OMP_TASK:
+ case EXEC_OMP_TASKWAIT:
case EXEC_OMP_WORKSHARE:
res = gfc_trans_omp_directive (code);
break;
diff --git a/gcc/fortran/trans.h b/gcc/fortran/trans.h
index d0ce2354120..3a07d712791 100644
--- a/gcc/fortran/trans.h
+++ b/gcc/fortran/trans.h
@@ -493,9 +493,13 @@ bool gfc_get_array_descr_info (const_tree, struct array_descr_info *);
/* In trans-openmp.c */
bool gfc_omp_privatize_by_reference (const_tree);
enum omp_clause_default_kind gfc_omp_predetermined_sharing (tree);
-tree gfc_omp_clause_default_ctor (tree, tree);
+tree gfc_omp_clause_default_ctor (tree, tree, tree);
+tree gfc_omp_clause_copy_ctor (tree, tree, tree);
+tree gfc_omp_clause_assign_op (tree, tree, tree);
+tree gfc_omp_clause_dtor (tree, tree);
bool gfc_omp_disregard_value_expr (tree, bool);
bool gfc_omp_private_debug_clause (tree, bool);
+bool gfc_omp_private_outer_ref (tree);
struct gimplify_omp_ctx;
void gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *, tree);
diff --git a/gcc/fortran/types.def b/gcc/fortran/types.def
index 22dcafd2397..5bcdb5261d9 100644
--- a/gcc/fortran/types.def
+++ b/gcc/fortran/types.def
@@ -50,10 +50,12 @@ along with GCC; see the file COPYING3. If not see
the type pointed to. */
DEF_PRIMITIVE_TYPE (BT_VOID, void_type_node)
-DEF_PRIMITIVE_TYPE (BT_BOOL, boolean_type_node)
+DEF_PRIMITIVE_TYPE (BT_BOOL,
+ (*lang_hooks.types.type_for_size) (BOOL_TYPE_SIZE, 1))
DEF_PRIMITIVE_TYPE (BT_INT, integer_type_node)
DEF_PRIMITIVE_TYPE (BT_UINT, unsigned_type_node)
DEF_PRIMITIVE_TYPE (BT_LONG, long_integer_type_node)
+DEF_PRIMITIVE_TYPE (BT_ULONGLONG, long_long_unsigned_type_node)
DEF_PRIMITIVE_TYPE (BT_WORD, (*lang_hooks.types.type_for_mode) (word_mode, 1))
DEF_PRIMITIVE_TYPE (BT_I1, builtin_type_for_size (BITS_PER_UNIT*1, 1))
@@ -70,6 +72,7 @@ DEF_PRIMITIVE_TYPE (BT_VOLATILE_PTR,
TYPE_QUAL_VOLATILE)))
DEF_POINTER_TYPE (BT_PTR_LONG, BT_LONG)
+DEF_POINTER_TYPE (BT_PTR_ULONGLONG, BT_ULONGLONG)
DEF_POINTER_TYPE (BT_PTR_PTR, BT_PTR)
DEF_FUNCTION_TYPE_0 (BT_FN_BOOL, BT_BOOL)
DEF_FUNCTION_TYPE_0 (BT_FN_PTR, BT_PTR)
@@ -87,11 +90,16 @@ DEF_POINTER_TYPE (BT_PTR_FN_VOID_PTR, BT_FN_VOID_PTR)
DEF_FUNCTION_TYPE_2 (BT_FN_BOOL_LONGPTR_LONGPTR,
BT_BOOL, BT_PTR_LONG, BT_PTR_LONG)
+DEF_FUNCTION_TYPE_2 (BT_FN_BOOL_ULONGLONGPTR_ULONGLONGPTR,
+ BT_BOOL, BT_PTR_ULONGLONG, BT_PTR_ULONGLONG)
DEF_FUNCTION_TYPE_2 (BT_FN_I1_VPTR_I1, BT_I1, BT_VOLATILE_PTR, BT_I1)
DEF_FUNCTION_TYPE_2 (BT_FN_I2_VPTR_I2, BT_I2, BT_VOLATILE_PTR, BT_I2)
DEF_FUNCTION_TYPE_2 (BT_FN_I4_VPTR_I4, BT_I4, BT_VOLATILE_PTR, BT_I4)
DEF_FUNCTION_TYPE_2 (BT_FN_I8_VPTR_I8, BT_I8, BT_VOLATILE_PTR, BT_I8)
DEF_FUNCTION_TYPE_2 (BT_FN_I16_VPTR_I16, BT_I16, BT_VOLATILE_PTR, BT_I16)
+DEF_FUNCTION_TYPE_2 (BT_FN_VOID_PTR_PTR, BT_VOID, BT_PTR, BT_PTR)
+
+DEF_POINTER_TYPE (BT_PTR_FN_VOID_PTR_PTR, BT_FN_VOID_PTR_PTR)
DEF_FUNCTION_TYPE_3 (BT_FN_BOOL_VPTR_I1_I1, BT_BOOL, BT_VOLATILE_PTR,
BT_I1, BT_I1)
@@ -127,9 +135,20 @@ DEF_FUNCTION_TYPE_6 (BT_FN_BOOL_LONG_LONG_LONG_LONG_LONGPTR_LONGPTR,
DEF_FUNCTION_TYPE_6 (BT_FN_VOID_OMPFN_PTR_UINT_LONG_LONG_LONG,
BT_VOID, BT_PTR_FN_VOID_PTR, BT_PTR, BT_UINT,
BT_LONG, BT_LONG, BT_LONG)
+DEF_FUNCTION_TYPE_6 (BT_FN_BOOL_BOOL_ULL_ULL_ULL_ULLPTR_ULLPTR,
+ BT_BOOL, BT_BOOL, BT_ULONGLONG, BT_ULONGLONG,
+ BT_ULONGLONG, BT_PTR_ULONGLONG, BT_PTR_ULONGLONG)
DEF_FUNCTION_TYPE_7 (BT_FN_VOID_OMPFN_PTR_UINT_LONG_LONG_LONG_LONG,
BT_VOID, BT_PTR_FN_VOID_PTR, BT_PTR, BT_UINT,
BT_LONG, BT_LONG, BT_LONG, BT_LONG)
+DEF_FUNCTION_TYPE_7 (BT_FN_VOID_OMPFN_PTR_OMPCPYFN_LONG_LONG_BOOL_UINT,
+ BT_VOID, BT_PTR_FN_VOID_PTR, BT_PTR,
+ BT_PTR_FN_VOID_PTR_PTR, BT_LONG, BT_LONG,
+ BT_BOOL, BT_UINT)
+DEF_FUNCTION_TYPE_7 (BT_FN_BOOL_BOOL_ULL_ULL_ULL_ULL_ULLPTR_ULLPTR,
+ BT_BOOL, BT_BOOL, BT_ULONGLONG, BT_ULONGLONG,
+ BT_ULONGLONG, BT_ULONGLONG,
+ BT_PTR_ULONGLONG, BT_PTR_ULONGLONG)
DEF_FUNCTION_TYPE_VAR_0 (BT_FN_VOID_VAR, BT_VOID)
diff --git a/gcc/gimple-low.c b/gcc/gimple-low.c
index fd1a19dfd32..99175d5b2f1 100644
--- a/gcc/gimple-low.c
+++ b/gcc/gimple-low.c
@@ -277,6 +277,7 @@ lower_stmt (tree_stmt_iterator *tsi, struct lower_data *data)
break;
case OMP_PARALLEL:
+ case OMP_TASK:
lower_omp_directive (tsi, data);
return;
diff --git a/gcc/gimplify.c b/gcc/gimplify.c
index 462e4a2cb85..0c2e5e24405 100644
--- a/gcc/gimplify.c
+++ b/gcc/gimplify.c
@@ -62,10 +62,19 @@ enum gimplify_omp_var_data
GOVD_REDUCTION = 64,
GOVD_LOCAL = 128,
GOVD_DEBUG_PRIVATE = 256,
+ GOVD_PRIVATE_OUTER_REF = 512,
GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
};
+enum omp_region_type
+{
+ ORT_WORKSHARE = 0,
+ ORT_TASK = 1,
+ ORT_PARALLEL = 2,
+ ORT_COMBINED_PARALLEL = 3
+};
+
struct gimplify_omp_ctx
{
struct gimplify_omp_ctx *outer_context;
@@ -73,8 +82,7 @@ struct gimplify_omp_ctx
struct pointer_set_t *privatized_types;
location_t location;
enum omp_clause_default_kind default_kind;
- bool is_parallel;
- bool is_combined_parallel;
+ enum omp_region_type region_type;
};
struct gimplify_ctx
@@ -270,7 +278,7 @@ splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
/* Create a new omp construct that deals with variable remapping. */
static struct gimplify_omp_ctx *
-new_omp_context (bool is_parallel, bool is_combined_parallel)
+new_omp_context (enum omp_region_type region_type)
{
struct gimplify_omp_ctx *c;
@@ -279,9 +287,11 @@ new_omp_context (bool is_parallel, bool is_combined_parallel)
c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
c->privatized_types = pointer_set_create ();
c->location = input_location;
- c->is_parallel = is_parallel;
- c->is_combined_parallel = is_combined_parallel;
- c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
+ c->region_type = region_type;
+ if (region_type != ORT_TASK)
+ c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
+ else
+ c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
return c;
}
@@ -756,7 +766,7 @@ gimple_add_tmp_var (tree tmp)
if (gimplify_omp_ctxp)
{
struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
- while (ctx && !ctx->is_parallel)
+ while (ctx && ctx->region_type == ORT_WORKSHARE)
ctx = ctx->outer_context;
if (ctx)
omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
@@ -4711,7 +4721,7 @@ omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
else
return;
}
- else if (ctx->is_parallel)
+ else if (ctx->region_type != ORT_WORKSHARE)
omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
ctx = ctx->outer_context;
@@ -4904,8 +4914,9 @@ omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
if (n == NULL)
{
enum omp_clause_default_kind default_kind, kind;
+ struct gimplify_omp_ctx *octx;
- if (!ctx->is_parallel)
+ if (ctx->region_type == ORT_WORKSHARE)
goto do_outer;
/* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
@@ -4929,10 +4940,47 @@ omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
case OMP_CLAUSE_DEFAULT_PRIVATE:
flags |= GOVD_PRIVATE;
break;
+ case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
+ flags |= GOVD_FIRSTPRIVATE;
+ break;
+ case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
+ /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
+ gcc_assert (ctx->region_type == ORT_TASK);
+ if (ctx->outer_context)
+ omp_notice_variable (ctx->outer_context, decl, in_code);
+ for (octx = ctx->outer_context; octx; octx = octx->outer_context)
+ {
+ splay_tree_node n2;
+
+ n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
+ if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
+ {
+ flags |= GOVD_FIRSTPRIVATE;
+ break;
+ }
+ if ((octx->region_type & ORT_PARALLEL) != 0)
+ break;
+ }
+ if (flags & GOVD_FIRSTPRIVATE)
+ break;
+ if (octx == NULL
+ && (TREE_CODE (decl) == PARM_DECL
+ || (!is_global_var (decl)
+ && DECL_CONTEXT (decl) == current_function_decl)))
+ {
+ flags |= GOVD_FIRSTPRIVATE;
+ break;
+ }
+ flags |= GOVD_SHARED;
+ break;
default:
gcc_unreachable ();
}
+ if ((flags & GOVD_PRIVATE)
+ && lang_hooks.decls.omp_private_outer_ref (decl))
+ flags |= GOVD_PRIVATE_OUTER_REF;
+
omp_add_variable (ctx, decl, flags);
shared = (flags & GOVD_SHARED) != 0;
@@ -4952,7 +5000,7 @@ omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
do_outer:
/* If the variable is private in the current context, then we don't
need to propagate anything to an outer context. */
- if (flags & GOVD_PRIVATE)
+ if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
return ret;
if (ctx->outer_context
&& omp_notice_variable (ctx->outer_context, decl, in_code))
@@ -4985,7 +5033,7 @@ omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
}
else if ((n->value & GOVD_EXPLICIT) != 0
&& (ctx == gimplify_omp_ctxp
- || (ctx->is_combined_parallel
+ || (ctx->region_type == ORT_COMBINED_PARALLEL
&& gimplify_omp_ctxp->outer_context == ctx)))
{
if ((n->value & GOVD_FIRSTPRIVATE) != 0)
@@ -4998,7 +5046,7 @@ omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
return true;
}
- if (ctx->is_parallel)
+ if (ctx->region_type != ORT_WORKSHARE)
return false;
else if (ctx->outer_context)
return omp_is_private (ctx->outer_context, decl);
@@ -5027,7 +5075,7 @@ omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
if (n != NULL)
return (n->value & GOVD_SHARED) == 0;
}
- while (!ctx->is_parallel);
+ while (ctx->region_type == ORT_WORKSHARE);
return false;
}
@@ -5035,13 +5083,13 @@ omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
and previous omp contexts. */
static void
-gimplify_scan_omp_clauses (tree *list_p, tree *pre_p, bool in_parallel,
- bool in_combined_parallel)
+gimplify_scan_omp_clauses (tree *list_p, tree *pre_p,
+ enum omp_region_type region_type)
{
struct gimplify_omp_ctx *ctx, *outer_ctx;
tree c;
- ctx = new_omp_context (in_parallel, in_combined_parallel);
+ ctx = new_omp_context (region_type);
outer_ctx = ctx->outer_context;
while ((c = *list_p) != NULL)
@@ -5057,7 +5105,13 @@ gimplify_scan_omp_clauses (tree *list_p, tree *pre_p, bool in_parallel,
{
case OMP_CLAUSE_PRIVATE:
flags = GOVD_PRIVATE | GOVD_EXPLICIT;
- notice_outer = false;
+ if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
+ {
+ flags |= GOVD_PRIVATE_OUTER_REF;
+ OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
+ }
+ else
+ notice_outer = false;
goto do_add;
case OMP_CLAUSE_SHARED:
flags = GOVD_SHARED | GOVD_EXPLICIT;
@@ -5097,6 +5151,23 @@ gimplify_scan_omp_clauses (tree *list_p, tree *pre_p, bool in_parallel,
pop_gimplify_context (OMP_CLAUSE_REDUCTION_MERGE (c));
gimplify_omp_ctxp = outer_ctx;
}
+ else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
+ && OMP_CLAUSE_LASTPRIVATE_STMT (c))
+ {
+ gimplify_omp_ctxp = ctx;
+ push_gimplify_context ();
+ if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
+ {
+ tree bind = build3 (BIND_EXPR, void_type_node, NULL,
+ NULL, NULL);
+ TREE_SIDE_EFFECTS (bind) = 1;
+ BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
+ OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
+ }
+ gimplify_stmt (&OMP_CLAUSE_LASTPRIVATE_STMT (c));
+ pop_gimplify_context (OMP_CLAUSE_LASTPRIVATE_STMT (c));
+ gimplify_omp_ctxp = outer_ctx;
+ }
if (notice_outer)
goto do_notice;
break;
@@ -5113,7 +5184,7 @@ gimplify_scan_omp_clauses (tree *list_p, tree *pre_p, bool in_parallel,
if (outer_ctx)
omp_notice_variable (outer_ctx, decl, true);
if (check_non_private
- && !in_parallel
+ && region_type == ORT_WORKSHARE
&& omp_check_private (ctx, decl))
{
error ("%s variable %qs is private in outer context",
@@ -5137,6 +5208,8 @@ gimplify_scan_omp_clauses (tree *list_p, tree *pre_p, bool in_parallel,
case OMP_CLAUSE_NOWAIT:
case OMP_CLAUSE_ORDERED:
+ case OMP_CLAUSE_UNTIED:
+ case OMP_CLAUSE_COLLAPSE:
break;
case OMP_CLAUSE_DEFAULT:
@@ -5215,7 +5288,10 @@ gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
OMP_CLAUSE_CHAIN (clause) = *list_p;
if (private_debug)
OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
+ else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
+ OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
*list_p = clause;
+ lang_hooks.decls.omp_finish_clause (clause);
return 0;
}
@@ -5272,6 +5348,8 @@ gimplify_adjust_omp_clauses (tree *list_p)
case OMP_CLAUSE_NOWAIT:
case OMP_CLAUSE_ORDERED:
case OMP_CLAUSE_DEFAULT:
+ case OMP_CLAUSE_UNTIED:
+ case OMP_CLAUSE_COLLAPSE:
break;
default:
@@ -5301,8 +5379,10 @@ gimplify_omp_parallel (tree *expr_p, tree *pre_p)
{
tree expr = *expr_p;
- gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p, true,
- OMP_PARALLEL_COMBINED (expr));
+ gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
+ OMP_PARALLEL_COMBINED (expr)
+ ? ORT_COMBINED_PARALLEL
+ : ORT_PARALLEL);
push_gimplify_context ();
@@ -5318,124 +5398,187 @@ gimplify_omp_parallel (tree *expr_p, tree *pre_p)
return GS_ALL_DONE;
}
-/* Gimplify the gross structure of an OMP_FOR statement. */
+/* Gimplify the contents of an OMP_TASK statement. This involves
+ gimplification of the body, as well as scanning the body for used
+ variables. We need to do this scan now, because variable-sized
+ decls will be decomposed during gimplification. */
static enum gimplify_status
-gimplify_omp_for (tree *expr_p, tree *pre_p)
+gimplify_omp_task (tree *expr_p, tree *pre_p)
{
- tree for_stmt, decl, var, t;
- enum gimplify_status ret = GS_OK;
- tree body, init_decl = NULL_TREE;
+ tree expr = *expr_p;
- for_stmt = *expr_p;
+ gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p, ORT_TASK);
- gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, false, false);
+ push_gimplify_context ();
- t = OMP_FOR_INIT (for_stmt);
- gcc_assert (TREE_CODE (t) == MODIFY_EXPR
- || TREE_CODE (t) == GIMPLE_MODIFY_STMT);
- decl = GENERIC_TREE_OPERAND (t, 0);
- gcc_assert (DECL_P (decl));
- gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl)));
+ gimplify_stmt (&OMP_TASK_BODY (expr));
- /* Make sure the iteration variable is private. */
- if (omp_is_private (gimplify_omp_ctxp, decl))
- omp_notice_variable (gimplify_omp_ctxp, decl, true);
+ if (TREE_CODE (OMP_TASK_BODY (expr)) == BIND_EXPR)
+ pop_gimplify_context (OMP_TASK_BODY (expr));
else
- omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
+ pop_gimplify_context (NULL_TREE);
- /* If DECL is not a gimple register, create a temporary variable to act as an
- iteration counter. This is valid, since DECL cannot be modified in the
- body of the loop. */
- if (!is_gimple_reg (decl))
- {
- var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
- GENERIC_TREE_OPERAND (t, 0) = var;
+ gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
- init_decl = build_gimple_modify_stmt (decl, var);
- omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
- }
- else
- var = decl;
+ return GS_ALL_DONE;
+}
+
+/* Gimplify the gross structure of an OMP_FOR statement. */
+
+static enum gimplify_status
+gimplify_omp_for (tree *expr_p, tree *pre_p)
+{
+ tree for_stmt, decl, var, t, bodylist;
+ enum gimplify_status ret = GS_OK;
+ tree body, init_decl = NULL_TREE;
+ int i;
+
+ for_stmt = *expr_p;
+
+ gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
+ ORT_WORKSHARE);
/* If OMP_FOR is re-gimplified, ensure all variables in pre-body
are noticed. */
gimplify_stmt (&OMP_FOR_PRE_BODY (for_stmt));
- ret |= gimplify_expr (&GENERIC_TREE_OPERAND (t, 1),
- &OMP_FOR_PRE_BODY (for_stmt),
- NULL, is_gimple_val, fb_rvalue);
+ bodylist = alloc_stmt_list ();
+
+ gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
+ == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
+ gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
+ == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
+ for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
+ {
+ t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
+ gcc_assert (TREE_CODE (t) == MODIFY_EXPR
+ || TREE_CODE (t) == GIMPLE_MODIFY_STMT);
+ decl = GENERIC_TREE_OPERAND (t, 0);
+ gcc_assert (DECL_P (decl));
+ gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
+ || POINTER_TYPE_P (TREE_TYPE (decl)));
+
+ /* Make sure the iteration variable is private. */
+ if (omp_is_private (gimplify_omp_ctxp, decl))
+ omp_notice_variable (gimplify_omp_ctxp, decl, true);
+ else
+ omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
+
+ /* If DECL is not a gimple register, create a temporary variable to act
+ as an iteration counter. This is valid, since DECL cannot be
+ modified in the body of the loop. */
+ if (!is_gimple_reg (decl))
+ {
+ var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
+ GENERIC_TREE_OPERAND (t, 0) = var;
- tree_to_gimple_tuple (&OMP_FOR_INIT (for_stmt));
+ init_decl = build_gimple_modify_stmt (decl, var);
+ omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
+ }
+ else
+ var = decl;
- t = OMP_FOR_COND (for_stmt);
- gcc_assert (COMPARISON_CLASS_P (t));
- gcc_assert (GENERIC_TREE_OPERAND (t, 0) == decl);
- TREE_OPERAND (t, 0) = var;
+ ret |= gimplify_expr (&GENERIC_TREE_OPERAND (t, 1),
+ &OMP_FOR_PRE_BODY (for_stmt),
+ NULL, is_gimple_val, fb_rvalue);
- ret |= gimplify_expr (&GENERIC_TREE_OPERAND (t, 1),
- &OMP_FOR_PRE_BODY (for_stmt),
- NULL, is_gimple_val, fb_rvalue);
+ tree_to_gimple_tuple (&TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i));
- tree_to_gimple_tuple (&OMP_FOR_INCR (for_stmt));
- t = OMP_FOR_INCR (for_stmt);
- switch (TREE_CODE (t))
- {
- case PREINCREMENT_EXPR:
- case POSTINCREMENT_EXPR:
- t = build_int_cst (TREE_TYPE (decl), 1);
- t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
- t = build_gimple_modify_stmt (var, t);
- OMP_FOR_INCR (for_stmt) = t;
- break;
+ t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
+ gcc_assert (COMPARISON_CLASS_P (t));
+ gcc_assert (GENERIC_TREE_OPERAND (t, 0) == decl);
+ TREE_OPERAND (t, 0) = var;
- case PREDECREMENT_EXPR:
- case POSTDECREMENT_EXPR:
- t = build_int_cst (TREE_TYPE (decl), -1);
- t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
- t = build_gimple_modify_stmt (var, t);
- OMP_FOR_INCR (for_stmt) = t;
- break;
-
- case GIMPLE_MODIFY_STMT:
- gcc_assert (GIMPLE_STMT_OPERAND (t, 0) == decl);
- GIMPLE_STMT_OPERAND (t, 0) = var;
+ ret |= gimplify_expr (&GENERIC_TREE_OPERAND (t, 1),
+ &OMP_FOR_PRE_BODY (for_stmt),
+ NULL, is_gimple_val, fb_rvalue);
- t = GIMPLE_STMT_OPERAND (t, 1);
+ tree_to_gimple_tuple (&TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i));
+ t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
switch (TREE_CODE (t))
{
- case PLUS_EXPR:
- if (TREE_OPERAND (t, 1) == decl)
+ case PREINCREMENT_EXPR:
+ case POSTINCREMENT_EXPR:
+ t = build_int_cst (TREE_TYPE (decl), 1);
+ t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
+ t = build_gimple_modify_stmt (var, t);
+ TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
+ break;
+
+ case PREDECREMENT_EXPR:
+ case POSTDECREMENT_EXPR:
+ t = build_int_cst (TREE_TYPE (decl), -1);
+ t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
+ t = build_gimple_modify_stmt (var, t);
+ TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
+ break;
+
+ case GIMPLE_MODIFY_STMT:
+ gcc_assert (GIMPLE_STMT_OPERAND (t, 0) == decl);
+ GIMPLE_STMT_OPERAND (t, 0) = var;
+
+ t = GIMPLE_STMT_OPERAND (t, 1);
+ switch (TREE_CODE (t))
{
- TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
+ case PLUS_EXPR:
+ if (TREE_OPERAND (t, 1) == decl)
+ {
+ TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
+ TREE_OPERAND (t, 0) = var;
+ break;
+ }
+
+ /* Fallthru. */
+ case MINUS_EXPR:
+ case POINTER_PLUS_EXPR:
+ gcc_assert (TREE_OPERAND (t, 0) == decl);
TREE_OPERAND (t, 0) = var;
break;
+ default:
+ gcc_unreachable ();
}
- /* Fallthru. */
- case MINUS_EXPR:
- gcc_assert (TREE_OPERAND (t, 0) == decl);
- TREE_OPERAND (t, 0) = var;
+ ret |= gimplify_expr (&TREE_OPERAND (t, 1),
+ &OMP_FOR_PRE_BODY (for_stmt),
+ NULL, is_gimple_val, fb_rvalue);
break;
+
default:
gcc_unreachable ();
}
- ret |= gimplify_expr (&TREE_OPERAND (t, 1), &OMP_FOR_PRE_BODY (for_stmt),
- NULL, is_gimple_val, fb_rvalue);
- break;
+ if (init_decl)
+ append_to_statement_list (init_decl, &bodylist);
- default:
- gcc_unreachable ();
+ if (var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
+ {
+ tree c;
+ for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
+ if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
+ && OMP_CLAUSE_DECL (c) == decl
+ && OMP_CLAUSE_LASTPRIVATE_STMT (c) == NULL)
+ {
+ t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
+ gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
+ gcc_assert (GIMPLE_STMT_OPERAND (t, 0) == var);
+ t = GIMPLE_STMT_OPERAND (t, 1);
+ gcc_assert (TREE_CODE (t) == PLUS_EXPR
+ || TREE_CODE (t) == MINUS_EXPR
+ || TREE_CODE (t) == POINTER_PLUS_EXPR);
+ gcc_assert (TREE_OPERAND (t, 0) == var);
+ t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
+ TREE_OPERAND (t, 1));
+ OMP_CLAUSE_LASTPRIVATE_STMT (c)
+ = build_gimple_modify_stmt (decl, t);
+ }
+ }
}
body = OMP_FOR_BODY (for_stmt);
gimplify_to_stmt_list (&body);
- t = alloc_stmt_list ();
- if (init_decl)
- append_to_statement_list (init_decl, &t);
- append_to_statement_list (body, &t);
- OMP_FOR_BODY (for_stmt) = t;
+ append_to_statement_list (body, &bodylist);
+ OMP_FOR_BODY (for_stmt) = bodylist;
gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
@@ -5449,7 +5592,7 @@ gimplify_omp_workshare (tree *expr_p, tree *pre_p)
{
tree stmt = *expr_p;
- gimplify_scan_omp_clauses (&OMP_CLAUSES (stmt), pre_p, false, false);
+ gimplify_scan_omp_clauses (&OMP_CLAUSES (stmt), pre_p, ORT_WORKSHARE);
gimplify_to_stmt_list (&OMP_BODY (stmt));
gimplify_adjust_omp_clauses (&OMP_CLAUSES (stmt));
@@ -6025,6 +6168,10 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
ret = gimplify_omp_parallel (expr_p, pre_p);
break;
+ case OMP_TASK:
+ ret = gimplify_omp_task (expr_p, pre_p);
+ break;
+
case OMP_FOR:
ret = gimplify_omp_for (expr_p, pre_p);
break;
@@ -6048,6 +6195,7 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
case OMP_RETURN:
case OMP_CONTINUE:
case OMP_ATOMIC_STORE:
+ case OMP_SECTIONS_SWITCH:
ret = GS_ALL_DONE;
break;
diff --git a/gcc/hooks.c b/gcc/hooks.c
index 289275e8441..50761b69ca5 100644
--- a/gcc/hooks.c
+++ b/gcc/hooks.c
@@ -291,6 +291,14 @@ hook_tree_tree_tree_null (tree t0 ATTRIBUTE_UNUSED, tree t1 ATTRIBUTE_UNUSED)
return NULL;
}
+tree
+hook_tree_tree_tree_tree_null (tree t0 ATTRIBUTE_UNUSED,
+ tree t1 ATTRIBUTE_UNUSED,
+ tree t2 ATTRIBUTE_UNUSED)
+{
+ return NULL;
+}
+
/* Generic hook that takes a rtx and returns a NULL string. */
const char *
hook_constcharptr_const_rtx_null (const_rtx r ATTRIBUTE_UNUSED)
diff --git a/gcc/hooks.h b/gcc/hooks.h
index 838a4223fe3..d6bbc4c2f07 100644
--- a/gcc/hooks.h
+++ b/gcc/hooks.h
@@ -63,6 +63,7 @@ extern int hook_int_size_t_constcharptr_int_0 (size_t, const char *, int);
extern int hook_int_void_no_regs (void);
extern tree hook_tree_tree_tree_null (tree, tree);
+extern tree hook_tree_tree_tree_tree_null (tree, tree, tree);
extern tree hook_tree_tree_tree_tree_3rd_identity (tree, tree, tree);
extern tree hook_tree_tree_tree_bool_null (tree, tree, bool);
diff --git a/gcc/ipa-struct-reorg.c b/gcc/ipa-struct-reorg.c
index 908681dc12f..cce9b3f6fc7 100644
--- a/gcc/ipa-struct-reorg.c
+++ b/gcc/ipa-struct-reorg.c
@@ -3727,6 +3727,7 @@ do_reorg_1 (void)
}
set_cfun (NULL);
+ bitmap_obstack_release (NULL);
}
/* This function creates new global struct variables.
diff --git a/gcc/langhooks-def.h b/gcc/langhooks-def.h
index aae46406515..dd4916caff6 100644
--- a/gcc/langhooks-def.h
+++ b/gcc/langhooks-def.h
@@ -199,10 +199,12 @@ extern tree lhd_make_node (enum tree_code);
#define LANG_HOOKS_OMP_PREDETERMINED_SHARING lhd_omp_predetermined_sharing
#define LANG_HOOKS_OMP_DISREGARD_VALUE_EXPR hook_bool_tree_bool_false
#define LANG_HOOKS_OMP_PRIVATE_DEBUG_CLAUSE hook_bool_tree_bool_false
-#define LANG_HOOKS_OMP_CLAUSE_DEFAULT_CTOR hook_tree_tree_tree_null
+#define LANG_HOOKS_OMP_PRIVATE_OUTER_REF hook_bool_tree_false
+#define LANG_HOOKS_OMP_CLAUSE_DEFAULT_CTOR hook_tree_tree_tree_tree_null
#define LANG_HOOKS_OMP_CLAUSE_COPY_CTOR lhd_omp_assignment
#define LANG_HOOKS_OMP_CLAUSE_ASSIGN_OP lhd_omp_assignment
#define LANG_HOOKS_OMP_CLAUSE_DTOR hook_tree_tree_tree_null
+#define LANG_HOOKS_OMP_FINISH_CLAUSE hook_void_tree
#define LANG_HOOKS_DECLS { \
LANG_HOOKS_GLOBAL_BINDINGS_P, \
@@ -216,10 +218,12 @@ extern tree lhd_make_node (enum tree_code);
LANG_HOOKS_OMP_PREDETERMINED_SHARING, \
LANG_HOOKS_OMP_DISREGARD_VALUE_EXPR, \
LANG_HOOKS_OMP_PRIVATE_DEBUG_CLAUSE, \
+ LANG_HOOKS_OMP_PRIVATE_OUTER_REF, \
LANG_HOOKS_OMP_CLAUSE_DEFAULT_CTOR, \
LANG_HOOKS_OMP_CLAUSE_COPY_CTOR, \
LANG_HOOKS_OMP_CLAUSE_ASSIGN_OP, \
- LANG_HOOKS_OMP_CLAUSE_DTOR \
+ LANG_HOOKS_OMP_CLAUSE_DTOR, \
+ LANG_HOOKS_OMP_FINISH_CLAUSE \
}
/* The whole thing. The structure is defined in langhooks.h. */
diff --git a/gcc/langhooks.h b/gcc/langhooks.h
index 6a54b01f060..1f64cf18d52 100644
--- a/gcc/langhooks.h
+++ b/gcc/langhooks.h
@@ -1,5 +1,5 @@
/* The lang_hooks data structure.
- Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007
+ Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
Free Software Foundation, Inc.
This file is part of GCC.
@@ -197,9 +197,14 @@ struct lang_hooks_for_decls
be put into OMP_CLAUSE_PRIVATE_DEBUG. */
bool (*omp_private_debug_clause) (tree, bool);
+ /* Return true if DECL in private clause needs
+ OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
+ bool (*omp_private_outer_ref) (tree);
+
/* Build and return code for a default constructor for DECL in
- response to CLAUSE. Return NULL if nothing to be done. */
- tree (*omp_clause_default_ctor) (tree clause, tree decl);
+ response to CLAUSE. OUTER is corresponding outer region's
+ variable if needed. Return NULL if nothing to be done. */
+ tree (*omp_clause_default_ctor) (tree clause, tree decl, tree outer);
/* Build and return code for a copy constructor from SRC to DST. */
tree (*omp_clause_copy_ctor) (tree clause, tree dst, tree src);
@@ -210,6 +215,9 @@ struct lang_hooks_for_decls
/* Build and return code destructing DECL. Return NULL if nothing
to be done. */
tree (*omp_clause_dtor) (tree clause, tree decl);
+
+ /* Do language specific checking on an implicitly determined clause. */
+ void (*omp_finish_clause) (tree clause);
};
/* Language-specific hooks. See langhooks-def.h for defaults. */
diff --git a/gcc/matrix-reorg.c b/gcc/matrix-reorg.c
index eee4ddf322f..50ac7e87b0b 100644
--- a/gcc/matrix-reorg.c
+++ b/gcc/matrix-reorg.c
@@ -2235,6 +2235,7 @@ matrix_reorg (void)
free_dominance_info (CDI_POST_DOMINATORS);
pop_cfun ();
current_function_decl = temp_fn;
+ bitmap_obstack_release (NULL);
return 0;
}
@@ -2249,6 +2250,7 @@ matrix_reorg (void)
free_dominance_info (CDI_POST_DOMINATORS);
pop_cfun ();
current_function_decl = temp_fn;
+ bitmap_obstack_release (NULL);
return 0;
}
@@ -2279,6 +2281,7 @@ matrix_reorg (void)
free_dominance_info (CDI_POST_DOMINATORS);
pop_cfun ();
current_function_decl = temp_fn;
+ bitmap_obstack_release (NULL);
}
htab_traverse (matrices_to_reorg, transform_allocation_sites, NULL);
/* Now transform the accesses. */
@@ -2299,6 +2302,7 @@ matrix_reorg (void)
free_dominance_info (CDI_POST_DOMINATORS);
pop_cfun ();
current_function_decl = temp_fn;
+ bitmap_obstack_release (NULL);
}
htab_traverse (matrices_to_reorg, dump_matrix_reorg_analysis, NULL);
diff --git a/gcc/omp-builtins.def b/gcc/omp-builtins.def
index cc450f6d4d6..5fd4f9aea75 100644
--- a/gcc/omp-builtins.def
+++ b/gcc/omp-builtins.def
@@ -1,6 +1,6 @@
/* This file contains the definitions and documentation for the
OpenMP builtins used in the GNU compiler.
- Copyright (C) 2005, 2007 Free Software Foundation, Inc.
+ Copyright (C) 2005, 2007, 2008 Free Software Foundation, Inc.
This file is part of GCC.
@@ -35,6 +35,8 @@ DEF_GOMP_BUILTIN (BUILT_IN_GOMP_ATOMIC_END, "GOMP_atomic_end",
BT_FN_VOID, ATTR_NOTHROW_LIST)
DEF_GOMP_BUILTIN (BUILT_IN_GOMP_BARRIER, "GOMP_barrier",
BT_FN_VOID, ATTR_NOTHROW_LIST)
+DEF_GOMP_BUILTIN (BUILT_IN_GOMP_TASKWAIT, "GOMP_taskwait",
+ BT_FN_VOID, ATTR_NOTHROW_LIST)
DEF_GOMP_BUILTIN (BUILT_IN_GOMP_CRITICAL_START, "GOMP_critical_start",
BT_FN_VOID, ATTR_NOTHROW_LIST)
DEF_GOMP_BUILTIN (BUILT_IN_GOMP_CRITICAL_END, "GOMP_critical_end",
@@ -100,6 +102,58 @@ DEF_GOMP_BUILTIN (BUILT_IN_GOMP_LOOP_ORDERED_GUIDED_NEXT,
DEF_GOMP_BUILTIN (BUILT_IN_GOMP_LOOP_ORDERED_RUNTIME_NEXT,
"GOMP_loop_ordered_runtime_next",
BT_FN_BOOL_LONGPTR_LONGPTR, ATTR_NOTHROW_LIST)
+DEF_GOMP_BUILTIN (BUILT_IN_GOMP_LOOP_ULL_STATIC_START,
+ "GOMP_loop_ull_static_start",
+ BT_FN_BOOL_BOOL_ULL_ULL_ULL_ULL_ULLPTR_ULLPTR,
+ ATTR_NOTHROW_LIST)
+DEF_GOMP_BUILTIN (BUILT_IN_GOMP_LOOP_ULL_DYNAMIC_START,
+ "GOMP_loop_ull_dynamic_start",
+ BT_FN_BOOL_BOOL_ULL_ULL_ULL_ULL_ULLPTR_ULLPTR,
+ ATTR_NOTHROW_LIST)
+DEF_GOMP_BUILTIN (BUILT_IN_GOMP_LOOP_ULL_GUIDED_START,
+ "GOMP_loop_ull_guided_start",
+ BT_FN_BOOL_BOOL_ULL_ULL_ULL_ULL_ULLPTR_ULLPTR,
+ ATTR_NOTHROW_LIST)
+DEF_GOMP_BUILTIN (BUILT_IN_GOMP_LOOP_ULL_RUNTIME_START,
+ "GOMP_loop_ull_runtime_start",
+ BT_FN_BOOL_BOOL_ULL_ULL_ULL_ULLPTR_ULLPTR,
+ ATTR_NOTHROW_LIST)
+DEF_GOMP_BUILTIN (BUILT_IN_GOMP_LOOP_ULL_ORDERED_STATIC_START,
+ "GOMP_loop_ull_ordered_static_start",
+ BT_FN_BOOL_BOOL_ULL_ULL_ULL_ULL_ULLPTR_ULLPTR,
+ ATTR_NOTHROW_LIST)
+DEF_GOMP_BUILTIN (BUILT_IN_GOMP_LOOP_ULL_ORDERED_DYNAMIC_START,
+ "GOMP_loop_ull_ordered_dynamic_start",
+ BT_FN_BOOL_BOOL_ULL_ULL_ULL_ULL_ULLPTR_ULLPTR,
+ ATTR_NOTHROW_LIST)
+DEF_GOMP_BUILTIN (BUILT_IN_GOMP_LOOP_ULL_ORDERED_GUIDED_START,
+ "GOMP_loop_ull_ordered_guided_start",
+ BT_FN_BOOL_BOOL_ULL_ULL_ULL_ULL_ULLPTR_ULLPTR,
+ ATTR_NOTHROW_LIST)
+DEF_GOMP_BUILTIN (BUILT_IN_GOMP_LOOP_ULL_ORDERED_RUNTIME_START,
+ "GOMP_loop_ull_ordered_runtime_start",
+ BT_FN_BOOL_BOOL_ULL_ULL_ULL_ULLPTR_ULLPTR,
+ ATTR_NOTHROW_LIST)
+DEF_GOMP_BUILTIN (BUILT_IN_GOMP_LOOP_ULL_STATIC_NEXT, "GOMP_loop_ull_static_next",
+ BT_FN_BOOL_ULONGLONGPTR_ULONGLONGPTR, ATTR_NOTHROW_LIST)
+DEF_GOMP_BUILTIN (BUILT_IN_GOMP_LOOP_ULL_DYNAMIC_NEXT, "GOMP_loop_ull_dynamic_next",
+ BT_FN_BOOL_ULONGLONGPTR_ULONGLONGPTR, ATTR_NOTHROW_LIST)
+DEF_GOMP_BUILTIN (BUILT_IN_GOMP_LOOP_ULL_GUIDED_NEXT, "GOMP_loop_ull_guided_next",
+ BT_FN_BOOL_ULONGLONGPTR_ULONGLONGPTR, ATTR_NOTHROW_LIST)
+DEF_GOMP_BUILTIN (BUILT_IN_GOMP_LOOP_ULL_RUNTIME_NEXT, "GOMP_loop_ull_runtime_next",
+ BT_FN_BOOL_ULONGLONGPTR_ULONGLONGPTR, ATTR_NOTHROW_LIST)
+DEF_GOMP_BUILTIN (BUILT_IN_GOMP_LOOP_ULL_ORDERED_STATIC_NEXT,
+ "GOMP_loop_ull_ordered_static_next",
+ BT_FN_BOOL_ULONGLONGPTR_ULONGLONGPTR, ATTR_NOTHROW_LIST)
+DEF_GOMP_BUILTIN (BUILT_IN_GOMP_LOOP_ULL_ORDERED_DYNAMIC_NEXT,
+ "GOMP_loop_ull_ordered_dynamic_next",
+ BT_FN_BOOL_ULONGLONGPTR_ULONGLONGPTR, ATTR_NOTHROW_LIST)
+DEF_GOMP_BUILTIN (BUILT_IN_GOMP_LOOP_ULL_ORDERED_GUIDED_NEXT,
+ "GOMP_loop_ull_ordered_guided_next",
+ BT_FN_BOOL_ULONGLONGPTR_ULONGLONGPTR, ATTR_NOTHROW_LIST)
+DEF_GOMP_BUILTIN (BUILT_IN_GOMP_LOOP_ULL_ORDERED_RUNTIME_NEXT,
+ "GOMP_loop_ull_ordered_runtime_next",
+ BT_FN_BOOL_ULONGLONGPTR_ULONGLONGPTR, ATTR_NOTHROW_LIST)
/* NOTE: Do not change the order of BUILT_IN_GOMP_PARALLEL_LOOP_*_START.
They are used in index arithmetic with enum omp_clause_schedule_kind
in omp-low.c. */
@@ -131,6 +185,9 @@ DEF_GOMP_BUILTIN (BUILT_IN_GOMP_PARALLEL_START, "GOMP_parallel_start",
BT_FN_VOID_OMPFN_PTR_UINT, ATTR_NOTHROW_LIST)
DEF_GOMP_BUILTIN (BUILT_IN_GOMP_PARALLEL_END, "GOMP_parallel_end",
BT_FN_VOID, ATTR_NOTHROW_LIST)
+DEF_GOMP_BUILTIN (BUILT_IN_GOMP_TASK, "GOMP_task",
+ BT_FN_VOID_OMPFN_PTR_OMPCPYFN_LONG_LONG_BOOL_UINT,
+ ATTR_NOTHROW_LIST)
DEF_GOMP_BUILTIN (BUILT_IN_GOMP_SECTIONS_START, "GOMP_sections_start",
BT_FN_UINT_UINT, ATTR_NOTHROW_LIST)
DEF_GOMP_BUILTIN (BUILT_IN_GOMP_SECTIONS_NEXT, "GOMP_sections_next",
diff --git a/gcc/omp-low.c b/gcc/omp-low.c
index c608ae4521e..e9223b2afb2 100644
--- a/gcc/omp-low.c
+++ b/gcc/omp-low.c
@@ -77,6 +77,14 @@ typedef struct omp_context
tree sender_decl;
tree receiver_decl;
+ /* These are used just by task contexts, if task firstprivate fn is
+ needed. srecord_type is used to communicate from the thread
+ that encountered the task construct to task firstprivate fn,
+ record_type is allocated by GOMP_task, initialized by task firstprivate
+ fn and passed to the task body fn. */
+ splay_tree sfield_map;
+ tree srecord_type;
+
/* A chain of variables to add to the top-level block surrounding the
construct. In the case of a parallel, this is in the child function. */
tree block_vars;
@@ -95,21 +103,30 @@ typedef struct omp_context
} omp_context;
+struct omp_for_data_loop
+{
+ tree v, n1, n2, step;
+ enum tree_code cond_code;
+};
+
/* A structure describing the main elements of a parallel loop. */
struct omp_for_data
{
- tree v, n1, n2, step, chunk_size, for_stmt;
- enum tree_code cond_code;
- tree pre;
+ struct omp_for_data_loop loop;
+ tree chunk_size, for_stmt;
+ tree pre, iter_type;
+ int collapse;
bool have_nowait, have_ordered;
enum omp_clause_schedule_kind sched_kind;
+ struct omp_for_data_loop *loops;
};
static splay_tree all_contexts;
-static int parallel_nesting_level;
+static int taskreg_nesting_level;
struct omp_region *root_omp_region;
+static bitmap task_shared_vars;
static void scan_omp (tree *, omp_context *);
static void lower_omp (tree *, omp_context *);
@@ -137,6 +154,25 @@ is_parallel_ctx (omp_context *ctx)
}
+/* Return true if CTX is for an omp task. */
+
+static inline bool
+is_task_ctx (omp_context *ctx)
+{
+ return TREE_CODE (ctx->stmt) == OMP_TASK;
+}
+
+
+/* Return true if CTX is for an omp parallel or omp task. */
+
+static inline bool
+is_taskreg_ctx (omp_context *ctx)
+{
+ return TREE_CODE (ctx->stmt) == OMP_PARALLEL
+ || TREE_CODE (ctx->stmt) == OMP_TASK;
+}
+
+
/* Return true if REGION is a combined parallel+workshare region. */
static inline bool
@@ -150,65 +186,28 @@ is_combined_parallel (struct omp_region *region)
them into *FD. */
static void
-extract_omp_for_data (tree for_stmt, struct omp_for_data *fd)
+extract_omp_for_data (tree for_stmt, struct omp_for_data *fd,
+ struct omp_for_data_loop *loops)
{
- tree t, var;
+ tree t, var, *collapse_iter, *collapse_count;
+ tree count = NULL_TREE, iter_type = long_integer_type_node;
+ struct omp_for_data_loop *loop;
+ int i;
+ struct omp_for_data_loop dummy_loop;
fd->for_stmt = for_stmt;
fd->pre = NULL;
-
- t = OMP_FOR_INIT (for_stmt);
- gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
- fd->v = GIMPLE_STMT_OPERAND (t, 0);
- gcc_assert (SSA_VAR_P (fd->v));
- gcc_assert (TREE_CODE (TREE_TYPE (fd->v)) == INTEGER_TYPE);
- var = TREE_CODE (fd->v) == SSA_NAME ? SSA_NAME_VAR (fd->v) : fd->v;
- fd->n1 = GIMPLE_STMT_OPERAND (t, 1);
-
- t = OMP_FOR_COND (for_stmt);
- fd->cond_code = TREE_CODE (t);
- gcc_assert (TREE_OPERAND (t, 0) == var);
- fd->n2 = TREE_OPERAND (t, 1);
- switch (fd->cond_code)
- {
- case LT_EXPR:
- case GT_EXPR:
- break;
- case LE_EXPR:
- fd->n2 = fold_build2 (PLUS_EXPR, TREE_TYPE (fd->n2), fd->n2,
- build_int_cst (TREE_TYPE (fd->n2), 1));
- fd->cond_code = LT_EXPR;
- break;
- case GE_EXPR:
- fd->n2 = fold_build2 (MINUS_EXPR, TREE_TYPE (fd->n2), fd->n2,
- build_int_cst (TREE_TYPE (fd->n2), 1));
- fd->cond_code = GT_EXPR;
- break;
- default:
- gcc_unreachable ();
- }
-
- t = OMP_FOR_INCR (fd->for_stmt);
- gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
- gcc_assert (GIMPLE_STMT_OPERAND (t, 0) == var);
- t = GIMPLE_STMT_OPERAND (t, 1);
- gcc_assert (TREE_OPERAND (t, 0) == var);
- switch (TREE_CODE (t))
- {
- case PLUS_EXPR:
- fd->step = TREE_OPERAND (t, 1);
- break;
- case MINUS_EXPR:
- fd->step = TREE_OPERAND (t, 1);
- fd->step = fold_build1 (NEGATE_EXPR, TREE_TYPE (fd->step), fd->step);
- break;
- default:
- gcc_unreachable ();
- }
+ fd->collapse = TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt));
+ if (fd->collapse > 1)
+ fd->loops = loops;
+ else
+ fd->loops = &fd->loop;
fd->have_nowait = fd->have_ordered = false;
fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
fd->chunk_size = NULL_TREE;
+ collapse_iter = NULL;
+ collapse_count = NULL;
for (t = OMP_FOR_CLAUSES (for_stmt); t ; t = OMP_CLAUSE_CHAIN (t))
switch (OMP_CLAUSE_CODE (t))
@@ -223,20 +222,223 @@ extract_omp_for_data (tree for_stmt, struct omp_for_data *fd)
fd->sched_kind = OMP_CLAUSE_SCHEDULE_KIND (t);
fd->chunk_size = OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (t);
break;
+ case OMP_CLAUSE_COLLAPSE:
+ if (fd->collapse > 1)
+ {
+ collapse_iter = &OMP_CLAUSE_COLLAPSE_ITERVAR (t);
+ collapse_count = &OMP_CLAUSE_COLLAPSE_COUNT (t);
+ }
default:
break;
}
+ /* FIXME: for now map schedule(auto) to schedule(static).
+ There should be analysis to determine whether all iterations
+ are approximately the same amount of work (then schedule(static)
+ is best) or if it varries (then schedule(dynamic,N) is better). */
+ if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_AUTO)
+ {
+ fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
+ gcc_assert (fd->chunk_size == NULL);
+ }
+ gcc_assert (fd->collapse == 1 || collapse_iter != NULL);
if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_RUNTIME)
gcc_assert (fd->chunk_size == NULL);
else if (fd->chunk_size == NULL)
{
/* We only need to compute a default chunk size for ordered
static loops and dynamic loops. */
- if (fd->sched_kind != OMP_CLAUSE_SCHEDULE_STATIC || fd->have_ordered)
+ if (fd->sched_kind != OMP_CLAUSE_SCHEDULE_STATIC
+ || fd->have_ordered
+ || fd->collapse > 1)
fd->chunk_size = (fd->sched_kind == OMP_CLAUSE_SCHEDULE_STATIC)
? integer_zero_node : integer_one_node;
}
+
+ for (i = 0; i < fd->collapse; i++)
+ {
+ if (fd->collapse == 1)
+ loop = &fd->loop;
+ else if (loops != NULL)
+ loop = loops + i;
+ else
+ loop = &dummy_loop;
+
+ t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
+ gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
+ loop->v = GIMPLE_STMT_OPERAND (t, 0);
+ gcc_assert (SSA_VAR_P (loop->v));
+ gcc_assert (TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
+ || TREE_CODE (TREE_TYPE (loop->v)) == POINTER_TYPE);
+ var = TREE_CODE (loop->v) == SSA_NAME ? SSA_NAME_VAR (loop->v) : loop->v;
+ loop->n1 = GIMPLE_STMT_OPERAND (t, 1);
+
+ t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
+ loop->cond_code = TREE_CODE (t);
+ gcc_assert (TREE_OPERAND (t, 0) == var);
+ loop->n2 = TREE_OPERAND (t, 1);
+ switch (loop->cond_code)
+ {
+ case LT_EXPR:
+ case GT_EXPR:
+ break;
+ case LE_EXPR:
+ if (POINTER_TYPE_P (TREE_TYPE (loop->n2)))
+ loop->n2 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (loop->n2),
+ loop->n2, size_one_node);
+ else
+ loop->n2 = fold_build2 (PLUS_EXPR, TREE_TYPE (loop->n2), loop->n2,
+ build_int_cst (TREE_TYPE (loop->n2), 1));
+ loop->cond_code = LT_EXPR;
+ break;
+ case GE_EXPR:
+ if (POINTER_TYPE_P (TREE_TYPE (loop->n2)))
+ loop->n2 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (loop->n2),
+ loop->n2, size_int (-1));
+ else
+ loop->n2 = fold_build2 (MINUS_EXPR, TREE_TYPE (loop->n2), loop->n2,
+ build_int_cst (TREE_TYPE (loop->n2), 1));
+ loop->cond_code = GT_EXPR;
+ break;
+ default:
+ gcc_unreachable ();
+ }
+
+ t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
+ gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
+ gcc_assert (GIMPLE_STMT_OPERAND (t, 0) == var);
+ t = GIMPLE_STMT_OPERAND (t, 1);
+ gcc_assert (TREE_OPERAND (t, 0) == var);
+ switch (TREE_CODE (t))
+ {
+ case PLUS_EXPR:
+ case POINTER_PLUS_EXPR:
+ loop->step = TREE_OPERAND (t, 1);
+ break;
+ case MINUS_EXPR:
+ loop->step = TREE_OPERAND (t, 1);
+ loop->step = fold_build1 (NEGATE_EXPR, TREE_TYPE (loop->step),
+ loop->step);
+ break;
+ default:
+ gcc_unreachable ();
+ }
+
+ if (iter_type != long_long_unsigned_type_node)
+ {
+ if (POINTER_TYPE_P (TREE_TYPE (loop->v)))
+ iter_type = long_long_unsigned_type_node;
+ else if (TYPE_UNSIGNED (TREE_TYPE (loop->v))
+ && TYPE_PRECISION (TREE_TYPE (loop->v))
+ >= TYPE_PRECISION (iter_type))
+ {
+ tree n;
+
+ if (loop->cond_code == LT_EXPR)
+ n = fold_build2 (PLUS_EXPR, TREE_TYPE (loop->v),
+ loop->n2, loop->step);
+ else
+ n = loop->n1;
+ if (TREE_CODE (n) != INTEGER_CST
+ || tree_int_cst_lt (TYPE_MAX_VALUE (iter_type), n))
+ iter_type = long_long_unsigned_type_node;
+ }
+ else if (TYPE_PRECISION (TREE_TYPE (loop->v))
+ > TYPE_PRECISION (iter_type))
+ {
+ tree n1, n2;
+
+ if (loop->cond_code == LT_EXPR)
+ {
+ n1 = loop->n1;
+ n2 = fold_build2 (PLUS_EXPR, TREE_TYPE (loop->v),
+ loop->n2, loop->step);
+ }
+ else
+ {
+ n1 = fold_build2 (MINUS_EXPR, TREE_TYPE (loop->v),
+ loop->n2, loop->step);
+ n2 = loop->n1;
+ }
+ if (TREE_CODE (n1) != INTEGER_CST
+ || TREE_CODE (n2) != INTEGER_CST
+ || !tree_int_cst_lt (TYPE_MIN_VALUE (iter_type), n1)
+ || !tree_int_cst_lt (n2, TYPE_MAX_VALUE (iter_type)))
+ iter_type = long_long_unsigned_type_node;
+ }
+ }
+
+ if (collapse_count && *collapse_count == NULL)
+ {
+ if ((i == 0 || count != NULL_TREE)
+ && TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
+ && TREE_CONSTANT (loop->n1)
+ && TREE_CONSTANT (loop->n2)
+ && TREE_CODE (loop->step) == INTEGER_CST)
+ {
+ tree itype = TREE_TYPE (loop->v);
+
+ if (POINTER_TYPE_P (itype))
+ itype
+ = lang_hooks.types.type_for_size (TYPE_PRECISION (itype), 0);
+ t = build_int_cst (itype, (loop->cond_code == LT_EXPR ? -1 : 1));
+ t = fold_build2 (PLUS_EXPR, itype,
+ fold_convert (itype, loop->step), t);
+ t = fold_build2 (PLUS_EXPR, itype, t,
+ fold_convert (itype, loop->n2));
+ t = fold_build2 (MINUS_EXPR, itype, t,
+ fold_convert (itype, loop->n1));
+ if (TYPE_UNSIGNED (itype) && loop->cond_code == GT_EXPR)
+ t = fold_build2 (TRUNC_DIV_EXPR, itype,
+ fold_build1 (NEGATE_EXPR, itype, t),
+ fold_build1 (NEGATE_EXPR, itype,
+ fold_convert (itype,
+ loop->step)));
+ else
+ t = fold_build2 (TRUNC_DIV_EXPR, itype, t,
+ fold_convert (itype, loop->step));
+ t = fold_convert (long_long_unsigned_type_node, t);
+ if (count != NULL_TREE)
+ count = fold_build2 (MULT_EXPR, long_long_unsigned_type_node,
+ count, t);
+ else
+ count = t;
+ if (TREE_CODE (count) != INTEGER_CST)
+ count = NULL_TREE;
+ }
+ else
+ count = NULL_TREE;
+ }
+ }
+
+ if (count)
+ {
+ if (!tree_int_cst_lt (count, TYPE_MAX_VALUE (long_integer_type_node)))
+ iter_type = long_long_unsigned_type_node;
+ else
+ iter_type = long_integer_type_node;
+ }
+ else if (collapse_iter && *collapse_iter != NULL)
+ iter_type = TREE_TYPE (*collapse_iter);
+ fd->iter_type = iter_type;
+ if (collapse_iter && *collapse_iter == NULL)
+ *collapse_iter = create_tmp_var (iter_type, ".iter");
+ if (collapse_count && *collapse_count == NULL)
+ {
+ if (count)
+ *collapse_count = fold_convert (iter_type, count);
+ else
+ *collapse_count = create_tmp_var (iter_type, ".count");
+ }
+
+ if (fd->collapse > 1)
+ {
+ fd->loop.v = *collapse_iter;
+ fd->loop.n1 = build_int_cst (TREE_TYPE (fd->loop.v), 0);
+ fd->loop.n2 = *collapse_count;
+ fd->loop.step = build_int_cst (TREE_TYPE (fd->loop.v), 1);
+ fd->loop.cond_code = LT_EXPR;
+ }
}
@@ -296,16 +498,21 @@ workshare_safe_to_combine_p (basic_block par_entry_bb, basic_block ws_entry_bb)
gcc_assert (TREE_CODE (ws_stmt) == OMP_FOR);
- extract_omp_for_data (ws_stmt, &fd);
+ extract_omp_for_data (ws_stmt, &fd, NULL);
+
+ if (fd.collapse > 1 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
+ return false;
+ if (fd.iter_type != long_integer_type_node)
+ return false;
/* FIXME. We give up too easily here. If any of these arguments
are not constants, they will likely involve variables that have
been mapped into fields of .omp_data_s for sharing with the child
function. With appropriate data flow, it would be possible to
see through this. */
- if (!is_gimple_min_invariant (fd.n1)
- || !is_gimple_min_invariant (fd.n2)
- || !is_gimple_min_invariant (fd.step)
+ if (!is_gimple_min_invariant (fd.loop.n1)
+ || !is_gimple_min_invariant (fd.loop.n2)
+ || !is_gimple_min_invariant (fd.loop.step)
|| (fd.chunk_size && !is_gimple_min_invariant (fd.chunk_size)))
return false;
@@ -327,7 +534,7 @@ get_ws_args_for (tree ws_stmt)
struct omp_for_data fd;
tree ws_args;
- extract_omp_for_data (ws_stmt, &fd);
+ extract_omp_for_data (ws_stmt, &fd, NULL);
ws_args = NULL_TREE;
if (fd.chunk_size)
@@ -336,13 +543,13 @@ get_ws_args_for (tree ws_stmt)
ws_args = tree_cons (NULL, t, ws_args);
}
- t = fold_convert (long_integer_type_node, fd.step);
+ t = fold_convert (long_integer_type_node, fd.loop.step);
ws_args = tree_cons (NULL, t, ws_args);
- t = fold_convert (long_integer_type_node, fd.n2);
+ t = fold_convert (long_integer_type_node, fd.loop.n2);
ws_args = tree_cons (NULL, t, ws_args);
- t = fold_convert (long_integer_type_node, fd.n1);
+ t = fold_convert (long_integer_type_node, fd.loop.n1);
ws_args = tree_cons (NULL, t, ws_args);
return ws_args;
@@ -472,6 +679,16 @@ lookup_field (tree var, omp_context *ctx)
}
static inline tree
+lookup_sfield (tree var, omp_context *ctx)
+{
+ splay_tree_node n;
+ n = splay_tree_lookup (ctx->sfield_map
+ ? ctx->sfield_map : ctx->field_map,
+ (splay_tree_key) var);
+ return (tree) n->value;
+}
+
+static inline tree
maybe_lookup_field (tree var, omp_context *ctx)
{
splay_tree_node n;
@@ -483,7 +700,7 @@ maybe_lookup_field (tree var, omp_context *ctx)
the parallel context if DECL is to be shared. */
static bool
-use_pointer_for_field (const_tree decl, omp_context *shared_ctx)
+use_pointer_for_field (tree decl, omp_context *shared_ctx)
{
if (AGGREGATE_TYPE_P (TREE_TYPE (decl)))
return true;
@@ -524,11 +741,11 @@ use_pointer_for_field (const_tree decl, omp_context *shared_ctx)
if (maybe_lookup_decl (decl, up))
break;
- if (up && is_parallel_ctx (up))
+ if (up && is_taskreg_ctx (up))
{
tree c;
- for (c = OMP_PARALLEL_CLAUSES (up->stmt);
+ for (c = OMP_TASKREG_CLAUSES (up->stmt);
c; c = OMP_CLAUSE_CHAIN (c))
if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
&& OMP_CLAUSE_DECL (c) == decl)
@@ -538,6 +755,26 @@ use_pointer_for_field (const_tree decl, omp_context *shared_ctx)
return true;
}
}
+
+ /* For tasks avoid using copy-in/out, unless they are readonly
+ (in which case just copy-in is used). As tasks can be
+ deferred or executed in different thread, when GOMP_task
+ returns, the task hasn't necessarily terminated. */
+ if (!TREE_READONLY (decl) && is_task_ctx (shared_ctx))
+ {
+ tree outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
+ if (is_gimple_reg (outer))
+ {
+ /* Taking address of OUTER in lower_send_shared_vars
+ might need regimplification of everything that uses the
+ variable. */
+ if (!task_shared_vars)
+ task_shared_vars = BITMAP_ALLOC (NULL);
+ bitmap_set_bit (task_shared_vars, DECL_UID (outer));
+ TREE_ADDRESSABLE (outer) = 1;
+ }
+ return true;
+ }
}
return false;
@@ -622,7 +859,7 @@ build_outer_var_ref (tree var, omp_context *ctx)
x = build_outer_var_ref (x, ctx);
x = build_fold_indirect_ref (x);
}
- else if (is_parallel_ctx (ctx))
+ else if (is_taskreg_ctx (ctx))
{
bool by_ref = use_pointer_for_field (var, NULL);
x = build_receiver_ref (var, by_ref, ctx);
@@ -647,7 +884,7 @@ build_outer_var_ref (tree var, omp_context *ctx)
static tree
build_sender_ref (tree var, omp_context *ctx)
{
- tree field = lookup_field (var, ctx);
+ tree field = lookup_sfield (var, ctx);
return build3 (COMPONENT_REF, TREE_TYPE (field),
ctx->sender_decl, field, NULL);
}
@@ -655,15 +892,20 @@ build_sender_ref (tree var, omp_context *ctx)
/* Add a new field for VAR inside the structure CTX->SENDER_DECL. */
static void
-install_var_field (tree var, bool by_ref, omp_context *ctx)
+install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
{
- tree field, type;
+ tree field, type, sfield = NULL_TREE;
- gcc_assert (!splay_tree_lookup (ctx->field_map, (splay_tree_key) var));
+ gcc_assert ((mask & 1) == 0
+ || !splay_tree_lookup (ctx->field_map, (splay_tree_key) var));
+ gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
+ || !splay_tree_lookup (ctx->sfield_map, (splay_tree_key) var));
type = TREE_TYPE (var);
if (by_ref)
type = build_pointer_type (type);
+ else if ((mask & 3) == 1 && is_reference (var))
+ type = TREE_TYPE (type);
field = build_decl (FIELD_DECL, DECL_NAME (var), type);
@@ -671,11 +913,57 @@ install_var_field (tree var, bool by_ref, omp_context *ctx)
side effect of making dwarf2out ignore this member, so for helpful
debugging we clear it later in delete_omp_context. */
DECL_ABSTRACT_ORIGIN (field) = var;
+ if (type == TREE_TYPE (var))
+ {
+ DECL_ALIGN (field) = DECL_ALIGN (var);
+ DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
+ TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
+ }
+ else
+ DECL_ALIGN (field) = TYPE_ALIGN (type);
- insert_field_into_struct (ctx->record_type, field);
+ if ((mask & 3) == 3)
+ {
+ insert_field_into_struct (ctx->record_type, field);
+ if (ctx->srecord_type)
+ {
+ sfield = build_decl (FIELD_DECL, DECL_NAME (var), type);
+ DECL_ABSTRACT_ORIGIN (sfield) = var;
+ DECL_ALIGN (sfield) = DECL_ALIGN (field);
+ DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
+ TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
+ insert_field_into_struct (ctx->srecord_type, sfield);
+ }
+ }
+ else
+ {
+ if (ctx->srecord_type == NULL_TREE)
+ {
+ tree t;
+
+ ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
+ ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
+ for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
+ {
+ sfield = build_decl (FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
+ DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
+ insert_field_into_struct (ctx->srecord_type, sfield);
+ splay_tree_insert (ctx->sfield_map,
+ (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
+ (splay_tree_value) sfield);
+ }
+ }
+ sfield = field;
+ insert_field_into_struct ((mask & 1) ? ctx->record_type
+ : ctx->srecord_type, field);
+ }
- splay_tree_insert (ctx->field_map, (splay_tree_key) var,
- (splay_tree_value) field);
+ if (mask & 1)
+ splay_tree_insert (ctx->field_map, (splay_tree_key) var,
+ (splay_tree_value) field);
+ if ((mask & 2) && ctx->sfield_map)
+ splay_tree_insert (ctx->sfield_map, (splay_tree_key) var,
+ (splay_tree_value) sfield);
}
static tree
@@ -740,7 +1028,7 @@ omp_copy_decl (tree var, copy_body_data *cb)
return new_var;
}
- while (!is_parallel_ctx (ctx))
+ while (!is_taskreg_ctx (ctx))
{
ctx = ctx->outer;
if (ctx == NULL)
@@ -912,6 +1200,8 @@ delete_omp_context (splay_tree_value value)
if (ctx->field_map)
splay_tree_delete (ctx->field_map);
+ if (ctx->sfield_map)
+ splay_tree_delete (ctx->sfield_map);
/* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
it produces corrupt debug information. */
@@ -921,6 +1211,12 @@ delete_omp_context (splay_tree_value value)
for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
DECL_ABSTRACT_ORIGIN (t) = NULL;
}
+ if (ctx->srecord_type)
+ {
+ tree t;
+ for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = TREE_CHAIN (t))
+ DECL_ABSTRACT_ORIGIN (t) = NULL;
+ }
XDELETE (ctx);
}
@@ -955,6 +1251,9 @@ fixup_child_record_type (omp_context *ctx)
DECL_CONTEXT (new_f) = type;
TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
TREE_CHAIN (new_f) = new_fields;
+ walk_tree (&DECL_SIZE (new_f), copy_body_r, &ctx->cb, NULL);
+ walk_tree (&DECL_SIZE_UNIT (new_f), copy_body_r, &ctx->cb, NULL);
+ walk_tree (&DECL_FIELD_OFFSET (new_f), copy_body_r, &ctx->cb, NULL);
new_fields = new_f;
/* Arrange to be able to look up the receiver field
@@ -986,26 +1285,28 @@ scan_sharing_clauses (tree clauses, omp_context *ctx)
{
case OMP_CLAUSE_PRIVATE:
decl = OMP_CLAUSE_DECL (c);
- if (!is_variable_sized (decl))
+ if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
+ goto do_private;
+ else if (!is_variable_sized (decl))
install_var_local (decl, ctx);
break;
case OMP_CLAUSE_SHARED:
- gcc_assert (is_parallel_ctx (ctx));
+ gcc_assert (is_taskreg_ctx (ctx));
decl = OMP_CLAUSE_DECL (c);
gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
|| !is_variable_sized (decl));
- by_ref = use_pointer_for_field (decl, ctx);
/* Global variables don't need to be copied,
the receiver side will use them directly. */
if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
break;
+ by_ref = use_pointer_for_field (decl, ctx);
if (! TREE_READONLY (decl)
|| TREE_ADDRESSABLE (decl)
|| by_ref
|| is_reference (decl))
{
- install_var_field (decl, by_ref, ctx);
+ install_var_field (decl, by_ref, 3, ctx);
install_var_local (decl, ctx);
break;
}
@@ -1025,13 +1326,26 @@ scan_sharing_clauses (tree clauses, omp_context *ctx)
decl = OMP_CLAUSE_DECL (c);
do_private:
if (is_variable_sized (decl))
- break;
- else if (is_parallel_ctx (ctx)
- && ! is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
- ctx)))
{
+ if (is_task_ctx (ctx))
+ install_var_field (decl, false, 1, ctx);
+ break;
+ }
+ else if (is_taskreg_ctx (ctx))
+ {
+ bool global
+ = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
by_ref = use_pointer_for_field (decl, NULL);
- install_var_field (decl, by_ref, ctx);
+
+ if (is_task_ctx (ctx)
+ && (global || by_ref || is_reference (decl)))
+ {
+ install_var_field (decl, false, 1, ctx);
+ if (!global)
+ install_var_field (decl, by_ref, 2, ctx);
+ }
+ else if (!global)
+ install_var_field (decl, by_ref, 3, ctx);
}
install_var_local (decl, ctx);
break;
@@ -1044,7 +1358,7 @@ scan_sharing_clauses (tree clauses, omp_context *ctx)
case OMP_CLAUSE_COPYIN:
decl = OMP_CLAUSE_DECL (c);
by_ref = use_pointer_for_field (decl, NULL);
- install_var_field (decl, by_ref, ctx);
+ install_var_field (decl, by_ref, 3, ctx);
break;
case OMP_CLAUSE_DEFAULT:
@@ -1060,6 +1374,8 @@ scan_sharing_clauses (tree clauses, omp_context *ctx)
case OMP_CLAUSE_NOWAIT:
case OMP_CLAUSE_ORDERED:
+ case OMP_CLAUSE_COLLAPSE:
+ case OMP_CLAUSE_UNTIED:
break;
default:
@@ -1074,6 +1390,8 @@ scan_sharing_clauses (tree clauses, omp_context *ctx)
case OMP_CLAUSE_LASTPRIVATE:
/* Let the corresponding firstprivate clause create
the variable. */
+ if (OMP_CLAUSE_LASTPRIVATE_STMT (c))
+ scan_array_reductions = true;
if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
break;
/* FALLTHRU */
@@ -1106,6 +1424,8 @@ scan_sharing_clauses (tree clauses, omp_context *ctx)
case OMP_CLAUSE_SCHEDULE:
case OMP_CLAUSE_NOWAIT:
case OMP_CLAUSE_ORDERED:
+ case OMP_CLAUSE_COLLAPSE:
+ case OMP_CLAUSE_UNTIED:
break;
default:
@@ -1121,6 +1441,9 @@ scan_sharing_clauses (tree clauses, omp_context *ctx)
scan_omp (&OMP_CLAUSE_REDUCTION_INIT (c), ctx);
scan_omp (&OMP_CLAUSE_REDUCTION_MERGE (c), ctx);
}
+ else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
+ && OMP_CLAUSE_LASTPRIVATE_STMT (c))
+ scan_omp (&OMP_CLAUSE_LASTPRIVATE_STMT (c), ctx);
}
/* Create a new name for omp child function. Returns an identifier. */
@@ -1128,15 +1451,17 @@ scan_sharing_clauses (tree clauses, omp_context *ctx)
static GTY(()) unsigned int tmp_ompfn_id_num;
static tree
-create_omp_child_function_name (void)
+create_omp_child_function_name (bool task_copy)
{
tree name = DECL_ASSEMBLER_NAME (current_function_decl);
size_t len = IDENTIFIER_LENGTH (name);
char *tmp_name, *prefix;
+ const char *suffix;
- prefix = alloca (len + sizeof ("_omp_fn"));
+ suffix = task_copy ? "_omp_cpyfn" : "_omp_fn";
+ prefix = alloca (len + strlen (suffix) + 1);
memcpy (prefix, IDENTIFIER_POINTER (name), len);
- strcpy (prefix + len, "_omp_fn");
+ strcpy (prefix + len, suffix);
#ifndef NO_DOT_IN_LABEL
prefix[len] = '.';
#elif !defined NO_DOLLAR_IN_LABEL
@@ -1150,17 +1475,24 @@ create_omp_child_function_name (void)
yet, just the bare decl. */
static void
-create_omp_child_function (omp_context *ctx)
+create_omp_child_function (omp_context *ctx, bool task_copy)
{
tree decl, type, name, t;
- name = create_omp_child_function_name ();
- type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
+ name = create_omp_child_function_name (task_copy);
+ if (task_copy)
+ type = build_function_type_list (void_type_node, ptr_type_node,
+ ptr_type_node, NULL_TREE);
+ else
+ type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
decl = build_decl (FUNCTION_DECL, name, type);
decl = lang_hooks.decls.pushdecl (decl);
- ctx->cb.dst_fn = decl;
+ if (!task_copy)
+ ctx->cb.dst_fn = decl;
+ else
+ OMP_TASK_COPYFN (ctx->stmt) = decl;
TREE_STATIC (decl) = 1;
TREE_USED (decl) = 1;
@@ -1183,7 +1515,19 @@ create_omp_child_function (omp_context *ctx)
DECL_CONTEXT (t) = current_function_decl;
TREE_USED (t) = 1;
DECL_ARGUMENTS (decl) = t;
- ctx->receiver_decl = t;
+ if (!task_copy)
+ ctx->receiver_decl = t;
+ else
+ {
+ t = build_decl (PARM_DECL, get_identifier (".omp_data_o"),
+ ptr_type_node);
+ DECL_ARTIFICIAL (t) = 1;
+ DECL_ARG_TYPE (t) = ptr_type_node;
+ DECL_CONTEXT (t) = current_function_decl;
+ TREE_USED (t) = 1;
+ TREE_CHAIN (t) = DECL_ARGUMENTS (decl);
+ DECL_ARGUMENTS (decl) = t;
+ }
/* Allocate memory for the function structure. The call to
allocate_struct_function clobbers CFUN, so we need to restore
@@ -1214,7 +1558,7 @@ scan_omp_parallel (tree *stmt_p, omp_context *outer_ctx)
}
ctx = new_omp_context (*stmt_p, outer_ctx);
- if (parallel_nesting_level > 1)
+ if (taskreg_nesting_level > 1)
ctx->is_nested = true;
ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
@@ -1222,7 +1566,7 @@ scan_omp_parallel (tree *stmt_p, omp_context *outer_ctx)
name = create_tmp_var_name (".omp_data_s");
name = build_decl (TYPE_DECL, name, ctx->record_type);
TYPE_NAME (ctx->record_type) = name;
- create_omp_child_function (ctx);
+ create_omp_child_function (ctx, false);
OMP_PARALLEL_FN (*stmt_p) = ctx->cb.dst_fn;
scan_sharing_clauses (OMP_PARALLEL_CLAUSES (*stmt_p), ctx);
@@ -1237,6 +1581,84 @@ scan_omp_parallel (tree *stmt_p, omp_context *outer_ctx)
}
}
+/* Scan an OpenMP task directive. */
+
+static void
+scan_omp_task (tree *stmt_p, omp_context *outer_ctx)
+{
+ omp_context *ctx;
+ tree name;
+
+ /* Ignore task directives with empty bodies. */
+ if (optimize > 0
+ && empty_body_p (OMP_TASK_BODY (*stmt_p)))
+ {
+ *stmt_p = build_empty_stmt ();
+ return;
+ }
+
+ ctx = new_omp_context (*stmt_p, outer_ctx);
+ if (taskreg_nesting_level > 1)
+ ctx->is_nested = true;
+ ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
+ ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
+ ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
+ name = create_tmp_var_name (".omp_data_s");
+ name = build_decl (TYPE_DECL, name, ctx->record_type);
+ TYPE_NAME (ctx->record_type) = name;
+ create_omp_child_function (ctx, false);
+ OMP_TASK_FN (*stmt_p) = ctx->cb.dst_fn;
+
+ scan_sharing_clauses (OMP_TASK_CLAUSES (*stmt_p), ctx);
+
+ if (ctx->srecord_type)
+ {
+ name = create_tmp_var_name (".omp_data_a");
+ name = build_decl (TYPE_DECL, name, ctx->srecord_type);
+ TYPE_NAME (ctx->srecord_type) = name;
+ create_omp_child_function (ctx, true);
+ }
+
+ scan_omp (&OMP_TASK_BODY (*stmt_p), ctx);
+
+ if (TYPE_FIELDS (ctx->record_type) == NULL)
+ {
+ ctx->record_type = ctx->receiver_decl = NULL;
+ OMP_TASK_ARG_SIZE (*stmt_p)
+ = build_int_cst (long_integer_type_node, 0);
+ OMP_TASK_ARG_ALIGN (*stmt_p)
+ = build_int_cst (long_integer_type_node, 1);
+ }
+ else
+ {
+ tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
+ /* Move VLA fields to the end. */
+ p = &TYPE_FIELDS (ctx->record_type);
+ while (*p)
+ if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
+ || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
+ {
+ *q = *p;
+ *p = TREE_CHAIN (*p);
+ TREE_CHAIN (*q) = NULL_TREE;
+ q = &TREE_CHAIN (*q);
+ }
+ else
+ p = &TREE_CHAIN (*p);
+ *p = vla_fields;
+ layout_type (ctx->record_type);
+ fixup_child_record_type (ctx);
+ if (ctx->srecord_type)
+ layout_type (ctx->srecord_type);
+ OMP_TASK_ARG_SIZE (*stmt_p)
+ = fold_convert (long_integer_type_node,
+ TYPE_SIZE_UNIT (ctx->record_type));
+ OMP_TASK_ARG_ALIGN (*stmt_p)
+ = build_int_cst (long_integer_type_node,
+ TYPE_ALIGN_UNIT (ctx->record_type));
+ }
+}
+
/* Scan an OpenMP loop directive. */
@@ -1245,6 +1667,7 @@ scan_omp_for (tree *stmt_p, omp_context *outer_ctx)
{
omp_context *ctx;
tree stmt;
+ int i;
stmt = *stmt_p;
ctx = new_omp_context (stmt, outer_ctx);
@@ -1252,9 +1675,12 @@ scan_omp_for (tree *stmt_p, omp_context *outer_ctx)
scan_sharing_clauses (OMP_FOR_CLAUSES (stmt), ctx);
scan_omp (&OMP_FOR_PRE_BODY (stmt), ctx);
- scan_omp (&OMP_FOR_INIT (stmt), ctx);
- scan_omp (&OMP_FOR_COND (stmt), ctx);
- scan_omp (&OMP_FOR_INCR (stmt), ctx);
+ for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (stmt)); i++)
+ {
+ scan_omp (&TREE_VEC_ELT (OMP_FOR_INIT (stmt), i), ctx);
+ scan_omp (&TREE_VEC_ELT (OMP_FOR_COND (stmt), i), ctx);
+ scan_omp (&TREE_VEC_ELT (OMP_FOR_INCR (stmt), i), ctx);
+ }
scan_omp (&OMP_FOR_BODY (stmt), ctx);
}
@@ -1307,6 +1733,7 @@ check_omp_nesting_restrictions (tree t, omp_context *ctx)
case OMP_FOR:
case OMP_SECTIONS:
case OMP_SINGLE:
+ case CALL_EXPR:
for (; ctx != NULL; ctx = ctx->outer)
switch (TREE_CODE (ctx->stmt))
{
@@ -1315,8 +1742,17 @@ check_omp_nesting_restrictions (tree t, omp_context *ctx)
case OMP_SINGLE:
case OMP_ORDERED:
case OMP_MASTER:
+ case OMP_TASK:
+ if (TREE_CODE (t) == CALL_EXPR)
+ {
+ warning (0, "barrier region may not be closely nested inside "
+ "of work-sharing, critical, ordered, master or "
+ "explicit task region");
+ return;
+ }
warning (0, "work-sharing region may not be closely nested inside "
- "of work-sharing, critical, ordered or master region");
+ "of work-sharing, critical, ordered, master or explicit "
+ "task region");
return;
case OMP_PARALLEL:
return;
@@ -1331,8 +1767,9 @@ check_omp_nesting_restrictions (tree t, omp_context *ctx)
case OMP_FOR:
case OMP_SECTIONS:
case OMP_SINGLE:
+ case OMP_TASK:
warning (0, "master region may not be closely nested inside "
- "of work-sharing region");
+ "of work-sharing or explicit task region");
return;
case OMP_PARALLEL:
return;
@@ -1345,8 +1782,9 @@ check_omp_nesting_restrictions (tree t, omp_context *ctx)
switch (TREE_CODE (ctx->stmt))
{
case OMP_CRITICAL:
+ case OMP_TASK:
warning (0, "ordered region may not be closely nested inside "
- "of critical region");
+ "of critical or explicit task region");
return;
case OMP_FOR:
if (find_omp_clause (OMP_CLAUSES (ctx->stmt),
@@ -1389,16 +1827,32 @@ scan_omp_1 (tree *tp, int *walk_subtrees, void *data)
input_location = EXPR_LOCATION (t);
/* Check the OpenMP nesting restrictions. */
- if (OMP_DIRECTIVE_P (t) && ctx != NULL)
- check_omp_nesting_restrictions (t, ctx);
+ if (ctx != NULL)
+ {
+ if (OMP_DIRECTIVE_P (t))
+ check_omp_nesting_restrictions (t, ctx);
+ else if (TREE_CODE (t) == CALL_EXPR)
+ {
+ tree fndecl = get_callee_fndecl (t);
+ if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
+ && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
+ check_omp_nesting_restrictions (t, ctx);
+ }
+ }
*walk_subtrees = 0;
switch (TREE_CODE (t))
{
case OMP_PARALLEL:
- parallel_nesting_level++;
+ taskreg_nesting_level++;
scan_omp_parallel (tp, ctx);
- parallel_nesting_level--;
+ taskreg_nesting_level--;
+ break;
+
+ case OMP_TASK:
+ taskreg_nesting_level++;
+ scan_omp_task (tp, ctx);
+ taskreg_nesting_level--;
break;
case OMP_FOR:
@@ -1715,16 +2169,18 @@ lower_rec_input_clauses (tree clauses, tree *ilist, tree *dlist,
if (pass == 0)
continue;
- ptr = DECL_VALUE_EXPR (new_var);
- gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
- ptr = TREE_OPERAND (ptr, 0);
- gcc_assert (DECL_P (ptr));
-
- x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
- x = build_call_expr (built_in_decls[BUILT_IN_ALLOCA], 1, x);
- x = fold_convert (TREE_TYPE (ptr), x);
- x = build_gimple_modify_stmt (ptr, x);
- gimplify_and_add (x, ilist);
+ if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
+ {
+ ptr = DECL_VALUE_EXPR (new_var);
+ gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
+ ptr = TREE_OPERAND (ptr, 0);
+ gcc_assert (DECL_P (ptr));
+ x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
+ x = build_call_expr (built_in_decls[BUILT_IN_ALLOCA], 1, x);
+ x = fold_convert (TREE_TYPE (ptr), x);
+ x = build_gimple_modify_stmt (ptr, x);
+ gimplify_and_add (x, ilist);
+ }
}
else if (is_reference (var))
{
@@ -1740,7 +2196,12 @@ lower_rec_input_clauses (tree clauses, tree *ilist, tree *dlist,
continue;
x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
- if (TREE_CONSTANT (x))
+ if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
+ {
+ x = build_receiver_ref (var, false, ctx);
+ x = build_fold_addr_expr (x);
+ }
+ else if (TREE_CONSTANT (x))
{
const char *name = NULL;
if (DECL_NAME (var))
@@ -1800,7 +2261,18 @@ lower_rec_input_clauses (tree clauses, tree *ilist, tree *dlist,
/* FALLTHRU */
case OMP_CLAUSE_PRIVATE:
- x = lang_hooks.decls.omp_clause_default_ctor (c, new_var);
+ if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
+ x = build_outer_var_ref (var, ctx);
+ else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
+ {
+ if (is_task_ctx (ctx))
+ x = build_receiver_ref (var, false, ctx);
+ else
+ x = build_outer_var_ref (var, ctx);
+ }
+ else
+ x = NULL;
+ x = lang_hooks.decls.omp_clause_default_ctor (c, new_var, x);
if (x)
gimplify_and_add (x, ilist);
/* FALLTHRU */
@@ -1816,6 +2288,20 @@ lower_rec_input_clauses (tree clauses, tree *ilist, tree *dlist,
break;
case OMP_CLAUSE_FIRSTPRIVATE:
+ if (is_task_ctx (ctx))
+ {
+ if (is_reference (var) || is_variable_sized (var))
+ goto do_dtor;
+ else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
+ ctx))
+ || use_pointer_for_field (var, NULL))
+ {
+ x = build_receiver_ref (var, false, ctx);
+ SET_DECL_VALUE_EXPR (new_var, x);
+ DECL_HAS_VALUE_EXPR_P (new_var) = 1;
+ goto do_dtor;
+ }
+ }
x = build_outer_var_ref (var, ctx);
x = lang_hooks.decls.omp_clause_copy_ctor (c, new_var, x);
gimplify_and_add (x, ilist);
@@ -1833,8 +2319,16 @@ lower_rec_input_clauses (tree clauses, tree *ilist, tree *dlist,
case OMP_CLAUSE_REDUCTION:
if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
{
+ tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
+ x = build_outer_var_ref (var, ctx);
+
+ if (is_reference (var))
+ x = build_fold_addr_expr (x);
+ SET_DECL_VALUE_EXPR (placeholder, x);
+ DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c), ilist);
OMP_CLAUSE_REDUCTION_INIT (c) = NULL;
+ DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
}
else
{
@@ -1879,9 +2373,10 @@ lower_rec_input_clauses (tree clauses, tree *ilist, tree *dlist,
static void
lower_lastprivate_clauses (tree clauses, tree predicate, tree *stmt_list,
- omp_context *ctx)
+ omp_context *ctx)
{
tree sub_list, x, c;
+ bool par_clauses = false;
/* Early exit if there are no lastprivate clauses. */
clauses = find_omp_clause (clauses, OMP_CLAUSE_LASTPRIVATE);
@@ -1901,25 +2396,47 @@ lower_lastprivate_clauses (tree clauses, tree predicate, tree *stmt_list,
OMP_CLAUSE_LASTPRIVATE);
if (clauses == NULL)
return;
+ par_clauses = true;
}
sub_list = alloc_stmt_list ();
- for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
+ for (c = clauses; c ;)
{
tree var, new_var;
- if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LASTPRIVATE)
- continue;
+ if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
+ {
+ var = OMP_CLAUSE_DECL (c);
+ new_var = lookup_decl (var, ctx);
- var = OMP_CLAUSE_DECL (c);
- new_var = lookup_decl (var, ctx);
+ if (OMP_CLAUSE_LASTPRIVATE_STMT (c))
+ gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c), &sub_list);
+ OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL;
- x = build_outer_var_ref (var, ctx);
- if (is_reference (var))
- new_var = build_fold_indirect_ref (new_var);
- x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
- append_to_statement_list (x, &sub_list);
+ x = build_outer_var_ref (var, ctx);
+ if (is_reference (var))
+ new_var = build_fold_indirect_ref (new_var);
+ x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
+ append_to_statement_list (x, &sub_list);
+ }
+ c = OMP_CLAUSE_CHAIN (c);
+ if (c == NULL && !par_clauses)
+ {
+ /* If this was a workshare clause, see if it had been combined
+ with its parallel. In that case, continue looking for the
+ clauses also on the parallel statement itself. */
+ if (is_parallel_ctx (ctx))
+ break;
+
+ ctx = ctx->outer;
+ if (ctx == NULL || !is_parallel_ctx (ctx))
+ break;
+
+ c = find_omp_clause (OMP_PARALLEL_CLAUSES (ctx->stmt),
+ OMP_CLAUSE_LASTPRIVATE);
+ par_clauses = true;
+ }
}
if (predicate)
@@ -2071,6 +2588,10 @@ lower_send_clauses (tree clauses, tree *ilist, tree *olist, omp_context *ctx)
switch (OMP_CLAUSE_CODE (c))
{
+ case OMP_CLAUSE_PRIVATE:
+ if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
+ break;
+ continue;
case OMP_CLAUSE_FIRSTPRIVATE:
case OMP_CLAUSE_COPYIN:
case OMP_CLAUSE_LASTPRIVATE:
@@ -2092,6 +2613,7 @@ lower_send_clauses (tree clauses, tree *ilist, tree *olist, omp_context *ctx)
switch (OMP_CLAUSE_CODE (c))
{
+ case OMP_CLAUSE_PRIVATE:
case OMP_CLAUSE_FIRSTPRIVATE:
case OMP_CLAUSE_COPYIN:
do_in = true;
@@ -2105,7 +2627,11 @@ lower_send_clauses (tree clauses, tree *ilist, tree *olist, omp_context *ctx)
do_in = true;
}
else
- do_out = true;
+ {
+ do_out = true;
+ if (lang_hooks.decls.omp_private_outer_ref (val))
+ do_in = true;
+ }
break;
case OMP_CLAUSE_REDUCTION:
@@ -2123,6 +2649,8 @@ lower_send_clauses (tree clauses, tree *ilist, tree *olist, omp_context *ctx)
x = by_ref ? build_fold_addr_expr (var) : var;
x = build_gimple_modify_stmt (ref, x);
gimplify_and_add (x, ilist);
+ if (is_task_ctx (ctx))
+ DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
}
if (do_out)
@@ -2141,12 +2669,13 @@ lower_send_clauses (tree clauses, tree *ilist, tree *olist, omp_context *ctx)
static void
lower_send_shared_vars (tree *ilist, tree *olist, omp_context *ctx)
{
- tree var, ovar, nvar, f, x;
+ tree var, ovar, nvar, f, x, record_type;
if (ctx->record_type == NULL)
return;
- for (f = TYPE_FIELDS (ctx->record_type); f ; f = TREE_CHAIN (f))
+ record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
+ for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
{
ovar = DECL_ABSTRACT_ORIGIN (f);
nvar = maybe_lookup_decl (ovar, ctx);
@@ -2171,9 +2700,12 @@ lower_send_shared_vars (tree *ilist, tree *olist, omp_context *ctx)
x = build_gimple_modify_stmt (x, var);
gimplify_and_add (x, ilist);
- x = build_sender_ref (ovar, ctx);
- x = build_gimple_modify_stmt (var, x);
- gimplify_and_add (x, olist);
+ if (!TREE_READONLY (var))
+ {
+ x = build_sender_ref (ovar, ctx);
+ x = build_gimple_modify_stmt (var, x);
+ gimplify_and_add (x, olist);
+ }
}
}
}
@@ -2203,8 +2735,11 @@ expand_parallel_call (struct omp_region *region, basic_block bb,
switch (region->inner->type)
{
case OMP_FOR:
+ gcc_assert (region->inner->sched_kind != OMP_CLAUSE_SCHEDULE_AUTO);
start_ix = BUILT_IN_GOMP_PARALLEL_LOOP_STATIC_START
- + region->inner->sched_kind;
+ + (region->inner->sched_kind
+ == OMP_CLAUSE_SCHEDULE_RUNTIME
+ ? 3 : region->inner->sched_kind);
break;
case OMP_SECTIONS:
start_ix = BUILT_IN_GOMP_PARALLEL_SECTIONS_START;
@@ -2347,6 +2882,80 @@ expand_parallel_call (struct omp_region *region, basic_block bb,
}
+static void maybe_catch_exception (tree *stmt_p);
+
+
+/* Finalize task copyfn. */
+
+static void
+expand_task_copyfn (tree task_stmt)
+{
+ struct function *child_cfun;
+ tree child_fn, old_fn;
+
+ child_fn = OMP_TASK_COPYFN (task_stmt);
+ child_cfun = DECL_STRUCT_FUNCTION (child_fn);
+
+ /* Inform the callgraph about the new function. */
+ DECL_STRUCT_FUNCTION (child_fn)->curr_properties
+ = cfun->curr_properties;
+
+ old_fn = current_function_decl;
+ push_cfun (child_cfun);
+ current_function_decl = child_fn;
+ gimplify_body (&DECL_SAVED_TREE (child_fn), child_fn, false);
+ maybe_catch_exception (&BIND_EXPR_BODY (DECL_SAVED_TREE (child_fn)));
+ pop_cfun ();
+ current_function_decl = old_fn;
+
+ cgraph_add_new_function (child_fn, false);
+}
+
+/* Build the function call to GOMP_task to actually
+ generate the task operation. BB is the block where to insert the code. */
+
+static void
+expand_task_call (basic_block bb, tree entry_stmt)
+{
+ tree t, t1, t2, t3, flags, cond, c, clauses;
+ block_stmt_iterator si;
+
+ clauses = OMP_TASK_CLAUSES (entry_stmt);
+
+ if (OMP_TASK_COPYFN (entry_stmt))
+ expand_task_copyfn (entry_stmt);
+
+ c = find_omp_clause (clauses, OMP_CLAUSE_IF);
+ if (c)
+ cond = gimple_boolify (OMP_CLAUSE_IF_EXPR (c));
+ else
+ cond = boolean_true_node;
+
+ c = find_omp_clause (clauses, OMP_CLAUSE_UNTIED);
+ flags = build_int_cst (unsigned_type_node, (c ? 1 : 0));
+
+ si = bsi_last (bb);
+ t = OMP_TASK_DATA_ARG (entry_stmt);
+ if (t == NULL)
+ t2 = null_pointer_node;
+ else
+ t2 = build_fold_addr_expr (t);
+ t1 = build_fold_addr_expr (OMP_TASK_FN (entry_stmt));
+ t = OMP_TASK_COPYFN (entry_stmt);
+ if (t == NULL)
+ t3 = null_pointer_node;
+ else
+ t3 = build_fold_addr_expr (t);
+
+ t = build_call_expr (built_in_decls[BUILT_IN_GOMP_TASK], 7, t1, t2, t3,
+ OMP_TASK_ARG_SIZE (entry_stmt),
+ OMP_TASK_ARG_ALIGN (entry_stmt), cond, flags);
+
+ force_gimple_operand_bsi (&si, t, true, NULL_TREE,
+ false, BSI_CONTINUE_LINKING);
+}
+
+
/* If exceptions are enabled, wrap *STMT_P in a MUST_NOT_THROW catch
handler. This prevents programs from violating the structured
block semantics with throws. */
@@ -2460,10 +3069,12 @@ remove_exit_barriers (struct omp_region *region)
calls. These can't be declared as const functions, but
within one parallel body they are constant, so they can be
transformed there into __builtin_omp_get_{thread_num,num_threads} ()
- which are declared const. */
+ which are declared const. Similarly for task body, except
+ that in untied task omp_get_thread_num () can change at any task
+ scheduling point. */
static void
-optimize_omp_library_calls (void)
+optimize_omp_library_calls (tree entry_stmt)
{
basic_block bb;
block_stmt_iterator bsi;
@@ -2471,6 +3082,9 @@ optimize_omp_library_calls (void)
= DECL_ASSEMBLER_NAME (built_in_decls [BUILT_IN_OMP_GET_THREAD_NUM]);
tree num_thr_id
= DECL_ASSEMBLER_NAME (built_in_decls [BUILT_IN_OMP_GET_NUM_THREADS]);
+ bool untied_task = (TREE_CODE (entry_stmt) == OMP_TASK
+ && find_omp_clause (OMP_TASK_CLAUSES (entry_stmt),
+ OMP_CLAUSE_UNTIED) != NULL);
FOR_EACH_BB (bb)
for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
@@ -2488,7 +3102,13 @@ optimize_omp_library_calls (void)
tree built_in;
if (DECL_NAME (decl) == thr_num_id)
- built_in = built_in_decls [BUILT_IN_OMP_GET_THREAD_NUM];
+ {
+ /* In #pragma omp task untied omp_get_thread_num () can change
+ during the execution of the task region. */
+ if (untied_task)
+ continue;
+ built_in = built_in_decls [BUILT_IN_OMP_GET_THREAD_NUM];
+ }
else if (DECL_NAME (decl) == num_thr_id)
built_in = built_in_decls [BUILT_IN_OMP_GET_NUM_THREADS];
else
@@ -2511,10 +3131,10 @@ optimize_omp_library_calls (void)
}
}
-/* Expand the OpenMP parallel directive starting at REGION. */
+/* Expand the OpenMP parallel or task directive starting at REGION. */
static void
-expand_omp_parallel (struct omp_region *region)
+expand_omp_taskreg (struct omp_region *region)
{
basic_block entry_bb, exit_bb, new_bb;
struct function *child_cfun;
@@ -2524,7 +3144,7 @@ expand_omp_parallel (struct omp_region *region)
edge e;
entry_stmt = last_stmt (region->entry);
- child_fn = OMP_PARALLEL_FN (entry_stmt);
+ child_fn = OMP_TASKREG_FN (entry_stmt);
child_cfun = DECL_STRUCT_FUNCTION (child_fn);
/* If this function has been already instrumented, make sure
the child function isn't instrumented again. */
@@ -2549,7 +3169,8 @@ expand_omp_parallel (struct omp_region *region)
entry_succ_e = single_succ_edge (entry_bb);
si = bsi_last (entry_bb);
- gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_PARALLEL);
+ gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_PARALLEL
+ || TREE_CODE (bsi_stmt (si)) == OMP_TASK);
bsi_remove (&si, true);
new_bb = entry_bb;
@@ -2575,7 +3196,7 @@ expand_omp_parallel (struct omp_region *region)
a function call that has been inlined, the original PARM_DECL
.OMP_DATA_I may have been converted into a different local
variable. In which case, we need to keep the assignment. */
- if (OMP_PARALLEL_DATA_ARG (entry_stmt))
+ if (OMP_TASKREG_DATA_ARG (entry_stmt))
{
basic_block entry_succ_bb = single_succ (entry_bb);
block_stmt_iterator si;
@@ -2594,7 +3215,7 @@ expand_omp_parallel (struct omp_region *region)
STRIP_NOPS (arg);
if (TREE_CODE (arg) == ADDR_EXPR
&& TREE_OPERAND (arg, 0)
- == OMP_PARALLEL_DATA_ARG (entry_stmt))
+ == OMP_TASKREG_DATA_ARG (entry_stmt))
{
parcopy_stmt = stmt;
break;
@@ -2633,11 +3254,12 @@ expand_omp_parallel (struct omp_region *region)
for (t = DECL_ARGUMENTS (child_fn); t; t = TREE_CHAIN (t))
DECL_CONTEXT (t) = child_fn;
- /* Split ENTRY_BB at OMP_PARALLEL so that it can be moved to the
- child function. */
+ /* Split ENTRY_BB at OMP_PARALLEL or OMP_TASK, so that it can be
+ moved to the child function. */
si = bsi_last (entry_bb);
t = bsi_stmt (si);
- gcc_assert (t && TREE_CODE (t) == OMP_PARALLEL);
+ gcc_assert (t && (TREE_CODE (t) == OMP_PARALLEL
+ || TREE_CODE (t) == OMP_TASK));
bsi_remove (&si, true);
e = split_block (entry_bb, t);
entry_bb = e->dest;
@@ -2677,7 +3299,7 @@ expand_omp_parallel (struct omp_region *region)
fixed in a following pass. */
push_cfun (child_cfun);
if (optimize)
- optimize_omp_library_calls ();
+ optimize_omp_library_calls (entry_stmt);
rebuild_cgraph_edges ();
/* Some EH regions might become dead, see PR34608. If
@@ -2701,7 +3323,10 @@ expand_omp_parallel (struct omp_region *region)
}
/* Emit a library call to launch the children threads. */
- expand_parallel_call (region, new_bb, entry_stmt, ws_args);
+ if (TREE_CODE (entry_stmt) == OMP_PARALLEL)
+ expand_parallel_call (region, new_bb, entry_stmt, ws_args);
+ else
+ expand_task_call (new_bb, entry_stmt);
update_ssa (TODO_update_ssa_only_virtuals);
}
@@ -2727,7 +3352,64 @@ expand_omp_parallel (struct omp_region *region)
L3:
If this is a combined omp parallel loop, instead of the call to
- GOMP_loop_foo_start, we call GOMP_loop_foo_next. */
+ GOMP_loop_foo_start, we call GOMP_loop_foo_next.
+
+ For collapsed loops, given parameters:
+ collapse(3)
+ for (V1 = N11; V1 cond1 N12; V1 += STEP1)
+ for (V2 = N21; V2 cond2 N22; V2 += STEP2)
+ for (V3 = N31; V3 cond3 N32; V3 += STEP3)
+ BODY;
+
+ we generate pseudocode
+
+ if (cond3 is <)
+ adj = STEP3 - 1;
+ else
+ adj = STEP3 + 1;
+ count3 = (adj + N32 - N31) / STEP3;
+ if (cond2 is <)
+ adj = STEP2 - 1;
+ else
+ adj = STEP2 + 1;
+ count2 = (adj + N22 - N21) / STEP2;
+ if (cond1 is <)
+ adj = STEP1 - 1;
+ else
+ adj = STEP1 + 1;
+ count1 = (adj + N12 - N11) / STEP1;
+ count = count1 * count2 * count3;
+ more = GOMP_loop_foo_start (0, count, 1, CHUNK, &istart0, &iend0);
+ if (more) goto L0; else goto L3;
+ L0:
+ V = istart0;
+ T = V;
+ V3 = N31 + (T % count3) * STEP3;
+ T = T / count3;
+ V2 = N21 + (T % count2) * STEP2;
+ T = T / count2;
+ V1 = N11 + T * STEP1;
+ iend = iend0;
+ L1:
+ BODY;
+ V += 1;
+ if (V < iend) goto L10; else goto L2;
+ L10:
+ V3 += STEP3;
+ if (V3 cond3 N32) goto L1; else goto L11;
+ L11:
+ V3 = N31;
+ V2 += STEP2;
+ if (V2 cond2 N22) goto L1; else goto L12;
+ L12:
+ V2 = N21;
+ V1 += STEP1;
+ goto L1;
+ L2:
+ if (GOMP_loop_foo_next (&istart0, &iend0)) goto L0; else goto L3;
+ L3:
+
+ */
static void
expand_omp_for_generic (struct omp_region *region,
@@ -2736,20 +3418,23 @@ expand_omp_for_generic (struct omp_region *region,
enum built_in_function next_fn)
{
tree type, istart0, iend0, iend, phi;
- tree t, vmain, vback;
- basic_block entry_bb, cont_bb, exit_bb, l0_bb, l1_bb;
+ tree t, vmain, vback, bias = NULL_TREE;
+ basic_block entry_bb, cont_bb, exit_bb, l0_bb, l1_bb, collapse_bb;
basic_block l2_bb = NULL, l3_bb = NULL;
block_stmt_iterator si;
bool in_combined_parallel = is_combined_parallel (region);
bool broken_loop = region->cont == NULL;
edge e, ne;
+ tree *counts = NULL;
+ int i;
gcc_assert (!broken_loop || !in_combined_parallel);
+ gcc_assert (fd->iter_type == long_integer_type_node
+ || !in_combined_parallel);
- type = TREE_TYPE (fd->v);
-
- istart0 = create_tmp_var (long_integer_type_node, ".istart0");
- iend0 = create_tmp_var (long_integer_type_node, ".iend0");
+ type = TREE_TYPE (fd->loop.v);
+ istart0 = create_tmp_var (fd->iter_type, ".istart0");
+ iend0 = create_tmp_var (fd->iter_type, ".iend0");
TREE_ADDRESSABLE (istart0) = 1;
TREE_ADDRESSABLE (iend0) = 1;
if (gimple_in_ssa_p (cfun))
@@ -2758,8 +3443,32 @@ expand_omp_for_generic (struct omp_region *region,
add_referenced_var (iend0);
}
+ /* See if we need to bias by LLONG_MIN. */
+ if (fd->iter_type == long_long_unsigned_type_node
+ && TREE_CODE (type) == INTEGER_TYPE
+ && !TYPE_UNSIGNED (type))
+ {
+ tree n1, n2;
+
+ if (fd->loop.cond_code == LT_EXPR)
+ {
+ n1 = fd->loop.n1;
+ n2 = fold_build2 (PLUS_EXPR, type, fd->loop.n2, fd->loop.step);
+ }
+ else
+ {
+ n1 = fold_build2 (MINUS_EXPR, type, fd->loop.n2, fd->loop.step);
+ n2 = fd->loop.n1;
+ }
+ if (TREE_CODE (n1) != INTEGER_CST
+ || TREE_CODE (n2) != INTEGER_CST
+ || ((tree_int_cst_sgn (n1) < 0) ^ (tree_int_cst_sgn (n2) < 0)))
+ bias = fold_convert (fd->iter_type, TYPE_MIN_VALUE (type));
+ }
+
entry_bb = region->entry;
cont_bb = region->cont;
+ collapse_bb = NULL;
gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
gcc_assert (broken_loop
|| BRANCH_EDGE (entry_bb)->dest == FALLTHRU_EDGE (cont_bb)->dest);
@@ -2777,7 +3486,60 @@ expand_omp_for_generic (struct omp_region *region,
exit_bb = region->exit;
si = bsi_last (entry_bb);
+
gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_FOR);
+ if (fd->collapse > 1)
+ {
+ /* collapsed loops need work for expansion in SSA form. */
+ gcc_assert (!gimple_in_ssa_p (cfun));
+ counts = (tree *) alloca (fd->collapse * sizeof (tree));
+ for (i = 0; i < fd->collapse; i++)
+ {
+ tree itype = TREE_TYPE (fd->loops[i].v);
+
+ if (POINTER_TYPE_P (itype))
+ itype = lang_hooks.types.type_for_size (TYPE_PRECISION (itype), 0);
+ t = build_int_cst (itype, (fd->loops[i].cond_code == LT_EXPR
+ ? -1 : 1));
+ t = fold_build2 (PLUS_EXPR, itype,
+ fold_convert (itype, fd->loops[i].step), t);
+ t = fold_build2 (PLUS_EXPR, itype, t,
+ fold_convert (itype, fd->loops[i].n2));
+ t = fold_build2 (MINUS_EXPR, itype, t,
+ fold_convert (itype, fd->loops[i].n1));
+ if (TYPE_UNSIGNED (itype) && fd->loops[i].cond_code == GT_EXPR)
+ t = fold_build2 (TRUNC_DIV_EXPR, itype,
+ fold_build1 (NEGATE_EXPR, itype, t),
+ fold_build1 (NEGATE_EXPR, itype,
+ fold_convert (itype,
+ fd->loops[i].step)));
+ else
+ t = fold_build2 (TRUNC_DIV_EXPR, itype, t,
+ fold_convert (itype, fd->loops[i].step));
+ t = fold_convert (type, t);
+ if (TREE_CODE (t) == INTEGER_CST)
+ counts[i] = t;
+ else
+ {
+ counts[i] = create_tmp_var (type, ".count");
+ t = build_gimple_modify_stmt (counts[i], t);
+ force_gimple_operand_bsi (&si, t, true, NULL_TREE,
+ true, BSI_SAME_STMT);
+ }
+ if (SSA_VAR_P (fd->loop.n2))
+ {
+ if (i == 0)
+ t = build_gimple_modify_stmt (fd->loop.n2, counts[0]);
+ else
+ {
+ t = fold_build2 (MULT_EXPR, type, fd->loop.n2, counts[i]);
+ t = build_gimple_modify_stmt (fd->loop.n2, t);
+ }
+ force_gimple_operand_bsi (&si, t, true, NULL_TREE,
+ true, BSI_SAME_STMT);
+ }
+ }
+ }
if (in_combined_parallel)
{
/* In a combined parallel loop, emit a call to
@@ -2793,19 +3555,52 @@ expand_omp_for_generic (struct omp_region *region,
GOMP_loop_foo_start in ENTRY_BB. */
t4 = build_fold_addr_expr (iend0);
t3 = build_fold_addr_expr (istart0);
- t2 = fold_convert (long_integer_type_node, fd->step);
- t1 = fold_convert (long_integer_type_node, fd->n2);
- t0 = fold_convert (long_integer_type_node, fd->n1);
- if (fd->chunk_size)
+ t2 = fold_convert (fd->iter_type, fd->loop.step);
+ t1 = fold_convert (fd->iter_type, fd->loop.n2);
+ t0 = fold_convert (fd->iter_type, fd->loop.n1);
+ if (bias)
{
- t = fold_convert (long_integer_type_node, fd->chunk_size);
- t = build_call_expr (built_in_decls[start_fn], 6,
- t0, t1, t2, t, t3, t4);
+ t1 = fold_build2 (PLUS_EXPR, fd->iter_type, t1, bias);
+ t0 = fold_build2 (PLUS_EXPR, fd->iter_type, t0, bias);
+ }
+ if (fd->iter_type == long_integer_type_node)
+ {
+ if (fd->chunk_size)
+ {
+ t = fold_convert (fd->iter_type, fd->chunk_size);
+ t = build_call_expr (built_in_decls[start_fn], 6,
+ t0, t1, t2, t, t3, t4);
+ }
+ else
+ t = build_call_expr (built_in_decls[start_fn], 5,
+ t0, t1, t2, t3, t4);
}
else
- t = build_call_expr (built_in_decls[start_fn], 5,
- t0, t1, t2, t3, t4);
+ {
+ tree t5;
+ tree c_bool_type;
+
+ /* The GOMP_loop_ull_*start functions have additional boolean
+ argument, true for < loops and false for > loops.
+ In Fortran, the C bool type can be different from
+ boolean_type_node. */
+ c_bool_type = TREE_TYPE (TREE_TYPE (built_in_decls[start_fn]));
+ t5 = build_int_cst (c_bool_type,
+ fd->loop.cond_code == LT_EXPR ? 1 : 0);
+ if (fd->chunk_size)
+ {
+ t = fold_convert (fd->iter_type, fd->chunk_size);
+ t = build_call_expr (built_in_decls[start_fn], 7,
+ t5, t0, t1, t2, t, t3, t4);
+ }
+ else
+ t = build_call_expr (built_in_decls[start_fn], 6,
+ t5, t0, t1, t2, t3, t4);
+ }
}
+ if (TREE_TYPE (t) != boolean_type_node)
+ t = fold_build2 (NE_EXPR, boolean_type_node,
+ t, build_int_cst (TREE_TYPE (t), 0));
t = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
true, BSI_SAME_STMT);
t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
@@ -2816,17 +3611,57 @@ expand_omp_for_generic (struct omp_region *region,
/* Iteration setup for sequential loop goes in L0_BB. */
si = bsi_start (l0_bb);
- t = fold_convert (type, istart0);
+ if (bias)
+ t = fold_convert (type, fold_build2 (MINUS_EXPR, fd->iter_type,
+ istart0, bias));
+ else
+ t = fold_convert (type, istart0);
t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
false, BSI_CONTINUE_LINKING);
- t = build_gimple_modify_stmt (fd->v, t);
+ t = build_gimple_modify_stmt (fd->loop.v, t);
bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
if (gimple_in_ssa_p (cfun))
- SSA_NAME_DEF_STMT (fd->v) = t;
+ SSA_NAME_DEF_STMT (fd->loop.v) = t;
- t = fold_convert (type, iend0);
+ if (bias)
+ t = fold_convert (type, fold_build2 (MINUS_EXPR, fd->iter_type,
+ iend0, bias));
+ else
+ t = fold_convert (type, iend0);
iend = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
false, BSI_CONTINUE_LINKING);
+ if (fd->collapse > 1)
+ {
+ tree tem = create_tmp_var (type, ".tem");
+
+ t = build_gimple_modify_stmt (tem, fd->loop.v);
+ bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
+ for (i = fd->collapse - 1; i >= 0; i--)
+ {
+ tree vtype = TREE_TYPE (fd->loops[i].v), itype;
+ itype = vtype;
+ if (POINTER_TYPE_P (vtype))
+ itype = lang_hooks.types.type_for_size (TYPE_PRECISION (vtype), 0);
+ t = fold_build2 (TRUNC_MOD_EXPR, type, tem, counts[i]);
+ t = fold_convert (itype, t);
+ t = fold_build2 (MULT_EXPR, itype, t, fd->loops[i].step);
+ if (POINTER_TYPE_P (vtype))
+ t = fold_build2 (POINTER_PLUS_EXPR, vtype,
+ fd->loops[i].n1, fold_convert (sizetype, t));
+ else
+ t = fold_build2 (PLUS_EXPR, itype, fd->loops[i].n1, t);
+ t = build_gimple_modify_stmt (fd->loops[i].v, t);
+ force_gimple_operand_bsi (&si, t, true, NULL_TREE,
+ false, BSI_CONTINUE_LINKING);
+ if (i != 0)
+ {
+ t = fold_build2 (TRUNC_DIV_EXPR, type, tem, counts[i]);
+ t = build_gimple_modify_stmt (tem, t);
+ force_gimple_operand_bsi (&si, t, true, NULL_TREE,
+ false, BSI_CONTINUE_LINKING);
+ }
+ }
+ }
if (!broken_loop)
{
@@ -2838,7 +3673,11 @@ expand_omp_for_generic (struct omp_region *region,
vmain = TREE_OPERAND (t, 1);
vback = TREE_OPERAND (t, 0);
- t = fold_build2 (PLUS_EXPR, type, vmain, fd->step);
+ if (POINTER_TYPE_P (type))
+ t = fold_build2 (POINTER_PLUS_EXPR, type, vmain,
+ fold_convert (sizetype, fd->loop.step));
+ else
+ t = fold_build2 (PLUS_EXPR, type, vmain, fd->loop.step);
t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
true, BSI_SAME_STMT);
t = build_gimple_modify_stmt (vback, t);
@@ -2846,19 +3685,78 @@ expand_omp_for_generic (struct omp_region *region,
if (gimple_in_ssa_p (cfun))
SSA_NAME_DEF_STMT (vback) = t;
- t = build2 (fd->cond_code, boolean_type_node, vback, iend);
+ t = build2 (fd->loop.cond_code, boolean_type_node, vback, iend);
t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
bsi_insert_before (&si, t, BSI_SAME_STMT);
/* Remove OMP_CONTINUE. */
bsi_remove (&si, true);
+ if (fd->collapse > 1)
+ {
+ basic_block last_bb, bb;
+
+ last_bb = cont_bb;
+ for (i = fd->collapse - 1; i >= 0; i--)
+ {
+ tree vtype = TREE_TYPE (fd->loops[i].v);
+
+ bb = create_empty_bb (last_bb);
+ si = bsi_start (bb);
+
+ if (i < fd->collapse - 1)
+ {
+ e = make_edge (last_bb, bb, EDGE_FALSE_VALUE);
+ e->probability = REG_BR_PROB_BASE / 8;
+
+ t = build_gimple_modify_stmt (fd->loops[i + 1].v,
+ fd->loops[i + 1].n1);
+ force_gimple_operand_bsi (&si, t, true, NULL_TREE,
+ false, BSI_CONTINUE_LINKING);
+ }
+ else
+ collapse_bb = bb;
+
+ set_immediate_dominator (CDI_DOMINATORS, bb, last_bb);
+
+ if (POINTER_TYPE_P (vtype))
+ t = fold_build2 (POINTER_PLUS_EXPR, vtype,
+ fd->loops[i].v,
+ fold_convert (sizetype, fd->loops[i].step));
+ else
+ t = fold_build2 (PLUS_EXPR, vtype, fd->loops[i].v,
+ fd->loops[i].step);
+ t = build_gimple_modify_stmt (fd->loops[i].v, t);
+ force_gimple_operand_bsi (&si, t, true, NULL_TREE,
+ false, BSI_CONTINUE_LINKING);
+
+ if (i > 0)
+ {
+ t = fold_build2 (fd->loops[i].cond_code, boolean_type_node,
+ fd->loops[i].v, fd->loops[i].n2);
+ t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
+ false, BSI_CONTINUE_LINKING);
+ t = build3 (COND_EXPR, void_type_node, t,
+ NULL_TREE, NULL_TREE);
+ bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
+ e = make_edge (bb, l1_bb, EDGE_TRUE_VALUE);
+ e->probability = REG_BR_PROB_BASE * 7 / 8;
+ }
+ else
+ make_edge (bb, l1_bb, EDGE_FALLTHRU);
+ last_bb = bb;
+ }
+ }
+
/* Emit code to get the next parallel iteration in L2_BB. */
si = bsi_start (l2_bb);
t = build_call_expr (built_in_decls[next_fn], 2,
build_fold_addr_expr (istart0),
build_fold_addr_expr (iend0));
+ if (TREE_TYPE (t) != boolean_type_node)
+ t = fold_build2 (NE_EXPR, boolean_type_node,
+ t, build_int_cst (TREE_TYPE (t), 0));
t = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
false, BSI_CONTINUE_LINKING);
t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
@@ -2889,8 +3787,20 @@ expand_omp_for_generic (struct omp_region *region,
PHI_ARG_DEF_FROM_EDGE (phi, e));
remove_edge (e);
- find_edge (cont_bb, l1_bb)->flags = EDGE_TRUE_VALUE;
make_edge (cont_bb, l2_bb, EDGE_FALSE_VALUE);
+ if (fd->collapse > 1)
+ {
+ e = find_edge (cont_bb, l1_bb);
+ remove_edge (e);
+ e = make_edge (cont_bb, collapse_bb, EDGE_TRUE_VALUE);
+ }
+ else
+ {
+ e = find_edge (cont_bb, l1_bb);
+ e->flags = EDGE_TRUE_VALUE;
+ }
+ e->probability = REG_BR_PROB_BASE * 7 / 8;
+ find_edge (cont_bb, l2_bb)->probability = REG_BR_PROB_BASE / 8;
make_edge (l2_bb, l0_bb, EDGE_TRUE_VALUE);
set_immediate_dominator (CDI_DOMINATORS, l2_bb,
@@ -2917,7 +3827,10 @@ expand_omp_for_generic (struct omp_region *region,
adj = STEP - 1;
else
adj = STEP + 1;
- n = (adj + N2 - N1) / STEP;
+ if ((__typeof (V)) -1 > 0 && cond is >)
+ n = -(adj + N2 - N1) / -STEP;
+ else
+ n = (adj + N2 - N1) / STEP;
q = n / nthreads;
q += (q * nthreads != n);
s0 = q * threadid;
@@ -2938,12 +3851,14 @@ expand_omp_for_static_nochunk (struct omp_region *region,
struct omp_for_data *fd)
{
tree n, q, s0, e0, e, t, nthreads, threadid;
- tree type, vmain, vback;
+ tree type, itype, vmain, vback;
basic_block entry_bb, exit_bb, seq_start_bb, body_bb, cont_bb;
basic_block fin_bb;
block_stmt_iterator si;
- type = TREE_TYPE (fd->v);
+ itype = type = TREE_TYPE (fd->loop.v);
+ if (POINTER_TYPE_P (type))
+ itype = lang_hooks.types.type_for_size (TYPE_PRECISION (type), 0);
entry_bb = region->entry;
cont_bb = region->cont;
@@ -2961,51 +3876,51 @@ expand_omp_for_static_nochunk (struct omp_region *region,
gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_FOR);
t = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_NUM_THREADS], 0);
- t = fold_convert (type, t);
+ t = fold_convert (itype, t);
nthreads = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
true, BSI_SAME_STMT);
t = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM], 0);
- t = fold_convert (type, t);
+ t = fold_convert (itype, t);
threadid = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
true, BSI_SAME_STMT);
- fd->n1 = force_gimple_operand_bsi (&si,
- fold_convert (type, fd->n1),
- true, NULL_TREE,
- true, BSI_SAME_STMT);
-
- fd->n2 = force_gimple_operand_bsi (&si,
- fold_convert (type, fd->n2),
- true, NULL_TREE,
- true, BSI_SAME_STMT);
-
- fd->step = force_gimple_operand_bsi (&si,
- fold_convert (type, fd->step),
- true, NULL_TREE,
- true, BSI_SAME_STMT);
-
- t = build_int_cst (type, (fd->cond_code == LT_EXPR ? -1 : 1));
- t = fold_build2 (PLUS_EXPR, type, fd->step, t);
- t = fold_build2 (PLUS_EXPR, type, t, fd->n2);
- t = fold_build2 (MINUS_EXPR, type, t, fd->n1);
- t = fold_build2 (TRUNC_DIV_EXPR, type, t, fd->step);
- t = fold_convert (type, t);
+ fd->loop.n1
+ = force_gimple_operand_bsi (&si, fold_convert (type, fd->loop.n1),
+ true, NULL_TREE, true, BSI_SAME_STMT);
+ fd->loop.n2
+ = force_gimple_operand_bsi (&si, fold_convert (itype, fd->loop.n2),
+ true, NULL_TREE, true, BSI_SAME_STMT);
+ fd->loop.step
+ = force_gimple_operand_bsi (&si, fold_convert (itype, fd->loop.step),
+ true, NULL_TREE, true, BSI_SAME_STMT);
+
+ t = build_int_cst (itype, (fd->loop.cond_code == LT_EXPR ? -1 : 1));
+ t = fold_build2 (PLUS_EXPR, itype, fd->loop.step, t);
+ t = fold_build2 (PLUS_EXPR, itype, t, fd->loop.n2);
+ t = fold_build2 (MINUS_EXPR, itype, t, fold_convert (itype, fd->loop.n1));
+ if (TYPE_UNSIGNED (itype) && fd->loop.cond_code == GT_EXPR)
+ t = fold_build2 (TRUNC_DIV_EXPR, itype,
+ fold_build1 (NEGATE_EXPR, itype, t),
+ fold_build1 (NEGATE_EXPR, itype, fd->loop.step));
+ else
+ t = fold_build2 (TRUNC_DIV_EXPR, itype, t, fd->loop.step);
+ t = fold_convert (itype, t);
n = force_gimple_operand_bsi (&si, t, true, NULL_TREE, true, BSI_SAME_STMT);
- t = fold_build2 (TRUNC_DIV_EXPR, type, n, nthreads);
+ t = fold_build2 (TRUNC_DIV_EXPR, itype, n, nthreads);
q = force_gimple_operand_bsi (&si, t, true, NULL_TREE, true, BSI_SAME_STMT);
- t = fold_build2 (MULT_EXPR, type, q, nthreads);
- t = fold_build2 (NE_EXPR, type, t, n);
- t = fold_build2 (PLUS_EXPR, type, q, t);
+ t = fold_build2 (MULT_EXPR, itype, q, nthreads);
+ t = fold_build2 (NE_EXPR, itype, t, n);
+ t = fold_build2 (PLUS_EXPR, itype, q, t);
q = force_gimple_operand_bsi (&si, t, true, NULL_TREE, true, BSI_SAME_STMT);
- t = build2 (MULT_EXPR, type, q, threadid);
+ t = build2 (MULT_EXPR, itype, q, threadid);
s0 = force_gimple_operand_bsi (&si, t, true, NULL_TREE, true, BSI_SAME_STMT);
- t = fold_build2 (PLUS_EXPR, type, s0, q);
- t = fold_build2 (MIN_EXPR, type, t, n);
+ t = fold_build2 (PLUS_EXPR, itype, s0, q);
+ t = fold_build2 (MIN_EXPR, itype, t, n);
e0 = force_gimple_operand_bsi (&si, t, true, NULL_TREE, true, BSI_SAME_STMT);
t = build2 (GE_EXPR, boolean_type_node, s0, e0);
@@ -3018,19 +3933,27 @@ expand_omp_for_static_nochunk (struct omp_region *region,
/* Setup code for sequential iteration goes in SEQ_START_BB. */
si = bsi_start (seq_start_bb);
- t = fold_convert (type, s0);
- t = fold_build2 (MULT_EXPR, type, t, fd->step);
- t = fold_build2 (PLUS_EXPR, type, t, fd->n1);
+ t = fold_convert (itype, s0);
+ t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
+ if (POINTER_TYPE_P (type))
+ t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
+ fold_convert (sizetype, t));
+ else
+ t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
false, BSI_CONTINUE_LINKING);
- t = build_gimple_modify_stmt (fd->v, t);
+ t = build_gimple_modify_stmt (fd->loop.v, t);
bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
if (gimple_in_ssa_p (cfun))
- SSA_NAME_DEF_STMT (fd->v) = t;
+ SSA_NAME_DEF_STMT (fd->loop.v) = t;
- t = fold_convert (type, e0);
- t = fold_build2 (MULT_EXPR, type, t, fd->step);
- t = fold_build2 (PLUS_EXPR, type, t, fd->n1);
+ t = fold_convert (itype, e0);
+ t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
+ if (POINTER_TYPE_P (type))
+ t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
+ fold_convert (sizetype, t));
+ else
+ t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
e = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
false, BSI_CONTINUE_LINKING);
@@ -3041,7 +3964,11 @@ expand_omp_for_static_nochunk (struct omp_region *region,
vmain = TREE_OPERAND (t, 1);
vback = TREE_OPERAND (t, 0);
- t = fold_build2 (PLUS_EXPR, type, vmain, fd->step);
+ if (POINTER_TYPE_P (type))
+ t = fold_build2 (POINTER_PLUS_EXPR, type, vmain,
+ fold_convert (sizetype, fd->loop.step));
+ else
+ t = fold_build2 (PLUS_EXPR, type, vmain, fd->loop.step);
t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
true, BSI_SAME_STMT);
t = build_gimple_modify_stmt (vback, t);
@@ -3049,7 +3976,7 @@ expand_omp_for_static_nochunk (struct omp_region *region,
if (gimple_in_ssa_p (cfun))
SSA_NAME_DEF_STMT (vback) = t;
- t = build2 (fd->cond_code, boolean_type_node, vback, e);
+ t = build2 (fd->loop.cond_code, boolean_type_node, vback, e);
t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
bsi_insert_before (&si, t, BSI_SAME_STMT);
@@ -3090,7 +4017,10 @@ expand_omp_for_static_nochunk (struct omp_region *region,
adj = STEP - 1;
else
adj = STEP + 1;
- n = (adj + N2 - N1) / STEP;
+ if ((__typeof (V)) -1 > 0 && cond is >)
+ n = -(adj + N2 - N1) / -STEP;
+ else
+ n = (adj + N2 - N1) / STEP;
trip = 0;
V = threadid * CHUNK * STEP + N1; -- this extra definition of V is
here so that V is defined
@@ -3113,17 +4043,20 @@ expand_omp_for_static_nochunk (struct omp_region *region,
*/
static void
-expand_omp_for_static_chunk (struct omp_region *region, struct omp_for_data *fd)
+expand_omp_for_static_chunk (struct omp_region *region,
+ struct omp_for_data *fd)
{
tree n, s0, e0, e, t, phi, nphi, args;
tree trip_var, trip_init, trip_main, trip_back, nthreads, threadid;
- tree type, cont, v_main, v_back, v_extra;
+ tree type, itype, cont, v_main, v_back, v_extra;
basic_block entry_bb, exit_bb, body_bb, seq_start_bb, iter_part_bb;
basic_block trip_update_bb, cont_bb, fin_bb;
block_stmt_iterator si;
edge se, re, ene;
- type = TREE_TYPE (fd->v);
+ itype = type = TREE_TYPE (fd->loop.v);
+ if (POINTER_TYPE_P (type))
+ itype = lang_hooks.types.type_for_size (TYPE_PRECISION (type), 0);
entry_bb = region->entry;
se = split_block (entry_bb, last_stmt (entry_bb));
@@ -3146,40 +4079,43 @@ expand_omp_for_static_chunk (struct omp_region *region, struct omp_for_data *fd)
gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_FOR);
t = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_NUM_THREADS], 0);
- t = fold_convert (type, t);
+ t = fold_convert (itype, t);
nthreads = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
true, BSI_SAME_STMT);
t = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM], 0);
- t = fold_convert (type, t);
+ t = fold_convert (itype, t);
threadid = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
true, BSI_SAME_STMT);
- fd->n1 = force_gimple_operand_bsi (&si, fold_convert (type, fd->n1),
- true, NULL_TREE,
- true, BSI_SAME_STMT);
- fd->n2 = force_gimple_operand_bsi (&si, fold_convert (type, fd->n2),
- true, NULL_TREE,
- true, BSI_SAME_STMT);
- fd->step = force_gimple_operand_bsi (&si, fold_convert (type, fd->step),
- true, NULL_TREE,
- true, BSI_SAME_STMT);
+ fd->loop.n1
+ = force_gimple_operand_bsi (&si, fold_convert (type, fd->loop.n1),
+ true, NULL_TREE, true, BSI_SAME_STMT);
+ fd->loop.n2
+ = force_gimple_operand_bsi (&si, fold_convert (itype, fd->loop.n2),
+ true, NULL_TREE, true, BSI_SAME_STMT);
+ fd->loop.step
+ = force_gimple_operand_bsi (&si, fold_convert (itype, fd->loop.step),
+ true, NULL_TREE, true, BSI_SAME_STMT);
fd->chunk_size
- = force_gimple_operand_bsi (&si, fold_convert (type,
- fd->chunk_size),
- true, NULL_TREE,
- true, BSI_SAME_STMT);
-
- t = build_int_cst (type, (fd->cond_code == LT_EXPR ? -1 : 1));
- t = fold_build2 (PLUS_EXPR, type, fd->step, t);
- t = fold_build2 (PLUS_EXPR, type, t, fd->n2);
- t = fold_build2 (MINUS_EXPR, type, t, fd->n1);
- t = fold_build2 (TRUNC_DIV_EXPR, type, t, fd->step);
- t = fold_convert (type, t);
+ = force_gimple_operand_bsi (&si, fold_convert (itype, fd->chunk_size),
+ true, NULL_TREE, true, BSI_SAME_STMT);
+
+ t = build_int_cst (itype, (fd->loop.cond_code == LT_EXPR ? -1 : 1));
+ t = fold_build2 (PLUS_EXPR, itype, fd->loop.step, t);
+ t = fold_build2 (PLUS_EXPR, itype, t, fd->loop.n2);
+ t = fold_build2 (MINUS_EXPR, itype, t, fold_convert (itype, fd->loop.n1));
+ if (TYPE_UNSIGNED (itype) && fd->loop.cond_code == GT_EXPR)
+ t = fold_build2 (TRUNC_DIV_EXPR, itype,
+ fold_build1 (NEGATE_EXPR, itype, t),
+ fold_build1 (NEGATE_EXPR, itype, fd->loop.step));
+ else
+ t = fold_build2 (TRUNC_DIV_EXPR, itype, t, fd->loop.step);
+ t = fold_convert (itype, t);
n = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
true, BSI_SAME_STMT);
- trip_var = create_tmp_var (type, ".trip");
+ trip_var = create_tmp_var (itype, ".trip");
if (gimple_in_ssa_p (cfun))
{
add_referenced_var (trip_var);
@@ -3194,14 +4130,18 @@ expand_omp_for_static_chunk (struct omp_region *region, struct omp_for_data *fd)
trip_back = trip_var;
}
- t = build_gimple_modify_stmt (trip_init, build_int_cst (type, 0));
+ t = build_gimple_modify_stmt (trip_init, build_int_cst (itype, 0));
bsi_insert_before (&si, t, BSI_SAME_STMT);
if (gimple_in_ssa_p (cfun))
SSA_NAME_DEF_STMT (trip_init) = t;
- t = fold_build2 (MULT_EXPR, type, threadid, fd->chunk_size);
- t = fold_build2 (MULT_EXPR, type, t, fd->step);
- t = fold_build2 (PLUS_EXPR, type, t, fd->n1);
+ t = fold_build2 (MULT_EXPR, itype, threadid, fd->chunk_size);
+ t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
+ if (POINTER_TYPE_P (type))
+ t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
+ fold_convert (sizetype, t));
+ else
+ t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
v_extra = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
true, BSI_SAME_STMT);
@@ -3211,14 +4151,14 @@ expand_omp_for_static_chunk (struct omp_region *region, struct omp_for_data *fd)
/* Iteration space partitioning goes in ITER_PART_BB. */
si = bsi_last (iter_part_bb);
- t = fold_build2 (MULT_EXPR, type, trip_main, nthreads);
- t = fold_build2 (PLUS_EXPR, type, t, threadid);
- t = fold_build2 (MULT_EXPR, type, t, fd->chunk_size);
+ t = fold_build2 (MULT_EXPR, itype, trip_main, nthreads);
+ t = fold_build2 (PLUS_EXPR, itype, t, threadid);
+ t = fold_build2 (MULT_EXPR, itype, t, fd->chunk_size);
s0 = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
false, BSI_CONTINUE_LINKING);
- t = fold_build2 (PLUS_EXPR, type, s0, fd->chunk_size);
- t = fold_build2 (MIN_EXPR, type, t, n);
+ t = fold_build2 (PLUS_EXPR, itype, s0, fd->chunk_size);
+ t = fold_build2 (MIN_EXPR, itype, t, n);
e0 = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
false, BSI_CONTINUE_LINKING);
@@ -3229,19 +4169,27 @@ expand_omp_for_static_chunk (struct omp_region *region, struct omp_for_data *fd)
/* Setup code for sequential iteration goes in SEQ_START_BB. */
si = bsi_start (seq_start_bb);
- t = fold_convert (type, s0);
- t = fold_build2 (MULT_EXPR, type, t, fd->step);
- t = fold_build2 (PLUS_EXPR, type, t, fd->n1);
+ t = fold_convert (itype, s0);
+ t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
+ if (POINTER_TYPE_P (type))
+ t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
+ fold_convert (sizetype, t));
+ else
+ t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
false, BSI_CONTINUE_LINKING);
- t = build_gimple_modify_stmt (fd->v, t);
+ t = build_gimple_modify_stmt (fd->loop.v, t);
bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
if (gimple_in_ssa_p (cfun))
- SSA_NAME_DEF_STMT (fd->v) = t;
+ SSA_NAME_DEF_STMT (fd->loop.v) = t;
- t = fold_convert (type, e0);
- t = fold_build2 (MULT_EXPR, type, t, fd->step);
- t = fold_build2 (PLUS_EXPR, type, t, fd->n1);
+ t = fold_convert (itype, e0);
+ t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
+ if (POINTER_TYPE_P (type))
+ t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
+ fold_convert (sizetype, t));
+ else
+ t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
e = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
false, BSI_CONTINUE_LINKING);
@@ -3253,13 +4201,17 @@ expand_omp_for_static_chunk (struct omp_region *region, struct omp_for_data *fd)
v_main = TREE_OPERAND (cont, 1);
v_back = TREE_OPERAND (cont, 0);
- t = build2 (PLUS_EXPR, type, v_main, fd->step);
+ if (POINTER_TYPE_P (type))
+ t = fold_build2 (POINTER_PLUS_EXPR, type, v_main,
+ fold_convert (sizetype, fd->loop.step));
+ else
+ t = build2 (PLUS_EXPR, type, v_main, fd->loop.step);
t = build_gimple_modify_stmt (v_back, t);
bsi_insert_before (&si, t, BSI_SAME_STMT);
if (gimple_in_ssa_p (cfun))
SSA_NAME_DEF_STMT (v_back) = t;
- t = build2 (fd->cond_code, boolean_type_node, v_back, e);
+ t = build2 (fd->loop.cond_code, boolean_type_node, v_back, e);
t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
bsi_insert_before (&si, t, BSI_SAME_STMT);
@@ -3269,8 +4221,8 @@ expand_omp_for_static_chunk (struct omp_region *region, struct omp_for_data *fd)
/* Trip update code goes into TRIP_UPDATE_BB. */
si = bsi_start (trip_update_bb);
- t = build_int_cst (type, 1);
- t = build2 (PLUS_EXPR, type, trip_main, t);
+ t = build_int_cst (itype, 1);
+ t = build2 (PLUS_EXPR, itype, trip_main, t);
t = build_gimple_modify_stmt (trip_back, t);
bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
if (gimple_in_ssa_p (cfun))
@@ -3313,9 +4265,9 @@ expand_omp_for_static_chunk (struct omp_region *region, struct omp_for_data *fd)
SSA_NAME_DEF_STMT (t) = nphi;
t = PHI_ARG_DEF_FROM_EDGE (phi, se);
- /* A special case -- fd->v is not yet computed in iter_part_bb, we
- need to use v_extra instead. */
- if (t == fd->v)
+ /* A special case -- fd->loop.v is not yet computed in
+ iter_part_bb, we need to use v_extra instead. */
+ if (t == fd->loop.v)
t = v_extra;
add_phi_arg (nphi, t, ene);
add_phi_arg (nphi, TREE_VALUE (args), re);
@@ -3349,8 +4301,14 @@ static void
expand_omp_for (struct omp_region *region)
{
struct omp_for_data fd;
+ struct omp_for_data_loop *loops;
- extract_omp_for_data (last_stmt (region->entry), &fd);
+ loops
+ = (struct omp_for_data_loop *)
+ alloca (TREE_VEC_LENGTH (OMP_FOR_INIT (last_stmt (region->entry)))
+ * sizeof (struct omp_for_data_loop));
+
+ extract_omp_for_data (last_stmt (region->entry), &fd, loops);
region->sched_kind = fd.sched_kind;
gcc_assert (EDGE_COUNT (region->entry->succs) == 2);
@@ -3365,6 +4323,7 @@ expand_omp_for (struct omp_region *region)
if (fd.sched_kind == OMP_CLAUSE_SCHEDULE_STATIC
&& !fd.have_ordered
+ && fd.collapse == 1
&& region->cont != NULL)
{
if (fd.chunk_size == NULL)
@@ -3374,9 +4333,21 @@ expand_omp_for (struct omp_region *region)
}
else
{
- int fn_index = fd.sched_kind + fd.have_ordered * 4;
- int start_ix = BUILT_IN_GOMP_LOOP_STATIC_START + fn_index;
- int next_ix = BUILT_IN_GOMP_LOOP_STATIC_NEXT + fn_index;
+ int fn_index, start_ix, next_ix;
+
+ gcc_assert (fd.sched_kind != OMP_CLAUSE_SCHEDULE_AUTO);
+ fn_index = (fd.sched_kind == OMP_CLAUSE_SCHEDULE_RUNTIME)
+ ? 3 : fd.sched_kind;
+ fn_index += fd.have_ordered * 4;
+ start_ix = BUILT_IN_GOMP_LOOP_STATIC_START + fn_index;
+ next_ix = BUILT_IN_GOMP_LOOP_STATIC_NEXT + fn_index;
+ if (fd.iter_type == long_long_unsigned_type_node)
+ {
+ start_ix += BUILT_IN_GOMP_LOOP_ULL_STATIC_START
+ - BUILT_IN_GOMP_LOOP_STATIC_START;
+ next_ix += BUILT_IN_GOMP_LOOP_ULL_STATIC_NEXT
+ - BUILT_IN_GOMP_LOOP_STATIC_NEXT;
+ }
expand_omp_for_generic (region, &fd, start_ix, next_ix);
}
@@ -4037,7 +5008,11 @@ expand_omp (struct omp_region *region)
switch (region->type)
{
case OMP_PARALLEL:
- expand_omp_parallel (region);
+ expand_omp_taskreg (region);
+ break;
+
+ case OMP_TASK:
+ expand_omp_taskreg (region);
break;
case OMP_FOR:
@@ -4355,6 +5330,9 @@ lower_omp_single_simple (tree single_stmt, tree *pre_p)
tree t;
t = build_call_expr (built_in_decls[BUILT_IN_GOMP_SINGLE_START], 0);
+ if (TREE_TYPE (t) != boolean_type_node)
+ t = fold_build2 (NE_EXPR, boolean_type_node,
+ t, build_int_cst (TREE_TYPE (t), 0));
t = build3 (COND_EXPR, void_type_node, t,
OMP_SINGLE_BODY (single_stmt), NULL);
gimplify_and_add (t, pre_p);
@@ -4661,37 +5639,38 @@ lower_omp_for_lastprivate (struct omp_for_data *fd, tree *body_p,
tree clauses, cond, stmts, vinit, t;
enum tree_code cond_code;
- cond_code = fd->cond_code;
+ cond_code = fd->loop.cond_code;
cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
/* When possible, use a strict equality expression. This can let VRP
type optimizations deduce the value and remove a copy. */
- if (host_integerp (fd->step, 0))
+ if (host_integerp (fd->loop.step, 0))
{
- HOST_WIDE_INT step = TREE_INT_CST_LOW (fd->step);
+ HOST_WIDE_INT step = TREE_INT_CST_LOW (fd->loop.step);
if (step == 1 || step == -1)
cond_code = EQ_EXPR;
}
- cond = build2 (cond_code, boolean_type_node, fd->v, fd->n2);
+ cond = build2 (cond_code, boolean_type_node, fd->loop.v, fd->loop.n2);
clauses = OMP_FOR_CLAUSES (fd->for_stmt);
stmts = NULL;
lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
if (stmts != NULL)
{
- append_to_statement_list (stmts, dlist);
+ append_to_statement_list (*dlist, &stmts);
+ *dlist = stmts;
/* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
- vinit = fd->n1;
+ vinit = fd->loop.n1;
if (cond_code == EQ_EXPR
- && host_integerp (fd->n2, 0)
- && ! integer_zerop (fd->n2))
- vinit = build_int_cst (TREE_TYPE (fd->v), 0);
+ && host_integerp (fd->loop.n2, 0)
+ && ! integer_zerop (fd->loop.n2))
+ vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
/* Initialize the iterator variable, so that threads that don't execute
any iterations don't execute the lastprivate clauses by accident. */
- t = build_gimple_modify_stmt (fd->v, vinit);
+ t = build_gimple_modify_stmt (fd->loop.v, vinit);
gimplify_and_add (t, body_p);
}
}
@@ -4704,6 +5683,7 @@ lower_omp_for (tree *stmt_p, omp_context *ctx)
{
tree t, stmt, ilist, dlist, new_stmt, *body_p, *rhs_p;
struct omp_for_data fd;
+ int i;
stmt = *stmt_p;
@@ -4724,8 +5704,8 @@ lower_omp_for (tree *stmt_p, omp_context *ctx)
/* The pre-body and input clauses go before the lowered OMP_FOR. */
ilist = NULL;
dlist = NULL;
- append_to_statement_list (OMP_FOR_PRE_BODY (stmt), body_p);
lower_rec_input_clauses (OMP_FOR_CLAUSES (stmt), body_p, &dlist, ctx);
+ append_to_statement_list (OMP_FOR_PRE_BODY (stmt), body_p);
/* Lower the header expressions. At this point, we can assume that
the header is of the form:
@@ -4734,20 +5714,24 @@ lower_omp_for (tree *stmt_p, omp_context *ctx)
We just need to make sure that VAL1, VAL2 and VAL3 are lowered
using the .omp_data_s mapping, if needed. */
- rhs_p = &GIMPLE_STMT_OPERAND (OMP_FOR_INIT (stmt), 1);
- if (!is_gimple_min_invariant (*rhs_p))
- *rhs_p = get_formal_tmp_var (*rhs_p, body_p);
-
- rhs_p = &TREE_OPERAND (OMP_FOR_COND (stmt), 1);
- if (!is_gimple_min_invariant (*rhs_p))
- *rhs_p = get_formal_tmp_var (*rhs_p, body_p);
-
- rhs_p = &TREE_OPERAND (GIMPLE_STMT_OPERAND (OMP_FOR_INCR (stmt), 1), 1);
- if (!is_gimple_min_invariant (*rhs_p))
- *rhs_p = get_formal_tmp_var (*rhs_p, body_p);
+ for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (stmt)); i++)
+ {
+ rhs_p = &GIMPLE_STMT_OPERAND (TREE_VEC_ELT (OMP_FOR_INIT (stmt), i), 1);
+ if (!is_gimple_min_invariant (*rhs_p))
+ *rhs_p = get_formal_tmp_var (*rhs_p, body_p);
+
+ rhs_p = &TREE_OPERAND (TREE_VEC_ELT (OMP_FOR_COND (stmt), i), 1);
+ if (!is_gimple_min_invariant (*rhs_p))
+ *rhs_p = get_formal_tmp_var (*rhs_p, body_p);
+
+ rhs_p = &TREE_OPERAND (GIMPLE_STMT_OPERAND
+ (TREE_VEC_ELT (OMP_FOR_INCR (stmt), i), 1), 1);
+ if (!is_gimple_min_invariant (*rhs_p))
+ *rhs_p = get_formal_tmp_var (*rhs_p, body_p);
+ }
/* Once lowered, extract the bounds and clauses. */
- extract_omp_for_data (stmt, &fd);
+ extract_omp_for_data (stmt, &fd, NULL);
lower_omp_for_lastprivate (&fd, body_p, &dlist, ctx);
@@ -4755,7 +5739,7 @@ lower_omp_for (tree *stmt_p, omp_context *ctx)
append_to_statement_list (OMP_FOR_BODY (stmt), body_p);
- t = build2 (OMP_CONTINUE, void_type_node, fd.v, fd.v);
+ t = build2 (OMP_CONTINUE, void_type_node, fd.loop.v, fd.loop.v);
append_to_statement_list (t, body_p);
/* After the loop, add exit clauses. */
@@ -4800,11 +5784,290 @@ check_combined_parallel (tree *tp, int *walk_subtrees, void *data)
return NULL;
}
-/* Lower the OpenMP parallel directive in *STMT_P. CTX holds context
+struct omp_taskcopy_context
+{
+ /* This field must be at the beginning, as we do "inheritance": Some
+ callback functions for tree-inline.c (e.g., omp_copy_decl)
+ receive a copy_body_data pointer that is up-casted to an
+ omp_context pointer. */
+ copy_body_data cb;
+ omp_context *ctx;
+};
+
+static tree
+task_copyfn_copy_decl (tree var, copy_body_data *cb)
+{
+ struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
+
+ if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
+ return create_tmp_var (TREE_TYPE (var), NULL);
+
+ return var;
+}
+
+static tree
+task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
+{
+ tree name, new_fields = NULL, type, f;
+
+ type = lang_hooks.types.make_type (RECORD_TYPE);
+ name = DECL_NAME (TYPE_NAME (orig_type));
+ name = build_decl (TYPE_DECL, name, type);
+ TYPE_NAME (type) = name;
+
+ for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
+ {
+ tree new_f = copy_node (f);
+ DECL_CONTEXT (new_f) = type;
+ TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
+ TREE_CHAIN (new_f) = new_fields;
+ walk_tree (&DECL_SIZE (new_f), copy_body_r, &tcctx->cb, NULL);
+ walk_tree (&DECL_SIZE_UNIT (new_f), copy_body_r, &tcctx->cb, NULL);
+ walk_tree (&DECL_FIELD_OFFSET (new_f), copy_body_r, &tcctx->cb, NULL);
+ new_fields = new_f;
+ *pointer_map_insert (tcctx->cb.decl_map, f) = new_f;
+ }
+ TYPE_FIELDS (type) = nreverse (new_fields);
+ layout_type (type);
+ return type;
+}
+
+/* Create task copyfn. */
+
+static void
+create_task_copyfn (tree task_stmt, omp_context *ctx)
+{
+ struct function *child_cfun;
+ tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
+ tree record_type, srecord_type, bind, list;
+ bool record_needs_remap = false, srecord_needs_remap = false;
+ splay_tree_node n;
+ struct omp_taskcopy_context tcctx;
+
+ child_fn = OMP_TASK_COPYFN (task_stmt);
+ child_cfun = DECL_STRUCT_FUNCTION (child_fn);
+ gcc_assert (child_cfun->cfg == NULL);
+ child_cfun->dont_save_pending_sizes_p = 1;
+ DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
+
+ /* Reset DECL_CONTEXT on function arguments. */
+ for (t = DECL_ARGUMENTS (child_fn); t; t = TREE_CHAIN (t))
+ DECL_CONTEXT (t) = child_fn;
+
+ /* Populate the function. */
+ push_gimplify_context ();
+ current_function_decl = child_fn;
+
+ bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
+ TREE_SIDE_EFFECTS (bind) = 1;
+ list = NULL;
+ DECL_SAVED_TREE (child_fn) = bind;
+ DECL_SOURCE_LOCATION (child_fn) = EXPR_LOCATION (task_stmt);
+
+ /* Remap src and dst argument types if needed. */
+ record_type = ctx->record_type;
+ srecord_type = ctx->srecord_type;
+ for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
+ if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
+ {
+ record_needs_remap = true;
+ break;
+ }
+ for (f = TYPE_FIELDS (srecord_type); f ; f = TREE_CHAIN (f))
+ if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
+ {
+ srecord_needs_remap = true;
+ break;
+ }
+
+ if (record_needs_remap || srecord_needs_remap)
+ {
+ memset (&tcctx, '\0', sizeof (tcctx));
+ tcctx.cb.src_fn = ctx->cb.src_fn;
+ tcctx.cb.dst_fn = child_fn;
+ tcctx.cb.src_node = cgraph_node (tcctx.cb.src_fn);
+ tcctx.cb.dst_node = tcctx.cb.src_node;
+ tcctx.cb.src_cfun = ctx->cb.src_cfun;
+ tcctx.cb.copy_decl = task_copyfn_copy_decl;
+ tcctx.cb.eh_region = -1;
+ tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
+ tcctx.cb.decl_map = pointer_map_create ();
+ tcctx.ctx = ctx;
+
+ if (record_needs_remap)
+ record_type = task_copyfn_remap_type (&tcctx, record_type);
+ if (srecord_needs_remap)
+ srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
+ }
+ else
+ tcctx.cb.decl_map = NULL;
+
+ push_cfun (child_cfun);
+
+ arg = DECL_ARGUMENTS (child_fn);
+ TREE_TYPE (arg) = build_pointer_type (record_type);
+ sarg = TREE_CHAIN (arg);
+ TREE_TYPE (sarg) = build_pointer_type (srecord_type);
+
+ /* First pass: initialize temporaries used in record_type and srecord_type
+ sizes and field offsets. */
+ if (tcctx.cb.decl_map)
+ for (c = OMP_TASK_CLAUSES (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
+ if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
+ {
+ tree *p;
+
+ decl = OMP_CLAUSE_DECL (c);
+ p = (tree *) pointer_map_contains (tcctx.cb.decl_map, decl);
+ if (p == NULL)
+ continue;
+ n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
+ sf = (tree) n->value;
+ sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
+ src = build_fold_indirect_ref (sarg);
+ src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
+ t = build_gimple_modify_stmt (*p, src);
+ append_to_statement_list (t, &list);
+ }
+
+ /* Second pass: copy shared var pointers and copy construct non-VLA
+ firstprivate vars. */
+ for (c = OMP_TASK_CLAUSES (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
+ switch (OMP_CLAUSE_CODE (c))
+ {
+ case OMP_CLAUSE_SHARED:
+ decl = OMP_CLAUSE_DECL (c);
+ n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
+ if (n == NULL)
+ break;
+ f = (tree) n->value;
+ if (tcctx.cb.decl_map)
+ f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
+ n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
+ sf = (tree) n->value;
+ if (tcctx.cb.decl_map)
+ sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
+ src = build_fold_indirect_ref (sarg);
+ src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
+ dst = build_fold_indirect_ref (arg);
+ dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
+ t = build_gimple_modify_stmt (dst, src);
+ append_to_statement_list (t, &list);
+ break;
+ case OMP_CLAUSE_FIRSTPRIVATE:
+ decl = OMP_CLAUSE_DECL (c);
+ if (is_variable_sized (decl))
+ break;
+ n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
+ if (n == NULL)
+ break;
+ f = (tree) n->value;
+ if (tcctx.cb.decl_map)
+ f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
+ n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
+ if (n != NULL)
+ {
+ sf = (tree) n->value;
+ if (tcctx.cb.decl_map)
+ sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
+ src = build_fold_indirect_ref (sarg);
+ src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
+ if (use_pointer_for_field (decl, NULL) || is_reference (decl))
+ src = build_fold_indirect_ref (src);
+ }
+ else
+ src = decl;
+ dst = build_fold_indirect_ref (arg);
+ dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
+ t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
+ append_to_statement_list (t, &list);
+ break;
+ case OMP_CLAUSE_PRIVATE:
+ if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
+ break;
+ decl = OMP_CLAUSE_DECL (c);
+ n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
+ f = (tree) n->value;
+ if (tcctx.cb.decl_map)
+ f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
+ n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
+ if (n != NULL)
+ {
+ sf = (tree) n->value;
+ if (tcctx.cb.decl_map)
+ sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
+ src = build_fold_indirect_ref (sarg);
+ src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
+ if (use_pointer_for_field (decl, NULL))
+ src = build_fold_indirect_ref (src);
+ }
+ else
+ src = decl;
+ dst = build_fold_indirect_ref (arg);
+ dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
+ t = build_gimple_modify_stmt (dst, src);
+ append_to_statement_list (t, &list);
+ break;
+ default:
+ break;
+ }
+
+ /* Last pass: handle VLA firstprivates. */
+ if (tcctx.cb.decl_map)
+ for (c = OMP_TASK_CLAUSES (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
+ if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
+ {
+ tree ind, ptr, df;
+
+ decl = OMP_CLAUSE_DECL (c);
+ if (!is_variable_sized (decl))
+ continue;
+ n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
+ if (n == NULL)
+ continue;
+ f = (tree) n->value;
+ f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
+ gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
+ ind = DECL_VALUE_EXPR (decl);
+ gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
+ gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
+ n = splay_tree_lookup (ctx->sfield_map,
+ (splay_tree_key) TREE_OPERAND (ind, 0));
+ sf = (tree) n->value;
+ sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
+ src = build_fold_indirect_ref (sarg);
+ src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
+ src = build_fold_indirect_ref (src);
+ dst = build_fold_indirect_ref (arg);
+ dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
+ t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
+ append_to_statement_list (t, &list);
+ n = splay_tree_lookup (ctx->field_map,
+ (splay_tree_key) TREE_OPERAND (ind, 0));
+ df = (tree) n->value;
+ df = *(tree *) pointer_map_contains (tcctx.cb.decl_map, df);
+ ptr = build_fold_indirect_ref (arg);
+ ptr = build3 (COMPONENT_REF, TREE_TYPE (df), ptr, df, NULL);
+ t = build_gimple_modify_stmt (ptr, build_fold_addr_expr (dst));
+ append_to_statement_list (t, &list);
+ }
+
+ t = build1 (RETURN_EXPR, void_type_node, NULL);
+ append_to_statement_list (t, &list);
+
+ if (tcctx.cb.decl_map)
+ pointer_map_destroy (tcctx.cb.decl_map);
+ pop_gimplify_context (NULL);
+ BIND_EXPR_BODY (bind) = list;
+ pop_cfun ();
+ current_function_decl = ctx->cb.src_fn;
+}
+
+/* Lower the OpenMP parallel or task directive in *STMT_P. CTX holds context
information for the directive. */
static void
-lower_omp_parallel (tree *stmt_p, omp_context *ctx)
+lower_omp_taskreg (tree *stmt_p, omp_context *ctx)
{
tree clauses, par_bind, par_body, new_body, bind;
tree olist, ilist, par_olist, par_ilist;
@@ -4812,11 +6075,11 @@ lower_omp_parallel (tree *stmt_p, omp_context *ctx)
stmt = *stmt_p;
- clauses = OMP_PARALLEL_CLAUSES (stmt);
- par_bind = OMP_PARALLEL_BODY (stmt);
+ clauses = OMP_TASKREG_CLAUSES (stmt);
+ par_bind = OMP_TASKREG_BODY (stmt);
par_body = BIND_EXPR_BODY (par_bind);
child_fn = ctx->cb.dst_fn;
- if (!OMP_PARALLEL_COMBINED (stmt))
+ if (TREE_CODE (stmt) == OMP_PARALLEL && !OMP_PARALLEL_COMBINED (stmt))
{
struct walk_stmt_info wi;
int ws_num = 0;
@@ -4829,6 +6092,8 @@ lower_omp_parallel (tree *stmt_p, omp_context *ctx)
if (ws_num == 1)
OMP_PARALLEL_COMBINED (stmt) = 1;
}
+ if (ctx->srecord_type)
+ create_task_copyfn (stmt, ctx);
push_gimplify_context ();
@@ -4836,7 +6101,8 @@ lower_omp_parallel (tree *stmt_p, omp_context *ctx)
par_ilist = NULL_TREE;
lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx);
lower_omp (&par_body, ctx);
- lower_reduction_clauses (clauses, &par_olist, ctx);
+ if (TREE_CODE (stmt) == OMP_PARALLEL)
+ lower_reduction_clauses (clauses, &par_olist, ctx);
/* Declare all the variables created by mapping and the variables
declared in the scope of the parallel body. */
@@ -4845,8 +6111,10 @@ lower_omp_parallel (tree *stmt_p, omp_context *ctx)
if (ctx->record_type)
{
- ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_data_o");
- OMP_PARALLEL_DATA_ARG (stmt) = ctx->sender_decl;
+ ctx->sender_decl
+ = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
+ : ctx->record_type, ".omp_data_o");
+ OMP_TASKREG_DATA_ARG (stmt) = ctx->sender_decl;
}
olist = NULL_TREE;
@@ -4855,7 +6123,7 @@ lower_omp_parallel (tree *stmt_p, omp_context *ctx)
lower_send_shared_vars (&ilist, &olist, ctx);
/* Once all the expansions are done, sequence all the different
- fragments inside OMP_PARALLEL_BODY. */
+ fragments inside OMP_TASKREG_BODY. */
bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
append_to_statement_list (ilist, &BIND_EXPR_BODY (bind));
@@ -4876,7 +6144,7 @@ lower_omp_parallel (tree *stmt_p, omp_context *ctx)
maybe_catch_exception (&new_body);
t = make_node (OMP_RETURN);
append_to_statement_list (t, &new_body);
- OMP_PARALLEL_BODY (stmt) = new_body;
+ OMP_TASKREG_BODY (stmt) = new_body;
append_to_statement_list (stmt, &BIND_EXPR_BODY (bind));
append_to_statement_list (olist, &BIND_EXPR_BODY (bind));
@@ -4890,17 +6158,21 @@ lower_omp_parallel (tree *stmt_p, omp_context *ctx)
regimplified. */
static tree
-lower_omp_2 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
+lower_omp_2 (tree *tp, int *walk_subtrees, void *data)
{
tree t = *tp;
+ omp_context *ctx = data;
/* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
- if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
+ if (TREE_CODE (t) == VAR_DECL
+ && ((ctx && DECL_HAS_VALUE_EXPR_P (t))
+ || (task_shared_vars
+ && bitmap_bit_p (task_shared_vars, DECL_UID (t)))))
return t;
/* If a global variable has been privatized, TREE_CONSTANT on
ADDR_EXPR might be wrong. */
- if (TREE_CODE (t) == ADDR_EXPR)
+ if (ctx && TREE_CODE (t) == ADDR_EXPR)
recompute_tree_invariant_for_addr_expr (t);
*walk_subtrees = !TYPE_P (t) && !DECL_P (t);
@@ -4940,7 +6212,7 @@ lower_omp_1 (tree *tp, omp_context *ctx, tree_stmt_iterator *tsi)
case COND_EXPR:
lower_omp_1 (&COND_EXPR_THEN (t), ctx, NULL);
lower_omp_1 (&COND_EXPR_ELSE (t), ctx, NULL);
- if (ctx
+ if ((ctx || task_shared_vars)
&& walk_tree (&COND_EXPR_COND (t), lower_omp_2, ctx, NULL))
{
tree pre = NULL;
@@ -4977,8 +6249,9 @@ lower_omp_1 (tree *tp, omp_context *ctx, tree_stmt_iterator *tsi)
break;
case OMP_PARALLEL:
+ case OMP_TASK:
ctx = maybe_lookup_ctx (t);
- lower_omp_parallel (tp, ctx);
+ lower_omp_taskreg (tp, ctx);
break;
case OMP_FOR:
ctx = maybe_lookup_ctx (t);
@@ -5012,7 +6285,8 @@ lower_omp_1 (tree *tp, omp_context *ctx, tree_stmt_iterator *tsi)
break;
default:
- if (ctx && walk_tree (tp, lower_omp_2, ctx, NULL))
+ if ((ctx || task_shared_vars)
+ && walk_tree (tp, lower_omp_2, ctx, NULL))
{
/* The gimplifier doesn't gimplify CALL_EXPR_STATIC_CHAIN.
Handle that here. */
@@ -5068,16 +6342,23 @@ execute_lower_omp (void)
delete_omp_context);
scan_omp (&DECL_SAVED_TREE (current_function_decl), NULL);
- gcc_assert (parallel_nesting_level == 0);
+ gcc_assert (taskreg_nesting_level == 0);
if (all_contexts->root)
- lower_omp (&DECL_SAVED_TREE (current_function_decl), NULL);
+ {
+ if (task_shared_vars)
+ push_gimplify_context ();
+ lower_omp (&DECL_SAVED_TREE (current_function_decl), NULL);
+ if (task_shared_vars)
+ pop_gimplify_context (NULL);
+ }
if (all_contexts)
{
splay_tree_delete (all_contexts);
all_contexts = NULL;
}
+ BITMAP_FREE (task_shared_vars);
return 0;
}
@@ -5160,11 +6441,13 @@ diagnose_sb_1 (tree *tp, int *walk_subtrees, void *data)
tree context = (tree) wi->info;
tree inner_context;
tree t = *tp;
+ int i;
*walk_subtrees = 0;
switch (TREE_CODE (t))
{
case OMP_PARALLEL:
+ case OMP_TASK:
case OMP_SECTIONS:
case OMP_SINGLE:
walk_tree (&OMP_CLAUSES (t), diagnose_sb_1, wi, NULL);
@@ -5184,9 +6467,15 @@ diagnose_sb_1 (tree *tp, int *walk_subtrees, void *data)
walk_tree (&OMP_FOR_CLAUSES (t), diagnose_sb_1, wi, NULL);
inner_context = tree_cons (NULL, t, context);
wi->info = inner_context;
- walk_tree (&OMP_FOR_INIT (t), diagnose_sb_1, wi, NULL);
- walk_tree (&OMP_FOR_COND (t), diagnose_sb_1, wi, NULL);
- walk_tree (&OMP_FOR_INCR (t), diagnose_sb_1, wi, NULL);
+ for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (t)); i++)
+ {
+ walk_tree (&TREE_VEC_ELT (OMP_FOR_INIT (t), i), diagnose_sb_1,
+ wi, NULL);
+ walk_tree (&TREE_VEC_ELT (OMP_FOR_COND (t), i), diagnose_sb_1,
+ wi, NULL);
+ walk_tree (&TREE_VEC_ELT (OMP_FOR_INCR (t), i), diagnose_sb_1,
+ wi, NULL);
+ }
walk_stmts (wi, &OMP_FOR_PRE_BODY (t));
walk_stmts (wi, &OMP_FOR_BODY (t));
wi->info = context;
@@ -5214,11 +6503,13 @@ diagnose_sb_2 (tree *tp, int *walk_subtrees, void *data)
tree context = (tree) wi->info;
splay_tree_node n;
tree t = *tp;
+ int i;
*walk_subtrees = 0;
switch (TREE_CODE (t))
{
case OMP_PARALLEL:
+ case OMP_TASK:
case OMP_SECTIONS:
case OMP_SINGLE:
walk_tree (&OMP_CLAUSES (t), diagnose_sb_2, wi, NULL);
@@ -5235,9 +6526,15 @@ diagnose_sb_2 (tree *tp, int *walk_subtrees, void *data)
case OMP_FOR:
walk_tree (&OMP_FOR_CLAUSES (t), diagnose_sb_2, wi, NULL);
wi->info = t;
- walk_tree (&OMP_FOR_INIT (t), diagnose_sb_2, wi, NULL);
- walk_tree (&OMP_FOR_COND (t), diagnose_sb_2, wi, NULL);
- walk_tree (&OMP_FOR_INCR (t), diagnose_sb_2, wi, NULL);
+ for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (t)); i++)
+ {
+ walk_tree (&TREE_VEC_ELT (OMP_FOR_INIT (t), i), diagnose_sb_2,
+ wi, NULL);
+ walk_tree (&TREE_VEC_ELT (OMP_FOR_COND (t), i), diagnose_sb_2,
+ wi, NULL);
+ walk_tree (&TREE_VEC_ELT (OMP_FOR_INCR (t), i), diagnose_sb_2,
+ wi, NULL);
+ }
walk_stmts (wi, &OMP_FOR_PRE_BODY (t));
walk_stmts (wi, &OMP_FOR_BODY (t));
wi->info = context;
diff --git a/gcc/testsuite/ChangeLog b/gcc/testsuite/ChangeLog
index fb43d809e49..73e4a5e6c2a 100644
--- a/gcc/testsuite/ChangeLog
+++ b/gcc/testsuite/ChangeLog
@@ -1,3 +1,27 @@
+2008-06-06 Jakub Jelinek <jakub@redhat.com>
+
+ * gcc.dg/gomp/collapse-1.c: New test.
+ * gcc.dg/gomp/nesting-1.c: New test.
+ * g++.dg/gomp/task-1.C: New test.
+ * g++.dg/gomp/predetermined-1.C: New test.
+ * g++.dg/gomp/tls-4.C: New test.
+ * gfortran.dg/gomp/collapse1.f90: New test.
+ * gfortran.dg/gomp/sharing-3.f90: New test.
+ * gcc.dg/gomp/pr27499.c (foo): Remove is unsigned dg-warning.
+ * g++.dg/gomp/pr27499.C (foo): Likewise.
+ * g++.dg/gomp/for-16.C (foo): Likewise.
+ * g++.dg/gomp/tls-3.C: Remove dg-error, add S::s definition.
+ * g++.dg/gomp/pr34607.C: Adjust dg-error location.
+ * g++.dg/gomp/for-16.C (foo): Add a new dg-error.
+ * gcc.dg/gomp/appendix-a/a.35.4.c: Add dg-warning.
+ * gcc.dg/gomp/appendix-a/a.35.6.c: Likewise.
+ * gfortran.dg/gomp/appendix-a/a.35.4.f90: Likewise.
+ * gfortran.dg/gomp/appendix-a/a.35.6.f90: Likewise.
+ * gfortran.dg/gomp/omp_parse1.f90: Remove !$omp tab test.
+ * gfortran.dg/gomp/appendix-a/a.33.4.f90: Remove dg-error
+ about allocatable array.
+ * gfortran.dg/gomp/reduction1.f90: Likewise.
+
2008-06-06 Richard Guenther <rguenther@suse.de>
* gcc.dg/tree-ssa/alias-18.c: XFAIL some sub-tests.
diff --git a/gcc/testsuite/g++.dg/gomp/for-16.C b/gcc/testsuite/g++.dg/gomp/for-16.C
index 76231751f70..dbbed8fe505 100644
--- a/gcc/testsuite/g++.dg/gomp/for-16.C
+++ b/gcc/testsuite/g++.dg/gomp/for-16.C
@@ -4,7 +4,7 @@ template<typename T>
void foo ()
{
#pragma omp for
- for (unsigned int i = 0; i < 10; i++); // { dg-warning "is unsigned" }
+ for (unsigned int i = 0; i < 10; i++);
#pragma omp for
for (int j = 0; ; j++); // { dg-error "missing controlling predicate" }
#pragma omp for
@@ -12,8 +12,7 @@ void foo ()
#pragma omp for
for (int l = 0; l < 10; ); // { dg-error "missing increment expression" }
#pragma omp for
- for (int m = 0; m < 10; m *= 3); // Error here is emitted only during
- // instantiation
+ for (int m = 0; m < 10; m *= 3); // { dg-error "invalid increment expression" }
#pragma omp for
for (T n = 0; ; n++); // { dg-error "missing controlling predicate" }
#pragma omp for
diff --git a/gcc/testsuite/g++.dg/gomp/pr27499.C b/gcc/testsuite/g++.dg/gomp/pr27499.C
index 293ef8fb08e..4e0d5b1a5b8 100644
--- a/gcc/testsuite/g++.dg/gomp/pr27499.C
+++ b/gcc/testsuite/g++.dg/gomp/pr27499.C
@@ -8,6 +8,6 @@ foo (void)
{
unsigned int i;
#pragma omp for
- for (i = 0; i < 64; ++i) // { dg-warning "is unsigned" }
+ for (i = 0; i < 64; ++i)
bar (i);
}
diff --git a/gcc/testsuite/g++.dg/gomp/pr34607.C b/gcc/testsuite/g++.dg/gomp/pr34607.C
index 1dbba4a7414..f032aa45d50 100644
--- a/gcc/testsuite/g++.dg/gomp/pr34607.C
+++ b/gcc/testsuite/g++.dg/gomp/pr34607.C
@@ -13,6 +13,6 @@ foo ()
;
T j; // { dg-error "was not declared|expected" }
#pragma omp for
- for (j = 1; j < 3; j++) // { dg-error "was not declared" }
- ; // { dg-error "expected" }
+ for (j = 1; j < 3; j++) // { dg-error "was not declared|expected" }
+ ;
}
diff --git a/gcc/testsuite/g++.dg/gomp/predetermined-1.C b/gcc/testsuite/g++.dg/gomp/predetermined-1.C
new file mode 100644
index 00000000000..dd09855de97
--- /dev/null
+++ b/gcc/testsuite/g++.dg/gomp/predetermined-1.C
@@ -0,0 +1,33 @@
+// { dg-do compile }
+// { dg-options "-fopenmp" }
+
+struct A { int i; A (); ~A (); };
+struct B { int i; };
+struct C { int i; mutable int j; C (); ~C (); };
+
+template <typename T> void bar (const T *);
+
+const A a;
+const C c;
+
+const A foo (const A d, const C e)
+{
+ const A f;
+ const B b = { 4 };
+ A g;
+ #pragma omp parallel default (none)
+ bar (&a);
+ #pragma omp parallel default (none)
+ bar (&b);
+ #pragma omp parallel default (none) // { dg-error "enclosing parallel" }
+ bar (&c); // { dg-error "not specified" }
+ #pragma omp parallel default (none)
+ bar (&d);
+ #pragma omp parallel default (none) // { dg-error "enclosing parallel" }
+ bar (&e); // { dg-error "not specified" }
+ #pragma omp parallel default (none)
+ bar (&f);
+ #pragma omp parallel default (none) // { dg-error "enclosing parallel" }
+ bar (&g); // { dg-error "not specified" }
+ return f;
+}
diff --git a/gcc/testsuite/g++.dg/gomp/task-1.C b/gcc/testsuite/g++.dg/gomp/task-1.C
new file mode 100644
index 00000000000..0000e6f1fa9
--- /dev/null
+++ b/gcc/testsuite/g++.dg/gomp/task-1.C
@@ -0,0 +1,17 @@
+// { dg-do compile }
+// { dg-options "-fopenmp" }
+
+struct A { A (); ~A (); int i; };
+
+template <typename T> void bar (T &);
+
+const A a;
+
+void foo (A &p)
+{
+ const A &q = a;
+#pragma omp task // { dg-error "has reference type" }
+ bar (p);
+#pragma omp task // { dg-error "has reference type" }
+ bar (q);
+}
diff --git a/gcc/testsuite/g++.dg/gomp/tls-3.C b/gcc/testsuite/g++.dg/gomp/tls-3.C
index 96baec9453e..04f6bbea408 100644
--- a/gcc/testsuite/g++.dg/gomp/tls-3.C
+++ b/gcc/testsuite/g++.dg/gomp/tls-3.C
@@ -13,9 +13,11 @@ namespace N
struct S
{
static int s;
-#pragma omp thr (s) // { dg-error "is not file, namespace or block scope" }
+#pragma omp thr (s)
};
+int S::s = 5;
+
int
foo ()
{
diff --git a/gcc/testsuite/g++.dg/gomp/tls-4.C b/gcc/testsuite/g++.dg/gomp/tls-4.C
new file mode 100644
index 00000000000..e4377c5e4c0
--- /dev/null
+++ b/gcc/testsuite/g++.dg/gomp/tls-4.C
@@ -0,0 +1,16 @@
+// { dg-do compile }
+// { dg-require-effective-target tls_native }
+
+#define thr threadprivate
+
+struct S
+{
+ static int s;
+};
+struct T : public S
+{
+ static int t;
+#pragma omp thr (s) // { dg-error "directive not in" }
+};
+
+#pragma omp thr (T::t) // { dg-error "directive not in" }
diff --git a/gcc/testsuite/gcc.dg/gomp/appendix-a/a.35.4.c b/gcc/testsuite/gcc.dg/gomp/appendix-a/a.35.4.c
index 88824031cc2..d7579e6e735 100644
--- a/gcc/testsuite/gcc.dg/gomp/appendix-a/a.35.4.c
+++ b/gcc/testsuite/gcc.dg/gomp/appendix-a/a.35.4.c
@@ -11,7 +11,7 @@ wrong4 (int n)
{
work (i, 0);
/* incorrect nesting of barrier region in a loop region */
-#pragma omp barrier
+#pragma omp barrier /* { dg-warning "may not be closely nested" } */
work (i, 1);
}
}
diff --git a/gcc/testsuite/gcc.dg/gomp/appendix-a/a.35.6.c b/gcc/testsuite/gcc.dg/gomp/appendix-a/a.35.6.c
index 6385db30897..ac850e5410a 100644
--- a/gcc/testsuite/gcc.dg/gomp/appendix-a/a.35.6.c
+++ b/gcc/testsuite/gcc.dg/gomp/appendix-a/a.35.6.c
@@ -9,7 +9,7 @@ wrong6 (int n)
{
work (n, 0);
/* incorrect nesting of barrier region in a single region */
-#pragma omp barrier
+#pragma omp barrier /* { dg-warning "may not be closely nested" } */
work (n, 1);
}
}
diff --git a/gcc/testsuite/gcc.dg/gomp/collapse-1.c b/gcc/testsuite/gcc.dg/gomp/collapse-1.c
new file mode 100644
index 00000000000..89b76bb669c
--- /dev/null
+++ b/gcc/testsuite/gcc.dg/gomp/collapse-1.c
@@ -0,0 +1,92 @@
+/* { dg-do compile } */
+/* { dg-options "-fopenmp" } */
+
+int i, j, k;
+extern int foo (void);
+
+void
+f1 (void)
+{
+ #pragma omp for collapse (2)
+ for (i = 0; i < 5; i++)
+ ; /* { dg-error "not enough perfectly nested" } */
+ {
+ for (j = 0; j < 5; j++)
+ ;
+ }
+}
+
+void
+f2 (void)
+{
+ #pragma omp for collapse (2)
+ for (i = 0; i < 5; i++)
+ {
+ {
+ {
+ for (j = 0; j < 5; j++)
+ {
+ }
+ }
+ }
+ }
+}
+
+void
+f3 (void)
+{
+ #pragma omp for collapse (2)
+ for (i = 0; i < 5; i++)
+ {
+ int k = foo (); /* { dg-error "not enough perfectly nested" } */
+ {
+ {
+ for (j = 0; j < 5; j++)
+ {
+ }
+ }
+ }
+ }
+}
+
+void
+f4 (void)
+{
+ #pragma omp for collapse (2)
+ for (i = 0; i < 5; i++)
+ {
+ {
+ for (j = 0; j < 5; j++)
+ ;
+ foo (); /* { dg-error "collapsed loops not perfectly nested before" } */
+ }
+ }
+}
+
+void
+f5 (void)
+{
+ #pragma omp for collapse (2)
+ for (i = 0; i < 5; i++)
+ {
+ {
+ for (j = 0; j < 5; j++)
+ ;
+ }
+ foo (); /* { dg-error "collapsed loops not perfectly nested before" } */
+ }
+}
+
+void
+f6 (void)
+{
+ #pragma omp for collapse (2)
+ for (i = 0; i < 5; i++)
+ {
+ {
+ for (j = 0; j < 5; j++)
+ ;
+ }
+ }
+ foo ();
+}
diff --git a/gcc/testsuite/gcc.dg/gomp/nesting-1.c b/gcc/testsuite/gcc.dg/gomp/nesting-1.c
new file mode 100644
index 00000000000..6f27b907e6b
--- /dev/null
+++ b/gcc/testsuite/gcc.dg/gomp/nesting-1.c
@@ -0,0 +1,198 @@
+/* { dg-do compile } */
+/* { dg-options "-fopenmp" } */
+
+void
+f1 (void)
+{
+ int i, j;
+ #pragma omp for
+ for (i = 0; i < 3; i++)
+ {
+ #pragma omp for /* { dg-warning "may not be closely nested" } */
+ for (j = 0; j < 3; j++)
+ ;
+ #pragma omp sections /* { dg-warning "may not be closely nested" } */
+ {
+ ;
+ #pragma omp section
+ ;
+ }
+ #pragma omp single /* { dg-warning "may not be closely nested" } */
+ ;
+ #pragma omp master /* { dg-warning "may not be closely nested" } */
+ ;
+ #pragma omp barrier /* { dg-warning "may not be closely nested" } */
+ }
+ #pragma omp sections
+ {
+ #pragma omp for /* { dg-warning "may not be closely nested" } */
+ for (j = 0; j < 3; j++)
+ ;
+ #pragma omp sections /* { dg-warning "may not be closely nested" } */
+ {
+ ;
+ #pragma omp section
+ ;
+ }
+ #pragma omp single /* { dg-warning "may not be closely nested" } */
+ ;
+ #pragma omp master /* { dg-warning "may not be closely nested" } */
+ ;
+ #pragma omp section
+ ;
+ }
+ #pragma omp single
+ {
+ #pragma omp for /* { dg-warning "may not be closely nested" } */
+ for (j = 0; j < 3; j++)
+ ;
+ #pragma omp sections /* { dg-warning "may not be closely nested" } */
+ {
+ ;
+ #pragma omp section
+ ;
+ }
+ #pragma omp single /* { dg-warning "may not be closely nested" } */
+ ;
+ #pragma omp master /* { dg-warning "may not be closely nested" } */
+ ;
+ #pragma omp barrier /* { dg-warning "may not be closely nested" } */
+ }
+ #pragma omp master
+ {
+ #pragma omp for /* { dg-warning "may not be closely nested" } */
+ for (j = 0; j < 3; j++)
+ ;
+ #pragma omp sections /* { dg-warning "may not be closely nested" } */
+ {
+ ;
+ #pragma omp section
+ ;
+ }
+ #pragma omp single /* { dg-warning "may not be closely nested" } */
+ ;
+ #pragma omp master
+ ;
+ #pragma omp barrier /* { dg-warning "may not be closely nested" } */
+ }
+ #pragma omp task
+ {
+ #pragma omp for /* { dg-warning "may not be closely nested" } */
+ for (j = 0; j < 3; j++)
+ ;
+ #pragma omp sections /* { dg-warning "may not be closely nested" } */
+ {
+ ;
+ #pragma omp section
+ ;
+ }
+ #pragma omp single /* { dg-warning "may not be closely nested" } */
+ ;
+ #pragma omp master /* { dg-warning "may not be closely nested" } */
+ ;
+ #pragma omp barrier /* { dg-warning "may not be closely nested" } */
+ }
+ #pragma omp parallel
+ {
+ #pragma omp for
+ for (j = 0; j < 3; j++)
+ ;
+ #pragma omp sections
+ {
+ ;
+ #pragma omp section
+ ;
+ }
+ #pragma omp single
+ ;
+ #pragma omp master
+ ;
+ #pragma omp barrier
+ }
+}
+
+void
+f2 (void)
+{
+ int i, j;
+ #pragma omp ordered
+ {
+ #pragma omp for /* { dg-warning "may not be closely nested" } */
+ for (j = 0; j < 3; j++)
+ ;
+ #pragma omp sections /* { dg-warning "may not be closely nested" } */
+ {
+ ;
+ #pragma omp section
+ ;
+ }
+ #pragma omp single /* { dg-warning "may not be closely nested" } */
+ ;
+ #pragma omp master
+ ;
+ #pragma omp barrier /* { dg-warning "may not be closely nested" } */
+ }
+}
+
+void
+f3 (void)
+{
+ #pragma omp critical
+ {
+ #pragma omp ordered /* { dg-warning "may not be closely nested" } */
+ ;
+ }
+}
+
+void
+f4 (void)
+{
+ #pragma omp task
+ {
+ #pragma omp ordered /* { dg-warning "may not be closely nested" } */
+ ;
+ }
+}
+
+void
+f5 (void)
+{
+ int i;
+ #pragma omp for
+ for (i = 0; i < 10; i++)
+ {
+ #pragma omp ordered /* { dg-warning "must be closely nested" } */
+ ;
+ }
+ #pragma omp for ordered
+ for (i = 0; i < 10; i++)
+ {
+ #pragma omp ordered
+ ;
+ }
+}
+
+void
+f6 (void)
+{
+ #pragma omp critical (foo)
+ #pragma omp critical (bar)
+ ;
+ #pragma omp critical
+ #pragma omp critical (baz)
+ ;
+}
+
+void
+f7 (void)
+{
+ #pragma omp critical (foo2)
+ #pragma omp critical
+ ;
+ #pragma omp critical (bar)
+ #pragma omp critical (bar) /* { dg-warning "may not be nested" } */
+ ;
+ #pragma omp critical
+ #pragma omp critical /* { dg-warning "may not be nested" } */
+ ;
+}
diff --git a/gcc/testsuite/gcc.dg/gomp/pr27499.c b/gcc/testsuite/gcc.dg/gomp/pr27499.c
index e8c1db496d7..0de2e0686f1 100644
--- a/gcc/testsuite/gcc.dg/gomp/pr27499.c
+++ b/gcc/testsuite/gcc.dg/gomp/pr27499.c
@@ -8,6 +8,6 @@ foo (void)
{
unsigned int i;
#pragma omp parallel for
- for (i = 0; i < 64; ++i) /* { dg-warning "is unsigned" } */
+ for (i = 0; i < 64; ++i)
bar (i);
}
diff --git a/gcc/testsuite/gfortran.dg/gomp/appendix-a/a.33.4.f90 b/gcc/testsuite/gfortran.dg/gomp/appendix-a/a.33.4.f90
index 9685b5939c8..7a9e1840b24 100644
--- a/gcc/testsuite/gfortran.dg/gomp/appendix-a/a.33.4.f90
+++ b/gcc/testsuite/gfortran.dg/gomp/appendix-a/a.33.4.f90
@@ -4,7 +4,7 @@
REAL, DIMENSION(:), ALLOCATABLE :: A
REAL, DIMENSION(:), POINTER :: B
ALLOCATE (A(N))
-!$OMP SINGLE ! { dg-error "COPYPRIVATE clause object 'a'" }
+!$OMP SINGLE
ALLOCATE (B(N))
READ (11) A,B
!$OMP END SINGLE COPYPRIVATE(A,B)
diff --git a/gcc/testsuite/gfortran.dg/gomp/appendix-a/a.35.4.f90 b/gcc/testsuite/gfortran.dg/gomp/appendix-a/a.35.4.f90
index e44952263f1..f130dd5f480 100644
--- a/gcc/testsuite/gfortran.dg/gomp/appendix-a/a.35.4.f90
+++ b/gcc/testsuite/gfortran.dg/gomp/appendix-a/a.35.4.f90
@@ -8,7 +8,7 @@
DO I = 1, N
CALL WORK(I, 1)
! incorrect nesting of barrier region in a loop region
-!$OMP BARRIER
+!$OMP BARRIER ! { dg-warning "may not be closely nested" }
CALL WORK(I, 2)
END DO
!$OMP END PARALLEL
diff --git a/gcc/testsuite/gfortran.dg/gomp/appendix-a/a.35.6.f90 b/gcc/testsuite/gfortran.dg/gomp/appendix-a/a.35.6.f90
index 0488537dd10..62ba245236b 100644
--- a/gcc/testsuite/gfortran.dg/gomp/appendix-a/a.35.6.f90
+++ b/gcc/testsuite/gfortran.dg/gomp/appendix-a/a.35.6.f90
@@ -6,7 +6,7 @@
!$OMP SINGLE
CALL WORK(N,1)
! incorrect nesting of barrier region in a single region
-!$OMP BARRIER
+!$OMP BARRIER ! { dg-warning "may not be closely nested" }
CALL WORK(N,2)
!$OMP END SINGLE
!$OMP END PARALLEL
diff --git a/gcc/testsuite/gfortran.dg/gomp/collapse1.f90 b/gcc/testsuite/gfortran.dg/gomp/collapse1.f90
new file mode 100644
index 00000000000..f16a780ad99
--- /dev/null
+++ b/gcc/testsuite/gfortran.dg/gomp/collapse1.f90
@@ -0,0 +1,57 @@
+! { dg-do compile }
+! { dg-options "-fopenmp" }
+
+subroutine collapse1
+ integer :: i, j, k, a(1:3, 4:6, 5:7)
+ real :: r
+ logical :: l
+ integer, save :: thr
+ !$omp threadprivate (thr)
+ l = .false.
+ a(:, :, :) = 0
+ !$omp parallel do collapse(4) schedule(static, 4) ! { dg-error "not enough DO loops for collapsed" }
+ do i = 1, 3
+ do j = 4, 6
+ do k = 5, 7
+ a(i, j, k) = i + j + k
+ end do
+ end do
+ end do
+ !$omp parallel do collapse(2)
+ do i = 1, 5, 2
+ do j = i + 1, 7, i ! { dg-error "collapsed loops don.t form rectangular iteration space" }
+ end do
+ end do
+ !$omp parallel do collapse(2) shared(j)
+ do i = 1, 3
+ do j = 4, 6 ! { dg-error "iteration variable present on clause other than PRIVATE or LASTPRIVATE" }
+ end do
+ end do
+ !$omp parallel do collapse(2)
+ do i = 1, 3
+ do j = 4, 6
+ end do
+ k = 4
+ end do
+ !$omp parallel do collapse(2)
+ do i = 1, 3
+ do ! { dg-error "cannot be a DO WHILE or DO without loop control" }
+ end do
+ end do
+ !$omp parallel do collapse(2)
+ do i = 1, 3
+ do r = 4, 6 ! { dg-warning "must be integer" }
+ end do
+ end do
+end subroutine collapse1
+
+subroutine collapse1_2
+ integer :: i
+ !$omp parallel do collapse(2)
+ do i = -6, 6 ! { dg-error "cannot be redefined inside loop beginning" }
+ do i = 4, 6 ! { dg-error "collapsed loops don.t form rectangular iteration space|cannot be redefined" }
+ end do
+ end do
+end subroutine collapse1_2
+
+! { dg-error "iteration variable must be of type integer" "integer" { target *-*-* } 43 }
diff --git a/gcc/testsuite/gfortran.dg/gomp/omp_parse1.f90 b/gcc/testsuite/gfortran.dg/gomp/omp_parse1.f90
index d4137cd11ec..3ab43670762 100644
--- a/gcc/testsuite/gfortran.dg/gomp/omp_parse1.f90
+++ b/gcc/testsuite/gfortran.dg/gomp/omp_parse1.f90
@@ -14,10 +14,6 @@ call bar
!$omp rallel
call bar
!$omp end parallel
-! Non-continuation !$omp must be followed by space, and my reading
-! doesn't seem to allow tab there. So such lines should be completely
-! ignored.
-!$omp strange ! { dg-warning "starts a commented line" }
end
! { dg-final { scan-tree-dump-times "pragma omp parallel" 3 "omplower" } }
diff --git a/gcc/testsuite/gfortran.dg/gomp/reduction1.f90 b/gcc/testsuite/gfortran.dg/gomp/reduction1.f90
index 108e5dc4155..9c55d173c11 100644
--- a/gcc/testsuite/gfortran.dg/gomp/reduction1.f90
+++ b/gcc/testsuite/gfortran.dg/gomp/reduction1.f90
@@ -56,7 +56,7 @@ common /blk/ i1
!$omp end parallel
!$omp parallel reduction (*:p1) ! { dg-error "POINTER object" }
!$omp end parallel
-!$omp parallel reduction (-:aa1) ! { dg-error "is ALLOCATABLE" }
+!$omp parallel reduction (-:aa1)
!$omp end parallel
!$omp parallel reduction (*:ia1) ! { dg-error "Assumed size" }
!$omp end parallel
diff --git a/gcc/testsuite/gfortran.dg/gomp/sharing-3.f90 b/gcc/testsuite/gfortran.dg/gomp/sharing-3.f90
new file mode 100644
index 00000000000..5c1581454b2
--- /dev/null
+++ b/gcc/testsuite/gfortran.dg/gomp/sharing-3.f90
@@ -0,0 +1,37 @@
+! { dg-do compile }
+! { dg-options "-fopenmp" }
+
+subroutine foo (vara, varb, varc, vard, n)
+ integer :: n, vara(n), varb(*), varc(:), vard(6), vare(6)
+ vare(:) = 0
+ !$omp parallel default(none) shared(vara, varb, varc, vard, vare)
+ !$omp master
+ vara(1) = 1
+ varb(1) = 1
+ varc(1) = 1
+ vard(1) = 1
+ vare(1) = 1
+ !$omp end master
+ !$omp end parallel
+ !$omp parallel default(none) private(vara, varc, vard, vare)
+ vara(1) = 1
+ varc(1) = 1
+ vard(1) = 1
+ vare(1) = 1
+ !$omp end parallel
+ !$omp parallel default(none) firstprivate(vara, varc, vard, vare)
+ vara(1) = 1
+ varc(1) = 1
+ vard(1) = 1
+ vare(1) = 1
+ !$omp end parallel
+ !$omp parallel default(none) ! { dg-error "enclosing parallel" }
+ !$omp master
+ vara(1) = 1 ! { dg-error "not specified" }
+ varb(1) = 1 ! Assumed-size is predetermined
+ varc(1) = 1 ! { dg-error "not specified" "" { xfail *-*-* } }
+ vard(1) = 1 ! { dg-error "not specified" }
+ vare(1) = 1 ! { dg-error "not specified" }
+ !$omp end master
+ !$omp end parallel
+end subroutine foo
diff --git a/gcc/tree-cfg.c b/gcc/tree-cfg.c
index c0cb7b87488..3034ba339e1 100644
--- a/gcc/tree-cfg.c
+++ b/gcc/tree-cfg.c
@@ -523,6 +523,7 @@ make_edges (void)
break;
case OMP_PARALLEL:
+ case OMP_TASK:
case OMP_FOR:
case OMP_SINGLE:
case OMP_MASTER:
@@ -1936,16 +1937,17 @@ remove_useless_stmts_1 (tree *tp, struct rus_data *data)
break;
case OMP_PARALLEL:
+ case OMP_TASK:
/* Make sure the outermost BIND_EXPR in OMP_BODY isn't removed
as useless. */
- remove_useless_stmts_1 (&BIND_EXPR_BODY (OMP_BODY (*tp)), data);
+ remove_useless_stmts_1 (&BIND_EXPR_BODY (OMP_TASKREG_BODY (*tp)), data);
data->last_goto = NULL;
break;
case OMP_SECTIONS:
case OMP_SINGLE:
case OMP_SECTION:
- case OMP_MASTER :
+ case OMP_MASTER:
case OMP_ORDERED:
case OMP_CRITICAL:
remove_useless_stmts_1 (&OMP_BODY (*tp), data);
diff --git a/gcc/tree-gimple.c b/gcc/tree-gimple.c
index 2334e126343..8b05f93d505 100644
--- a/gcc/tree-gimple.c
+++ b/gcc/tree-gimple.c
@@ -322,6 +322,7 @@ is_gimple_stmt (tree t)
case OMP_CRITICAL:
case OMP_RETURN:
case OMP_CONTINUE:
+ case OMP_TASK:
case OMP_ATOMIC_LOAD:
case OMP_ATOMIC_STORE:
/* These are always void. */
diff --git a/gcc/tree-inline.c b/gcc/tree-inline.c
index 48a6a2366ee..a9ca33b14d4 100644
--- a/gcc/tree-inline.c
+++ b/gcc/tree-inline.c
@@ -2478,6 +2478,7 @@ estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
}
case OMP_PARALLEL:
+ case OMP_TASK:
case OMP_FOR:
case OMP_SECTIONS:
case OMP_SINGLE:
diff --git a/gcc/tree-nested.c b/gcc/tree-nested.c
index 562e39a173a..be5e87ca316 100644
--- a/gcc/tree-nested.c
+++ b/gcc/tree-nested.c
@@ -677,6 +677,7 @@ walk_omp_for (walk_tree_fn callback, struct nesting_info *info, tree for_stmt)
{
struct walk_stmt_info wi;
tree t, list = NULL, empty;
+ int i;
walk_body (callback, info, &OMP_FOR_PRE_BODY (for_stmt));
@@ -687,36 +688,39 @@ walk_omp_for (walk_tree_fn callback, struct nesting_info *info, tree for_stmt)
wi.info = info;
wi.tsi = tsi_last (list);
- t = OMP_FOR_INIT (for_stmt);
- gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
- SET_EXPR_LOCUS (empty, EXPR_LOCUS (t));
- wi.val_only = false;
- walk_tree (&GIMPLE_STMT_OPERAND (t, 0), callback, &wi, NULL);
- wi.val_only = true;
- wi.is_lhs = false;
- walk_tree (&GIMPLE_STMT_OPERAND (t, 1), callback, &wi, NULL);
-
- t = OMP_FOR_COND (for_stmt);
- gcc_assert (COMPARISON_CLASS_P (t));
- SET_EXPR_LOCUS (empty, EXPR_LOCUS (t));
- wi.val_only = false;
- walk_tree (&TREE_OPERAND (t, 0), callback, &wi, NULL);
- wi.val_only = true;
- wi.is_lhs = false;
- walk_tree (&TREE_OPERAND (t, 1), callback, &wi, NULL);
-
- t = OMP_FOR_INCR (for_stmt);
- gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
- SET_EXPR_LOCUS (empty, EXPR_LOCUS (t));
- wi.val_only = false;
- walk_tree (&GIMPLE_STMT_OPERAND (t, 0), callback, &wi, NULL);
- t = GIMPLE_STMT_OPERAND (t, 1);
- gcc_assert (BINARY_CLASS_P (t));
- wi.val_only = false;
- walk_tree (&TREE_OPERAND (t, 0), callback, &wi, NULL);
- wi.val_only = true;
- wi.is_lhs = false;
- walk_tree (&TREE_OPERAND (t, 1), callback, &wi, NULL);
+ for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
+ {
+ t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
+ gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
+ SET_EXPR_LOCUS (empty, EXPR_LOCUS (t));
+ wi.val_only = false;
+ walk_tree (&GIMPLE_STMT_OPERAND (t, 0), callback, &wi, NULL);
+ wi.val_only = true;
+ wi.is_lhs = false;
+ walk_tree (&GIMPLE_STMT_OPERAND (t, 1), callback, &wi, NULL);
+
+ t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
+ gcc_assert (COMPARISON_CLASS_P (t));
+ SET_EXPR_LOCUS (empty, EXPR_LOCUS (t));
+ wi.val_only = false;
+ walk_tree (&TREE_OPERAND (t, 0), callback, &wi, NULL);
+ wi.val_only = true;
+ wi.is_lhs = false;
+ walk_tree (&TREE_OPERAND (t, 1), callback, &wi, NULL);
+
+ t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
+ gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
+ SET_EXPR_LOCUS (empty, EXPR_LOCUS (t));
+ wi.val_only = false;
+ walk_tree (&GIMPLE_STMT_OPERAND (t, 0), callback, &wi, NULL);
+ t = GIMPLE_STMT_OPERAND (t, 1);
+ gcc_assert (BINARY_CLASS_P (t));
+ wi.val_only = false;
+ walk_tree (&TREE_OPERAND (t, 0), callback, &wi, NULL);
+ wi.val_only = true;
+ wi.is_lhs = false;
+ walk_tree (&TREE_OPERAND (t, 1), callback, &wi, NULL);
+ }
/* Remove empty statement added above from the end of statement list. */
tsi_delink (&wi.tsi);
@@ -1100,24 +1104,25 @@ convert_nonlocal_reference (tree *tp, int *walk_subtrees, void *data)
break;
case OMP_PARALLEL:
+ case OMP_TASK:
save_suppress = info->suppress_expansion;
- if (convert_nonlocal_omp_clauses (&OMP_PARALLEL_CLAUSES (t), wi))
+ if (convert_nonlocal_omp_clauses (&OMP_TASKREG_CLAUSES (t), wi))
{
tree c, decl;
decl = get_chain_decl (info);
c = build_omp_clause (OMP_CLAUSE_FIRSTPRIVATE);
OMP_CLAUSE_DECL (c) = decl;
- OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (t);
- OMP_PARALLEL_CLAUSES (t) = c;
+ OMP_CLAUSE_CHAIN (c) = OMP_TASKREG_CLAUSES (t);
+ OMP_TASKREG_CLAUSES (t) = c;
}
save_local_var_chain = info->new_local_var_chain;
info->new_local_var_chain = NULL;
- walk_body (convert_nonlocal_reference, info, &OMP_PARALLEL_BODY (t));
+ walk_body (convert_nonlocal_reference, info, &OMP_TASKREG_BODY (t));
if (info->new_local_var_chain)
- declare_vars (info->new_local_var_chain, OMP_PARALLEL_BODY (t), false);
+ declare_vars (info->new_local_var_chain, OMP_TASKREG_BODY (t), false);
info->new_local_var_chain = save_local_var_chain;
info->suppress_expansion = save_suppress;
break;
@@ -1161,7 +1166,7 @@ static bool
convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
{
struct nesting_info *info = wi->info;
- bool need_chain = false;
+ bool need_chain = false, need_stmts = false;
tree clause, decl;
int dummy;
bitmap new_suppress;
@@ -1173,13 +1178,25 @@ convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
{
switch (OMP_CLAUSE_CODE (clause))
{
+ case OMP_CLAUSE_REDUCTION:
+ if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
+ need_stmts = true;
+ goto do_decl_clause;
+
+ case OMP_CLAUSE_LASTPRIVATE:
+ if (OMP_CLAUSE_LASTPRIVATE_STMT (clause))
+ need_stmts = true;
+ goto do_decl_clause;
+
case OMP_CLAUSE_PRIVATE:
case OMP_CLAUSE_FIRSTPRIVATE:
- case OMP_CLAUSE_LASTPRIVATE:
- case OMP_CLAUSE_REDUCTION:
case OMP_CLAUSE_COPYPRIVATE:
case OMP_CLAUSE_SHARED:
+ do_decl_clause:
decl = OMP_CLAUSE_DECL (clause);
+ if (TREE_CODE (decl) == VAR_DECL
+ && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
+ break;
if (decl_function_context (decl) != info->context)
{
bitmap_set_bit (new_suppress, DECL_UID (decl));
@@ -1204,6 +1221,8 @@ convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
case OMP_CLAUSE_ORDERED:
case OMP_CLAUSE_DEFAULT:
case OMP_CLAUSE_COPYIN:
+ case OMP_CLAUSE_COLLAPSE:
+ case OMP_CLAUSE_UNTIED:
break;
default:
@@ -1213,6 +1232,35 @@ convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
info->suppress_expansion = new_suppress;
+ if (need_stmts)
+ for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
+ switch (OMP_CLAUSE_CODE (clause))
+ {
+ case OMP_CLAUSE_REDUCTION:
+ if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
+ {
+ tree old_context
+ = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
+ DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
+ = info->context;
+ walk_body (convert_nonlocal_reference, info,
+ &OMP_CLAUSE_REDUCTION_INIT (clause));
+ walk_body (convert_nonlocal_reference, info,
+ &OMP_CLAUSE_REDUCTION_MERGE (clause));
+ DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
+ = old_context;
+ }
+ break;
+
+ case OMP_CLAUSE_LASTPRIVATE:
+ walk_body (convert_nonlocal_reference, info,
+ &OMP_CLAUSE_LASTPRIVATE_STMT (clause));
+ break;
+
+ default:
+ break;
+ }
+
return need_chain;
}
@@ -1392,24 +1440,25 @@ convert_local_reference (tree *tp, int *walk_subtrees, void *data)
break;
case OMP_PARALLEL:
+ case OMP_TASK:
save_suppress = info->suppress_expansion;
- if (convert_local_omp_clauses (&OMP_PARALLEL_CLAUSES (t), wi))
+ if (convert_local_omp_clauses (&OMP_TASKREG_CLAUSES (t), wi))
{
tree c;
(void) get_frame_type (info);
c = build_omp_clause (OMP_CLAUSE_SHARED);
OMP_CLAUSE_DECL (c) = info->frame_decl;
- OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (t);
- OMP_PARALLEL_CLAUSES (t) = c;
+ OMP_CLAUSE_CHAIN (c) = OMP_TASKREG_CLAUSES (t);
+ OMP_TASKREG_CLAUSES (t) = c;
}
save_local_var_chain = info->new_local_var_chain;
info->new_local_var_chain = NULL;
- walk_body (convert_local_reference, info, &OMP_PARALLEL_BODY (t));
+ walk_body (convert_local_reference, info, &OMP_TASKREG_BODY (t));
if (info->new_local_var_chain)
- declare_vars (info->new_local_var_chain, OMP_PARALLEL_BODY (t), false);
+ declare_vars (info->new_local_var_chain, OMP_TASKREG_BODY (t), false);
info->new_local_var_chain = save_local_var_chain;
info->suppress_expansion = save_suppress;
break;
@@ -1453,7 +1502,7 @@ static bool
convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
{
struct nesting_info *info = wi->info;
- bool need_frame = false;
+ bool need_frame = false, need_stmts = false;
tree clause, decl;
int dummy;
bitmap new_suppress;
@@ -1465,13 +1514,25 @@ convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
{
switch (OMP_CLAUSE_CODE (clause))
{
+ case OMP_CLAUSE_REDUCTION:
+ if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
+ need_stmts = true;
+ goto do_decl_clause;
+
+ case OMP_CLAUSE_LASTPRIVATE:
+ if (OMP_CLAUSE_LASTPRIVATE_STMT (clause))
+ need_stmts = true;
+ goto do_decl_clause;
+
case OMP_CLAUSE_PRIVATE:
case OMP_CLAUSE_FIRSTPRIVATE:
- case OMP_CLAUSE_LASTPRIVATE:
- case OMP_CLAUSE_REDUCTION:
case OMP_CLAUSE_COPYPRIVATE:
case OMP_CLAUSE_SHARED:
+ do_decl_clause:
decl = OMP_CLAUSE_DECL (clause);
+ if (TREE_CODE (decl) == VAR_DECL
+ && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
+ break;
if (decl_function_context (decl) == info->context
&& !use_pointer_in_frame (decl))
{
@@ -1501,6 +1562,8 @@ convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
case OMP_CLAUSE_ORDERED:
case OMP_CLAUSE_DEFAULT:
case OMP_CLAUSE_COPYIN:
+ case OMP_CLAUSE_COLLAPSE:
+ case OMP_CLAUSE_UNTIED:
break;
default:
@@ -1510,6 +1573,35 @@ convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
info->suppress_expansion = new_suppress;
+ if (need_stmts)
+ for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
+ switch (OMP_CLAUSE_CODE (clause))
+ {
+ case OMP_CLAUSE_REDUCTION:
+ if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
+ {
+ tree old_context
+ = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
+ DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
+ = info->context;
+ walk_body (convert_local_reference, info,
+ &OMP_CLAUSE_REDUCTION_INIT (clause));
+ walk_body (convert_local_reference, info,
+ &OMP_CLAUSE_REDUCTION_MERGE (clause));
+ DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
+ = old_context;
+ }
+ break;
+
+ case OMP_CLAUSE_LASTPRIVATE:
+ walk_body (convert_local_reference, info,
+ &OMP_CLAUSE_LASTPRIVATE_STMT (clause));
+ break;
+
+ default:
+ break;
+ }
+
return need_frame;
}
@@ -1731,9 +1823,10 @@ convert_call_expr (tree *tp, int *walk_subtrees, void *data)
break;
case OMP_PARALLEL:
+ case OMP_TASK:
save_static_chain_added = info->static_chain_added;
info->static_chain_added = 0;
- walk_body (convert_call_expr, info, &OMP_PARALLEL_BODY (t));
+ walk_body (convert_call_expr, info, &OMP_TASKREG_BODY (t));
for (i = 0; i < 2; i++)
{
tree c, decl;
@@ -1741,7 +1834,7 @@ convert_call_expr (tree *tp, int *walk_subtrees, void *data)
continue;
decl = i ? get_chain_decl (info) : info->frame_decl;
/* Don't add CHAIN.* or FRAME.* twice. */
- for (c = OMP_PARALLEL_CLAUSES (t); c; c = OMP_CLAUSE_CHAIN (c))
+ for (c = OMP_TASKREG_CLAUSES (t); c; c = OMP_CLAUSE_CHAIN (c))
if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
|| OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
&& OMP_CLAUSE_DECL (c) == decl)
@@ -1751,8 +1844,8 @@ convert_call_expr (tree *tp, int *walk_subtrees, void *data)
c = build_omp_clause (i ? OMP_CLAUSE_FIRSTPRIVATE
: OMP_CLAUSE_SHARED);
OMP_CLAUSE_DECL (c) = decl;
- OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (t);
- OMP_PARALLEL_CLAUSES (t) = c;
+ OMP_CLAUSE_CHAIN (c) = OMP_TASKREG_CLAUSES (t);
+ OMP_TASKREG_CLAUSES (t) = c;
}
}
info->static_chain_added |= save_static_chain_added;
diff --git a/gcc/tree-parloops.c b/gcc/tree-parloops.c
index 8754432eb30..8344093ba6b 100644
--- a/gcc/tree-parloops.c
+++ b/gcc/tree-parloops.c
@@ -1617,13 +1617,16 @@ create_parallel_loop (struct loop *loop, tree loop_fn, tree data,
for_stmt = make_node (OMP_FOR);
TREE_TYPE (for_stmt) = void_type_node;
OMP_FOR_CLAUSES (for_stmt) = t;
- OMP_FOR_INIT (for_stmt) = build_gimple_modify_stmt (initvar, cvar_init);
- OMP_FOR_COND (for_stmt) = cond;
- OMP_FOR_INCR (for_stmt) = build_gimple_modify_stmt (cvar_base,
- build2 (PLUS_EXPR, type,
- cvar_base,
- build_int_cst
- (type, 1)));
+ OMP_FOR_INIT (for_stmt) = make_tree_vec (1);
+ TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0)
+ = build_gimple_modify_stmt (initvar, cvar_init);
+ OMP_FOR_COND (for_stmt) = make_tree_vec (1);
+ TREE_VEC_ELT (OMP_FOR_COND (for_stmt), 0) = cond;
+ OMP_FOR_INCR (for_stmt) = make_tree_vec (2);
+ TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), 0)
+ = build_gimple_modify_stmt (cvar_base,
+ build2 (PLUS_EXPR, type, cvar_base,
+ build_int_cst (type, 1)));
OMP_FOR_BODY (for_stmt) = NULL_TREE;
OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
diff --git a/gcc/tree-pretty-print.c b/gcc/tree-pretty-print.c
index a15e49b5759..2f7de8f1c7d 100644
--- a/gcc/tree-pretty-print.c
+++ b/gcc/tree-pretty-print.c
@@ -334,19 +334,22 @@ dump_omp_clause (pretty_printer *buffer, tree clause, int spc, int flags)
pp_string (buffer, "default(");
switch (OMP_CLAUSE_DEFAULT_KIND (clause))
{
- case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
- break;
- case OMP_CLAUSE_DEFAULT_SHARED:
- pp_string (buffer, "shared");
- break;
- case OMP_CLAUSE_DEFAULT_NONE:
- pp_string (buffer, "none");
- break;
- case OMP_CLAUSE_DEFAULT_PRIVATE:
- pp_string (buffer, "private");
- break;
- default:
- gcc_unreachable ();
+ case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
+ break;
+ case OMP_CLAUSE_DEFAULT_SHARED:
+ pp_string (buffer, "shared");
+ break;
+ case OMP_CLAUSE_DEFAULT_NONE:
+ pp_string (buffer, "none");
+ break;
+ case OMP_CLAUSE_DEFAULT_PRIVATE:
+ pp_string (buffer, "private");
+ break;
+ case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
+ pp_string (buffer, "firstprivate");
+ break;
+ default:
+ gcc_unreachable ();
}
pp_character (buffer, ')');
break;
@@ -367,6 +370,9 @@ dump_omp_clause (pretty_printer *buffer, tree clause, int spc, int flags)
case OMP_CLAUSE_SCHEDULE_RUNTIME:
pp_string (buffer, "runtime");
break;
+ case OMP_CLAUSE_SCHEDULE_AUTO:
+ pp_string (buffer, "auto");
+ break;
default:
gcc_unreachable ();
}
@@ -380,6 +386,18 @@ dump_omp_clause (pretty_printer *buffer, tree clause, int spc, int flags)
pp_character (buffer, ')');
break;
+ case OMP_CLAUSE_UNTIED:
+ pp_string (buffer, "untied");
+ break;
+
+ case OMP_CLAUSE_COLLAPSE:
+ pp_string (buffer, "collapse(");
+ dump_generic_node (buffer,
+ OMP_CLAUSE_COLLAPSE_EXPR (clause),
+ spc, flags, false);
+ pp_character (buffer, ')');
+ break;
+
default:
/* Should never happen. */
dump_generic_node (buffer, clause, spc, flags, false);
@@ -1863,12 +1881,41 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
is_expr = false;
break;
+ case OMP_TASK:
+ pp_string (buffer, "#pragma omp task");
+ dump_omp_clauses (buffer, OMP_TASK_CLAUSES (node), spc, flags);
+ if (OMP_TASK_FN (node))
+ {
+ pp_string (buffer, " [child fn: ");
+ dump_generic_node (buffer, OMP_TASK_FN (node), spc, flags, false);
+
+ pp_string (buffer, " (");
+
+ if (OMP_TASK_DATA_ARG (node))
+ dump_generic_node (buffer, OMP_TASK_DATA_ARG (node), spc, flags,
+ false);
+ else
+ pp_string (buffer, "???");
+
+ pp_character (buffer, ')');
+ if (OMP_TASK_COPYFN (node))
+ {
+ pp_string (buffer, ", copy fn: ");
+ dump_generic_node (buffer, OMP_TASK_COPYFN (node), spc,
+ flags, false);
+ }
+ pp_character (buffer, ']');
+ }
+ goto dump_omp_body;
+
case OMP_FOR:
pp_string (buffer, "#pragma omp for");
dump_omp_clauses (buffer, OMP_FOR_CLAUSES (node), spc, flags);
if (!(flags & TDF_SLIM))
{
+ int i;
+
if (OMP_FOR_PRE_BODY (node))
{
newline_and_indent (buffer, spc + 2);
@@ -1878,14 +1925,22 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
dump_generic_node (buffer, OMP_FOR_PRE_BODY (node),
spc, flags, false);
}
- newline_and_indent (buffer, spc);
- pp_string (buffer, "for (");
- dump_generic_node (buffer, OMP_FOR_INIT (node), spc, flags, false);
- pp_string (buffer, "; ");
- dump_generic_node (buffer, OMP_FOR_COND (node), spc, flags, false);
- pp_string (buffer, "; ");
- dump_generic_node (buffer, OMP_FOR_INCR (node), spc, flags, false);
- pp_string (buffer, ")");
+ spc -= 2;
+ for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (node)); i++)
+ {
+ spc += 2;
+ newline_and_indent (buffer, spc);
+ pp_string (buffer, "for (");
+ dump_generic_node (buffer, TREE_VEC_ELT (OMP_FOR_INIT (node), i),
+ spc, flags, false);
+ pp_string (buffer, "; ");
+ dump_generic_node (buffer, TREE_VEC_ELT (OMP_FOR_COND (node), i),
+ spc, flags, false);
+ pp_string (buffer, "; ");
+ dump_generic_node (buffer, TREE_VEC_ELT (OMP_FOR_INCR (node), i),
+ spc, flags, false);
+ pp_string (buffer, ")");
+ }
if (OMP_FOR_BODY (node))
{
newline_and_indent (buffer, spc + 2);
@@ -1896,6 +1951,7 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
newline_and_indent (buffer, spc + 2);
pp_character (buffer, '}');
}
+ spc -= 2 * TREE_VEC_LENGTH (OMP_FOR_INIT (node)) - 2;
if (OMP_FOR_PRE_BODY (node))
{
spc -= 4;
diff --git a/gcc/tree-ssa-operands.c b/gcc/tree-ssa-operands.c
index 102ed5af3d4..71fb883f84a 100644
--- a/gcc/tree-ssa-operands.c
+++ b/gcc/tree-ssa-operands.c
@@ -2093,17 +2093,22 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
case OMP_FOR:
{
- tree init = OMP_FOR_INIT (expr);
- tree cond = OMP_FOR_COND (expr);
- tree incr = OMP_FOR_INCR (expr);
tree c, clauses = OMP_FOR_CLAUSES (stmt);
+ int i;
- get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (init, 0), opf_def);
- get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (init, 1), opf_use);
- get_expr_operands (stmt, &TREE_OPERAND (cond, 1), opf_use);
- get_expr_operands (stmt,
- &TREE_OPERAND (GIMPLE_STMT_OPERAND (incr, 1), 1),
- opf_use);
+ for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (expr)); i++)
+ {
+ tree init = TREE_VEC_ELT (OMP_FOR_INIT (expr), i);
+ tree cond = TREE_VEC_ELT (OMP_FOR_COND (expr), i);
+ tree incr = TREE_VEC_ELT (OMP_FOR_INCR (expr), i);
+
+ get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (init, 0), opf_def);
+ get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (init, 1), opf_use);
+ get_expr_operands (stmt, &TREE_OPERAND (cond, 1), opf_use);
+ get_expr_operands (stmt,
+ &TREE_OPERAND (GIMPLE_STMT_OPERAND (incr, 1),
+ 1), opf_use);
+ }
c = find_omp_clause (clauses, OMP_CLAUSE_SCHEDULE);
if (c)
diff --git a/gcc/tree.c b/gcc/tree.c
index 2d0ddc33073..8bf2b7718af 100644
--- a/gcc/tree.c
+++ b/gcc/tree.c
@@ -177,7 +177,7 @@ unsigned const char omp_clause_num_ops[] =
1, /* OMP_CLAUSE_PRIVATE */
1, /* OMP_CLAUSE_SHARED */
1, /* OMP_CLAUSE_FIRSTPRIVATE */
- 1, /* OMP_CLAUSE_LASTPRIVATE */
+ 2, /* OMP_CLAUSE_LASTPRIVATE */
4, /* OMP_CLAUSE_REDUCTION */
1, /* OMP_CLAUSE_COPYIN */
1, /* OMP_CLAUSE_COPYPRIVATE */
@@ -186,7 +186,9 @@ unsigned const char omp_clause_num_ops[] =
1, /* OMP_CLAUSE_SCHEDULE */
0, /* OMP_CLAUSE_NOWAIT */
0, /* OMP_CLAUSE_ORDERED */
- 0 /* OMP_CLAUSE_DEFAULT */
+ 0, /* OMP_CLAUSE_DEFAULT */
+ 3, /* OMP_CLAUSE_COLLAPSE */
+ 0 /* OMP_CLAUSE_UNTIED */
};
const char * const omp_clause_code_name[] =
@@ -204,7 +206,9 @@ const char * const omp_clause_code_name[] =
"schedule",
"nowait",
"ordered",
- "default"
+ "default",
+ "collapse",
+ "untied"
};
/* Init tree.c. */
@@ -8452,7 +8456,6 @@ walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
case OMP_CLAUSE_PRIVATE:
case OMP_CLAUSE_SHARED:
case OMP_CLAUSE_FIRSTPRIVATE:
- case OMP_CLAUSE_LASTPRIVATE:
case OMP_CLAUSE_COPYIN:
case OMP_CLAUSE_COPYPRIVATE:
case OMP_CLAUSE_IF:
@@ -8464,8 +8467,22 @@ walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
case OMP_CLAUSE_NOWAIT:
case OMP_CLAUSE_ORDERED:
case OMP_CLAUSE_DEFAULT:
+ case OMP_CLAUSE_UNTIED:
WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
+ case OMP_CLAUSE_LASTPRIVATE:
+ WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
+ WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
+ WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
+
+ case OMP_CLAUSE_COLLAPSE:
+ {
+ int i;
+ for (i = 0; i < 3; i++)
+ WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
+ WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
+ }
+
case OMP_CLAUSE_REDUCTION:
{
int i;
diff --git a/gcc/tree.def b/gcc/tree.def
index 2c0e9f63d5b..7474f2e68f1 100644
--- a/gcc/tree.def
+++ b/gcc/tree.def
@@ -1001,6 +1001,23 @@ DEFTREECODE (TARGET_MEM_REF, "target_mem_ref", tcc_reference, 7)
DEFTREECODE (OMP_PARALLEL, "omp_parallel", tcc_statement, 4)
+/* OpenMP - #pragma omp task [clause1 ... clauseN]
+ Operand 0: OMP_TASK_BODY: Code to be executed by all threads.
+ Operand 1: OMP_TASK_CLAUSES: List of clauses.
+ Operand 2: OMP_TASK_FN: FUNCTION_DECL used when outlining the
+ body of the task region. Only valid after
+ pass_lower_omp.
+ Operand 3: OMP_TASK_DATA_ARG: Local variable in the parent
+ function containing data to be shared with the child
+ function.
+ Operand 4: OMP_TASK_COPYFN: FUNCTION_DECL used for constructing
+ firstprivate variables.
+ Operand 5: OMP_TASK_ARG_SIZE: Length of the task argument block.
+ Operand 6: OMP_TASK_ARG_ALIGN: Required alignment of the task
+ argument block. */
+
+DEFTREECODE (OMP_TASK, "omp_task", tcc_statement, 7)
+
/* OpenMP - #pragma omp for [clause1 ... clauseN]
Operand 0: OMP_FOR_BODY: Loop body.
Operand 1: OMP_FOR_CLAUSES: List of clauses.
diff --git a/gcc/tree.h b/gcc/tree.h
index c10cc0380e7..c3d2abe6978 100644
--- a/gcc/tree.h
+++ b/gcc/tree.h
@@ -186,6 +186,7 @@ extern const enum tree_code_class tree_code_type[];
#define OMP_DIRECTIVE_P(NODE) \
(TREE_CODE (NODE) == OMP_PARALLEL \
+ || TREE_CODE (NODE) == OMP_TASK \
|| TREE_CODE (NODE) == OMP_FOR \
|| TREE_CODE (NODE) == OMP_SECTIONS \
|| TREE_CODE (NODE) == OMP_SECTIONS_SWITCH \
@@ -315,7 +316,7 @@ enum omp_clause_code
Operand 2: OMP_CLAUSE_REDUCTION_MERGE: Stmt-list to merge private var
into the shared one.
Operand 3: OMP_CLAUSE_REDUCTION_PLACEHOLDER: A dummy VAR_DECL
- placeholder used in OMP_CLAUSE_REDUCTION_MERGE. */
+ placeholder used in OMP_CLAUSE_REDUCTION_{INIT,MERGE}. */
OMP_CLAUSE_REDUCTION,
/* OpenMP clause: copyin (variable_list). */
@@ -340,7 +341,13 @@ enum omp_clause_code
OMP_CLAUSE_ORDERED,
/* OpenMP clause: default. */
- OMP_CLAUSE_DEFAULT
+ OMP_CLAUSE_DEFAULT,
+
+ /* OpenMP clause: collapse (constant-integer-expression). */
+ OMP_CLAUSE_COLLAPSE,
+
+ /* OpenMP clause: untied. */
+ OMP_CLAUSE_UNTIED
};
/* The definition of tree nodes fills the next several pages. */
@@ -524,6 +531,8 @@ struct gimple_stmt GTY(())
OMP_PARALLEL_COMBINED in
OMP_PARALLEL
+ OMP_CLAUSE_PRIVATE_OUTER_REF in
+ OMP_CLAUSE_PRIVATE
protected_flag:
@@ -1796,6 +1805,20 @@ struct tree_constructor GTY(())
#define OMP_PARALLEL_FN(NODE) TREE_OPERAND (OMP_PARALLEL_CHECK (NODE), 2)
#define OMP_PARALLEL_DATA_ARG(NODE) TREE_OPERAND (OMP_PARALLEL_CHECK (NODE), 3)
+#define OMP_TASK_BODY(NODE) TREE_OPERAND (OMP_TASK_CHECK (NODE), 0)
+#define OMP_TASK_CLAUSES(NODE) TREE_OPERAND (OMP_TASK_CHECK (NODE), 1)
+#define OMP_TASK_FN(NODE) TREE_OPERAND (OMP_TASK_CHECK (NODE), 2)
+#define OMP_TASK_DATA_ARG(NODE) TREE_OPERAND (OMP_TASK_CHECK (NODE), 3)
+#define OMP_TASK_COPYFN(NODE) TREE_OPERAND (OMP_TASK_CHECK (NODE), 4)
+#define OMP_TASK_ARG_SIZE(NODE) TREE_OPERAND (OMP_TASK_CHECK (NODE), 5)
+#define OMP_TASK_ARG_ALIGN(NODE) TREE_OPERAND (OMP_TASK_CHECK (NODE), 6)
+
+#define OMP_TASKREG_CHECK(NODE) TREE_RANGE_CHECK (NODE, OMP_PARALLEL, OMP_TASK)
+#define OMP_TASKREG_BODY(NODE) TREE_OPERAND (OMP_TASKREG_CHECK (NODE), 0)
+#define OMP_TASKREG_CLAUSES(NODE) TREE_OPERAND (OMP_TASKREG_CHECK (NODE), 1)
+#define OMP_TASKREG_FN(NODE) TREE_OPERAND (OMP_TASKREG_CHECK (NODE), 2)
+#define OMP_TASKREG_DATA_ARG(NODE) TREE_OPERAND (OMP_TASKREG_CHECK (NODE), 3)
+
#define OMP_FOR_BODY(NODE) TREE_OPERAND (OMP_FOR_CHECK (NODE), 0)
#define OMP_FOR_CLAUSES(NODE) TREE_OPERAND (OMP_FOR_CHECK (NODE), 1)
#define OMP_FOR_INIT(NODE) TREE_OPERAND (OMP_FOR_CHECK (NODE), 2)
@@ -1848,10 +1871,19 @@ struct tree_constructor GTY(())
#define OMP_CLAUSE_PRIVATE_DEBUG(NODE) \
(OMP_CLAUSE_SUBCODE_CHECK (NODE, OMP_CLAUSE_PRIVATE)->base.public_flag)
+/* True on a PRIVATE clause if ctor needs access to outer region's
+ variable. */
+#define OMP_CLAUSE_PRIVATE_OUTER_REF(NODE) \
+ TREE_PRIVATE (OMP_CLAUSE_SUBCODE_CHECK (NODE, OMP_CLAUSE_PRIVATE))
+
/* True on a LASTPRIVATE clause if a FIRSTPRIVATE clause for the same
decl is present in the chain. */
#define OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE(NODE) \
(OMP_CLAUSE_SUBCODE_CHECK (NODE, OMP_CLAUSE_LASTPRIVATE)->base.public_flag)
+#define OMP_CLAUSE_LASTPRIVATE_STMT(NODE) \
+ OMP_CLAUSE_OPERAND (OMP_CLAUSE_SUBCODE_CHECK (NODE, \
+ OMP_CLAUSE_LASTPRIVATE),\
+ 1)
#define OMP_CLAUSE_IF_EXPR(NODE) \
OMP_CLAUSE_OPERAND (OMP_CLAUSE_SUBCODE_CHECK (NODE, OMP_CLAUSE_IF), 0)
@@ -1860,6 +1892,13 @@ struct tree_constructor GTY(())
#define OMP_CLAUSE_SCHEDULE_CHUNK_EXPR(NODE) \
OMP_CLAUSE_OPERAND (OMP_CLAUSE_SUBCODE_CHECK (NODE, OMP_CLAUSE_SCHEDULE), 0)
+#define OMP_CLAUSE_COLLAPSE_EXPR(NODE) \
+ OMP_CLAUSE_OPERAND (OMP_CLAUSE_SUBCODE_CHECK (NODE, OMP_CLAUSE_COLLAPSE), 0)
+#define OMP_CLAUSE_COLLAPSE_ITERVAR(NODE) \
+ OMP_CLAUSE_OPERAND (OMP_CLAUSE_SUBCODE_CHECK (NODE, OMP_CLAUSE_COLLAPSE), 1)
+#define OMP_CLAUSE_COLLAPSE_COUNT(NODE) \
+ OMP_CLAUSE_OPERAND (OMP_CLAUSE_SUBCODE_CHECK (NODE, OMP_CLAUSE_COLLAPSE), 2)
+
#define OMP_CLAUSE_REDUCTION_CODE(NODE) \
(OMP_CLAUSE_SUBCODE_CHECK (NODE, OMP_CLAUSE_REDUCTION)->omp_clause.subcode.reduction_code)
#define OMP_CLAUSE_REDUCTION_INIT(NODE) \
@@ -1874,6 +1913,7 @@ enum omp_clause_schedule_kind
OMP_CLAUSE_SCHEDULE_STATIC,
OMP_CLAUSE_SCHEDULE_DYNAMIC,
OMP_CLAUSE_SCHEDULE_GUIDED,
+ OMP_CLAUSE_SCHEDULE_AUTO,
OMP_CLAUSE_SCHEDULE_RUNTIME
};
@@ -1885,7 +1925,8 @@ enum omp_clause_default_kind
OMP_CLAUSE_DEFAULT_UNSPECIFIED,
OMP_CLAUSE_DEFAULT_SHARED,
OMP_CLAUSE_DEFAULT_NONE,
- OMP_CLAUSE_DEFAULT_PRIVATE
+ OMP_CLAUSE_DEFAULT_PRIVATE,
+ OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
};
#define OMP_CLAUSE_DEFAULT_KIND(NODE) \