From: Marek Olšák <[email protected]>

---
 src/gallium/drivers/radeon/r600_pipe_common.h | 5 -----
 src/gallium/drivers/radeon/r600_query.c       | 2 +-
 src/gallium/drivers/radeonsi/si_pipe.c        | 2 --
 3 files changed, 1 insertion(+), 8 deletions(-)

diff --git a/src/gallium/drivers/radeon/r600_pipe_common.h 
b/src/gallium/drivers/radeon/r600_pipe_common.h
index 236b3bc..e9b6d46 100644
--- a/src/gallium/drivers/radeon/r600_pipe_common.h
+++ b/src/gallium/drivers/radeon/r600_pipe_common.h
@@ -421,25 +421,20 @@ struct r600_common_screen {
        struct {
                /* Context flags to set so that all writes from earlier jobs
                 * in the CP are seen by L2 clients.
                 */
                unsigned cp_to_L2;
 
                /* Context flags to set so that all writes from earlier jobs
                 * that end in L2 are seen by CP.
                 */
                unsigned L2_to_cp;
-
-               /* Context flags to set so that all writes from earlier
-                * compute jobs are seen by L2 clients.
-                */
-               unsigned compute_to_L2;
        } barrier_flags;
 };
 
 /* This encapsulates a state or an operation which can emitted into the GPU
  * command stream. */
 struct r600_atom {
        void (*emit)(struct r600_common_context *ctx, struct r600_atom *state);
        unsigned short          id;
 };
 
diff --git a/src/gallium/drivers/radeon/r600_query.c 
b/src/gallium/drivers/radeon/r600_query.c
index e1239ae..de52167 100644
--- a/src/gallium/drivers/radeon/r600_query.c
+++ b/src/gallium/drivers/radeon/r600_query.c
@@ -1761,21 +1761,21 @@ static void r600_query_hw_get_result_resource(struct 
r600_common_context *rctx,
                         * of the last entry, since the fence writes should be
                         * serialized in the CP.
                         */
                        va = qbuf->buf->gpu_address + qbuf->results_end - 
query->result_size;
                        va += params.fence_offset;
 
                        si_gfx_wait_fence(rctx, va, 0x80000000, 0x80000000);
                }
 
                rctx->b.launch_grid(&rctx->b, &grid);
-               rctx->flags |= rctx->screen->barrier_flags.compute_to_L2;
+               rctx->flags |= SI_CONTEXT_CS_PARTIAL_FLUSH;
        }
 
        r600_restore_qbo_state(rctx, &saved_state);
        pipe_resource_reference(&tmp_buffer, NULL);
 }
 
 static void r600_render_condition(struct pipe_context *ctx,
                                  struct pipe_query *query,
                                  boolean condition,
                                  enum pipe_render_cond_flag mode)
diff --git a/src/gallium/drivers/radeonsi/si_pipe.c 
b/src/gallium/drivers/radeonsi/si_pipe.c
index 141662a..6c4e183 100644
--- a/src/gallium/drivers/radeonsi/si_pipe.c
+++ b/src/gallium/drivers/radeonsi/si_pipe.c
@@ -835,22 +835,20 @@ struct pipe_screen *radeonsi_screen_create(struct 
radeon_winsys *ws,
        sscreen->use_monolithic_shaders =
                (sscreen->b.debug_flags & DBG(MONOLITHIC_SHADERS)) != 0;
 
        sscreen->b.barrier_flags.cp_to_L2 = SI_CONTEXT_INV_SMEM_L1 |
                                            SI_CONTEXT_INV_VMEM_L1;
        if (sscreen->b.chip_class <= VI) {
                sscreen->b.barrier_flags.cp_to_L2 |= SI_CONTEXT_INV_GLOBAL_L2;
                sscreen->b.barrier_flags.L2_to_cp |= 
SI_CONTEXT_WRITEBACK_GLOBAL_L2;
        }
 
-       sscreen->b.barrier_flags.compute_to_L2 = SI_CONTEXT_CS_PARTIAL_FLUSH;
-
        if (debug_get_bool_option("RADEON_DUMP_SHADERS", false))
                sscreen->b.debug_flags |= DBG_ALL_SHADERS;
 
        for (i = 0; i < num_compiler_threads; i++)
                sscreen->tm[i] = si_create_llvm_target_machine(sscreen);
        for (i = 0; i < num_compiler_threads_lowprio; i++)
                sscreen->tm_low_priority[i] = 
si_create_llvm_target_machine(sscreen);
 
        /* Create the auxiliary context. This must be done last. */
        sscreen->b.aux_context = si_create_context(&sscreen->b.b, 0);
-- 
2.7.4

_______________________________________________
mesa-dev mailing list
[email protected]
https://lists.freedesktop.org/mailman/listinfo/mesa-dev

Reply via email to