The clock gets enabled early on in init, since it's required in order
to read registers.  If only devm_clk_prepare_enable() was a thing!

Signed-off-by: Eric Anholt <e...@anholt.net>
---

This fixup, if you like, I would slip in before patch 1 of your series.

 drivers/gpu/drm/stm/ltdc.c | 10 ++++++----
 1 file changed, 6 insertions(+), 4 deletions(-)

diff --git a/drivers/gpu/drm/stm/ltdc.c b/drivers/gpu/drm/stm/ltdc.c
index 8aa05860029f..7d7e889f09c3 100644
--- a/drivers/gpu/drm/stm/ltdc.c
+++ b/drivers/gpu/drm/stm/ltdc.c
@@ -908,13 +908,15 @@ int ltdc_load(struct drm_device *ddev)
 
        if (of_address_to_resource(np, 0, &res)) {
                DRM_ERROR("Unable to get resource\n");
-               return -ENODEV;
+               ret = -ENODEV;
+               goto err;
        }
 
        ldev->regs = devm_ioremap_resource(dev, &res);
        if (IS_ERR(ldev->regs)) {
                DRM_ERROR("Unable to get ltdc registers\n");
-               return PTR_ERR(ldev->regs);
+               ret = PTR_ERR(ldev->regs);
+               goto err;
        }
 
        for (i = 0; i < MAX_IRQ; i++) {
@@ -927,7 +929,7 @@ int ltdc_load(struct drm_device *ddev)
                                                dev_name(dev), ddev);
                if (ret) {
                        DRM_ERROR("Failed to register LTDC interrupt\n");
-                       return ret;
+                       goto err;
                }
        }
 
@@ -942,7 +944,7 @@ int ltdc_load(struct drm_device *ddev)
        if (ret) {
                DRM_ERROR("hardware identifier (0x%08x) not supported!\n",
                          ldev->caps.hw_version);
-               return ret;
+               goto err;
        }
 
        DRM_INFO("ltdc hw version 0x%08x - ready\n", ldev->caps.hw_version);
-- 
2.11.0

Reply via email to