tools/source/fsys/urlobj.cxx   |    2 +-
 tools/source/inet/inetmime.cxx |   16 ++++++----------
 2 files changed, 7 insertions(+), 11 deletions(-)

New commits:
commit 4742808bf277cd33ef95029b987aac904149afd9
Author:     Caolán McNamara <caolan.mcnam...@collabora.com>
AuthorDate: Tue Apr 30 19:47:06 2024 +0100
Commit:     Caolán McNamara <caolan.mcnam...@collabora.com>
CommitDate: Wed May 1 09:16:57 2024 +0200

    WaE: C6011 Dereferencing NULL pointer warnings
    
    Change-Id: I665bb37e1d45ec87489b039b1d1d41529f027328
    Reviewed-on: https://gerrit.libreoffice.org/c/core/+/166939
    Tested-by: Caolán McNamara <caolan.mcnam...@collabora.com>
    Reviewed-by: Caolán McNamara <caolan.mcnam...@collabora.com>

diff --git a/tools/source/fsys/urlobj.cxx b/tools/source/fsys/urlobj.cxx
index b1e3073f39b4..aeb85c1c9f1d 100644
--- a/tools/source/fsys/urlobj.cxx
+++ b/tools/source/fsys/urlobj.cxx
@@ -2274,7 +2274,7 @@ INetURLObject::PrefixInfo const * 
INetURLObject::getPrefix(sal_Unicode const *&
     sal_Int32 i = 0;
     for (; pFirst < pLast; ++i)
     {
-        if (pFirst->m_pPrefix[i] == '
+        if (pFirst->m_pPrefix && pFirst->m_pPrefix[i] == '
         {
             pMatch = pFirst++;
             pMatched = p;
diff --git a/tools/source/inet/inetmime.cxx b/tools/source/inet/inetmime.cxx
index 6694dc398669..28572483951f 100644
--- a/tools/source/inet/inetmime.cxx
+++ b/tools/source/inet/inetmime.cxx
@@ -70,8 +70,7 @@ int getBase64Weight(sal_uInt32 nChar)
 bool startsWithLineFolding(const sal_Unicode * pBegin,
                                             const sal_Unicode * pEnd)
 {
-    DBG_ASSERT(pBegin && pBegin <= pEnd,
-               "startsWithLineFolding(): Bad sequence");
+    assert(pBegin && pBegin <= pEnd && "startsWithLineFolding(): Bad 
sequence");
 
     return pEnd - pBegin >= 3 && pBegin[0] == 0x0D && pBegin[1] == 0x0A
            && isWhiteSpace(pBegin[2]); // CR, LF
@@ -395,8 +394,7 @@ bool isTokenChar(sal_uInt32 nChar)
 const sal_Unicode * skipComment(const sal_Unicode * pBegin,
                                           const sal_Unicode * pEnd)
 {
-    DBG_ASSERT(pBegin && pBegin <= pEnd,
-               "skipComment(): Bad sequence");
+    assert(pBegin && pBegin <= pEnd && "skipComment(): Bad sequence");
 
     if (pBegin != pEnd && *pBegin == '(')
     {
@@ -427,8 +425,7 @@ const sal_Unicode * skipLinearWhiteSpaceComment(const 
sal_Unicode *
                                                           const sal_Unicode *
                                                               pEnd)
 {
-    DBG_ASSERT(pBegin && pBegin <= pEnd,
-               "skipLinearWhiteSpaceComment(): Bad sequence");
+    assert(pBegin && pBegin <= pEnd && "skipLinearWhiteSpaceComment(): Bad 
sequence");
 
     while (pBegin != pEnd)
         switch (*pBegin)
@@ -463,8 +460,7 @@ const sal_Unicode * skipLinearWhiteSpaceComment(const 
sal_Unicode *
 const sal_Unicode * skipQuotedString(const sal_Unicode * pBegin,
                                                const sal_Unicode * pEnd)
 {
-    DBG_ASSERT(pBegin && pBegin <= pEnd,
-               "skipQuotedString(): Bad sequence");
+    assert(pBegin && pBegin <= pEnd && "skipQuotedString(): Bad sequence");
 
     if (pBegin != pEnd && *pBegin == '"')
         for (const sal_Unicode * p = pBegin + 1; p != pEnd;)
@@ -707,7 +703,7 @@ bool equalIgnoreCase(const char * pBegin1,
                                const char * pEnd1,
                                const char * pString2)
 {
-    DBG_ASSERT(pBegin1 && pBegin1 <= pEnd1 && pString2,
+    assert(pBegin1 && pBegin1 <= pEnd1 && pString2 &&
                "equalIgnoreCase(): Bad sequences");
 
     while (*pString2 != 0)
@@ -971,7 +967,7 @@ bool INetMIME::equalIgnoreCase(const sal_Unicode * pBegin1,
                                const sal_Unicode * pEnd1,
                                const char * pString2)
 {
-    DBG_ASSERT(pBegin1 && pBegin1 <= pEnd1 && pString2,
+    assert(pBegin1 && pBegin1 <= pEnd1 && pString2 &&
                "INetMIME::equalIgnoreCase(): Bad sequences");
 
     while (*pString2 != 0)

Reply via email to