desktop/source/lib/init.cxx     |   43 ++++++++++++++++++++++++++--------------
 sc/source/ui/docshell/docsh.cxx |   16 ++++++++++++--
 2 files changed, 42 insertions(+), 17 deletions(-)

New commits:
commit 04493a0597f4e65c208db009972508ecb305ed45
Author:     Andras Timar <[email protected]>
AuthorDate: Mon Feb 9 14:11:16 2026 +0100
Commit:     Miklos Vajna <[email protected]>
CommitDate: Thu Feb 12 09:54:39 2026 +0100

    fix CSV sheet number export via LOKit convert-to API
    
    The 12th token of CSV filter options (nSheetToExport) was ignored when
    exporting via the /cool/convert-to endpoint. Two bugs:
    
    1. doc_saveAs() in init.cxx processed filter options through
       comphelper::string::convertCommaSeparated() which drops empty tokens
       and adds spaces after commas. This mangled "46,39,0,1,,0,..." (12
       tokens) into "46, 39, 0, 1, 0, ..." (11 tokens with spaces), so
       ScImportOptions never saw the 12th token. Fix: iterate tokens
       manually with o3tl::getToken() preserving empty tokens and spacing.
    
    2. ConvertTo() in docsh.cxx for single-sheet export (nSheetToExport > 0)
       created a separate file named basename-SheetName.csv instead of
       writing to the provided output stream. The convert-to endpoint only
       reads the original filename, so it never found the result. Fix: write
       directly to rMed.GetOutStream() for single-sheet export.
    
    Change-Id: Ib7c693c39ca01b5779aabf799e12db9de1870ddc
    Reviewed-on: https://gerrit.libreoffice.org/c/core/+/199232
    Tested-by: Jenkins CollaboraOffice <[email protected]>
    Reviewed-by: Miklos Vajna <[email protected]>

diff --git a/desktop/source/lib/init.cxx b/desktop/source/lib/init.cxx
index edcd91ba0cd8..6ec3cd5688b9 100644
--- a/desktop/source/lib/init.cxx
+++ b/desktop/source/lib/init.cxx
@@ -76,6 +76,7 @@
 #include <osl/thread.h>
 #include <rtl/bootstrap.hxx>
 #include <rtl/strbuf.hxx>
+#include <rtl/ustrbuf.hxx>
 #include <rtl/uri.hxx>
 #include <svl/cryptosign.hxx>
 #include <linguistic/misc.hxx>
@@ -3869,21 +3870,37 @@ static int doc_saveAs(LibreOfficeKitDocument* pThis, 
const char* sUrl, const cha
         // saveAs() is more like save-a-copy, which allows saving to any
         // random format like PDF or PNG.
         // It is not a real filter option, so we have to filter it out.
-        const uno::Sequence<OUString> aOptionSeq = 
comphelper::string::convertCommaSeparated(aFilterOptions);
-        std::vector<OUString> aFilteredOptionVec;
+        // Preserve CSV filter options structure: don't drop empty tokens or 
add spaces.
+        // Just scan for and remove special non-filter keywords.
+        // (Using convertCommaSeparated() would drop empty tokens and add 
spaces,
+        // which breaks CSV filter options where token position matters.)
         bool bTakeOwnership = false;
         bool bCreateFromTemplate = false;
         MediaDescriptor aSaveMediaDescriptor;
-        for (const auto& rOption : aOptionSeq)
-        {
-            if (rOption == "TakeOwnership")
-                bTakeOwnership = true;
-            else if (rOption == "NoFileSync")
-                aSaveMediaDescriptor[u"NoFileSync"_ustr] <<= true;
-            else if (rOption == "FromTemplate")
-                bCreateFromTemplate = true;
-            else
-                aFilteredOptionVec.push_back(rOption);
+        {
+            OUStringBuffer aFilteredBuf;
+            sal_Int32 nIndex = 0;
+            bool bFirst = true;
+            do
+            {
+                const OUString aToken(o3tl::getToken(aFilterOptions, 0, ',', 
nIndex));
+                const OUString aTrimmed = aToken.trim();
+
+                if (aTrimmed == "TakeOwnership")
+                    bTakeOwnership = true;
+                else if (aTrimmed == "NoFileSync")
+                    aSaveMediaDescriptor[u"NoFileSync"_ustr] <<= true;
+                else if (aTrimmed == "FromTemplate")
+                    bCreateFromTemplate = true;
+                else
+                {
+                    if (!bFirst)
+                        aFilteredBuf.append(u',');
+                    aFilteredBuf.append(aToken);
+                    bFirst = false;
+                }
+            } while (nIndex >= 0);
+            aFilterOptions = aFilteredBuf.makeStringAndClear();
         }
 
         if (bCreateFromTemplate && bTakeOwnership)
@@ -3898,8 +3915,6 @@ static int doc_saveAs(LibreOfficeKitDocument* pThis, 
const char* sUrl, const cha
         aSaveMediaDescriptor[u"Overwrite"_ustr] <<= true;
         aSaveMediaDescriptor[u"FilterName"_ustr] <<= aFilterName;
 
-        auto aFilteredOptionSeq = 
comphelper::containerToSequence<OUString>(aFilteredOptionVec);
-        aFilterOptions = 
comphelper::string::convertCommaSeparated(aFilteredOptionSeq);
         aSaveMediaDescriptor[MediaDescriptor::PROP_FILTEROPTIONS] <<= 
aFilterOptions;
 
         comphelper::SequenceAsHashMap aFilterDataMap;
diff --git a/sc/source/ui/docshell/docsh.cxx b/sc/source/ui/docshell/docsh.cxx
index eee54aacb342..3eaeaad31b06 100644
--- a/sc/source/ui/docshell/docsh.cxx
+++ b/sc/source/ui/docshell/docsh.cxx
@@ -2567,9 +2567,19 @@ bool ScDocShell::ConvertTo( SfxMedium &rMed )
             }
             else if (0 < aOptions.nSheetToExport && aOptions.nSheetToExport <= 
nCount)
             {
-                // One sheet, 1-based.
-                nCount = aOptions.nSheetToExport;
-                nStartTab = nCount - 1;
+                // Single sheet export, 1-based.
+                // Write to the provided output stream so that LOKit convert-to
+                // finds the result at the expected URL (instead of a separate
+                // file named with the sheet name).
+                SvStream* pStream = rMed.GetOutStream();
+                if (pStream)
+                {
+                    AsciiSave(*pStream, aOptions, aOptions.nSheetToExport - 1);
+                    bRet = true;
+                }
+                // Skip the separate-files loop below.
+                nStartTab = 0;
+                nCount = 0;
             }
             else
             {

Reply via email to