<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Re: Not able to run task while DatabricksDelta connector as sink in Qlik Replicate</title>
    <link>https://community.qlik.com/t5/Qlik-Replicate/Not-able-to-run-task-while-DatabricksDelta-connector-as-sink/m-p/2096372#M6799</link>
    <description>&lt;P&gt;Hello&amp;nbsp;&lt;a href="https://community.qlik.com/t5/user/viewprofilepage/user-id/238493"&gt;@AnujGupta&lt;/a&gt;&amp;nbsp;,&lt;/P&gt;
&lt;P&gt;There is an article&amp;nbsp;&lt;A title="Ideation Guidelines: Getting Started with Ideation" href="https://community.qlik.com/t5/Get-Started/Ideation-Guidelines-Getting-Started-with-Ideation/ta-p/1960217" target="_blank" rel="noopener"&gt;Ideation Guidelines: Getting Started with Ideation&lt;/A&gt;&amp;nbsp;.&lt;/P&gt;
&lt;P&gt;Hope it helps.&lt;/P&gt;
&lt;P&gt;Regards,&lt;/P&gt;
&lt;P&gt;John.&lt;/P&gt;</description>
    <pubDate>Sun, 23 Jul 2023 14:59:50 GMT</pubDate>
    <dc:creator>john_wang</dc:creator>
    <dc:date>2023-07-23T14:59:50Z</dc:date>
    <item>
      <title>Not able to run task while DatabricksDelta connector as sink</title>
      <link>https://community.qlik.com/t5/Qlik-Replicate/Not-able-to-run-task-while-DatabricksDelta-connector-as-sink/m-p/2096202#M6790</link>
      <description>&lt;P&gt;Hi Team,&amp;nbsp;&lt;/P&gt;
&lt;P&gt;This is very strange behavior from Qlik replicate, same highlighted command is working fine when I executed from databaricks. But it is not working when I run task from Qlik.&amp;nbsp;&lt;/P&gt;
&lt;P&gt;Source - Oracle&lt;/P&gt;
&lt;P&gt;Destination - ADLS GEN2 unmanaged table.&lt;/P&gt;
&lt;P&gt;Qlik Version -&amp;nbsp;November 2022 (2022.11.0.289)&lt;/P&gt;
&lt;P&gt;Oracle edition - 19.0.0.0&lt;/P&gt;
&lt;P&gt;Error log -&amp;nbsp;&lt;/P&gt;
&lt;P&gt;00011668: 2023-07-21T16:39:22 [METADATA_MANAGE ]I: Going to connect to server adb-2446663257414103.3.azuredatabricks.net database wms3check (cloud_imp.c:3965)&lt;BR /&gt;00011668: 2023-07-21T16:39:22 [METADATA_MANAGE ]I: Target endpoint 'Databricks Lakehouse (Delta)' is using provider syntax 'DatabricksDelta' (provider_syntax_manager.c:947)&lt;BR /&gt;00011668: 2023-07-21T16:39:23 [METADATA_MANAGE ]I: ODBC driver version: '2.6.22.1037' (ar_odbc_conn.c:633)&lt;BR /&gt;00011668: 2023-07-21T16:39:23 [METADATA_MANAGE ]I: Connected to server adb-2446663257414103.3.azuredatabricks.net database wms3check successfully. (cloud_imp.c:4004)&lt;BR /&gt;00011668: 2023-07-21T16:39:23 [TASK_MANAGER ]I: Creating threads for all components (replicationtask.c:2401)&lt;BR /&gt;00011668: 2023-07-21T16:39:23 [TASK_MANAGER ]I: Threads for all components were created (replicationtask.c:2560)&lt;BR /&gt;00011668: 2023-07-21T16:39:23 [TASK_MANAGER ]I: Task initialization completed successfully (replicationtask.c:3921)&lt;BR /&gt;00010800: 2023-07-21T16:39:23 [SOURCE_CAPTURE ]I: Use any Oracle Archived Redo Log Destination (oracle_endpoint_imp.c:974)&lt;BR /&gt;00010800: 2023-07-21T16:39:23 [SOURCE_CAPTURE ]I: Read '63' blocks backward (oracle_endpoint_imp.c:994)&lt;BR /&gt;00010800: 2023-07-21T16:39:23 [SOURCE_CAPTURE ]I: Oracle CDC uses Oracle File Access mode (oracle_endpoint_imp.c:1010)&lt;BR /&gt;00010800: 2023-07-21T16:39:23 [SOURCE_CAPTURE ]I: Wait '5' minutes for missing Archived Redo log (oracle_endpoint_imp.c:1169)&lt;BR /&gt;00010800: 2023-07-21T16:39:23 [SOURCE_CAPTURE ]I: retry timeout is '120' minutes (oracle_endpoint_imp.c:1184)&lt;BR /&gt;00010800: 2023-07-21T16:39:23 [SOURCE_CAPTURE ]I: Scale is set to 10 for NUMBER Datatype (oracle_endpoint_imp.c:1215)&lt;BR /&gt;00010800: 2023-07-21T16:39:23 [SOURCE_CAPTURE ]I: Retry interval is set to 5 (oracle_endpoint_imp.c:1229)&lt;BR /&gt;00010800: 2023-07-21T16:39:23 [SOURCE_CAPTURE ]I: Oracle source database version is 19.0.0.0.0 (oracle_endpoint_conn.c:611)&lt;BR /&gt;00010800: 2023-07-21T16:39:23 [SOURCE_CAPTURE ]I: Oracle Client version: 19.3.0.0.0 (oracle_endpoint_conn.c:624)&lt;BR /&gt;00010800: 2023-07-21T16:39:23 [SOURCE_CAPTURE ]I: The classic Oracle source database is used (oracle_endpoint_conn.c:1103)&lt;BR /&gt;00010800: 2023-07-21T16:39:23 [SOURCE_CAPTURE ]I: Oracle compatibility version is 19.0.0 (oracle_endpoint_conn.c:87)&lt;BR /&gt;00010800: 2023-07-21T16:39:23 [SOURCE_CAPTURE ]I: Database role is 'PRIMARY' (oracle_endpoint_conn.c:133)&lt;BR /&gt;00010800: 2023-07-21T16:39:23 [SOURCE_CAPTURE ]I: SUPPLEMENTAL_LOG_DATA_PK is set (oracle_endpoint_conn.c:142)&lt;BR /&gt;00010800: 2023-07-21T16:39:23 [SOURCE_CAPTURE ]I: Resetlog process is supported in Oracle Database (oracle_endpoint_imp.c:1410)&lt;BR /&gt;00010820: 2023-07-21T16:39:23 [TARGET_APPLY ]I: Going to connect to server adb-2446663257414103.3.azuredatabricks.net database wms3check (cloud_imp.c:3965)&lt;BR /&gt;00010820: 2023-07-21T16:39:23 [TARGET_APPLY ]I: Target endpoint 'Databricks Lakehouse (Delta)' is using provider syntax 'DatabricksDelta' (provider_syntax_manager.c:947)&lt;BR /&gt;00010820: 2023-07-21T16:39:23 [TARGET_APPLY ]I: ODBC driver version: '2.6.22.1037' (ar_odbc_conn.c:633)&lt;BR /&gt;00010820: 2023-07-21T16:39:23 [TARGET_APPLY ]I: Connected to server adb-2446663257414103.3.azuredatabricks.net database wms3check successfully. (cloud_imp.c:4004)&lt;BR /&gt;00010820: 2023-07-21T16:39:23 [TARGET_APPLY ]I: Restore bulk state. Last bulk last record id - '0', last applied record id - '0', target confirmed record id - '0', sorter confirmed record id - '0' (endpointshell.c:2016)&lt;BR /&gt;00010820: 2023-07-21T16:39:23 [TARGET_APPLY ]I: Set Bulk Timeout = 30000 milliseconds (bulk_apply.c:563)&lt;BR /&gt;00010820: 2023-07-21T16:39:23 [TARGET_APPLY ]I: Set Bulk Timeout Min = 1000 milliseconds (bulk_apply.c:564)&lt;BR /&gt;00010820: 2023-07-21T16:39:23 [TARGET_APPLY ]I: Working in bulk apply mode (endpointshell.c:2024)&lt;BR /&gt;00010820: 2023-07-21T16:39:24 [SOURCE_CAPTURE ]I: Source endpoint 'Oracle' is using provider syntax 'Oracle' (provider_syntax_manager.c:941)&lt;BR /&gt;00009292: 2023-07-21T16:39:24 [SORTER ]I: 'Stop reading when memory limit reached' is set to false (sorter.c:658)&lt;BR /&gt;00009292: 2023-07-21T16:39:24 [STREAM_COMPONEN ]I: Going to connect to Oracle server (DESCRIPTION=(ADDRESS_LIST=(FAILOVER=ON)(LOAD_BALANCE=OFF)(CONNECT_TIMEOUT=5)(ADDRESS=(PROTOCOL=TCP)(HOST=segotl5155.srv.volvo.com)(PORT= 1525))(ADDRESS=(PROTOCOL=TCP)(HOST=segotl5156.srv.volvo.com)(PORT=1525)))(CONNECT_DATA=(SERVICE_NAME=gwms4p01_rw.srv.volvo.com))) with username WMS_REPLSVC (oracle_endpoint_imp.c:871)&lt;BR /&gt;00011668: 2023-07-21T16:39:24 [TASK_MANAGER ]I: All stream components were initialized (replicationtask.c:3694)&lt;BR /&gt;00010800: 2023-07-21T16:39:24 [SOURCE_CAPTURE ]I: Oracle capture start time: now (oracle_endpoint_capture.c:605)&lt;BR /&gt;00009292: 2023-07-21T16:39:24 [SORTER ]I: Sorter last run state: confirmed_record_id = 0, confirmed_stream_position = '' (sorter_transaction.c:3295)&lt;BR /&gt;00010820: 2023-07-21T16:39:24 [TARGET_APPLY ]I: Bulk max file size: 100 MB, 102400 KB (cloud_bulk.c:150)&lt;BR /&gt;00010800: 2023-07-21T16:39:24 [SOURCE_CAPTURE ]I: Used difference between the Replicate machine UTC time and Oracle Local time is '-7200' seconds (oracle_endpoint_capture.c:349)&lt;BR /&gt;00010800: 2023-07-21T16:39:24 [SOURCE_CAPTURE ]I: Used Oracle archived Redo log destination id is '1' (oracdc_merger.c:646)&lt;BR /&gt;00010800: 2023-07-21T16:39:24 [SOURCE_CAPTURE ]I: Oracle instance uses more than one archived Redo log destination id. Please configure the correct destination id, if Redo logs of '1' destination cannot be accessed (oracdc_merger.c:650)&lt;BR /&gt;00010800: 2023-07-21T16:39:24 [SOURCE_CAPTURE ]I: Start processing online Redo log sequence 7442 thread 1 name /oracle/redo/gwms4p01/redo_02.log (oradcdc_redo.c:914)&lt;BR /&gt;00010800: 2023-07-21T16:39:24 [SOURCE_CAPTURE ]I: Oracle Redo compatibility version 13000000 (oradcdc_redo.c:916)&lt;BR /&gt;00010800: 2023-07-21T16:39:24 [SOURCE_CAPTURE ]I: Start REDO fetch from the context 0000002f.408daa00.00000001.0000.00.0000:7442.94987.16, thread 1 (oradcdc_thread.c:2652)&lt;BR /&gt;00010800: 2023-07-21T16:39:24 [SOURCE_CAPTURE ]I: Opened transaction list contains '2' transactions (oracle_endpoint_capture.c:830)&lt;BR /&gt;00009292: 2023-07-21T16:39:24 [SORTER ]I: Correcting source database time by 0 microseconds (sorter_transaction.c:197)&lt;BR /&gt;00009292: 2023-07-21T16:39:24 [SORTER ]I: 2 open transactions. Waiting for transaction consistency (sorter_transaction.c:303)&lt;BR /&gt;00011668: 2023-07-21T16:39:24 [TASK_MANAGER ]I: Start waiting for transactional consistency (replicationtask.c:3371)&lt;BR /&gt;00011668: 2023-07-21T16:39:25 [TASK_MANAGER ]I: Task error notification received from subtask 0, thread 1, status 1020401 (replicationtask.c:3517)&lt;BR /&gt;00011668: 2023-07-21T16:39:25 [TASK_MANAGER ]W: Task 'ora_wms' encountered a fatal error (repository.c:5935)&lt;BR /&gt;00009292: 2023-07-21T16:39:25 [SORTER ]I: Final saved task state. Stream position 0000002f.408daa00.00000001.0000.00.0000:7442.94987.16, Source id 3, next Target id 1, confirmed Target id 0, last source timestamp 1689950364428554 (sorter.c:781)&lt;BR /&gt;00010820: 2023-07-21T16:39:25 [TARGET_APPLY ]E: Failed (retcode -1) to execute statement: &lt;STRONG&gt;'CREATE OR REPLACE TABLE `wms3check`.`attrep_apply_exceptions` ( `TASK_NAME` VARCHAR(128) NOT NULL, `TABLE_OWNER` VARCHAR(128) NOT NULL, `TABLE_NAME` VARCHAR(128) NOT NULL, `ERROR_TIME` TIMESTAMP NOT NULL, `STATEMENT` STRING NOT NULL, `ERROR` STRING NOT NULL ) USING DELTA LOCATION 'abfss://rawdata@gtooldlsdev.dfs.core.windows.net/Databases/wmsdelta/attrep_apply_exceptions'&lt;/STRONG&gt; TBLPROPERTIES (delta.autoOptimize.optimizeWrite = true)' [1022502] (ar_odbc_stmt.c:4996)&lt;BR /&gt;00010820: 2023-07-21T16:39:25 [TARGET_APPLY ]E: RetCode: SQL_ERROR SqlState: 42000 NativeError: 80 Message: [Simba][Hardy] (80) Syntax or semantic analysis error thrown in server while executing query. Error message from server: org.apache.hive.service.cli.HiveSQLException: Error running query: Failure to initialize configurationInvalid configuration value detected for fs.azure.account.key&lt;BR /&gt;at org.apache.spark.sql.hive.thriftserver.HiveThriftServerErrors$.runningQueryError(HiveThriftServerErrors.scala:56)&lt;BR /&gt;at org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.$anonfun$execute$1(SparkExecuteStatementOperation.scala:498)&lt;BR /&gt;at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)&lt;BR /&gt;at com.databricks.unity.EmptyHandle$.runWith(UCSHandle.scala:124)&lt;BR /&gt;at org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.org$apache$spark$sql$hive$thriftserver$SparkExecuteStatementOperation$$execute(SparkExecuteStatementOperation.scala:410)&lt;BR /&gt;&lt;BR /&gt;&lt;/P&gt;</description>
      <pubDate>Fri, 21 Jul 2023 14:59:03 GMT</pubDate>
      <guid>https://community.qlik.com/t5/Qlik-Replicate/Not-able-to-run-task-while-DatabricksDelta-connector-as-sink/m-p/2096202#M6790</guid>
      <dc:creator>AnujGupta</dc:creator>
      <dc:date>2023-07-21T14:59:03Z</dc:date>
    </item>
    <item>
      <title>Re: Not able to run task while DatabricksDelta connector as sink</title>
      <link>https://community.qlik.com/t5/Qlik-Replicate/Not-able-to-run-task-while-DatabricksDelta-connector-as-sink/m-p/2096287#M6792</link>
      <description>&lt;P&gt;Hello&amp;nbsp;&lt;a href="https://community.qlik.com/t5/user/viewprofilepage/user-id/238493"&gt;@AnujGupta&lt;/a&gt;&amp;nbsp;,&lt;/P&gt;
&lt;P&gt;Thanks for the post in community!&lt;/P&gt;
&lt;P&gt;Several reasons may cause the same error:&lt;/P&gt;
&lt;P&gt;&lt;FONT face="courier new,courier"&gt;&lt;SPAN&gt;Error running query: Failure to initialize configurationInvalid configuration value detected for fs.azure.account.key&lt;/SPAN&gt;&lt;/FONT&gt;&lt;/P&gt;
&lt;P&gt;In most cases it's a configuration issue. Since the Microsoft Azure Databricks target endpoint consists of 2 independent parts, the Databricks ODBC Access and the Azure Storage Data Access, let's try to isolate&amp;nbsp;the issue by following the below steps:&lt;BR /&gt;&lt;BR /&gt;Go to the Microsoft Azure Databricks target endpoint settings.&lt;/P&gt;
&lt;UL&gt;
&lt;LI&gt;Click on "&lt;STRONG&gt;Database&lt;/STRONG&gt;" Browse under Databricks ODBC Access and see if you get an error.&lt;/LI&gt;
&lt;LI&gt;Click on "&lt;SPAN&gt;&lt;STRONG&gt;Staging directory&lt;/STRONG&gt;"&amp;nbsp;&lt;/SPAN&gt;Browse under storage Staging and see if you get an error.&lt;/LI&gt;
&lt;LI&gt;Click on &lt;STRONG&gt;Test Connection&lt;/STRONG&gt; to see if there are any error.&lt;/LI&gt;
&lt;/UL&gt;
&lt;P&gt;Please go through &lt;A title="Permissions and access" href="https://help.qlik.com/en-US/replicate/November2022/Content/Replicate/Main/Databricks%20Lakehouse%20(Delta)/databricks_delta_lakehouse_prereq_gen_target.htm" target="_blank" rel="noopener"&gt;Databricks&amp;nbsp;Permissions and access endpoint&lt;/A&gt;&amp;nbsp;especially:&lt;/P&gt;
&lt;P&gt;1.&amp;nbsp; &amp;nbsp; When configuring a new cluster with Microsoft Azure Data Lake Storage (ADLS) Gen2, the following line must be added to the "Spark Config" section.&lt;/P&gt;
&lt;P&gt;&lt;FONT face="courier new,courier"&gt;spark.hadoop.hive.server2.enable.doAs false&lt;/FONT&gt;&lt;/P&gt;
&lt;P&gt;2.&amp;nbsp; &amp;nbsp; To be able to access the storage directories from the Databricks cluster, users need to add a configuration (in Spark Config) for that Storage Account and its key.&lt;/P&gt;
&lt;P&gt;Example:&lt;/P&gt;
&lt;P&gt;&lt;FONT face="courier new,courier"&gt;fs.azure.account.key.&lt;EM&gt;&amp;lt;storage-account-name&amp;gt;&lt;/EM&gt;.dfs.core.windows.net &lt;EM&gt;&amp;lt;storage-account-access-key&amp;gt;&lt;/EM&gt;&lt;/FONT&gt;&lt;/P&gt;
&lt;P&gt;For details, refer to the Databricks online help &lt;A title="spark configuration" href="https://docs.databricks.com/clusters/configure.html#spark-configuration" target="_blank" rel="noopener"&gt;spark configuration&lt;/A&gt;&amp;nbsp;.&lt;/P&gt;
&lt;P&gt;3.&amp;nbsp; &amp;nbsp; The&amp;nbsp;&lt;SPAN&gt;&lt;STRONG&gt;Client ID&lt;/STRONG&gt; is configured correctly in the Endpoint.&lt;/SPAN&gt;&lt;/P&gt;
&lt;P&gt;&lt;SPAN&gt;Hope this helps.&lt;/SPAN&gt;&lt;/P&gt;
&lt;P&gt;&lt;SPAN&gt;Regards,&lt;/SPAN&gt;&lt;/P&gt;
&lt;P&gt;&lt;SPAN&gt;John.&lt;/SPAN&gt;&lt;/P&gt;</description>
      <pubDate>Sat, 22 Jul 2023 03:38:19 GMT</pubDate>
      <guid>https://community.qlik.com/t5/Qlik-Replicate/Not-able-to-run-task-while-DatabricksDelta-connector-as-sink/m-p/2096287#M6792</guid>
      <dc:creator>john_wang</dc:creator>
      <dc:date>2023-07-22T03:38:19Z</dc:date>
    </item>
    <item>
      <title>Re: Not able to run task while DatabricksDelta connector as sink</title>
      <link>https://community.qlik.com/t5/Qlik-Replicate/Not-able-to-run-task-while-DatabricksDelta-connector-as-sink/m-p/2096303#M6794</link>
      <description>&lt;P&gt;&lt;SPAN&gt;fs.azure.account.key.&lt;/SPAN&gt;&lt;EM&gt;&amp;lt;storage-account-name&amp;gt;&lt;/EM&gt;&lt;SPAN&gt;.dfs.core.windows.net&amp;nbsp;&lt;/SPAN&gt;&lt;EM&gt;&amp;lt;storage-account-access-key&amp;gt;&lt;/EM&gt;&lt;/P&gt;
&lt;P&gt;&lt;EM&gt;This will work in my case. I need to know currently I have hardcoded my accesskey in Databricks cluster. Is there any way to not hardcode key in spark config setting.&amp;nbsp;&lt;/EM&gt;&lt;/P&gt;
&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Sat, 22 Jul 2023 06:40:10 GMT</pubDate>
      <guid>https://community.qlik.com/t5/Qlik-Replicate/Not-able-to-run-task-while-DatabricksDelta-connector-as-sink/m-p/2096303#M6794</guid>
      <dc:creator>AnujGupta</dc:creator>
      <dc:date>2023-07-22T06:40:10Z</dc:date>
    </item>
    <item>
      <title>Re: Not able to run task while DatabricksDelta connector as sink</title>
      <link>https://community.qlik.com/t5/Qlik-Replicate/Not-able-to-run-task-while-DatabricksDelta-connector-as-sink/m-p/2096309#M6795</link>
      <description>&lt;P&gt;Hello&amp;nbsp;&lt;a href="https://community.qlik.com/t5/user/viewprofilepage/user-id/238493"&gt;@AnujGupta&lt;/a&gt;&amp;nbsp;,&lt;/P&gt;
&lt;P&gt;Good news! Thanks a lot for your update.&lt;/P&gt;
&lt;P&gt;Databricks supports 3&amp;nbsp;&lt;SPAN&gt;credentials types to access Azure Data Lake Storage Gen2:&lt;/SPAN&gt;&lt;/P&gt;
&lt;UL&gt;
&lt;LI&gt;&lt;SPAN&gt;OAuth 2.0 with an Azure service principal&lt;/SPAN&gt;&lt;/LI&gt;
&lt;LI&gt;&lt;SPAN&gt;Shared access signatures (SAS)&lt;/SPAN&gt;&lt;/LI&gt;
&lt;LI&gt;&lt;SPAN&gt;Account keys&lt;/SPAN&gt;&lt;/LI&gt;
&lt;/UL&gt;
&lt;P&gt;&lt;SPAN&gt;Account keys is used in current major Replicate versions. You may raise &lt;A title="Feature Request " href="https://community.qlik.com/t5/About-Ideation/ct-p/qlik-aboutideation" target="_blank" rel="noopener"&gt;Feature Request&lt;/A&gt; if you need other credentials types.&lt;/SPAN&gt;&lt;/P&gt;
&lt;P&gt;&lt;SPAN&gt;thanks,&lt;/SPAN&gt;&lt;/P&gt;
&lt;P&gt;&lt;SPAN&gt;John.&lt;/SPAN&gt;&lt;/P&gt;</description>
      <pubDate>Sat, 22 Jul 2023 07:36:49 GMT</pubDate>
      <guid>https://community.qlik.com/t5/Qlik-Replicate/Not-able-to-run-task-while-DatabricksDelta-connector-as-sink/m-p/2096309#M6795</guid>
      <dc:creator>john_wang</dc:creator>
      <dc:date>2023-07-22T07:36:49Z</dc:date>
    </item>
    <item>
      <title>Re: Not able to run task while DatabricksDelta connector as sink</title>
      <link>https://community.qlik.com/t5/Qlik-Replicate/Not-able-to-run-task-while-DatabricksDelta-connector-as-sink/m-p/2096311#M6796</link>
      <description>&lt;P&gt;Please let me know how to raise Feature&amp;nbsp;Request if I need to use service principal.&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Sat, 22 Jul 2023 09:11:59 GMT</pubDate>
      <guid>https://community.qlik.com/t5/Qlik-Replicate/Not-able-to-run-task-while-DatabricksDelta-connector-as-sink/m-p/2096311#M6796</guid>
      <dc:creator>AnujGupta</dc:creator>
      <dc:date>2023-07-22T09:11:59Z</dc:date>
    </item>
    <item>
      <title>Re: Not able to run task while DatabricksDelta connector as sink</title>
      <link>https://community.qlik.com/t5/Qlik-Replicate/Not-able-to-run-task-while-DatabricksDelta-connector-as-sink/m-p/2096372#M6799</link>
      <description>&lt;P&gt;Hello&amp;nbsp;&lt;a href="https://community.qlik.com/t5/user/viewprofilepage/user-id/238493"&gt;@AnujGupta&lt;/a&gt;&amp;nbsp;,&lt;/P&gt;
&lt;P&gt;There is an article&amp;nbsp;&lt;A title="Ideation Guidelines: Getting Started with Ideation" href="https://community.qlik.com/t5/Get-Started/Ideation-Guidelines-Getting-Started-with-Ideation/ta-p/1960217" target="_blank" rel="noopener"&gt;Ideation Guidelines: Getting Started with Ideation&lt;/A&gt;&amp;nbsp;.&lt;/P&gt;
&lt;P&gt;Hope it helps.&lt;/P&gt;
&lt;P&gt;Regards,&lt;/P&gt;
&lt;P&gt;John.&lt;/P&gt;</description>
      <pubDate>Sun, 23 Jul 2023 14:59:50 GMT</pubDate>
      <guid>https://community.qlik.com/t5/Qlik-Replicate/Not-able-to-run-task-while-DatabricksDelta-connector-as-sink/m-p/2096372#M6799</guid>
      <dc:creator>john_wang</dc:creator>
      <dc:date>2023-07-23T14:59:50Z</dc:date>
    </item>
  </channel>
</rss>

