<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Re: Write a delta format file in Talend Studio</title>
    <link>https://community.qlik.com/t5/Talend-Studio/Write-a-delta-format-file/m-p/2346336#M113751</link>
    <description>&lt;P&gt;Hi Sabrina,&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;I think the thing is working even with this error.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Do you know how to avoid this error ?&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Thank you&lt;/P&gt;</description>
    <pubDate>Mon, 29 May 2023 19:04:35 GMT</pubDate>
    <dc:creator>MBourassa1682971203</dc:creator>
    <dc:date>2023-05-29T19:04:35Z</dc:date>
    <item>
      <title>Write a delta format file</title>
      <link>https://community.qlik.com/t5/Talend-Studio/Write-a-delta-format-file/m-p/2346331#M113746</link>
      <description>&lt;P&gt;Hi,&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;I would like to know if there is a way to write in a delta format file ?&lt;/P&gt;&lt;P&gt;My projet is to take a table from a db and write it in a delta format file.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;I have seen "tDeltaLakeOutput properties for Apache Spark Batch" and it seems it is possible to stores data in Delta format in files but I don't find this basic setting. I don't know if this could be the solution ?&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Thank you&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;span class="lia-inline-image-display-wrapper" image-alt="0695b00000ht6gAAAQ.jpg"&gt;&lt;img src="https://community.qlik.com/t5/image/serverpage/image-id/133962i4E50445E0D7E4967/image-size/large?v=v2&amp;amp;px=999" role="button" title="0695b00000ht6gAAAQ.jpg" alt="0695b00000ht6gAAAQ.jpg" /&gt;&lt;/span&gt;&lt;span class="lia-inline-image-display-wrapper" image-alt="0695b00000ht6fvAAA.png"&gt;&lt;img src="https://community.qlik.com/t5/image/serverpage/image-id/146051i9FB4BDFDBB03CCF4/image-size/large?v=v2&amp;amp;px=999" role="button" title="0695b00000ht6fvAAA.png" alt="0695b00000ht6fvAAA.png" /&gt;&lt;/span&gt;&lt;/P&gt;</description>
      <pubDate>Fri, 15 Nov 2024 21:45:44 GMT</pubDate>
      <guid>https://community.qlik.com/t5/Talend-Studio/Write-a-delta-format-file/m-p/2346331#M113746</guid>
      <dc:creator>MBourassa1682971203</dc:creator>
      <dc:date>2024-11-15T21:45:44Z</dc:date>
    </item>
    <item>
      <title>Re: Write a delta format file</title>
      <link>https://community.qlik.com/t5/Talend-Studio/Write-a-delta-format-file/m-p/2346332#M113747</link>
      <description>&lt;P&gt;Hello,&lt;/P&gt;&lt;P&gt;So far, we are able to ingest data with Delta format in dataset.&lt;/P&gt;&lt;P&gt;Could you please let us know if this KB article helps?&lt;/P&gt;&lt;P&gt;&lt;A href="https://community.talend.com/s/article/How-to-ingest-data-to-Azure-Databricks-Delta-Lake-with-Delta-format-in-Standard-job" alt="https://community.talend.com/s/article/How-to-ingest-data-to-Azure-Databricks-Delta-Lake-with-Delta-format-in-Standard-job" target="_blank"&gt;https://community.talend.com/s/article/How-to-ingest-data-to-Azure-Databricks-Delta-Lake-with-Delta-format-in-Standard-job&lt;/A&gt;&lt;/P&gt;&lt;P&gt;Best regards&lt;/P&gt;&lt;P&gt;Sabrina&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Mon, 29 May 2023 03:51:11 GMT</pubDate>
      <guid>https://community.qlik.com/t5/Talend-Studio/Write-a-delta-format-file/m-p/2346332#M113747</guid>
      <dc:creator>Anonymous</dc:creator>
      <dc:date>2023-05-29T03:51:11Z</dc:date>
    </item>
    <item>
      <title>Re: Write a delta format file</title>
      <link>https://community.qlik.com/t5/Talend-Studio/Write-a-delta-format-file/m-p/2346333#M113748</link>
      <description>&lt;P&gt;Hi Sabrina,&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;The page is not working: I get this error: Oops! Looks like we ran into a problem with your request. Please contact Talend Customer Support for further assistance.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;in a talend page.&lt;/P&gt;</description>
      <pubDate>Mon, 29 May 2023 13:25:50 GMT</pubDate>
      <guid>https://community.qlik.com/t5/Talend-Studio/Write-a-delta-format-file/m-p/2346333#M113748</guid>
      <dc:creator>MBourassa1682971203</dc:creator>
      <dc:date>2023-05-29T13:25:50Z</dc:date>
    </item>
    <item>
      <title>Re: Write a delta format file</title>
      <link>https://community.qlik.com/t5/Talend-Studio/Write-a-delta-format-file/m-p/2346334#M113749</link>
      <description>&lt;P&gt;Hi Sabrina,&lt;/P&gt;&lt;P&gt;I think I have found what I'm looking for.&lt;/P&gt;&lt;P&gt;I have seen that it is possible to convert parquet file to delta format file so I thought to get data from a data base, put it in a parquet file and convert it to delta format file like this.&lt;/P&gt;&lt;P&gt;Could you tell me if this could do the work ?&lt;/P&gt;&lt;P&gt;&lt;span class="lia-inline-image-display-wrapper" image-alt="0695b00000htD93AAE.jpg"&gt;&lt;img src="https://community.qlik.com/t5/image/serverpage/image-id/156744i011DE85139D21DB7/image-size/large?v=v2&amp;amp;px=999" role="button" title="0695b00000htD93AAE.jpg" alt="0695b00000htD93AAE.jpg" /&gt;&lt;/span&gt;Thank you&lt;/P&gt;&lt;P&gt;I have tried but I get this error:&lt;/P&gt;&lt;P&gt;Exception in thread "main" java.lang.UnsatisfiedLinkError: 'boolean org.apache.hadoop.io.nativeio.NativeIO$Windows.access0(java.lang.String, int)'&lt;/P&gt;&lt;P&gt;	at org.apache.hadoop.io.nativeio.NativeIO$Windows.access0(Native Method)&lt;/P&gt;&lt;P&gt;	at org.apache.hadoop.io.nativeio.NativeIO$Windows.access(NativeIO.java:793)&lt;/P&gt;&lt;P&gt;	at org.apache.hadoop.fs.FileUtil.canRead(FileUtil.java:1215)&lt;/P&gt;&lt;P&gt;	at org.apache.hadoop.fs.FileUtil.list(FileUtil.java:1420)&lt;/P&gt;&lt;P&gt;	at org.apache.hadoop.fs.RawLocalFileSystem.listStatus(RawLocalFileSystem.java:601)&lt;/P&gt;&lt;P&gt;	at org.apache.hadoop.fs.FileSystem.listStatus(FileSystem.java:1972)&lt;/P&gt;&lt;P&gt;	at org.apache.hadoop.fs.FileSystem.listStatus(FileSystem.java:2014)&lt;/P&gt;&lt;P&gt;	at org.apache.hadoop.fs.ChecksumFileSystem.listStatus(ChecksumFileSystem.java:761)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.storage.HadoopFileSystemLogStore.listFrom(HadoopFileSystemLogStore.scala:83)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.storage.DelegatingLogStore.listFrom(DelegatingLogStore.scala:119)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.SnapshotManagement.listFrom(SnapshotManagement.scala:62)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.SnapshotManagement.listFrom$(SnapshotManagement.scala:61)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.DeltaLog.listFrom(DeltaLog.scala:62)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.SnapshotManagement.getLogSegmentForVersion(SnapshotManagement.scala:95)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.SnapshotManagement.getLogSegmentForVersion$(SnapshotManagement.scala:89)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.DeltaLog.getLogSegmentForVersion(DeltaLog.scala:62)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.SnapshotManagement.$anonfun$updateInternal$1(SnapshotManagement.scala:284)&lt;/P&gt;&lt;P&gt;	at com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:77)&lt;/P&gt;&lt;P&gt;	at com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:67)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.DeltaLog.recordOperation(DeltaLog.scala:62)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:112)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:97)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.DeltaLog.recordDeltaOperation(DeltaLog.scala:62)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.SnapshotManagement.updateInternal(SnapshotManagement.scala:282)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.SnapshotManagement.updateInternal$(SnapshotManagement.scala:281)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.DeltaLog.updateInternal(DeltaLog.scala:62)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.SnapshotManagement.$anonfun$update$1(SnapshotManagement.scala:243)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.DeltaLog.lockInterruptibly(DeltaLog.scala:163)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.SnapshotManagement.update(SnapshotManagement.scala:243)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.SnapshotManagement.update$(SnapshotManagement.scala:239)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.DeltaLog.update(DeltaLog.scala:62)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.OptimisticTransactionImpl.doCommit(OptimisticTransaction.scala:749)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.OptimisticTransactionImpl.doCommit$(OptimisticTransaction.scala:715)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.OptimisticTransaction.doCommit(OptimisticTransaction.scala:86)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.OptimisticTransactionImpl.$anonfun$doCommitRetryIteratively$2(OptimisticTransaction.scala:684)&lt;/P&gt;&lt;P&gt;	at com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:77)&lt;/P&gt;&lt;P&gt;	at com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:67)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.OptimisticTransaction.recordOperation(OptimisticTransaction.scala:86)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:112)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:97)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.OptimisticTransaction.recordDeltaOperation(OptimisticTransaction.scala:86)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.OptimisticTransactionImpl.$anonfun$doCommitRetryIteratively$1(OptimisticTransaction.scala:680)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.DeltaLog.lockInterruptibly(DeltaLog.scala:163)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.OptimisticTransactionImpl.lockCommitIfEnabled(OptimisticTransaction.scala:659)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.OptimisticTransactionImpl.doCommitRetryIteratively(OptimisticTransaction.scala:674)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.OptimisticTransactionImpl.doCommitRetryIteratively$(OptimisticTransaction.scala:671)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.OptimisticTransaction.doCommitRetryIteratively(OptimisticTransaction.scala:86)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.OptimisticTransactionImpl.liftedTree1$1(OptimisticTransaction.scala:522)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.OptimisticTransactionImpl.$anonfun$commit$1(OptimisticTransaction.scala:462)&lt;/P&gt;&lt;P&gt;	at scala.runtime.java8.JFunction0$mcJ$sp.apply(JFunction0$mcJ$sp.java:23)&lt;/P&gt;&lt;P&gt;	at com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:77)&lt;/P&gt;&lt;P&gt;	at com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:67)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.OptimisticTransaction.recordOperation(OptimisticTransaction.scala:86)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:112)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:97)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.OptimisticTransaction.recordDeltaOperation(OptimisticTransaction.scala:86)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.OptimisticTransactionImpl.commit(OptimisticTransaction.scala:459)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.OptimisticTransactionImpl.commit$(OptimisticTransaction.scala:457&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.OptimisticTransaction.commit(OptimisticTransaction.scala:86)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.commands.WriteIntoDelta.$anonfun$run$1(WriteIntoDelta.scala:83)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.commands.WriteIntoDelta.$anonfun$run$1$adapted(WriteIntoDelta.scala:78)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.DeltaLog.withNewTransaction(DeltaLog.scala:198)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.commands.WriteIntoDelta.run(WriteIntoDelta.scala:78)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.delta.sources.DeltaDataSource.createRelation(DeltaDataSource.scala:154)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:45)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:75)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:73)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:84)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:97)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:103)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:90)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:97)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:93)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:481)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:82)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:481)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)&lt;/P&gt;&lt;P&gt;[WARN ] 12:49:54 org.apache.spark.SparkEnv- Exception while deleting Spark temp dir: C:\tmp\spark-d7580ae9-e76f-4980-aa3f-aa8657aab947\userFiles-a1c7c163-c4f2-461d-814c-617d60412e45&lt;/P&gt;&lt;P&gt;java.io.IOException: Failed to delete: C:\tmp\spark-d7580ae9-e76f-4980-aa3f-aa8657aab947\userFiles-a1c7c163-c4f2-461d-814c-617d60412e45\talend_file_enhanced-1.3.jar&lt;/P&gt;</description>
      <pubDate>Mon, 29 May 2023 17:43:17 GMT</pubDate>
      <guid>https://community.qlik.com/t5/Talend-Studio/Write-a-delta-format-file/m-p/2346334#M113749</guid>
      <dc:creator>MBourassa1682971203</dc:creator>
      <dc:date>2023-05-29T17:43:17Z</dc:date>
    </item>
    <item>
      <title>Re: Write a delta format file</title>
      <link>https://community.qlik.com/t5/Talend-Studio/Write-a-delta-format-file/m-p/2346335#M113750</link>
      <description>&lt;P&gt;I have fixed the first error by adding hadoop.dll in c:\windows\system32&lt;/P&gt;</description>
      <pubDate>Mon, 29 May 2023 18:33:30 GMT</pubDate>
      <guid>https://community.qlik.com/t5/Talend-Studio/Write-a-delta-format-file/m-p/2346335#M113750</guid>
      <dc:creator>MBourassa1682971203</dc:creator>
      <dc:date>2023-05-29T18:33:30Z</dc:date>
    </item>
    <item>
      <title>Re: Write a delta format file</title>
      <link>https://community.qlik.com/t5/Talend-Studio/Write-a-delta-format-file/m-p/2346336#M113751</link>
      <description>&lt;P&gt;Hi Sabrina,&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;I think the thing is working even with this error.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Do you know how to avoid this error ?&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Thank you&lt;/P&gt;</description>
      <pubDate>Mon, 29 May 2023 19:04:35 GMT</pubDate>
      <guid>https://community.qlik.com/t5/Talend-Studio/Write-a-delta-format-file/m-p/2346336#M113751</guid>
      <dc:creator>MBourassa1682971203</dc:creator>
      <dc:date>2023-05-29T19:04:35Z</dc:date>
    </item>
    <item>
      <title>Re: Write a delta format file</title>
      <link>https://community.qlik.com/t5/Talend-Studio/Write-a-delta-format-file/m-p/2346337#M113752</link>
      <description>&lt;P&gt;Hello,&lt;/P&gt;&lt;P&gt;The DB connection is successful with you when creating a JDBC metadata connection?&lt;/P&gt;&lt;P&gt;&lt;span class="lia-inline-image-display-wrapper" image-alt="0695b00000htEu0AAE.png"&gt;&lt;img src="https://community.qlik.com/t5/image/serverpage/image-id/139840iCE8E9C052D7F376B/image-size/large?v=v2&amp;amp;px=999" role="button" title="0695b00000htEu0AAE.png" alt="0695b00000htEu0AAE.png" /&gt;&lt;/span&gt;Could you please check if you untick checkbox "Use Auto-Commit"&lt;/P&gt;&lt;P&gt;Inside tDBOutput component - Advanced settings tab and re-run the job to see if it works?&lt;/P&gt;&lt;P&gt;Best regards&lt;/P&gt;&lt;P&gt;Sabrina&lt;/P&gt;</description>
      <pubDate>Tue, 30 May 2023 03:58:35 GMT</pubDate>
      <guid>https://community.qlik.com/t5/Talend-Studio/Write-a-delta-format-file/m-p/2346337#M113752</guid>
      <dc:creator>Anonymous</dc:creator>
      <dc:date>2023-05-30T03:58:35Z</dc:date>
    </item>
  </channel>
</rss>

