<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Loading 200+ Files to 200+ Snowflake tables in Talend Studio</title>
    <link>https://community.qlik.com/t5/Talend-Studio/Loading-200-Files-to-200-Snowflake-tables/m-p/2329926#M99065</link>
    <description>&lt;P&gt;Hi , I have a problem in which i want to load 200+ files into 200+snowflake tables (upsert logic) , Earlier for 50 around tables i had used 50 different filelist and 50 fileinputdelimited components .  Are there any inbuilt components to handle this or a mechanism which could be used to implement this . I don't want to use filelist and fileinputdelimited for every new File which we get.&lt;/P&gt;&lt;P&gt;Some of the files coming have similar names to differentiate them i have used regex.&lt;/P&gt;&lt;P&gt;Is there a possibility to create a hashmap and use that in the single component to load all the files to respective tables and implement upsert logic.&lt;/P&gt;</description>
    <pubDate>Fri, 15 Nov 2024 22:25:21 GMT</pubDate>
    <dc:creator>Rishab1</dc:creator>
    <dc:date>2024-11-15T22:25:21Z</dc:date>
    <item>
      <title>Loading 200+ Files to 200+ Snowflake tables</title>
      <link>https://community.qlik.com/t5/Talend-Studio/Loading-200-Files-to-200-Snowflake-tables/m-p/2329926#M99065</link>
      <description>&lt;P&gt;Hi , I have a problem in which i want to load 200+ files into 200+snowflake tables (upsert logic) , Earlier for 50 around tables i had used 50 different filelist and 50 fileinputdelimited components .  Are there any inbuilt components to handle this or a mechanism which could be used to implement this . I don't want to use filelist and fileinputdelimited for every new File which we get.&lt;/P&gt;&lt;P&gt;Some of the files coming have similar names to differentiate them i have used regex.&lt;/P&gt;&lt;P&gt;Is there a possibility to create a hashmap and use that in the single component to load all the files to respective tables and implement upsert logic.&lt;/P&gt;</description>
      <pubDate>Fri, 15 Nov 2024 22:25:21 GMT</pubDate>
      <guid>https://community.qlik.com/t5/Talend-Studio/Loading-200-Files-to-200-Snowflake-tables/m-p/2329926#M99065</guid>
      <dc:creator>Rishab1</dc:creator>
      <dc:date>2024-11-15T22:25:21Z</dc:date>
    </item>
    <item>
      <title>Re: Loading 200+ Files to 200+ Snowflake tables</title>
      <link>https://community.qlik.com/t5/Talend-Studio/Loading-200-Files-to-200-Snowflake-tables/m-p/2329927#M99066</link>
      <description>&lt;P&gt;Hi &lt;/P&gt;&lt;P&gt;To iterate multiple files in the specified directory, tFIleList is the right component to do it. If some files are not updated, use the tFilelist to iterate all files and then select only the new files and process these new files? &lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Regards&lt;/P&gt;&lt;P&gt;Shong&lt;/P&gt;</description>
      <pubDate>Fri, 28 Oct 2022 05:01:46 GMT</pubDate>
      <guid>https://community.qlik.com/t5/Talend-Studio/Loading-200-Files-to-200-Snowflake-tables/m-p/2329927#M99066</guid>
      <dc:creator>Anonymous</dc:creator>
      <dc:date>2022-10-28T05:01:46Z</dc:date>
    </item>
  </channel>
</rss>

