<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Re: Spark 2.3, Spark 2.4 Local Mode in Talend Studio</title>
    <link>https://community.qlik.com/t5/Talend-Studio/Spark-2-3-Spark-2-4-Local-Mode/m-p/2252059#M35820</link>
    <description>&lt;P&gt;So the bug I opened was rejected and I dug deeper to see what's wrong with my settings.&lt;/P&gt; 
&lt;P&gt;Apparently there is nothing wrong with my settings, just one component, tLogRow doesn't work.&lt;/P&gt; 
&lt;P&gt;When I used other components, it was working fine with Spark 2.4.&lt;/P&gt; 
&lt;P&gt;&amp;nbsp;&lt;/P&gt; 
&lt;P&gt;The following job was successful.&lt;/P&gt; 
&lt;P&gt;&amp;nbsp;&lt;/P&gt; 
&lt;P&gt;&amp;nbsp;&lt;/P&gt; 
&lt;P&gt;&lt;SPAN class="lia-inline-image-display-wrapper lia-image-align-inline" image-alt="Screen Shot 2019-11-25 at 10.46.51 AM.png" style="width: 999px;"&gt;&lt;span class="lia-inline-image-display-wrapper" image-alt="0683p000009M8Fz.png"&gt;&lt;img src="https://community.qlik.com/t5/image/serverpage/image-id/136049iAC0EC37BC212A169/image-size/large?v=v2&amp;amp;px=999" role="button" title="0683p000009M8Fz.png" alt="0683p000009M8Fz.png" /&gt;&lt;/span&gt;&lt;/SPAN&gt;&lt;/P&gt; 
&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
    <pubDate>Mon, 25 Nov 2019 15:50:31 GMT</pubDate>
    <dc:creator>csapparapu</dc:creator>
    <dc:date>2019-11-25T15:50:31Z</dc:date>
    <item>
      <title>Spark 2.3, Spark 2.4 Local Mode</title>
      <link>https://community.qlik.com/t5/Talend-Studio/Spark-2-3-Spark-2-4-Local-Mode/m-p/2252056#M35817</link>
      <description>&lt;P&gt;Hello,&lt;/P&gt; 
&lt;P&gt;I am not able to run a Spark Big Data Batch job using Spark 2.3 or Spark 2.4 version locally.&lt;/P&gt; 
&lt;P&gt;I would like to know if Talend Big Data Platform V7.X works with Spark 2.3 or Spark 2.4 in local mode.&lt;/P&gt; 
&lt;P&gt;&amp;nbsp;&lt;/P&gt; 
&lt;P&gt;I get the following exception. Spark 2.2 version works fine.&lt;/P&gt; 
&lt;P&gt;&amp;nbsp;&lt;/P&gt; 
&lt;P&gt;&amp;nbsp;&lt;/P&gt; 
&lt;PRE&gt;java.lang.IllegalArgumentException
	at org.apache.xbean.asm5.ClassReader.&amp;lt;init&amp;gt;(Unknown Source)
	at org.apache.xbean.asm5.ClassReader.&amp;lt;init&amp;gt;(Unknown Source)
	at org.apache.xbean.asm5.ClassReader.&amp;lt;init&amp;gt;(Unknown Source)
	at org.apache.spark.util.ClosureCleaner$.getClassReader(ClosureCleaner.scala:46)
	at org.apache.spark.util.FieldAccessFinder$$anon$3$$anonfun$visitMethodInsn$2.apply(ClosureCleaner.scala:449)
	at org.apache.spark.util.FieldAccessFinder$$anon$3$$anonfun$visitMethodInsn$2.apply(ClosureCleaner.scala:432)
	at scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:733)
	at scala.collection.mutable.HashMap$$anon$1$$anonfun$foreach$2.apply(HashMap.scala:103)
	at scala.collection.mutable.HashMap$$anon$1$$anonfun$foreach$2.apply(HashMap.scala:103)
	at scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:230)
	at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:40)
	at scala.collection.mutable.HashMap$$anon$1.foreach(HashMap.scala:103)
	at scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:732)
	at org.apache.spark.util.FieldAccessFinder$$anon$3.visitMethodInsn(ClosureCleaner.scala:432)
	at org.apache.xbean.asm5.ClassReader.a(Unknown Source)
	at org.apache.xbean.asm5.ClassReader.b(Unknown Source)
	at org.apache.xbean.asm5.ClassReader.accept(Unknown Source)
	at org.apache.xbean.asm5.ClassReader.accept(Unknown Source)
	at org.apache.spark.util.ClosureCleaner$$anonfun$org$apache$spark$util$ClosureCleaner$$clean$14.apply(ClosureCleaner.scala:262)
	at org.apache.spark.util.ClosureCleaner$$anonfun$org$apache$spark$util$ClosureCleaner$$clean$14.apply(ClosureCleaner.scala:261)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.apache.spark.util.ClosureCleaner$.org$apache$spark$util$ClosureCleaner$$clean(ClosureCleaner.scala:261)
	at org.apache.spark.util.ClosureCleaner$.clean(ClosureCleaner.scala:159)
	at org.apache.spark.SparkContext.clean(SparkContext.scala:2292)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:2066)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:2092)
	at org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:939)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
	at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
	at org.apache.spark.rdd.RDD.collect(RDD.scala:938)
	at org.apache.spark.sql.execution.datasources.parquet.ParquetFileFormat$.mergeSchemasInParallel(ParquetFileFormat.scala:612)
	at org.apache.spark.sql.execution.datasources.parquet.ParquetFileFormat.inferSchema(ParquetFileFormat.scala:241)
	at org.apache.spark.sql.execution.datasources.DataSource$$anonfun$8.apply(DataSource.scala:202)
	at org.apache.spark.sql.execution.datasources.DataSource$$anonfun$8.apply(DataSource.scala:202)
	at scala.Option.orElse(Option.scala:289)
	at org.apache.spark.sql.execution.datasources.DataSource.getOrInferFileFormatSchema(DataSource.scala:201)
	at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:392)
	at org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:239)
	at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:227)
	at org.apache.spark.sql.DataFrameReader.parquet(DataFrameReader.scala:620)
	at org.apache.spark.sql.DataFrameReader.parquet(DataFrameReader.scala:604)&lt;/PRE&gt; 
&lt;P&gt;&amp;nbsp;&lt;/P&gt; 
&lt;P&gt;Thanks,&lt;/P&gt; 
&lt;P&gt;Chandana&lt;/P&gt;</description>
      <pubDate>Sat, 16 Nov 2024 04:08:20 GMT</pubDate>
      <guid>https://community.qlik.com/t5/Talend-Studio/Spark-2-3-Spark-2-4-Local-Mode/m-p/2252056#M35817</guid>
      <dc:creator>csapparapu</dc:creator>
      <dc:date>2024-11-16T04:08:20Z</dc:date>
    </item>
    <item>
      <title>Re: Spark 2.3, Spark 2.4 Local Mode</title>
      <link>https://community.qlik.com/t5/Talend-Studio/Spark-2-3-Spark-2-4-Local-Mode/m-p/2252057#M35818</link>
      <description>&lt;P&gt;&lt;A href="https://community.qlik.com/s/profile/0053p0000078APoAAM"&gt;@csapparapu&lt;/A&gt;&amp;nbsp;, I have use Talend Cloud Real-time Big Data Platform,where 2.3 is working in local mode.&lt;/P&gt;</description>
      <pubDate>Wed, 13 Nov 2019 06:09:23 GMT</pubDate>
      <guid>https://community.qlik.com/t5/Talend-Studio/Spark-2-3-Spark-2-4-Local-Mode/m-p/2252057#M35818</guid>
      <dc:creator>manodwhb</dc:creator>
      <dc:date>2019-11-13T06:09:23Z</dc:date>
    </item>
    <item>
      <title>Re: Spark 2.3, Spark 2.4 Local Mode</title>
      <link>https://community.qlik.com/t5/Talend-Studio/Spark-2-3-Spark-2-4-Local-Mode/m-p/2252058#M35819</link>
      <description>&lt;P&gt;&lt;A href="https://community.qlik.com/s/profile/0053p000007LKmJAAW"&gt;@manodwhb&lt;/A&gt;,&amp;nbsp;thanks for your response.&lt;/P&gt; 
&lt;P&gt;&amp;nbsp;&lt;/P&gt; 
&lt;P&gt;After spending a lot of time on this, I realized the problem is with Talend Big Data platform.&lt;/P&gt; 
&lt;P&gt;I've raised a bug on TalendForge.&lt;/P&gt; 
&lt;P&gt;&amp;nbsp;&lt;/P&gt; 
&lt;P&gt;&lt;A href="https://jira.talendforge.org/browse/TBD-9576" target="_blank" rel="nofollow noopener noreferrer"&gt;https://jira.talendforge.org/browse/TBD-9576&lt;/A&gt;&lt;/P&gt;</description>
      <pubDate>Mon, 18 Nov 2019 15:44:52 GMT</pubDate>
      <guid>https://community.qlik.com/t5/Talend-Studio/Spark-2-3-Spark-2-4-Local-Mode/m-p/2252058#M35819</guid>
      <dc:creator>csapparapu</dc:creator>
      <dc:date>2019-11-18T15:44:52Z</dc:date>
    </item>
    <item>
      <title>Re: Spark 2.3, Spark 2.4 Local Mode</title>
      <link>https://community.qlik.com/t5/Talend-Studio/Spark-2-3-Spark-2-4-Local-Mode/m-p/2252059#M35820</link>
      <description>&lt;P&gt;So the bug I opened was rejected and I dug deeper to see what's wrong with my settings.&lt;/P&gt; 
&lt;P&gt;Apparently there is nothing wrong with my settings, just one component, tLogRow doesn't work.&lt;/P&gt; 
&lt;P&gt;When I used other components, it was working fine with Spark 2.4.&lt;/P&gt; 
&lt;P&gt;&amp;nbsp;&lt;/P&gt; 
&lt;P&gt;The following job was successful.&lt;/P&gt; 
&lt;P&gt;&amp;nbsp;&lt;/P&gt; 
&lt;P&gt;&amp;nbsp;&lt;/P&gt; 
&lt;P&gt;&lt;SPAN class="lia-inline-image-display-wrapper lia-image-align-inline" image-alt="Screen Shot 2019-11-25 at 10.46.51 AM.png" style="width: 999px;"&gt;&lt;span class="lia-inline-image-display-wrapper" image-alt="0683p000009M8Fz.png"&gt;&lt;img src="https://community.qlik.com/t5/image/serverpage/image-id/136049iAC0EC37BC212A169/image-size/large?v=v2&amp;amp;px=999" role="button" title="0683p000009M8Fz.png" alt="0683p000009M8Fz.png" /&gt;&lt;/span&gt;&lt;/SPAN&gt;&lt;/P&gt; 
&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Mon, 25 Nov 2019 15:50:31 GMT</pubDate>
      <guid>https://community.qlik.com/t5/Talend-Studio/Spark-2-3-Spark-2-4-Local-Mode/m-p/2252059#M35820</guid>
      <dc:creator>csapparapu</dc:creator>
      <dc:date>2019-11-25T15:50:31Z</dc:date>
    </item>
  </channel>
</rss>

