Skip to content

Commit

Permalink
fix scalastyle issues (#127)
Browse files Browse the repository at this point in the history
  • Loading branch information
nlu90 authored Apr 12, 2023
1 parent 4619401 commit fa13155
Show file tree
Hide file tree
Showing 6 changed files with 10 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,14 @@
package org.apache.spark.sql.pulsar

import java.util.concurrent.TimeUnit

import scala.util.{Failure, Success, Try}

import com.google.common.cache._
import org.apache.pulsar.client.api.{Consumer, PulsarClient, SubscriptionInitialPosition}
import org.apache.pulsar.client.api.schema.GenericRecord
import org.apache.pulsar.client.impl.schema.AutoConsumeSchema

import org.apache.spark.SparkEnv
import org.apache.spark.internal.Logging

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ import com.google.common.cache._
import com.google.common.util.concurrent.{ExecutionError, UncheckedExecutionException}
import org.apache.pulsar.client.api.PulsarClient
import org.apache.pulsar.client.impl.PulsarClientImpl

import org.apache.spark.SparkEnv
import org.apache.spark.internal.Logging
import org.apache.spark.sql.pulsar.PulsarOptions._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ import org.apache.pulsar.common.api.proto.CommandGetTopicsOfNamespace
import org.apache.pulsar.common.naming.TopicName
import org.apache.pulsar.common.schema.SchemaInfo
import org.apache.pulsar.shade.com.google.common.util.concurrent.Uninterruptibles

import org.apache.spark.internal.Logging
import org.apache.spark.sql.pulsar.PulsarOptions._
import org.apache.spark.sql.types.StructType
Expand Down
3 changes: 2 additions & 1 deletion src/main/scala/org/apache/spark/sql/pulsar/PulsarSinks.scala
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,9 @@ import java.util.concurrent.TimeUnit
import scala.util.control.NonFatal

import org.apache.pulsar.client.api.{Producer, PulsarClientException, Schema}

import org.apache.spark.internal.Logging
import org.apache.spark.sql.{AnalysisException, DataFrame, SQLContext, SparkSession}
import org.apache.spark.sql.{AnalysisException, DataFrame, SparkSession, SQLContext}
import org.apache.spark.sql.catalyst.expressions
import org.apache.spark.sql.catalyst.expressions.{Attribute, Literal}
import org.apache.spark.sql.execution.QueryExecution
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,8 +99,8 @@ private[pulsar] abstract class PulsarSourceRDDBase(
case (_: BatchMessageIdImpl, _: BatchMessageIdImpl) =>
// we seek using a batch message id, we can read next directly in `getNext()`
case (_: MessageIdImpl, cbmid: BatchMessageIdImpl) =>
// we seek using a message id, this is supposed to be read by previous task since it's
// inclusive for the last batch (start, end], so we skip this batch
// we seek using a message id, this is supposed to be read by previous task since
// it's inclusive for the last batch (start, end], so we skip this batch
val newStart = new MessageIdImpl(
cbmid.getLedgerId,
cbmid.getEntryId + 1,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ import java.util.function.BiConsumer
import scala.collection.mutable

import org.apache.pulsar.client.api.{MessageId, Producer}

import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.{Attribute, Cast, Literal, UnsafeProjection}
import org.apache.spark.sql.types._
Expand Down

0 comments on commit fa13155

Please sign in to comment.