Scala Spark and MongoDB connection

Hello, How can I connect my mongodb with my scala3?

import org.apache.spark.sql.{SparkSession, DataFrame}

@main
def main(): Unit = {
println(“Hello world!”)

val spark = SparkSession.builder()
.appName(“helloSpark”)
.master(“local[*]”)
.config(“spark.mongodb.read.connection.uri”, “mongodb://cagri:[email protected]:27017/?authSource=admin”)
.getOrCreate()

spark.sparkContext.setLogLevel(“WARN”)

val mongoUri = “mongodb://cagri:[email protected]:27017/?authSource=admin”

val df: DataFrame = spark.read
.format(“com.mongodb.spark.sql.DefaultSource”)
.option(“uri”, mongoUri)
.option(“database”, “wifi”) // Specify the database name
.option(“collection”, “climac_positions_big”) // Specify the collection name
.load()

df.show()
}

build.sbt:
ThisBuild / version := “0.1.0-SNAPSHOT”

ThisBuild / scalaVersion := “3.4.2”

lazy val root = (project in file(“.”))
.settings(
name := “main”
)

val sparkVersion = “3.5.1”

libraryDependencies ++= Seq(
(“org.apache.spark” %% “spark-core” % sparkVersion),
(“org.apache.spark” %% “spark-sql” % sparkVersion),
(“org.mongodb.spark” %% “mongo-spark-connector” % “10.3.0”)
).map(_.cross(CrossVersion.for3Use2_13))