-
Notifications
You must be signed in to change notification settings - Fork 96
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Add back S3Mock related tests but comment out all code
- Loading branch information
1 parent
0f87a60
commit 325ea6e
Showing
2 changed files
with
245 additions
and
0 deletions.
There are no files selected for viewing
123 changes: 123 additions & 0 deletions
123
modified-lar/src/test/scala/hmda/publication/lar/parser/ModifiedLarPublisherSpec.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,123 @@ | ||
//package hmda.publication.lar.publication | ||
// | ||
//import akka.NotUsed | ||
//import akka.actor.ActorSystem | ||
//import akka.actor.typed.{ActorSystem => TypedSystem} | ||
//import akka.actor.typed.scaladsl.AskPattern._ | ||
//import akka.actor.typed.scaladsl.Behaviors | ||
//import akka.actor.typed.scaladsl.adapter._ | ||
//import akka.actor.typed.{ActorRef, SupervisorStrategy} | ||
//import akka.stream.scaladsl.Source | ||
//import akka.testkit.TestKit | ||
//import akka.util.Timeout | ||
//import com.adobe.testing.s3mock.S3MockApplication | ||
//import hmda.census.records.CensusRecords | ||
//import hmda.messages.submission.HmdaRawDataEvents.LineAdded | ||
//import hmda.model.census.Census | ||
//import hmda.model.filing.lar.{LarGenerators, LoanApplicationRegister} | ||
//import hmda.model.filing.submission.SubmissionId | ||
//import hmda.persistence.util.CassandraUtil | ||
//import hmda.query.repository.ModifiedLarRepository | ||
//import hmda.utils.EmbeddedPostgres | ||
//import hmda.utils.YearUtils.Period | ||
//import io.github.embeddedkafka.EmbeddedKafkaConfig.defaultConfig.{kafkaPort, zooKeeperPort} | ||
//import io.github.embeddedkafka.{EmbeddedK, EmbeddedKafka, EmbeddedKafkaConfig} | ||
//import org.scalacheck.Gen | ||
//import org.scalatest.concurrent.{PatienceConfiguration, ScalaFutures} | ||
//import org.scalatest.time.{Millis, Minutes, Span} | ||
//import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike} | ||
//import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks | ||
//import slick.basic.DatabaseConfig | ||
//import slick.jdbc.JdbcProfile | ||
// | ||
//import scala.annotation.tailrec | ||
//import scala.collection.JavaConverters._ | ||
//import scala.collection.mutable | ||
//import scala.concurrent.duration._ | ||
// | ||
//class ModifiedLarPublisherSpec | ||
// extends TestKit(ActorSystem("publisher-spec")) | ||
// with WordSpecLike | ||
// with Matchers | ||
// with PatienceConfiguration | ||
// with ScalaFutures | ||
// with ScalaCheckPropertyChecks | ||
// with BeforeAndAfterAll | ||
// with EmbeddedPostgres { | ||
// | ||
// implicit val typedSystem = system.toTyped | ||
// | ||
// implicit val timeout = Timeout(3.minutes) | ||
// | ||
// var s3mock: S3MockApplication = _ | ||
// var kafka: EmbeddedK = _ | ||
// | ||
// implicit val embedKafkaConfig = EmbeddedKafkaConfig(kafkaPort = 9092, zooKeeperPort = 9093) | ||
// | ||
// | ||
// | ||
// | ||
// override def beforeAll(): Unit = { | ||
// super.beforeAll() | ||
// s3mock = S3MockApplication.start(properties.asJava) | ||
// | ||
// kafka = EmbeddedKafka.start()(embedKafkaConfig) | ||
// | ||
// CassandraUtil.startEmbeddedCassandra() | ||
// } | ||
// | ||
// override def afterAll(): Unit = { | ||
// super.afterAll() | ||
// Option(s3mock).foreach(_.stop()) | ||
// Option(kafka).foreach(_.stop(clearLogs = true)) | ||
// CassandraUtil.shutdown() | ||
// } | ||
// | ||
// override implicit def patienceConfig: PatienceConfig = PatienceConfig(timeout = Span(2, Minutes), interval = Span(100, Millis)) | ||
// | ||
// "Spawn publisher and upload data to S3 and Postgres" in { | ||
// @tailrec | ||
// def generateLarData(gen: Gen[List[LoanApplicationRegister]]): List[LoanApplicationRegister] = { | ||
// val data = Gen.nonEmptyListOf(LarGenerators.larGen).sample | ||
// if (data.isEmpty) generateLarData(gen) else data.get | ||
// } | ||
// | ||
// val larData = generateLarData(LarGenerators.larNGen(100)) | ||
// val censusTractMap2018: Map[String, Census] = CensusRecords.indexedTract2018 | ||
// val censusTractMap2019: Map[String, Census] = CensusRecords.indexedTract2019 | ||
// val censusTractMap2020: Map[String, Census] = CensusRecords.indexedTract2020 | ||
// val censusTractMap2021: Map[String, Census] = CensusRecords.indexedTract2021 | ||
// val censusTractMap2022: Map[String, Census] = CensusRecords.indexedTract2022 | ||
// val censusTractMap2023: Map[String, Census] = CensusRecords.indexedTract2023 | ||
// | ||
// val customData: TypedSystem[_] => SubmissionId => Source[LineAdded, NotUsed] = | ||
// _ => _ => Source(larData.zipWithIndex.map { case (lar, timestamp) => LineAdded(timestamp, lar.toCSV) }) | ||
// | ||
// val databaseConfig = DatabaseConfig.forConfig[JdbcProfile]("embedded-pg") | ||
// val repo = new ModifiedLarRepository(databaseConfig) | ||
// val publisher = system.spawnAnonymous( | ||
// Behaviors | ||
// .supervise(ModifiedLarPublisher.behavior(censusTractMap2018, censusTractMap2019, censusTractMap2020, censusTractMap2021, censusTractMap2022, censusTractMap2023,repo, customData)) | ||
// .onFailure(SupervisorStrategy.stop) | ||
// ) | ||
// val submissionIdA = SubmissionId("B90YWS6AFX2LGWOXJ1LD", Period(2018, None), sequenceNumber = 1) | ||
// val submissionIdB = SubmissionId("B90YWS6AFX2LGWOXJ1LD", Period(2019, None), sequenceNumber = 1) | ||
// | ||
// val resultA = publisher ? ((replyTo: ActorRef[PersistModifiedLarResult]) => PersistToS3AndPostgres(submissionIdA, replyTo)) | ||
// val resultB = publisher ? ((replyTo: ActorRef[PersistModifiedLarResult]) => PersistToS3AndPostgres(submissionIdB, replyTo)) | ||
// resultA.futureValue shouldBe PersistModifiedLarResult(submissionIdA, UploadSucceeded) | ||
// resultB.futureValue shouldBe PersistModifiedLarResult(submissionIdB, UploadSucceeded) | ||
// } | ||
// | ||
// val properties: mutable.Map[String, Object] = | ||
// mutable // S3 Mock mutates the map so we cannot use an immutable map :( | ||
// .Map( | ||
// S3MockApplication.PROP_HTTPS_PORT -> S3MockApplication.DEFAULT_HTTPS_PORT, | ||
// S3MockApplication.PROP_HTTP_PORT -> S3MockApplication.DEFAULT_HTTP_PORT, | ||
// S3MockApplication.PROP_SILENT -> true, | ||
// S3MockApplication.PROP_INITIAL_BUCKETS -> "cfpb-hmda-public-dev" | ||
// ) | ||
// .map { case (k, v) => (k, v.asInstanceOf[Object]) } | ||
// | ||
// override def bootstrapSqlFile: String = "modifiedlar.sql" | ||
//} |
122 changes: 122 additions & 0 deletions
122
ratespread-calculator/src/test/scala/hmda/calculator/scheduler/APORSchedulerSpec.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,122 @@ | ||
//package hmda.calculator.scheduler | ||
// | ||
//import java.time.LocalDate | ||
//import java.time.format.DateTimeFormatter | ||
// | ||
//import akka.actor.ActorSystem | ||
//import akka.actor.typed.scaladsl.adapter._ | ||
//import akka.stream.alpakka.s3.ApiVersion.ListBucketVersion2 | ||
//import akka.stream.alpakka.s3.scaladsl.S3 | ||
//import akka.stream.alpakka.s3.{ MemoryBufferType, S3Attributes, S3Settings } | ||
//import akka.stream.scaladsl.{ Sink, Source } | ||
//import akka.testkit.{ ImplicitSender, TestKit } | ||
//import akka.util.ByteString | ||
//import com.adobe.testing.s3mock.S3MockApplication | ||
//import hmda.calculator.apor.APOR | ||
//import hmda.calculator.scheduler.APORScheduler.Command | ||
//import org.scalatest.concurrent.{ Eventually, ScalaFutures } | ||
//import org.scalatest.time.{ Millis, Minutes, Span } | ||
//import org.scalatest.{ BeforeAndAfterAll, FreeSpecLike, Matchers } | ||
//import software.amazon.awssdk.auth.credentials.{ AwsBasicCredentials, StaticCredentialsProvider } | ||
//import software.amazon.awssdk.regions.Region | ||
//import software.amazon.awssdk.regions.providers.AwsRegionProvider | ||
// | ||
//import scala.collection.JavaConverters._ | ||
//import scala.collection.mutable | ||
//import scala.util.Try | ||
// | ||
//class APORSchedulerSpec | ||
// extends TestKit(ActorSystem("apor-scheduler-spec")) | ||
// with ImplicitSender | ||
// with FreeSpecLike | ||
// with Matchers | ||
// with ScalaFutures | ||
// with BeforeAndAfterAll | ||
// with Eventually { | ||
// | ||
// override implicit def patienceConfig: PatienceConfig = PatienceConfig(timeout = Span(2, Minutes), interval = Span(100, Millis)) | ||
// | ||
// var s3mock: S3MockApplication = _ | ||
// | ||
// "APORScheduler should publish data to the S3 bucket" in { | ||
// | ||
// val awsConfig = system.settings.config.getConfig("aws") | ||
// val accessKeyId = awsConfig.getString("access-key-id") | ||
// val secretAccess = awsConfig.getString("secret-access-key ") | ||
// val region = awsConfig.getString("region") | ||
// val bucket = awsConfig.getString("public-bucket") | ||
// val environment = awsConfig.getString("environment") | ||
// val awsCredentialsProvider = StaticCredentialsProvider.create(AwsBasicCredentials.create(accessKeyId, secretAccess)) | ||
// val awsRegionProvider: AwsRegionProvider = () => Region.of(region) | ||
// | ||
// val s3Settings = S3Settings(system) | ||
// .withBufferType(MemoryBufferType) | ||
// .withCredentialsProvider(awsCredentialsProvider) | ||
// .withS3RegionProvider(awsRegionProvider) | ||
// .withListBucketApiVersion(ListBucketVersion2) | ||
// | ||
// val config = system.settings.config | ||
// val aporConfig = config.getConfig("hmda.apors") | ||
// val fixedRateFileName = aporConfig.getString("fixed.rate.fileName") | ||
// val variableRateFileName = aporConfig.getString("variable.rate.fileName ") | ||
// val fixedBucketKey = s"$environment/apor/$fixedRateFileName" | ||
// val variableBucketKey = s"$environment/apor/$variableRateFileName" | ||
// | ||
// val exampleAPOR: APOR = APOR( | ||
// LocalDate.parse("2018-03-22", DateTimeFormatter.ISO_LOCAL_DATE), | ||
// Seq(1.01, 1.02, 1.03, 1.04, 1.05, 1.06, 1.07, 1.08, 1.09, 1.1, 1.11, 1.12, 1.13, 1.14, 1.15, 1.16, 1.17, 1.18, 1.19, 1.2, 1.21, 1.22, | ||
// 1.23, 1.24, 1.25, 1.26, 1.27, 1.28, 1.29, 1.3, 1.31, 1.32, 1.33, 1.34, 1.35, 1.36, 1.37, 1.38, 1.39, 1.40, 1.41, 1.42, 1.43, 1.44, | ||
// 1.45, 1.46, 1.47, 1.48, 1.49, 1.5) | ||
// ) | ||
// | ||
// val sinkFixed = S3.multipartUpload(bucket, fixedBucketKey).withAttributes(S3Attributes.settings(s3Settings)) | ||
// val sinkVariable = S3.multipartUpload(bucket, variableBucketKey).withAttributes(S3Attributes.settings(s3Settings)) | ||
// | ||
// whenReady(Source.single(ByteString(exampleAPOR.toCSV)).runWith(sinkFixed))(_ => ()) | ||
// whenReady(Source.single(ByteString(exampleAPOR.toCSV)).runWith(sinkVariable))(_ => ()) | ||
// | ||
// val actor = system.spawn(APORScheduler(), APORScheduler.name) | ||
// actor ! Command.Initialize | ||
// | ||
// eventually { | ||
// val result = | ||
// S3.getObjectMetadata(bucket, fixedBucketKey) | ||
// .withAttributes(S3Attributes.settings(s3Settings)) | ||
// .runWith(Sink.head) | ||
// whenReady(result)(_ should not be empty) | ||
// } | ||
// | ||
// eventually { | ||
// val result = | ||
// S3.getObjectMetadata(bucket, variableBucketKey) | ||
// .withAttributes(S3Attributes.settings(s3Settings)) | ||
// .runWith(Sink.head) | ||
// whenReady(result)(_ should not be empty) | ||
// } | ||
// | ||
// watch(actor.toClassic) | ||
// system.stop(actor.toClassic) | ||
// expectTerminated(actor.toClassic) | ||
// } | ||
// | ||
// override def beforeAll(): Unit = { | ||
// super.beforeAll() | ||
// val properties: mutable.Map[String, Object] = | ||
// mutable // S3 Mock mutates the map so we cannot use an immutable map :( | ||
// .Map( | ||
// S3MockApplication.PROP_HTTPS_PORT -> S3MockApplication.DEFAULT_HTTPS_PORT, | ||
// S3MockApplication.PROP_HTTP_PORT -> S3MockApplication.DEFAULT_HTTP_PORT, | ||
// S3MockApplication.PROP_SILENT -> true, | ||
// S3MockApplication.PROP_INITIAL_BUCKETS -> "cfpb-hmda-public,cfpb-hmda-export" | ||
// ) | ||
// .map { case (k, v) => (k, v.asInstanceOf[Object]) } | ||
// | ||
// s3mock = S3MockApplication.start(properties.asJava) | ||
// } | ||
// | ||
// override def afterAll(): Unit = { | ||
// Try(s3mock.stop()) | ||
// super.afterAll() | ||
// } | ||
// | ||
//} |