placidenduwayo hace 1 año
padre
commit
bf76cab9b8

+ 46
- 10
src/main/java/fr/natan/akkastreamfileprocessingapi/controller/TvSeriesController.java Ver fichero

@@ -5,8 +5,6 @@ import fr.natan.akkastreamfileprocessingapi.models.ModelsAndJsonMap;
5 5
 import fr.natan.akkastreamfileprocessingapi.service.AkkaStreamFileProcessingFuture;
6 6
 import org.slf4j.Logger;
7 7
 import org.slf4j.LoggerFactory;
8
-import org.springframework.http.HttpStatus;
9
-import org.springframework.http.ResponseEntity;
10 8
 import org.springframework.web.bind.annotation.PathVariable;
11 9
 import org.springframework.web.bind.annotation.RequestMapping;
12 10
 import org.springframework.web.bind.annotation.RequestMethod;
@@ -121,8 +119,8 @@ public class TvSeriesController {
121 119
         IndexedSeq<ModelsAndJsonMap.Person> personIndexedSeq = seqCompletableFuture.get();
122 120
         List<String> personsList = new ArrayList<>();
123 121
         personIndexedSeq.foreach(person ->{
124
-            JsValue personJs = Json.toJson(person, person.personJsonFormatter());
125
-            personsList.add(Json.prettyPrint(personJs));
122
+            JsValue personJsV = Json.toJson(person, person.personJsonFormatter());
123
+            personsList.add(Json.prettyPrint(personJsV));
126 124
             return null;
127 125
         });
128 126
 
@@ -130,14 +128,52 @@ public class TvSeriesController {
130 128
     }
131 129
 
132 130
     @RequestMapping(value = "/persons", method = RequestMethod.GET)
133
-    private ResponseEntity<String> getAllPersons() {
134
-        akkaStreamFilesProcessing.getAllPersonsFuture();
135
-        return new ResponseEntity<>("is running", HttpStatus.OK);
131
+    private List<String> getAllPersons() throws ExecutionException, InterruptedException {
132
+       Future<IndexedSeq<ModelsAndJsonMap.Person>> indexedSeqFuture = akkaStreamFilesProcessing.getMaxPersonsFuture();
133
+       CompletableFuture<IndexedSeq<ModelsAndJsonMap.Person>> completableFuture =
134
+               CompletableFutureBuilder.buildcompletableFuture2(indexedSeqFuture);
135
+       IndexedSeq<ModelsAndJsonMap.Person> personIndexedSeq = completableFuture.get();
136
+       List<String> personList = new ArrayList<>();
137
+       personIndexedSeq.foreach(person ->{
138
+           JsValue personJsV = Json.toJson(person, person.personJsonFormatter());
139
+           personList.add(Json.prettyPrint(personJsV));
140
+           return null;
141
+       });
142
+
143
+       return personList;
136 144
     }
137 145
 
138 146
     @RequestMapping(value = "/tvseries", method = RequestMethod.GET)
139
-    private ResponseEntity<String> getAllvSeries() {
140
-        akkaStreamFilesProcessing.getAllTvSeriesFuture();
141
-        return new ResponseEntity<>("is running", HttpStatus.OK);
147
+    private List<String> getMaxTvSeries() throws ExecutionException, InterruptedException {
148
+        Future<IndexedSeq<ModelsAndJsonMap.TvSerie>> futureTvSeries= akkaStreamFilesProcessing.getMaxTvSeriesFuture();
149
+       CompletableFuture<IndexedSeq<ModelsAndJsonMap.TvSerie>> seqCompletableFuture = CompletableFutureBuilder
150
+                .buildcompletableFuture2(futureTvSeries);
151
+
152
+        IndexedSeq<ModelsAndJsonMap.TvSerie> tvSerieIndexedSeq = seqCompletableFuture.get();
153
+        List<String> tvSerieList = new ArrayList<>();
154
+        tvSerieIndexedSeq.foreach(tvSerie ->{
155
+            JsValue tvSerieJsV = Json.toJson(tvSerie, tvSerie.tvSerieJsonFormatter());
156
+            tvSerieList.add(Json.prettyPrint(tvSerieJsV));
157
+            return null;
158
+        });
159
+
160
+        return tvSerieList;
161
+    }
162
+
163
+    @RequestMapping(value = "/episodes", method = RequestMethod.GET)
164
+    private List<String> getMaxEpisodes() throws ExecutionException, InterruptedException {
165
+        Future<IndexedSeq<ModelsAndJsonMap.Episode>> seqFuture = akkaStreamFilesProcessing.getMaxEpisodes();
166
+        CompletableFuture<IndexedSeq<ModelsAndJsonMap.Episode>> completableFuture =
167
+                CompletableFutureBuilder.buildcompletableFuture2(seqFuture);
168
+        IndexedSeq<ModelsAndJsonMap.Episode> episodeIndexedSeq = completableFuture.get();
169
+
170
+        List<String> episodeList = new ArrayList<>();
171
+        episodeIndexedSeq.foreach(episode->{
172
+            JsValue episodeJsV = Json.toJson(episode, episode.episodeFormatter());
173
+            episodeList.add(Json.prettyPrint(episodeJsV));
174
+            return null;
175
+        });
176
+
177
+        return episodeList;
142 178
     }
143 179
 }

+ 4
- 3
src/main/java/fr/natan/akkastreamfileprocessingapi/futurecompleteness/CompletableFutureBuilder.java Ver fichero

@@ -32,19 +32,20 @@ public class CompletableFutureBuilder<T> {
32 32
         return completableFuture;
33 33
     }
34 34
 
35
-    public static <T> CompletableFuture<IndexedSeq<T>> buildcompletableFuture2(Future<IndexedSeq<T>> futureListT) {
35
+    public static <T> CompletableFuture<IndexedSeq<T>> buildcompletableFuture2(Future<IndexedSeq<T>> futureIndexedSeqT) {
36 36
 
37 37
         CompletableFuture<IndexedSeq<T>> completableFuture = new CompletableFuture<>();
38 38
         Executors.newSingleThreadExecutor().submit(() -> {
39
-            while (!futureListT.isCompleted()) {
39
+            while (!futureIndexedSeqT.isCompleted()) {
40 40
                 try {
41 41
                     TimeUnit.MILLISECONDS.sleep(300);
42 42
                 } catch (InterruptedException e) {
43 43
                     throw new RuntimeException(e);
44 44
                 }
45
+
45 46
             }
46 47
 
47
-            IndexedSeq<T> task = futureListT.value().get().get();
48
+            IndexedSeq<T> task = futureIndexedSeqT.value().get().get();
48 49
             completableFuture.complete(task);
49 50
 
50 51
         });

+ 2
- 0
src/main/scala/fr/natan/akkastreamfileprocessingapi/datasource/Datasource.scala Ver fichero

@@ -7,4 +7,6 @@ object Datasource {
7 7
   val titleBasics: File = new File("../title.basics.tsv.gz")
8 8
   val nameBasics: File = new File("../name.basics.tsv.gz")
9 9
   val titlePrincipalsBasics: File = new File("../title.principals.tsv.gz")
10
+  val titleEpisode: File = new File("../title.episode.tsv.gz")
11
+  val MAXI_ROWS : Int = 10000
10 12
 }

+ 19
- 3
src/main/scala/fr/natan/akkastreamfileprocessingapi/models/ModelsAndJsonMap.scala Ver fichero

@@ -6,8 +6,8 @@ object ModelsAndJsonMap {
6 6
 
7 7
   final case class Person(personID: String,
8 8
                           primaryName: String,
9
-                          birthYear: String,
10
-                          deathYear: String,
9
+                          birthYear: Option[String],
10
+                          deathYear: Option[String],
11 11
                           primaryProfession: List[String],
12 12
                           knownForTitles: List[String]
13 13
                          ) {
@@ -28,7 +28,7 @@ object ModelsAndJsonMap {
28 28
                            primaryTitle: String,
29 29
                            originalTitle: String,
30 30
                            startYear: String,
31
-                           endYear: String,
31
+                           endYear: Option[String],
32 32
                            runtimeMinutes: String,
33 33
                            genres: List[String]
34 34
                           ) {
@@ -46,4 +46,20 @@ object ModelsAndJsonMap {
46 46
       )
47 47
 
48 48
   }
49
+
50
+
51
+  final case class Episode(
52
+                            id: String,
53
+                            parentId: String,
54
+                            seasonNumber: Option[String],
55
+                            episodeNumber: Option[String]
56
+                          ){
57
+    implicit val episodeFormatter : Writes[Episode] =
58
+      (episode:Episode) => Json.obj(
59
+        "episode id"-> episode.id,
60
+        "parent id"-> episode.parentId,
61
+        "season number"->episode.seasonNumber,
62
+        "episode number"->episode.episodeNumber
63
+      )
64
+  }
49 65
 }

+ 17
- 8
src/main/scala/fr/natan/akkastreamfileprocessingapi/models/ModelsBuilder.scala Ver fichero

@@ -1,31 +1,40 @@
1 1
 package fr.natan.akkastreamfileprocessingapi.models
2
-import ModelsAndJsonMap.{Person, TvSerie}
2
+import ModelsAndJsonMap.{Episode, Person, TvSerie}
3 3
 object ModelsBuilder {
4 4
 
5
-  def buildPersonModel(personMap: Map[String, String]): Person =
6
-  {
5
+  def buildPersonModel(personMap: Map[String, String]): Person = {
7 6
     Person(
8 7
       personMap("nconst"),
9 8
       personMap("primaryName"),
10
-      personMap("birthYear"),
11
-      personMap("deathYear"),
9
+      Option( personMap("birthYear")),
10
+      Option(personMap("deathYear")),
12 11
       personMap("primaryProfession").split(",").toList,
13 12
       personMap("knownForTitles").split(",").toList
14 13
     )
15 14
   }
16 15
 
17
-  def buildTvSerieModel(tvSerieMap: Map[String, String]): TvSerie =
18
-  {
16
+  def buildTvSerieModel(tvSerieMap: Map[String, String]): TvSerie = {
19 17
     val tvSerie: TvSerie = TvSerie(
20 18
       tvSerieMap("tconst"),
21 19
       tvSerieMap("titleType"),
22 20
       tvSerieMap("primaryTitle"),
23 21
       tvSerieMap("originalTitle"),
24 22
       tvSerieMap("startYear"),
25
-      tvSerieMap("endYear"),
23
+      Option(tvSerieMap("endYear")),
26 24
       tvSerieMap("runtimeMinutes"),
27 25
       tvSerieMap("genres").split(",").toList
28 26
     )
29 27
     tvSerie
30 28
   }
29
+
30
+  def buildEpisodeModel(episodeMap: Map[String, String]): Episode = {
31
+    val episode: Episode = Episode(
32
+      episodeMap("tconst"),
33
+      episodeMap("parentTconst"),
34
+      Option(episodeMap("seasonNumber")),
35
+      Option(episodeMap("episodeNumber")),
36
+    )
37
+
38
+    episode
39
+  }
31 40
 }

+ 41
- 24
src/main/scala/fr/natan/akkastreamfileprocessingapi/service/AkkaStreamComponents.scala Ver fichero

@@ -1,13 +1,12 @@
1 1
 package fr.natan.akkastreamfileprocessingapi.service
2 2
 
3
+import akka.NotUsed
3 4
 import akka.actor.ActorSystem
4 5
 import akka.stream.alpakka.csv.scaladsl.{CsvParsing, CsvToMap}
5 6
 import akka.stream.scaladsl.{Compression, FileIO, Flow, Sink, Source}
6
-import akka.{Done, NotUsed}
7
-import com.typesafe.scalalogging.slf4j.Logger
8 7
 import fr.natan.akkastreamfileprocessingapi.businessexceptions.FileNotFoundException
9
-import fr.natan.akkastreamfileprocessingapi.models.ModelsAndJsonMap.{Person, TvSerie}
10
-import fr.natan.akkastreamfileprocessingapi.models.ModelsBuilder.{buildPersonModel, buildTvSerieModel}
8
+import fr.natan.akkastreamfileprocessingapi.models.ModelsAndJsonMap.{Episode, Person, TvSerie}
9
+import fr.natan.akkastreamfileprocessingapi.models.ModelsBuilder.{buildEpisodeModel, buildPersonModel, buildTvSerieModel}
11 10
 import fr.natan.akkastreamfileprocessingapi.valitator.Validators.fileExists
12 11
 
13 12
 import java.io.File
@@ -20,7 +19,7 @@ object AkkaStreamComponents {
20 19
   implicit val actorSystem: ActorSystem = ActorSystem("AkkaStreamActor")
21 20
 
22 21
   //flows building
23
-  def buildPersonFlow(): Flow[Map[String, String], Person, NotUsed] = {
22
+  def buildPersonFlow (): Flow[Map[String, String], Person, NotUsed] = {
24 23
     val personFlow: Flow[Map[String, String], Person, NotUsed] =
25 24
       Flow[Map[String, String]]
26 25
         .map((rowMap: Map[String, String]) => {
@@ -30,26 +29,26 @@ object AkkaStreamComponents {
30 29
     personFlow
31 30
   }
32 31
 
33
-  def filterPersonByIdFlow(personID: String): Flow[Map[String, String], Person, NotUsed]={
34
-    val personFlowFilter: Flow[Map[String, String], Person, NotUsed]=
32
+  def filterPersonByIdFlow(personID: String): Flow[Map[String, String], Person, NotUsed] = {
33
+    val personFlowFilter: Flow[Map[String, String], Person, NotUsed] =
35 34
       Flow[Map[String, String]]
36
-        .filter((rowMap:Map[String, String])=>{
37
-          rowMap.getOrElse(key="nconst",default="")==personID
35
+        .filter((rowMap: Map[String, String]) => {
36
+          rowMap.getOrElse(key = "nconst", default = "") == personID
38 37
         })
39
-        .map(rowMap=>{
38
+        .map(rowMap => {
40 39
           buildPersonModel(personMap = rowMap)
41 40
         })
42 41
 
43 42
     personFlowFilter
44 43
   }
45 44
 
46
-  def filterPersonByNameFlow(primaryName: String): Flow[Map[String, String], Person, NotUsed] ={
45
+  def filterPersonByNameFlow(primaryName: String): Flow[Map[String, String], Person, NotUsed] = {
47 46
     val personFlowFilter: Flow[Map[String, String], Person, NotUsed] =
48 47
       Flow[Map[String, String]]
49
-        .filter((rowMap: Map[String, String]) =>{
50
-          rowMap.getOrElse(key = "primaryName", default = "")==primaryName
48
+        .filter((rowMap: Map[String, String]) => {
49
+          rowMap.getOrElse(key = "primaryName", default = "") == primaryName
51 50
         })
52
-        .map((rowMap: Map[String, String])=>{
51
+        .map((rowMap: Map[String, String]) => {
53 52
           buildPersonModel(personMap = rowMap)
54 53
         })
55 54
 
@@ -66,16 +65,28 @@ object AkkaStreamComponents {
66 65
     tvSerieFlow
67 66
   }
68 67
 
69
-  def filterTvSerieByIdFlow(tvSerieID: String): Flow[Map[String, String], TvSerie, NotUsed] = {
70
-    val tvSerieFlowFilter : Flow[Map[String, String], TvSerie, NotUsed] =
68
+  def buildEpisodeFlow (): Flow[Map[String, String], Episode, NotUsed] ={
69
+    val episodeFlow: Flow[Map[String, String], Episode, NotUsed] =
71 70
       Flow[Map[String, String]]
72
-        .filter((rowMap: Map[String, String])=>rowMap.getOrElse(key = "tconst", default = "")==tvSerieID)
73 71
         .map((rowMap: Map[String, String])=>{
72
+          buildEpisodeModel(episodeMap = rowMap)
73
+        })
74
+
75
+    episodeFlow
76
+  }
77
+
78
+  //filters flow
79
+  def filterTvSerieByIdFlow(tvSerieID: String): Flow[Map[String, String], TvSerie, NotUsed] = {
80
+    val tvSerieFlowFilter: Flow[Map[String, String], TvSerie, NotUsed] =
81
+      Flow[Map[String, String]]
82
+        .filter((rowMap: Map[String, String]) => rowMap.getOrElse(key = "tconst", default = "") == tvSerieID)
83
+        .map((rowMap: Map[String, String]) => {
74 84
           buildTvSerieModel(tvSerieMap = rowMap)
75 85
         })
76 86
 
77 87
     tvSerieFlowFilter
78 88
   }
89
+
79 90
   def filterTvSerieByPrimaryTitleFlow(tvSeriePrimaryTitle: String): Flow[Map[String, String], TvSerie, NotUsed] = {
80 91
     val filterFlowFilter: Flow[Map[String, String], TvSerie, NotUsed] = Flow[Map[String, String]]
81 92
       .filter((rows: Map[String, String]) => {
@@ -122,15 +133,21 @@ object AkkaStreamComponents {
122 133
   }
123 134
 
124 135
   //sinks building
136
+  def buildMaxPersonsSink(): Sink[Person, Future[IndexedSeq[Person]]] = {
137
+    val listPersonsSink: Sink[Person, Future[IndexedSeq[Person]]] =
138
+      Sink.collection[Person, IndexedSeq[Person]]
139
+    listPersonsSink
140
+  }
125 141
 
126
-  def buildAllTvSeriesSink(logger: Logger): Sink[TvSerie, Future[Done]] = {
127
-    val tvSeriesSink: Sink[TvSerie, Future[Done]] = Sink
128
-      .foreach((tvSerie: TvSerie)=>logger.info(s"${tvSerie.toString}"))
142
+  def buildMaxTvSeriesSink(): Sink[TvSerie, Future[IndexedSeq[TvSerie]]] = {
143
+    val tvSeriesSink: Sink[TvSerie, Future[IndexedSeq[TvSerie]]] = Sink
144
+      .collection[TvSerie, IndexedSeq[TvSerie]]
129 145
     tvSeriesSink
130 146
   }
131
-  def buildAllPersonsSink(logger: Logger): Sink[Person,Future[Done]] = {
132
-    val listPersonsSink: Sink[Person, Future[Done]]=
133
-      Sink.foreach((person: Person)=>logger.info(s"${person.toString}"))
134
-    listPersonsSink
147
+
148
+  def buildMaxEpisodesSink(): Sink[Episode, Future[IndexedSeq[Episode]]] = {
149
+    val episodeSink: Sink[Episode, Future[IndexedSeq[Episode]]] = Sink
150
+      .collection[Episode, IndexedSeq[Episode]]
151
+    episodeSink
135 152
   }
136 153
 }

+ 4
- 4
src/main/scala/fr/natan/akkastreamfileprocessingapi/service/AkkaStreamFileProcessingFuture.scala Ver fichero

@@ -1,7 +1,6 @@
1 1
 package fr.natan.akkastreamfileprocessingapi.service
2 2
 
3
-import akka.Done
4
-import fr.natan.akkastreamfileprocessingapi.models.ModelsAndJsonMap.{Person, TvSerie}
3
+import fr.natan.akkastreamfileprocessingapi.models.ModelsAndJsonMap.{Episode, Person, TvSerie}
5 4
 
6 5
 import scala.concurrent.Future
7 6
 
@@ -14,6 +13,7 @@ trait AkkaStreamFileProcessingFuture {
14 13
 
15 14
   def getPersonsForTvSerieByTvSerieTitleFuture(tvSeriePrimaryTitle: String): Future[IndexedSeq[Person]]
16 15
   def getPersonsForTvSerieByTvSerieIDFuture(tvSerieId: String): Future[IndexedSeq[Person]]
17
-  def getAllTvSeriesFuture: Future[Done]
18
-  def getAllPersonsFuture: Future[Done]
16
+  def getMaxPersonsFuture: Future[IndexedSeq[Person]]
17
+  def getMaxTvSeriesFuture: Future[IndexedSeq[TvSerie]]
18
+  def getMaxEpisodes: Future[IndexedSeq[Episode]]
19 19
 }

+ 50
- 27
src/main/scala/fr/natan/akkastreamfileprocessingapi/service/AkkaStreamFileProcessingImpl.scala Ver fichero

@@ -1,13 +1,36 @@
1 1
 package fr.natan.akkastreamfileprocessingapi.service
2 2
 
3
-import akka.Done
4 3
 import akka.stream.scaladsl.{Sink, Source}
5 4
 import com.typesafe.scalalogging.slf4j.Logger
6 5
 import fr.natan.akkastreamfileprocessingapi.businessexceptions.PersonNotFoundException
7
-import fr.natan.akkastreamfileprocessingapi.datasource.Datasource.{nameBasics, titleBasics, titlePrincipalsBasics}
8
-import fr.natan.akkastreamfileprocessingapi.models.ModelsAndJsonMap.{Person, TvSerie}
9
-import fr.natan.akkastreamfileprocessingapi.service.AkkaStreamComponents.{actorSystem, buildAllPersonsSink, buildAllTvSeriesSink, buildAndValidateSource, buildPersonFlow, buildTvSerieFlow, filterPersonByIdFlow, filterPersonByNameFlow, filterTvSerieByIdFlow, filterTvSerieByPrimaryTitleFlow}
10
-import fr.natan.akkastreamfileprocessingapi.service.UtilitiesClass.{getListOfPersonsForTvSerie, getListOfPersonsIDByTvSerieID, getTvSerieIDFuture, getTvSerieIdByPrimaryTitle}
6
+import fr.natan.akkastreamfileprocessingapi.datasource.Datasource.{
7
+  MAXI_ROWS,
8
+  nameBasics,
9
+  titleBasics,
10
+  titleEpisode,
11
+  titlePrincipalsBasics
12
+}
13
+import fr.natan.akkastreamfileprocessingapi.models.ModelsAndJsonMap.{Episode, Person, TvSerie}
14
+import fr.natan.akkastreamfileprocessingapi.service.AkkaStreamComponents.{
15
+  actorSystem,
16
+  buildAndValidateSource,
17
+  buildEpisodeFlow,
18
+  buildMaxEpisodesSink,
19
+  buildMaxPersonsSink,
20
+  buildMaxTvSeriesSink,
21
+  buildPersonFlow,
22
+  buildTvSerieFlow,
23
+  filterPersonByIdFlow,
24
+  filterPersonByNameFlow,
25
+  filterTvSerieByIdFlow,
26
+  filterTvSerieByPrimaryTitleFlow
27
+}
28
+import fr.natan.akkastreamfileprocessingapi.service.UtilitiesClass.{
29
+  getListOfPersonsForTvSerie,
30
+  getListOfPersonsIDByTvSerieID,
31
+  getTvSerieIDFuture,
32
+  getTvSerieIdByPrimaryTitle
33
+}
11 34
 import org.slf4j.LoggerFactory
12 35
 import org.springframework.stereotype.Component
13 36
 
@@ -48,7 +71,7 @@ class AkkaStreamFileProcessingImpl extends AkkaStreamFileProcessingFuture {
48 71
 
49 72
     personFuture.onComplete({
50 73
       case Failure(exception) => throw new PersonNotFoundException()
51
-      case Success(value:Option[Person]) =>logger.info(s"${value.get}")
74
+      case Success(value: Option[Person]) => logger.info(s"${value.get}")
52 75
     })
53 76
 
54 77
     personFuture
@@ -175,42 +198,42 @@ class AkkaStreamFileProcessingImpl extends AkkaStreamFileProcessingFuture {
175 198
 
176 199
   }
177 200
 
178
-  override def getAllPersonsFuture: Future[Done] = {
201
+  override def getMaxPersonsFuture: Future[IndexedSeq[Person]] = {
179 202
     val personSource: Source[Map[String, String], _] = buildAndValidateSource(inputFile = nameBasics)
180 203
     //graph
181 204
     val startTime: Long = System.currentTimeMillis()
182
-    val result: Future[Done] = personSource
205
+    val result: Future[IndexedSeq[Person]] = personSource
183 206
       .via(flow = buildPersonFlow())
184
-      .runWith(sink = buildAllPersonsSink(logger = logger))
185
-
186
-    result.onComplete {
187
-      case Failure(exception) => logger.error(s"$exception")
188
-      case Success(value) =>
189
-        logger.info(s"$value")
190
-        val time: Long = (System.currentTimeMillis() - startTime) / 100
191
-        logger.info(s"elapsed time: $time")
192
-    }
207
+      .take(n = MAXI_ROWS)
208
+      .runWith(sink = buildMaxPersonsSink())
209
+
193 210
     result
194 211
   }
195 212
 
196
-  override def getAllTvSeriesFuture: Future[Done] = {
213
+  override def getMaxTvSeriesFuture: Future[IndexedSeq[TvSerie]] = {
197 214
     val source: Source[Map[String, String], _] = buildAndValidateSource(inputFile = titleBasics)
198
-    val sink: Sink[TvSerie, Future[Done]] = buildAllTvSeriesSink(logger = logger)
199 215
 
200 216
     val startingTime: Long = System.currentTimeMillis()
201 217
     //graph sink->flow->sink
202
-    val results: Future[Done] = source
218
+    val results: Future[IndexedSeq[TvSerie]] = source
203 219
       .via(flow = buildTvSerieFlow())
204
-      .runWith(sink = sink)
205
-      .andThen {
206
-        case Success(value) =>
207
-          val elapsedTime: Long = (System.currentTimeMillis() - startingTime) / 1000
208
-          logger.info(s"$value: successfully processing file, elapsed time $titleBasics: $elapsedTime sec")
209
-        case Failure(error: Error) => logger.error(s"$error")
210
-      }
220
+      .take(n = MAXI_ROWS)
221
+      .runWith(sink = buildMaxTvSeriesSink())
211 222
 
212 223
     results
213 224
   }
225
+
226
+  override def getMaxEpisodes: Future[IndexedSeq[Episode]] = {
227
+    val source: Source[Map[String, String], _] = buildAndValidateSource(inputFile = titleEpisode)
228
+
229
+    val listEpisodeFuture: Future[IndexedSeq[Episode]] = source
230
+      .via(flow = buildEpisodeFlow())
231
+      .take(n = MAXI_ROWS)
232
+      .runWith(sink = buildMaxEpisodesSink())
233
+
234
+    listEpisodeFuture
235
+  }
214 236
 }
215 237
 
216 238
 
239
+

+ 0
- 1
src/main/scala/fr/natan/akkastreamfileprocessingapi/service/UtilitiesClass.scala Ver fichero

@@ -71,5 +71,4 @@ object UtilitiesClass {
71 71
 
72 72
     tvSerieIdFuture
73 73
   }
74
-
75 74
 }

Powered by TurnKey Linux.