|
@@ -5,15 +5,30 @@ from threading import Lock
|
|
|
from time import time, sleep
|
|
|
|
|
|
from eventlet import tpool, spawn_n
|
|
|
+from dataclasses import dataclass
|
|
|
from collections import namedtuple
|
|
|
|
|
|
+from pycs import app
|
|
|
from pycs.database.Project import Project
|
|
|
from pycs.interfaces.Pipeline import Pipeline
|
|
|
from pycs.jobs.JobRunner import JobRunner
|
|
|
from pycs.util.PipelineUtil import load_from_root_folder
|
|
|
from pycs.util.green_worker import GreenWorker
|
|
|
|
|
|
-PipelineEntry = namedtuple("PipelineEntry", "counter pipeline project_id")
|
|
|
+
|
|
|
+@dataclass
|
|
|
+class PipelineEntry(object):
|
|
|
+ counter: int = 1
|
|
|
+ pipeline: Pipeline = None
|
|
|
+ pipeline_name: str = None
|
|
|
+ project_id: int = -1
|
|
|
+
|
|
|
+ def __post_init__(self):
|
|
|
+ if self.pipeline is not None:
|
|
|
+ self.pipeline_name = self.pipeline.__class__.__name__
|
|
|
+
|
|
|
+ def __str__(self):
|
|
|
+ return f"<Pipeline '{self.pipeline_name}' for project #{self.project_id} (counter={self.counter})>"
|
|
|
|
|
|
class PipelineCache(GreenWorker):
|
|
|
CLOSE_TIMER = 120
|
|
@@ -44,6 +59,7 @@ class PipelineCache(GreenWorker):
|
|
|
|
|
|
# increase reference counter
|
|
|
entry.counter += 1
|
|
|
+ app.logger.info(f"Using {entry}")
|
|
|
|
|
|
# return entry
|
|
|
return entry.pipeline
|
|
@@ -53,7 +69,9 @@ class PipelineCache(GreenWorker):
|
|
|
|
|
|
# save instance to cache
|
|
|
with self.__lock:
|
|
|
- self.__pipelines[root_folder] = PipelineEntry(1, pipeline, project.id)
|
|
|
+ entry = PipelineEntry(counter=1, pipeline=pipeline, project_id=project.id)
|
|
|
+ app.logger.info(f"Cached {entry}")
|
|
|
+ self.__pipelines[root_folder] = entry
|
|
|
|
|
|
# return
|
|
|
return pipeline
|
|
@@ -79,7 +97,7 @@ class PipelineCache(GreenWorker):
|
|
|
def start_work(self, root_folder, timestamp):
|
|
|
|
|
|
# delegate to work method in a separate thread
|
|
|
- pipeline, project_id = super().run(root_folder, timestamp)
|
|
|
+ pipeline, project_id = super().start_work(root_folder, timestamp)
|
|
|
|
|
|
project = Project.query.get(project_id)
|
|
|
|
|
@@ -98,60 +116,62 @@ class PipelineCache(GreenWorker):
|
|
|
delay = int(timestamp + self.CLOSE_TIMER - time())
|
|
|
|
|
|
if delay > 0:
|
|
|
- eventlet.sleep(delay)
|
|
|
+ sleep(delay)
|
|
|
|
|
|
# lock and access __pipelines
|
|
|
with self.__lock:
|
|
|
- instance: PipelineEntry = self.__pipelines[root_folder]
|
|
|
+ entry: PipelineEntry = self.__pipelines[root_folder]
|
|
|
|
|
|
# reference counter greater than 1
|
|
|
- if instance.counter > 1:
|
|
|
+ if entry.counter > 1:
|
|
|
# decrease reference counter
|
|
|
- instance.counter -= 1
|
|
|
+ entry.counter -= 1
|
|
|
+ app.logger.info(f"Decreased counter of {entry}.")
|
|
|
continue
|
|
|
|
|
|
# reference counter equals 1
|
|
|
else:
|
|
|
# delete instance from __pipelines and return to call `close` function
|
|
|
del self.__pipelines[root_folder]
|
|
|
+ app.logger.info(f"Removed {entry} from cache.")
|
|
|
return entry.pipeline, entry.project_id
|
|
|
|
|
|
- def __get(self):
|
|
|
- while True:
|
|
|
- # get element from queue
|
|
|
- root_folder, timestamp = self.__queue.get()
|
|
|
-
|
|
|
- # sleep if needed
|
|
|
- delay = int(timestamp + self.CLOSE_TIMER - time())
|
|
|
-
|
|
|
- if delay > 0:
|
|
|
- eventlet.sleep(delay)
|
|
|
-
|
|
|
- # lock and access __pipelines
|
|
|
- with self.__lock:
|
|
|
- instance = self.__pipelines[root_folder]
|
|
|
-
|
|
|
- # reference counter greater than 1
|
|
|
- if instance[0] > 1:
|
|
|
- # decrease reference counter
|
|
|
- instance[0] -= 1
|
|
|
-
|
|
|
- # reference counter equals 1
|
|
|
- else:
|
|
|
- # delete instance from __pipelines and return to call `close` function
|
|
|
- del self.__pipelines[root_folder]
|
|
|
- return instance[1], instance[2]
|
|
|
-
|
|
|
- def __run(self):
|
|
|
- while True:
|
|
|
- # get pipeline
|
|
|
- pipeline, project_id = tpool.execute(self.__get)
|
|
|
- project = Project.query.get(project_id)
|
|
|
-
|
|
|
- # create job to close pipeline
|
|
|
- self.__jobs.run(project,
|
|
|
- 'Model Interaction',
|
|
|
- f'{project.name} (close pipeline)',
|
|
|
- f'{project.name}/model-interaction',
|
|
|
- pipeline.close
|
|
|
- )
|
|
|
+ # def __get(self):
|
|
|
+ # while True:
|
|
|
+ # # get element from queue
|
|
|
+ # root_folder, timestamp = self.__queue.get()
|
|
|
+
|
|
|
+ # # sleep if needed
|
|
|
+ # delay = int(timestamp + self.CLOSE_TIMER - time())
|
|
|
+
|
|
|
+ # if delay > 0:
|
|
|
+ # eventlet.sleep(delay)
|
|
|
+
|
|
|
+ # # lock and access __pipelines
|
|
|
+ # with self.__lock:
|
|
|
+ # instance = self.__pipelines[root_folder]
|
|
|
+
|
|
|
+ # # reference counter greater than 1
|
|
|
+ # if instance.counter > 1:
|
|
|
+ # # decrease reference counter
|
|
|
+ # instance.counter -= 1
|
|
|
+
|
|
|
+ # # reference counter equals 1
|
|
|
+ # else:
|
|
|
+ # # delete instance from __pipelines and return to call `close` function
|
|
|
+ # del self.__pipelines[root_folder]
|
|
|
+ # return instance.pipeline, instance.project_id
|
|
|
+
|
|
|
+ # def __run(self):
|
|
|
+ # while True:
|
|
|
+ # # get pipeline
|
|
|
+ # pipeline, project_id = tpool.execute(self.__get)
|
|
|
+ # project = Project.query.get(project_id)
|
|
|
+
|
|
|
+ # # create job to close pipeline
|
|
|
+ # self.__jobs.run(project,
|
|
|
+ # 'Model Interaction',
|
|
|
+ # f'{project.name} (close pipeline)',
|
|
|
+ # f'{project.name}/model-interaction',
|
|
|
+ # pipeline.close
|
|
|
+ # )
|