|
@@ -62,7 +62,7 @@ class PipelineCache(GreenWorker):
|
|
entry: PipelineEntry = self.__pipelines[root_folder]
|
|
entry: PipelineEntry = self.__pipelines[root_folder]
|
|
entry.poke()
|
|
entry.poke()
|
|
|
|
|
|
- app.logger.info(f"[{self.ident}] Using {entry}")
|
|
|
|
|
|
+ self.info(f"Using {entry}")
|
|
|
|
|
|
return entry.pipeline
|
|
return entry.pipeline
|
|
|
|
|
|
@@ -75,8 +75,9 @@ class PipelineCache(GreenWorker):
|
|
# save instance to cache
|
|
# save instance to cache
|
|
with self.__lock:
|
|
with self.__lock:
|
|
entry = PipelineEntry(pipeline=pipeline, project_id=project.id)
|
|
entry = PipelineEntry(pipeline=pipeline, project_id=project.id)
|
|
- app.logger.info(f"[{self.ident}] Cached {entry}")
|
|
|
|
|
|
+ self.info(f"Cached {entry}")
|
|
self.__pipelines[root_folder] = entry
|
|
self.__pipelines[root_folder] = entry
|
|
|
|
+ self.queue.put((root_folder,))
|
|
|
|
|
|
# return
|
|
# return
|
|
return pipeline
|
|
return pipeline
|
|
@@ -89,56 +90,72 @@ class PipelineCache(GreenWorker):
|
|
|
|
|
|
:param root_folder: path to model root folder
|
|
:param root_folder: path to model root folder
|
|
"""
|
|
"""
|
|
- # abort if no pipeline with this root folder is loaded
|
|
|
|
with self.__lock:
|
|
with self.__lock:
|
|
- if root_folder not in self.__pipelines:
|
|
|
|
|
|
+ if root_folder in self.__pipelines:
|
|
|
|
+ # reset "last used" to now
|
|
|
|
+ self.__pipelines[root_folder].poke()
|
|
|
|
+
|
|
|
|
+ # abort if no pipeline with this root folder is loaded
|
|
|
|
+ else:
|
|
return
|
|
return
|
|
|
|
|
|
- self.queue.put((root_folder,))
|
|
|
|
|
|
|
|
# executed as coroutine in the main thread
|
|
# executed as coroutine in the main thread
|
|
- def start_work(self, *args):
|
|
|
|
-
|
|
|
|
- # delegate to work method in a separate thread
|
|
|
|
- res = super().start_work(*args)
|
|
|
|
|
|
+ def __run__(self):
|
|
|
|
+ while True:
|
|
|
|
+ # get pipeline
|
|
|
|
+ res = tpool.execute(self.work)
|
|
|
|
|
|
- if res is None:
|
|
|
|
- # an error occured in the execution
|
|
|
|
- return
|
|
|
|
|
|
+ if res is self.STOP_QUEUE:
|
|
|
|
+ break
|
|
|
|
|
|
- pipeline, project_id = res
|
|
|
|
|
|
+ pipeline, project_id = res
|
|
|
|
|
|
- if pipeline is None:
|
|
|
|
- # pipeline vanished
|
|
|
|
- return
|
|
|
|
|
|
+ if pipeline is None:
|
|
|
|
+ # pipeline vanished from cache
|
|
|
|
+ continue
|
|
|
|
|
|
- project = Project.query.get(project_id)
|
|
|
|
- # create job to close pipeline
|
|
|
|
- self.__jobs.run(project,
|
|
|
|
- 'Model Interaction',
|
|
|
|
- f'{project.name} (close pipeline)',
|
|
|
|
- f'{project.name}/model-interaction',
|
|
|
|
- pipeline.close
|
|
|
|
- )
|
|
|
|
|
|
+ project = Project.query.get(project_id)
|
|
|
|
+ # create job to close pipeline
|
|
|
|
+ self.__jobs.run(project,
|
|
|
|
+ 'Model Interaction',
|
|
|
|
+ f'{project.name} (close pipeline)',
|
|
|
|
+ f'{project.name}/model-interaction',
|
|
|
|
+ pipeline.close
|
|
|
|
+ )
|
|
|
|
+ self._finish()
|
|
|
|
|
|
# executed in a separate thread
|
|
# executed in a separate thread
|
|
- def work(self, root_folder):
|
|
|
|
|
|
+ def work(self):
|
|
|
|
+ while True:
|
|
|
|
+ res = self.check_queue()
|
|
|
|
+
|
|
|
|
+ if res is self.STOP_QUEUE:
|
|
|
|
+ return res
|
|
|
|
+
|
|
|
|
+ elif res is self.CONTINUE_QUEUE:
|
|
|
|
+ continue
|
|
|
|
+
|
|
|
|
+ # an entry was found in the queue
|
|
|
|
+ return self._check_cache_entry(*res)
|
|
|
|
+
|
|
|
|
+ def _check_cache_entry(self, key):
|
|
with self.__lock:
|
|
with self.__lock:
|
|
- entry = self.__pipelines.get(root_folder)
|
|
|
|
|
|
+ entry = self.__pipelines.get(key)
|
|
|
|
|
|
if entry is None:
|
|
if entry is None:
|
|
- app.logger.info(f"[{self.ident}] Entry for {root_folder} already gone")
|
|
|
|
- return
|
|
|
|
|
|
+ self.info(f"Entry for {key} already gone")
|
|
|
|
+ return None, None
|
|
|
|
|
|
- app.logger.info(f"[{self.ident}] Starting checks for {entry}...")
|
|
|
|
|
|
+ self.info(f"Starting checks for {entry}...")
|
|
while True:
|
|
while True:
|
|
now = dt.datetime.now()
|
|
now = dt.datetime.now()
|
|
|
|
|
|
with self.__lock:
|
|
with self.__lock:
|
|
- entry = self.__pipelines.get(root_folder)
|
|
|
|
|
|
+ entry = self.__pipelines.get(key)
|
|
|
|
|
|
if entry is None:
|
|
if entry is None:
|
|
- app.logger.info(f"[{self.ident}] Entry for {root_folder} already gone")
|
|
|
|
|
|
+ self.info(f"Entry for {key} already gone")
|
|
return None, None
|
|
return None, None
|
|
|
|
|
|
delay = entry.last_used + self.CLOSE_TIMER - now
|
|
delay = entry.last_used + self.CLOSE_TIMER - now
|
|
@@ -148,11 +165,12 @@ class PipelineCache(GreenWorker):
|
|
continue
|
|
continue
|
|
|
|
|
|
with self.__lock:
|
|
with self.__lock:
|
|
- entry = self.__pipelines.pop(root_folder, None)
|
|
|
|
- app.logger.info(f"[{self.ident}] Entry for {root_folder} already gone")
|
|
|
|
- return None, None
|
|
|
|
|
|
+ entry = self.__pipelines.pop(key, None)
|
|
|
|
+ if entry is None:
|
|
|
|
+ self.info(f"Entry for {key} already gone")
|
|
|
|
+ return None, None
|
|
|
|
|
|
- app.logger.info(f"[{self.ident}] Removed {entry} from cache")
|
|
|
|
|
|
+ self.info(f"Removed {entry} from cache")
|
|
return entry.pipeline, entry.project_id
|
|
return entry.pipeline, entry.project_id
|
|
|
|
|
|
# def __get(self):
|
|
# def __get(self):
|