6
0
Jelajahi Sumber

JobRunner is now singleton

Dimitri Korsch 3 tahun lalu
induk
melakukan
ff1f636711

+ 2 - 1
pycs/database/File.py

@@ -155,7 +155,8 @@ class File(NamedBaseModel):
 
             result.label_id = label
 
-        self.commit()
+        if commit:
+            self.commit()
         return result
 
     def remove_results(self, origin='pipeline') -> List[Result]:

+ 22 - 32
pycs/frontend/WebServer.py

@@ -71,14 +71,6 @@ class WebServer:
         Model.discover("models/")
         LabelProvider.discover("labels/")
 
-        # start job runner
-        self.logger.info('Starting job runner... ')
-        self.jobs = JobRunner()
-
-        # create pipeline cache
-        self.logger.info('Creating pipeline cache')
-        self.pipelines = PipelineCache(self.jobs)
-
         PRODUCTION = os.path.exists('webui/index.html')
 
         init_func = self.production_init if PRODUCTION else self.development_init
@@ -91,8 +83,18 @@ class WebServer:
         # set json encoder so database objects are serialized correctly
         self.app.json_encoder = JSONEncoder
 
+        # create pipeline cache
+        self.logger.info('Creating pipeline cache')
+        self.pipelines = PipelineCache()
+
+        # create job runner
+        self.logger.info('Starting job runner... ')
+
+        # create notification manager
+        self.nm = NotificationManager(self.sio)
+        JobRunner().init_notifications(self.nm)
+
         self.start_runner()
-        self.init_notifications()
         self.define_routes()
         self.logger.info("Server initialized")
 
@@ -100,15 +102,15 @@ class WebServer:
 
     def start_runner(self):
         app.logger.info(f"Main Thread ID: {threading.get_ident()}")
-        self.jobs.start()
+        JobRunner().start()
         self.pipelines.start()
 
     def stop_runner(self):
-        self.jobs.stop()
+        JobRunner().stop()
         self.pipelines.stop()
 
     def wait_for_runner(self):
-        self.jobs.wait_for_empty_queue()
+        JobRunner().wait_for_empty_queue()
         self.pipelines.wait_for_empty_queue()
 
 
@@ -116,16 +118,6 @@ class WebServer:
     def logger(self):
         return self.app.logger
 
-    def init_notifications(self):
-        # create notification manager
-        self.nm = NotificationManager(self.sio)
-
-        self.jobs.on_create(self.nm.create_job)
-        self.jobs.on_start(self.nm.edit_job)
-        self.jobs.on_progress(self.nm.edit_job)
-        self.jobs.on_finish(self.nm.edit_job)
-        self.jobs.on_remove(self.nm.remove_job)
-
     def development_init(self):
 
         self.logger.info('Initializing development build')
@@ -182,11 +174,11 @@ class WebServer:
         # jobs
         self.app.add_url_rule(
             '/jobs',
-            view_func=ListJobs.as_view('list_jobs', self.jobs)
+            view_func=ListJobs.as_view('list_jobs')
         )
         self.app.add_url_rule(
             '/jobs/<identifier>/remove',
-            view_func=RemoveJob.as_view('remove_job', self.jobs)
+            view_func=RemoveJob.as_view('remove_job')
         )
 
         # models
@@ -303,17 +295,17 @@ class WebServer:
         )
         self.app.add_url_rule(
             '/projects',
-            view_func=CreateProject.as_view('create_project', self.nm, self.jobs)
+            view_func=CreateProject.as_view('create_project', self.nm)
         )
         self.app.add_url_rule(
             '/projects/<int:identifier>/label_provider',
             view_func=ExecuteLabelProvider.as_view('execute_label_provider',
-                                                   self.nm, self.jobs)
+                                                   self.nm)
         )
         self.app.add_url_rule(
             '/projects/<int:identifier>/external_storage',
             view_func=ExecuteExternalStorage.as_view('execute_external_storage',
-                                                     self.nm, self.jobs)
+                                                     self.nm)
         )
         self.app.add_url_rule(
             '/projects/<int:identifier>/remove',
@@ -331,17 +323,15 @@ class WebServer:
         # pipelines
         self.app.add_url_rule(
             '/projects/<int:project_id>/pipelines/fit',
-            view_func=FitModel.as_view('fit_model', self.jobs, self.pipelines)
+            view_func=FitModel.as_view('fit_model', self.pipelines)
         )
         self.app.add_url_rule(
             '/projects/<int:project_id>/pipelines/predict',
-            view_func=PredictModel.as_view('predict_model', self.nm,
-                                           self.jobs, self.pipelines)
+            view_func=PredictModel.as_view('predict_model', self.nm, self.pipelines)
         )
         self.app.add_url_rule(
             '/data/<int:file_id>/predict',
-            view_func=PredictFile.as_view('predict_file', self.nm,
-                                          self.jobs, self.pipelines)
+            view_func=PredictFile.as_view('predict_file', self.nm, self.pipelines)
         )
 
     def run(self):

+ 1 - 5
pycs/frontend/endpoints/ListJobs.py

@@ -11,9 +11,5 @@ class ListJobs(View):
     # pylint: disable=arguments-differ
     methods = ['GET']
 
-    def __init__(self, jobs: JobRunner):
-        # pylint: disable=invalid-name
-        self.jobs = jobs
-
     def dispatch_request(self):
-        return jsonify(self.jobs.list())
+        return jsonify(JobRunner().list())

+ 1 - 5
pycs/frontend/endpoints/jobs/RemoveJob.py

@@ -11,10 +11,6 @@ class RemoveJob(View):
     # pylint: disable=arguments-differ
     methods = ['POST']
 
-    def __init__(self, jobs: JobRunner):
-        # pylint: disable=invalid-name
-        self.jobs = jobs
-
     def dispatch_request(self, identifier):
         # extract request data
         data = request.get_json(force=True)
@@ -23,7 +19,7 @@ class RemoveJob(View):
             abort(400)
 
         # remove job
-        self.jobs.remove(identifier)
+        JobRunner().remove(identifier)
 
         # return success response
         return make_response()

+ 2 - 4
pycs/frontend/endpoints/pipelines/FitModel.py

@@ -1,7 +1,6 @@
 from flask import make_response, request, abort
 from flask.views import View
 
-from pycs.database.Database import Database
 from pycs.database.Project import Project
 from pycs.interfaces.MediaStorage import MediaStorage
 from pycs.jobs.JobGroupBusyException import JobGroupBusyException
@@ -16,9 +15,8 @@ class FitModel(View):
     # pylint: disable=arguments-differ
     methods = ['POST']
 
-    def __init__(self, jobs: JobRunner, pipelines: PipelineCache):
+    def __init__(self, pipelines: PipelineCache):
         # pylint: disable=invalid-name
-        self.jobs = jobs
         self.pipelines = pipelines
 
     def dispatch_request(self, project_id):
@@ -35,7 +33,7 @@ class FitModel(View):
 
         # create job
         try:
-            self.jobs.run(project,
+            JobRunner.Run(project,
                           'Model Interaction',
                           f'{project.name} (fit model with new data)',
                           f'{project.name}/model-interaction',

+ 2 - 4
pycs/frontend/endpoints/pipelines/PredictFile.py

@@ -1,7 +1,6 @@
 from flask import make_response, request, abort
 from flask.views import View
 
-from pycs.database.Database import Database
 from pycs.database.File import File
 from pycs.frontend.endpoints.pipelines.PredictModel import PredictModel
 from pycs.frontend.notifications.NotificationList import NotificationList
@@ -18,10 +17,9 @@ class PredictFile(View):
     # pylint: disable=arguments-differ
     methods = ['POST']
 
-    def __init__(self, nm: NotificationManager, jobs: JobRunner, pipelines: PipelineCache):
+    def __init__(self, nm: NotificationManager, pipelines: PipelineCache):
         # pylint: disable=invalid-name
         self.nm = nm
-        self.jobs = jobs
         self.pipelines = pipelines
 
     def dispatch_request(self, file_id):
@@ -43,7 +41,7 @@ class PredictFile(View):
         try:
             notifications = NotificationList(self.nm)
 
-            self.jobs.run(project,
+            JobRunner.Run(project,
                           'Model Interaction',
                           f'{project.name} (create predictions)',
                           f'{project.name}/model-interaction',

+ 2 - 3
pycs/frontend/endpoints/pipelines/PredictModel.py

@@ -26,10 +26,9 @@ class PredictModel(View):
     # pylint: disable=arguments-differ
     methods = ['POST']
 
-    def __init__(self, nm: NotificationManager, jobs: JobRunner, pipelines: PipelineCache):
+    def __init__(self, nm: NotificationManager, pipelines: PipelineCache):
         # pylint: disable=invalid-name
         self.nm = nm
-        self.jobs = jobs
         self.pipelines = pipelines
 
     def dispatch_request(self, project_id):
@@ -48,7 +47,7 @@ class PredictModel(View):
         try:
             notifications = NotificationList(self.nm)
 
-            self.jobs.run(project,
+            JobRunner.Run(project,
                           'Model Interaction',
                           f'{project.name} (create predictions)',
                           f'{project.name}/model-interaction',

+ 4 - 5
pycs/frontend/endpoints/projects/CreateProject.py

@@ -29,10 +29,9 @@ class CreateProject(View):
     # pylint: disable=arguments-differ
     methods = ['POST']
 
-    def __init__(self, nm: NotificationManager, jobs: JobRunner):
+    def __init__(self, nm: NotificationManager):
         # pylint: disable=invalid-name
         self.nm = nm
-        self.jobs = jobs
 
     @property
     def project_folder(self):
@@ -104,7 +103,7 @@ class CreateProject(View):
 
         # execute label provider and add labels to project
         if label_provider is not None:
-            ExecuteLabelProvider.execute_label_provider(self.nm, self.jobs, project,
+            ExecuteLabelProvider.execute_label_provider(self.nm, project,
                                                         label_provider)
 
         root_folder = model.root_folder
@@ -127,7 +126,7 @@ class CreateProject(View):
                         commit=False,
                     )
 
-        self.jobs.run(project,
+        JobRunner.Run(project,
                       'Media Collections',
                       f'{project.name}',
                       f'{project.id}/media-collections',
@@ -136,7 +135,7 @@ class CreateProject(View):
 
         # find media files
         if external_data:
-            ExecuteExternalStorage.find_media_files(self.nm, self.jobs, project)
+            ExecuteExternalStorage.find_media_files(self.nm, project)
 
         # fire event
         self.nm.create_model(model.id)

+ 9 - 11
pycs/frontend/endpoints/projects/ExecuteExternalStorage.py

@@ -22,10 +22,9 @@ class ExecuteExternalStorage(View):
     # pylint: disable=arguments-differ
     methods = ['POST']
 
-    def __init__(self, nm: NotificationManager, jobs: JobRunner):
+    def __init__(self, nm: NotificationManager):
         # pylint: disable=invalid-name
         self.nm = nm
-        self.jobs = jobs
 
     def dispatch_request(self, identifier):
         # extract request data
@@ -44,7 +43,7 @@ class ExecuteExternalStorage(View):
 
         # execute label provider and add labels to project
         try:
-            self.find_media_files(self.nm, self.jobs, project)
+            ExecuteExternalStorage.find_media_files(self.nm, project)
 
         except JobGroupBusyException:
             return abort(400)
@@ -52,13 +51,12 @@ class ExecuteExternalStorage(View):
         return make_response()
 
     @staticmethod
-    def find_media_files(nm: NotificationManager, jobs: JobRunner, project: Project):
+    def find_media_files(nm: NotificationManager, project: Project):
         """
         start a job that finds media files in the projects data_folder and adds them to the
         database afterwards
 
         :param nm: notification manager object
-        :param jobs: job runner object
         :param project: project
         :return:
         """
@@ -113,9 +111,9 @@ class ExecuteExternalStorage(View):
             return current / length
 
         # run job with given functions
-        jobs.run(project,
-                 'Find Media Files',
-                 project.name,
-                 f'{project.id}/find-files',
-                 find,
-                 progress=progress)
+        JobRunner.Run(project,
+            'Find Media Files',
+            project.name,
+            f'{project.id}/find-files',
+            find,
+            progress=progress)

+ 4 - 6
pycs/frontend/endpoints/projects/ExecuteLabelProvider.py

@@ -21,10 +21,9 @@ class ExecuteLabelProvider(View):
     # pylint: disable=arguments-differ
     methods = ['POST']
 
-    def __init__(self, nm: NotificationManager, jobs: JobRunner):
+    def __init__(self, nm: NotificationManager):
         # pylint: disable=invalid-name
         self.nm = nm
-        self.jobs = jobs
 
     def dispatch_request(self, identifier):
         # extract request data
@@ -45,21 +44,20 @@ class ExecuteLabelProvider(View):
 
         # execute label provider and add labels to project
         try:
-            self.execute_label_provider(self.nm, self.jobs, project, label_provider)
+            self.execute_label_provider(self.nm, project, label_provider)
         except JobGroupBusyException:
             return abort(400)
 
         return make_response()
 
     @staticmethod
-    def execute_label_provider(nm: NotificationManager, jobs: JobRunner,
+    def execute_label_provider(nm: NotificationManager,
                                project: Project, label_provider: LabelProvider):
         """
         start a job that loads and executes a label provider and saves its results to the
         database afterwards
 
         :param nm: notification manager object
-        :param jobs: job runner object
         :param project: project
         :param label_provider: label provider
         :return:
@@ -90,7 +88,7 @@ class ExecuteLabelProvider(View):
                     nm.edit_label(label.id)
 
         # run job with given functions
-        jobs.run(project,
+        JobRunner.Run(project,
                  'Label Provider',
                  f'{project.name} ({label_provider.name})',
                  f'{project.id}/label-provider',

+ 4 - 4
pycs/frontend/util/JSONEncoder.py

@@ -4,8 +4,8 @@ from typing import Any
 
 from flask.json import JSONEncoder as Base
 
-from pycs.database.util.JSONEncoder import JSONEncoder as Database
-from pycs.jobs.util.JSONEncoder import JSONEncoder as Jobs
+from pycs.database.util.JSONEncoder import JSONEncoder as DatabaseEncoder
+from pycs.jobs.util.JSONEncoder import JSONEncoder as JobsEncoder
 
 
 class JSONEncoder(Base):
@@ -17,10 +17,10 @@ class JSONEncoder(Base):
         module = o.__class__.__module__
 
         if module.startswith('pycs.database'):
-            return Database().default(o)
+            return DatabaseEncoder().default(o)
 
         if module.startswith('pycs.jobs'):
-            return Jobs().default(o)
+            return JobsEncoder().default(o)
 
         if isinstance(o, datetime.datetime):
             return str(o)

+ 59 - 150
pycs/jobs/JobRunner.py

@@ -1,90 +1,67 @@
+import eventlet
+
+from collections import defaultdict
 from concurrent.futures import ThreadPoolExecutor
 from time import time
 from types import GeneratorType
-from typing import Callable, List, Generator, Optional, Any
-
-import eventlet
+from typing import Any
+from typing import Callable
+from typing import Generator
+from typing import List
+from typing import Optional
 # from eventlet import spawn, spawn_n, tpool
 from eventlet.event import Event
 
 
 from pycs.database.Project import Project
 from pycs.jobs.Job import Job
+from pycs.frontend.notifications.NotificationManager import NotificationManager
 from pycs.jobs.JobGroupBusyException import JobGroupBusyException
+from pycs.util import Callbacks
+from pycs.util import GreenWorker
+from pycs.util import Singleton
 
-from pycs.util.green_worker import GreenWorker
 
-class JobRunner(GreenWorker):
+class JobRunner(GreenWorker, Singleton):
     """
     run jobs in a thread pool, but track progress and process results in eventlet queue
     """
 
     # pylint: disable=too-many-arguments
+    """ Because it is a singleton (as described here:
+        https://www.python.org/download/releases/2.2/descrintro/#__new__),
+        __init__ is called every time the singleton object
+        is requested. Hence, we do the actual initialization
+        in init()!
+    """
     def __init__(self):
+        pass
+
+    def init(self):
         super().__init__()
+        super().init()
         self.__jobs = []
         self.__groups = {}
 
         self.__executor = ThreadPoolExecutor(1)
 
-        self.__create_listeners = []
-        self.__start_listeners = []
-        self.__progress_listeners = []
-        self.__finish_listeners = []
-        self.__remove_listeners = []
+        self.__listeners = defaultdict(Callbacks)
 
-    def list(self) -> List[Job]:
-        """
-        get a list of all jobs including finished ones
+    def init_notifications(self, nm: NotificationManager):
 
-        :return: list of job objects
-        """
-        return self.__jobs
+        self.on("create",      nm.create_job)
+        self.on("start",       nm.edit_job)
+        self.on("progress",    nm.edit_job)
+        self.on("finish",      nm.edit_job)
+        self.on("remove",      nm.remove_job)
 
-    def on_create(self, callback: Callable[[Job], None]) -> None:
-        """
-        register a callback that is executed each time a job is created
-
-        :param callback: callback function
-        :return:
-        """
-        self.__create_listeners.append(callback)
-
-    def on_start(self, callback: Callable[[Job], None]) -> None:
-        """
-        register a callback that is executed each time a job is started
-
-        :param callback: callback function
-        :return:
-        """
-        self.__start_listeners.append(callback)
-
-    def on_progress(self, callback: Callable[[Job], None]) -> None:
-        """
-        register a callback that is executed each time a job changes it's progress
-
-        :param callback: callback function
-        :return:
-        """
-        self.__progress_listeners.append(callback)
-
-    def on_finish(self, callback: Callable[[Job], None]) -> None:
-        """
-        register a callback that is executed each time a job is finished
-
-        :param callback: callback function
-        :return:
-        """
-        self.__finish_listeners.append(callback)
-
-    def on_remove(self, callback: Callable[[Job], None]) -> None:
-        """
-        register a callback that is executed each time a job is removed
+    @classmethod
+    def Run(cls, *args, **kwargs):
+        cls().run(*args, **kwargs)
 
-        :param callback: callback function
-        :return:
-        """
-        self.__remove_listeners.append(callback)
+    @classmethod
+    def Remove(cls, *args, **kwargs):
+        cls().remove(*args, **kwargs)
 
     def remove(self, id):
         """
@@ -146,8 +123,7 @@ class JobRunner(GreenWorker):
         self.__jobs.append(job)
 
         # execute create listeners
-        for callback in self.__create_listeners:
-            callback(job)
+        self.__listeners["create"](job)
 
         # add to execution queue
         # self.__queue.put((group, executable, job, progress, result, result_event, args, kwargs))
@@ -156,21 +132,37 @@ class JobRunner(GreenWorker):
         # return job object
         return job
 
+    def list(self) -> List[Job]:
+        """
+        get a list of all jobs including finished ones
+
+        :return: list of job objects
+        """
+        return self.__jobs
+
+    def on(self, operation, callback: Callable[[Job], None]) -> None:
+        """
+        register a callback that is executed each time a operation
+        (create, start, progress, finish, or remove) on a job is performed
+
+        :param callback: callback function
+        :return:
+        """
+        assert operation in ["create", "start", "progress", "finish", "remove"], \
+            f"invalid operation: {operation}"
+        self.__listeners[operation].append(callback)
 
     def _job_started(self, job):
         job.start()
-        for callback in self.__start_listeners:
-            callback(job)
+        self.__listeners["start"](job)
 
     def _job_progress(self, job, progress):
         job.update(progress=progress)
-        for callback in self.__progress_listeners:
-            callback(job)
+        self.__listeners["progress"](job)
 
     def _job_finished(self, job):
         job.finish()
-        for callback in self.__finish_listeners:
-            callback(job)
+        self.__listeners["finish"](job)
 
 
     def process_iterator(self, generator, job, progress_fun):
@@ -232,86 +224,3 @@ class JobRunner(GreenWorker):
             del self.__groups[group]
 
         self._job_finished(job)
-
-    def __run(self):
-
-        while True:
-
-            # get execution function and job from queue
-            group, executable, job, progress_fun, result_fun, result_event, args, kwargs \
-                = self.__queue.get(block=True)
-
-            # execute start listeners
-            job.started = int(time())
-            job.updated = int(time())
-
-            for callback in self.__start_listeners:
-                callback(job)
-
-            # run function and track progress
-            try:
-                # result = generator = executable(*args, **kwargs)
-                future = self.__executor.submit(executable, *args, **kwargs)
-                result = generator = tpool.execute(future.result)
-
-                if isinstance(generator, GeneratorType):
-                    iterator = iter(generator)
-
-                    try:
-                        while True:
-                            # run until next progress event
-                            future = self.__executor.submit(next, iterator)
-                            progress = tpool.execute(future.result)
-                            # progress = next(iterator)
-
-
-                            # execute progress function
-                            if progress_fun is not None:
-                                if isinstance(progress, tuple):
-                                    progress = progress_fun(*progress)
-                                else:
-                                    progress = progress_fun(progress)
-
-                            # execute progress listeners
-                            job.progress = progress
-                            job.updated = int(time())
-
-                            for callback in self.__progress_listeners:
-                                callback(job)
-                    except StopIteration as stop_iteration_exception:
-                        result = stop_iteration_exception.value
-
-                # update progress
-                job.progress = 1
-                job.updated = int(time())
-
-                for callback in self.__progress_listeners:
-                    callback(job)
-
-                # execute result function
-                if result_fun is not None:
-                    if isinstance(result, tuple):
-                        result_fun(*result)
-                    else:
-                        result_fun(result)
-
-                # execute event
-                if result_event is not None:
-                    result_event.send(result)
-
-            # save exceptions to show in ui
-            except Exception as exception:
-                import traceback
-                traceback.print_exc()
-                job.exception = f'{type(exception).__name__} ({str(exception)})'
-
-            # remove from group dict
-            if group is not None:
-                del self.__groups[group]
-
-            # finish job
-            job.finished = int(time())
-            job.updated = int(time())
-
-            for callback in self.__finish_listeners:
-                callback(job)

+ 3 - 4
pycs/util/PipelineCache.py

@@ -16,7 +16,7 @@ from pycs.database.Project import Project
 from pycs.interfaces.Pipeline import Pipeline
 from pycs.jobs.JobRunner import JobRunner
 from pycs.util.PipelineUtil import load_from_root_folder
-from pycs.util.green_worker import GreenWorker
+from pycs.util import GreenWorker
 
 
 @dataclass
@@ -44,9 +44,8 @@ class PipelineCache(GreenWorker):
     """
     CLOSE_TIMER = dt.timedelta(seconds=120)
 
-    def __init__(self, jobs: JobRunner):
+    def __init__(self):
         super().__init__()
-        self.__jobs = jobs
 
         self.__pipelines: dict[PipelineEntry] = {}
         self.__lock = Lock()
@@ -125,7 +124,7 @@ class PipelineCache(GreenWorker):
                 continue
 
             # create job to close pipeline
-            self.__jobs.run(project,
+            JobRunner.Run(project,
                             'Model Interaction',
                             f'{project.name} (close pipeline)',
                             f'{project.name}/model-interaction',

+ 9 - 0
pycs/util/__init__.py

@@ -0,0 +1,9 @@
+from pycs.util.callbacks import Callbacks
+from pycs.util.green_worker import GreenWorker
+from pycs.util.singleton import Singleton
+
+__all__ = [
+    "Callbacks",
+    "GreenWorker",
+    "Singleton",
+]

+ 19 - 0
pycs/util/callbacks.py

@@ -0,0 +1,19 @@
+import warnings
+
+
+class Callbacks(list):
+
+    def append(self, value):
+        assert callable(value), "It only excepts callable values!"
+
+    def __call__(self, *args, **kwargs):
+
+        for callback in self:
+            try:
+                callback(*args, **kwargs)
+
+            except TypeError as e:
+                warnings.warn(f"{callback} failed with arguments {args} and {kwargs}!")
+                pass
+
+

+ 5 - 1
pycs/util/green_worker.py

@@ -28,10 +28,14 @@ class GreenWorker(abc.ABC):
     def ident(self):
         return threading.get_ident()
 
+    @property
+    def running(self):
+        return self.__running
 
     def start(self):
-        if self.__running:
+        if self.running:
             return
+        app.logger.info(f'Starting {self.__class__.__name__}... ')
         self._thread = self.pool.spawn_n(self.__run__)
         self.__running = True
 

+ 29 - 0
pycs/util/singleton.py

@@ -0,0 +1,29 @@
+import abc
+
+
+class Singleton(abc.ABC):
+    """ source: https://www.python.org/download/releases/2.2/descrintro/#__new__ """
+    def __new__(cls, *args, **kwds):
+        it = cls.__dict__.get("__it__")
+        if it is not None:
+            return it
+
+        cls.__it__ = it = super(Singleton, cls).__new__(cls)
+        it.init(*args, **kwds)
+        return it
+
+    @abc.abstractmethod
+    def init(self, *args, **kwds):
+        pass
+
+
+
+if __name__ == '__main__':
+
+    class Test(Singleton):
+        def init(self):
+            print("Calling init")
+
+
+    print(id(Test()))
+    print(id(Test()))

+ 23 - 0
testing.py

@@ -0,0 +1,23 @@
+
+class Test(object):
+    _instance = None
+
+    def __new__(cls, *args, **kwargs):
+        if cls._instance is None:
+            cls._instance = super(Test, cls).__new__(*args, **kwargs)
+        return cls._instance
+
+
+    def __init__(self):
+        super(Test, self).__init__()
+
+    @classmethod
+    def foo(cls):
+        print("classmethod")
+
+    def foo(self):
+        print("method", self)
+
+
+Test.foo()
+Test().foo()