summaryrefslogtreecommitdiff
path: root/apscheduler
diff options
context:
space:
mode:
Diffstat (limited to 'apscheduler')
-rw-r--r--apscheduler/executors/asyncio.py6
-rw-r--r--apscheduler/executors/debug.py6
-rw-r--r--apscheduler/executors/gevent.py6
-rw-r--r--apscheduler/executors/pool.py4
-rw-r--r--apscheduler/executors/twisted.py6
-rw-r--r--apscheduler/jobstores/memory.py6
-rw-r--r--apscheduler/jobstores/mongodb.py2
-rw-r--r--apscheduler/jobstores/redis.py2
-rw-r--r--apscheduler/jobstores/sqlalchemy.py2
-rw-r--r--apscheduler/schedulers/base.py96
10 files changed, 106 insertions, 30 deletions
diff --git a/apscheduler/executors/asyncio.py b/apscheduler/executors/asyncio.py
index 198cdb5..2618f61 100644
--- a/apscheduler/executors/asyncio.py
+++ b/apscheduler/executors/asyncio.py
@@ -5,7 +5,11 @@ from apscheduler.executors.base import BaseExecutor, run_job
class AsyncIOExecutor(BaseExecutor):
- """Runs jobs in the default executor of the event loop."""
+ """
+ Runs jobs in the default executor of the event loop.
+
+ Plugin alias: ``asyncio``
+ """
def start(self, scheduler, alias):
super(AsyncIOExecutor, self).start(scheduler, alias)
diff --git a/apscheduler/executors/debug.py b/apscheduler/executors/debug.py
index f9e5959..590d957 100644
--- a/apscheduler/executors/debug.py
+++ b/apscheduler/executors/debug.py
@@ -4,7 +4,11 @@ from apscheduler.executors.base import BaseExecutor, run_job
class DebugExecutor(BaseExecutor):
- """A special executor that executes the target callable directly instead of deferring it to a thread or process."""
+ """
+ A special executor that executes the target callable directly instead of deferring it to a thread or process.
+
+ Plugin alias: ``debug``
+ """
def _do_submit_job(self, job, run_times):
try:
diff --git a/apscheduler/executors/gevent.py b/apscheduler/executors/gevent.py
index bc1069e..6281cdc 100644
--- a/apscheduler/executors/gevent.py
+++ b/apscheduler/executors/gevent.py
@@ -11,7 +11,11 @@ except ImportError: # pragma: nocover
class GeventExecutor(BaseExecutor):
- """Runs jobs as greenlets."""
+ """
+ Runs jobs as greenlets.
+
+ Plugin alias: ``gevent``
+ """
def _do_submit_job(self, job, run_times):
def callback(greenlet):
diff --git a/apscheduler/executors/pool.py b/apscheduler/executors/pool.py
index bb3aa56..63fc06e 100644
--- a/apscheduler/executors/pool.py
+++ b/apscheduler/executors/pool.py
@@ -23,6 +23,8 @@ class ThreadPoolExecutor(BasePoolExecutor):
"""
An executor that runs jobs in a concurrent.futures thread pool.
+ Plugin alias: ``threadpool``
+
:param max_workers: the maximum number of spawned threads.
"""
@@ -35,6 +37,8 @@ class ProcessPoolExecutor(BasePoolExecutor):
"""
An executor that runs jobs in a concurrent.futures process pool.
+ Plugin alias: ``processpool``
+
:param max_workers: the maximum number of spawned processes.
"""
diff --git a/apscheduler/executors/twisted.py b/apscheduler/executors/twisted.py
index 03a212a..a0b1570 100644
--- a/apscheduler/executors/twisted.py
+++ b/apscheduler/executors/twisted.py
@@ -4,7 +4,11 @@ from apscheduler.executors.base import BaseExecutor, run_job
class TwistedExecutor(BaseExecutor):
- """Runs jobs in the reactor's thread pool."""
+ """
+ Runs jobs in the reactor's thread pool.
+
+ Plugin alias: ``twisted``
+ """
def start(self, scheduler, alias):
super(TwistedExecutor, self).start(scheduler, alias)
diff --git a/apscheduler/jobstores/memory.py b/apscheduler/jobstores/memory.py
index 031a105..645391f 100644
--- a/apscheduler/jobstores/memory.py
+++ b/apscheduler/jobstores/memory.py
@@ -5,7 +5,11 @@ from apscheduler.util import datetime_to_utc_timestamp
class MemoryJobStore(BaseJobStore):
- """Stores jobs in an array in RAM. Provides no persistence support."""
+ """
+ Stores jobs in an array in RAM. Provides no persistence support.
+
+ Plugin alias: ``memory``
+ """
def __init__(self):
super(MemoryJobStore, self).__init__()
diff --git a/apscheduler/jobstores/mongodb.py b/apscheduler/jobstores/mongodb.py
index 7f66732..ff762f7 100644
--- a/apscheduler/jobstores/mongodb.py
+++ b/apscheduler/jobstores/mongodb.py
@@ -22,6 +22,8 @@ class MongoDBJobStore(BaseJobStore):
Stores jobs in a MongoDB database. Any leftover keyword arguments are directly passed to pymongo's `MongoClient
<http://api.mongodb.org/python/current/api/pymongo/mongo_client.html#pymongo.mongo_client.MongoClient>`_.
+ Plugin alias: ``mongodb``
+
:param str database: database to store jobs in
:param str collection: collection to store jobs in
:param client: a :class:`~pymongo.mongo_client.MongoClient` instance to use instead of providing connection
diff --git a/apscheduler/jobstores/redis.py b/apscheduler/jobstores/redis.py
index ced4e7c..2b4ffd5 100644
--- a/apscheduler/jobstores/redis.py
+++ b/apscheduler/jobstores/redis.py
@@ -21,6 +21,8 @@ class RedisJobStore(BaseJobStore):
"""
Stores jobs in a Redis database. Any leftover keyword arguments are directly passed to redis's StrictRedis.
+ Plugin alias: ``redis``
+
:param int db: the database number to store jobs in
:param str jobs_key: key to store jobs in
:param str run_times_key: key to store the jobs' run times in
diff --git a/apscheduler/jobstores/sqlalchemy.py b/apscheduler/jobstores/sqlalchemy.py
index 9f3c460..f1692a5 100644
--- a/apscheduler/jobstores/sqlalchemy.py
+++ b/apscheduler/jobstores/sqlalchemy.py
@@ -20,6 +20,8 @@ class SQLAlchemyJobStore(BaseJobStore):
"""
Stores jobs in a database table using SQLAlchemy. The table will be created if it doesn't exist in the database.
+ Plugin alias: ``sqlalchemy``
+
:param str url: connection string (see `SQLAlchemy documentation
<http://docs.sqlalchemy.org/en/latest/core/engines.html?highlight=create_engine#database-urls>`_
on this)
diff --git a/apscheduler/schedulers/base.py b/apscheduler/schedulers/base.py
index 088de87..96e5fc3 100644
--- a/apscheduler/schedulers/base.py
+++ b/apscheduler/schedulers/base.py
@@ -39,6 +39,10 @@ class BaseScheduler(six.with_metaclass(ABCMeta)):
_trigger_plugins = dict((ep.name, ep) for ep in iter_entry_points('apscheduler.triggers'))
_trigger_classes = {}
+ _executor_plugins = dict((ep.name, ep) for ep in iter_entry_points('apscheduler.executors'))
+ _executor_classes = {}
+ _jobstore_plugins = dict((ep.name, ep) for ep in iter_entry_points('apscheduler.jobstores'))
+ _jobstore_classes = {}
_stopped = True
#
@@ -158,11 +162,13 @@ class BaseScheduler(six.with_metaclass(ABCMeta)):
def running(self):
return not self._stopped
- def add_executor(self, executor, alias='default'):
+ def add_executor(self, executor, alias='default', **executor_opts):
"""
- Adds an executor to this scheduler.
+ Adds an executor to this scheduler. Any extra keyword arguments will be passed to the executor plugin's
+ constructor, assuming that the first argument is the name of an executor plugin.
- :param apscheduler.executors.base.BaseExecutor executor: the executor instance to be added
+ :param str|unicode|apscheduler.executors.base.BaseExecutor executor: either an executor instance or the name of
+ an executor plugin
:param str|unicode alias: alias for the scheduler
"""
@@ -170,7 +176,13 @@ class BaseScheduler(six.with_metaclass(ABCMeta)):
if alias in self._executors:
raise KeyError('This scheduler already has an executor by the alias of "%s"' % alias)
- self._executors[alias] = executor
+ if isinstance(executor, BaseExecutor):
+ self._executors[alias] = executor
+ elif isinstance(executor, six.string_types):
+ self._executors[alias] = executor = self._create_plugin_instance('executor', executor, executor_opts)
+ else:
+ raise TypeError('Expected an executor instance or a string, got %s instead' %
+ executor.__class__.__name__)
# Start the executor right away if the scheduler is running
if self.running:
@@ -195,18 +207,26 @@ class BaseScheduler(six.with_metaclass(ABCMeta)):
self._dispatch_event(SchedulerEvent(EVENT_EXECUTOR_REMOVED, alias))
- def add_jobstore(self, jobstore, alias='default'):
+ def add_jobstore(self, jobstore, alias='default', **jobstore_opts):
"""
- Adds a job store to this scheduler.
+ Adds a job store to this scheduler. Any extra keyword arguments will be passed to the job store plugin's
+ constructor, assuming that the first argument is the name of a job store plugin.
- :param apscheduler.jobstores.base.BaseJobStore jobstore: job store to be added
+ :param str|unicode|apscheduler.jobstores.base.BaseJobStore jobstore: job store to be added
:param str|unicode alias: alias for the job store
"""
with self._jobstores_lock:
if alias in self._jobstores:
raise KeyError('This scheduler already has a job store by the alias of "%s"' % alias)
- self._jobstores[alias] = jobstore
+
+ if isinstance(jobstore, BaseJobStore):
+ self._jobstores[alias] = jobstore
+ elif isinstance(jobstore, six.string_types):
+ self._jobstores[alias] = jobstore = self._create_plugin_instance('jobstore', jobstore, jobstore_opts)
+ else:
+ raise TypeError('Expected a job store instance or a string, got %s instead' %
+ jobstore.__class__.__name__)
# Start the job store right away if the scheduler is running
if self.running:
@@ -563,9 +583,16 @@ class BaseScheduler(six.with_metaclass(ABCMeta)):
if isinstance(value, BaseExecutor):
self.add_executor(value, alias)
elif isinstance(value, MutableMapping):
- classname = value.pop('class')
- cls = maybe_ref(classname)
- executor = cls(**value)
+ executor_class = value.pop('class', None)
+ plugin = value.pop('type', None)
+ if plugin:
+ executor = self._create_plugin_instance('executor', plugin, value)
+ elif executor_class:
+ cls = maybe_ref(executor_class)
+ executor = cls(**value)
+ else:
+ raise ValueError('Cannot create executor "%s" -- either "type" or "class" must be defined' % alias)
+
self.add_executor(executor, alias)
else:
raise TypeError("Expected executor instance or dict for executors['%s'], got %s instead" % (
@@ -577,9 +604,16 @@ class BaseScheduler(six.with_metaclass(ABCMeta)):
if isinstance(value, BaseJobStore):
self.add_jobstore(value, alias)
elif isinstance(value, MutableMapping):
- classname = value.pop('class')
- cls = maybe_ref(classname)
- jobstore = cls(**value)
+ jobstore_class = value.pop('class', None)
+ plugin = value.pop('type', None)
+ if plugin:
+ jobstore = self._create_plugin_instance('jobstore', plugin, value)
+ elif jobstore_class:
+ cls = maybe_ref(jobstore_class)
+ jobstore = cls(**value)
+ else:
+ raise ValueError('Cannot create job store "%s" -- either "type" or "class" must be defined' % alias)
+
self.add_jobstore(jobstore, alias)
else:
raise TypeError("Expected job store instance or dict for jobstores['%s'], got %s instead" % (
@@ -696,6 +730,27 @@ class BaseScheduler(six.with_metaclass(ABCMeta)):
if wakeup:
self.wakeup()
+ def _create_plugin_instance(self, type_, alias, constructor_kwargs):
+ """Creates an instance of the given plugin type, loading the plugin first if necessary."""
+
+ plugin_container, class_container, base_class = {
+ 'trigger': (self._trigger_plugins, self._trigger_classes, BaseTrigger),
+ 'jobstore': (self._jobstore_plugins, self._jobstore_classes, BaseJobStore),
+ 'executor': (self._executor_plugins, self._executor_classes, BaseExecutor)
+ }[type_]
+
+ try:
+ plugin_cls = class_container[alias]
+ except KeyError:
+ if alias in plugin_container:
+ plugin_cls = class_container[alias] = plugin_container[alias].load()
+ if not issubclass(plugin_cls, base_class):
+ raise TypeError('The {0} entry point does not point to a {0} class'.format(type_))
+ else:
+ raise LookupError('No {0} by the name "{1}" was found'.format(type_, alias))
+
+ return plugin_cls(**constructor_kwargs)
+
def _create_trigger(self, trigger, trigger_args):
if isinstance(trigger, BaseTrigger):
return trigger
@@ -707,17 +762,8 @@ class BaseScheduler(six.with_metaclass(ABCMeta)):
# Use the scheduler's time zone if nothing else is specified
trigger_args.setdefault('timezone', self.timezone)
- try:
- trigger_cls = self._trigger_classes[trigger]
- except KeyError:
- if trigger in self._trigger_plugins:
- trigger_cls = self._trigger_classes[trigger] = self._trigger_plugins[trigger].load()
- if not issubclass(trigger_cls, BaseTrigger):
- raise TypeError('The trigger entry point does not point to a trigger class')
- else:
- raise LookupError('No trigger by the name "%s" was found' % trigger)
-
- return trigger_cls(**trigger_args)
+ # Instantiate the trigger class
+ return self._create_plugin_instance('trigger', trigger, trigger_args)
def _create_lock(self):
"""Creates a reentrant lock object."""