-
Notifications
You must be signed in to change notification settings - Fork 26
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #94 from amathuria/wip-amathuria-removing-beanstalkd
paddles: Adding a queueing mechanism to Paddles
- Loading branch information
Showing
13 changed files
with
613 additions
and
178 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,55 @@ | ||
"""modify jobs table | ||
Revision ID: e8de4928657 | ||
Revises: 266e6f3efd94 | ||
Create Date: 2021-06-28 13:45:32.717585 | ||
""" | ||
|
||
# revision identifiers, used by Alembic. | ||
revision = 'e8de4928657' | ||
down_revision = '266e6f3efd94' | ||
|
||
from alembic import op | ||
from paddles.models.types import JSONType | ||
from sqlalchemy.schema import Sequence, CreateSequence, DropSequence | ||
|
||
import sqlalchemy as sa | ||
|
||
|
||
def upgrade(): | ||
op.add_column(u'jobs', sa.Column('priority', sa.Integer(), nullable=True)) | ||
op.add_column(u'jobs', sa.Column('repo', sa.String(length=256), nullable=True)) | ||
op.add_column(u'jobs', sa.Column('seed', sa.Integer(), nullable=True)) | ||
op.add_column(u'jobs', sa.Column('sleep_before_teardown', sa.Integer(), nullable=True)) | ||
op.add_column(u'jobs', sa.Column('subset', sa.String(length=32), nullable=True)) | ||
op.add_column(u'jobs', sa.Column('suite', sa.String(length=256), nullable=True)) | ||
op.add_column(u'jobs', sa.Column('suite_path', sa.String(length=256), nullable=True)) | ||
op.add_column(u'jobs', sa.Column('suite_relpath', sa.String(length=256), nullable=True)) | ||
op.add_column(u'jobs', sa.Column('suite_repo', sa.String(length=256), nullable=True)) | ||
op.add_column(u'jobs', sa.Column('teuthology_branch', sa.String(length=256), nullable=True)) | ||
op.add_column(u'jobs', sa.Column('teuthology_sha1', sa.String(length=256), nullable=True)) | ||
op.add_column(u'jobs', sa.Column('timestamp', sa.DateTime(), nullable=True)) | ||
op.add_column(u'jobs', sa.Column('user', sa.String(length=64), nullable=True)) | ||
op.add_column(u'jobs', sa.Column('queue', sa.String(length=64), nullable=True)) | ||
op.create_index(op.f('ix_jobs_job_id'), 'jobs', ['job_id'], unique=False) | ||
op.create_index(op.f('ix_jobs_teuthology_sha1'), 'jobs', ['teuthology_sha1'], unique=False) | ||
op.execute(CreateSequence(Sequence('jobs_id_seq'))) | ||
op.drop_index('ix_jobs_archive_path', table_name='jobs') | ||
|
||
|
||
def downgrade(): | ||
op.create_index('ix_jobs_archive_path', 'jobs', ['archive_path'], unique=False) | ||
op.drop_index(op.f('ix_jobs_teuthology_sha1'), table_name='jobs') | ||
op.drop_index(op.f('ix_jobs_job_id'), table_name='jobs') | ||
op.execute(DropSequence(Sequence('jobs_id_seq'))) | ||
op.drop_column(u'jobs', 'user') | ||
op.drop_column(u'jobs', 'timestamp') | ||
op.drop_column(u'jobs', 'teuthology_sha1') | ||
op.drop_column(u'jobs', 'suite_repo') | ||
op.drop_column(u'jobs', 'suite_relpath') | ||
op.drop_column(u'jobs', 'suite') | ||
op.drop_column(u'jobs', 'sleep_before_teardown') | ||
op.drop_column(u'jobs', 'repo') | ||
op.drop_column(u'jobs', 'priority') | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,146 @@ | ||
from pecan import expose, request | ||
from paddles.controllers import error | ||
from paddles.exceptions import PaddlesError | ||
from paddles.models import Queue, Job, Run, Session | ||
|
||
import logging | ||
log = logging.getLogger(__name__) | ||
|
||
|
||
class QueuesController(object): | ||
@expose(generic=True, template='json') | ||
def index(self, machine_type='', paused_by=None): | ||
query = Queue.query | ||
if machine_type: | ||
if '|' in machine_type: | ||
query = query.filter(Queue.queue == 'multi') | ||
else: | ||
query = query.filter(Queue.queue == machine_type) | ||
if paused_by: | ||
query = query.filter(Queue.paused_by == paused_by) | ||
return [queue.__json__() for queue in query.all()] | ||
|
||
@index.when(method='POST', template='json') | ||
def index_post(self): | ||
""" | ||
Create a new Queue | ||
""" | ||
try: | ||
data = request.json | ||
queue_name = data.get('queue') | ||
except ValueError: | ||
error('/errors/invalid/', 'could not decode JSON body') | ||
if not queue_name: | ||
error('/errors/invalid/', "could not find required key: 'queue'") | ||
|
||
if Queue.filter_by(queue=queue_name).first(): | ||
error('/errors/invalid/', | ||
"Queue %s already exists" % queue_name) | ||
else: | ||
self.queue = Queue(queue=queue_name) | ||
try: | ||
self.queue.update(data) | ||
except PaddlesError as exc: | ||
error(exc.url, str(exc)) | ||
log.info("Created {queue}: {data}".format( | ||
queue=self.queue, | ||
data=data, | ||
)) | ||
return dict() | ||
|
||
@index.when(method='PUT', template='json') | ||
def index_put(self): | ||
""" | ||
Update the Queue | ||
""" | ||
try: | ||
data = request.json | ||
queue_name = data.get('queue') | ||
except ValueError: | ||
error('/errors/invalid', 'could not decode JSON body') | ||
if not queue_name: | ||
error('/errors/invalid/', "could not find required key: 'queue'") | ||
queue = Queue.filter_by(queue=queue_name).first() | ||
if queue: | ||
self.queue = queue | ||
try: | ||
self.queue.update(data) | ||
except PaddlesError as exc: | ||
error(exc.url, str(exc)) | ||
log.info("Updated {queue}: {data}".format( | ||
queue=self.queue, | ||
data=data, | ||
)) | ||
else: | ||
error('/errors/invalid', "specified queue does not exist") | ||
return dict() | ||
|
||
@expose(template='json') | ||
def pop_queue(self, queue): | ||
queue_name = queue | ||
queue = Queue.filter_by(queue=queue_name).first() | ||
if queue is None: | ||
log.info("%s queue is empty! No jobs to retrieve", queue_name) | ||
return None | ||
if queue.paused is True: | ||
error('/errors/unavailable', "queue is paused, cannot retrieve job") | ||
return | ||
job_query = Job.filter_by(status='queued').filter_by(queue=queue_name) | ||
job = job_query.order_by(Job.priority).first() | ||
return job | ||
|
||
@expose(template='json') | ||
def stats(self, queue): | ||
queue_name = queue | ||
if not queue_name: | ||
error('/errors/invalid/', "could not find required key: 'queue'") | ||
queue = Queue.filter_by(queue=queue_name).first() | ||
if queue: | ||
stats = Job.filter_by(queue=queue_name).\ | ||
filter_by(status='queued').\ | ||
all() | ||
current_jobs_ready = len(stats) | ||
|
||
if queue.__json__()['paused'] is False: | ||
return dict( | ||
queue=queue_name, | ||
queued_jobs=current_jobs_ready, | ||
paused=queue.__json__()['paused'] | ||
) | ||
else: | ||
paused_stats = queue.__json__() | ||
paused_stats.update(queued_jobs=current_jobs_ready) | ||
return paused_stats | ||
else: | ||
error('/errors/invalid', "specified queue does not exist") | ||
|
||
|
||
@expose(template='json') | ||
def queued_jobs(self, user=None, run_name=None): | ||
""" | ||
Retrieve all the queued jobs for a particular user or a particular run | ||
""" | ||
try: | ||
data = request.json | ||
queue_name = data.get('queue') | ||
except ValueError: | ||
error('/errors/invalid', 'could not decode JSON body') | ||
if not queue_name: | ||
error('/errors/invalid/', "could not find required key: 'queue'") | ||
queue = Queue.filter_by(queue=queue_name).first() | ||
if queue: | ||
if run_name: | ||
jobs = Session.query(Job).\ | ||
filter(Job.status == 'queued').\ | ||
filter(Run.id == Job.run_id).\ | ||
filter(Run.name == run_name) | ||
elif user: | ||
jobs = Job.filter_by(queue=queue_name).\ | ||
filter_by(status='queued').\ | ||
filter_by(user=user) | ||
else: | ||
jobs = Job.filter_by(queue=queue_name).\ | ||
filter_by(status='queued') | ||
return [job.__json__() for job in jobs.all()] | ||
else: | ||
error('/errors/invalid', "specified queue does not exist") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.