Validation coordinator queue

This coordinator feeds the data pipeline queue

  • First do the imports of the Python libraries required
In [1]:
import sys
import os

import owslib
from owslib.wps import monitorExecution
from owslib.wps import WebProcessingService
import json

import lxml.etree as etree

import cioppy

from shapely.wkt import loads
import getpass

import folium

from datetime import datetime, timedelta
import dateutil.parser

import requests

from nbconvert.preprocessors import ExecutePreprocessor, CellExecutionError
import nbformat as nbf
  • Read the data pipeline configuration information:
In [2]:
%store -r

nb_config = os.path.join('..', 'configuration.ipynb')

nb = nbf.read(nb_config, 4)

exec(nb['cells'][2]['source']) in globals(), locals()

app = dict([('artifact_id', app_artifact_id),
            ('version', app_version),
            ('repository', repository),
            ('community', community)])

app_process_id = '%s_%s_%s_%s' % (app['community'].replace('-', '_'),
                                  app['artifact_id'].replace('-', '_'),
                                  app['artifact_id'].replace('-', '_'),
                                  app['version'].replace('.', '_'))

trigger_queue = dict([('artifact_id', trigger_queue_artifact_id),
                      ('version', trigger_queue_version),
                      ('repository', repository),
                      ('folder', folder),
                      ('community', community)])

trigger_queue_process_id = '%s_%s_%s_%s' % (trigger_queue['community'].replace('-', '_'),
                                           trigger_queue['artifact_id'].replace('-', '_'),
                                           trigger_queue['artifact_id'].replace('-', '_'),
                                           trigger_queue['version'].replace('.', '_'))

print 'This notebook will create a coordinator for a queue to invoke the application %s with the trigger %s' % (app_process_id,
                                                                                                                trigger_queue_process_id)
This notebook will create a coordinator for a queue to invoke the application ec_better_ewf_ethz_01_02_01_ewf_ethz_01_02_01_0_7 with the trigger ec_better_tg_ethz_01_02_01_queue_tg_ethz_01_02_01_queue_0_5

Application parameters

In [3]:
bbox = (-180, -90, 180, 90)
min_mag = 5
buffer_size = '0.9'

Coordinator parameters

In [4]:
coordinator_name = 'co_%s_validation_queue' % data_pipeline
In [5]:
start_queue = '${coord:formatTime(coord:dateOffset(coord:nominalTime(), -2, \'DAY\'), "yyyy-MM-dd\'T\'HH:mm:ss\'Z\'")}'
end_queue = '${coord:formatTime(coord:dateOffset(coord:nominalTime(), -1, \'DAY\'), "yyyy-MM-dd\'T\'HH:mm:ss\'Z\'")}'
In [6]:
co_trigger_queue_process_id = 'coordinator_%s' % trigger_queue_process_id
In [7]:
coordinator_date_start = '2016-01-01T00:00Z'
coordinator_date_stop = '2018-12-31T00:00Z'
coordinator_period = '0 0 * * *'

Common Parameters

In [8]:
tg_quotation = 'No'
recovery = 'No'
_T2Username = data_pipeline

Check data transformation application

In [9]:
wps_url_apps = '%s/zoo-bin/zoo_loader.cgi' % apps_deployer

wps = WebProcessingService(wps_url_apps, verbose=False, skip_caps=False)

found_process = False

message = "The process %s is not deployed" % app_process_id

for index, elem in enumerate(wps.processes):

    if elem.identifier == app_process_id:
        message = "The process %s is deployed" % app_process_id
        found_process = True

print message

if not found_process:
    raise Exception(message)
The process ec_better_ewf_ethz_01_02_01_ewf_ethz_01_02_01_0_7 is deployed

Check trigger coordinator

In [11]:
wps_url_triggers = '%s/zoo-bin/zoo_loader.cgi' % trigger_deployer

wps = WebProcessingService(wps_url_triggers, verbose=False, skip_caps=False)

found_process = False

message = "The queue coordinator process %s is not deployed" % co_trigger_queue_process_id

for index, elem in enumerate(wps.processes):

    if elem.identifier == co_trigger_queue_process_id:
        message = "The queue coordinator process %s is deployed" % co_trigger_queue_process_id
        found_process = True

print message

if not found_process:
    raise Exception(message)
The queue coordinator process coordinator_ec_better_tg_ethz_01_02_01_queue_tg_ethz_01_02_01_queue_0_5 is deployed

Feed the queue

In [12]:
process = wps.describeprocess(co_trigger_queue_process_id)

print process.title

print process.abstract
ETHZ-01-02-01 Filtered DInSAR interferograms Trigger - Queue Coordinator
Coordinator: Trigger for ETHZ-01-02-01 Filtered DInSAR interferograms - queue
In [13]:
for data_input in process.dataInputs:
    print data_input.identifier
data_pipeline
wps_url
process_id
api_key
username
tg_quotation
start
end
bbox
min_mag
buffer_size
t2_coordinator_date_start
t2_coordinator_date_stop
t2_coordinator_period
t2_coordinator_name
quotation
_T2Username

Define the input parameters

In [14]:
mode = 'Queue'

inputs = [('data_pipeline', data_pipeline),
          ('wps_url', wps_url_apps),
          ('process_id', app_process_id),
          ('api_key', datapipeline_api_key),
          ('username', data_pipeline),
          ('tg_quotation', tg_quotation),
          ('start', start_queue),
          ('end', end_queue),
          ('bbox', ','.join(repr(e) for e in bbox)),
          ('min_mag', str(min_mag)),
          ('buffer_size', buffer_size),
          ('t2_coordinator_date_start', coordinator_date_start),
          ('t2_coordinator_date_stop', coordinator_date_stop),
          ('t2_coordinator_period', coordinator_period),
          ('t2_coordinator_name', coordinator_name),
          ('quotation', tg_quotation),
          ('_T2Username', data_pipeline)]

Submit the coordinator request

In [15]:
execution = owslib.wps.WPSExecution(url=wps_url_triggers)

execution_request = execution.buildRequest(co_trigger_queue_process_id,
                                           inputs,
                                           output=[('coordinatorIds', False)])

execution_response = execution.submitRequest(etree.tostring(execution_request, pretty_print=True))

execution.parseResponse(execution_response)

execution.statusLocation

monitorExecution(execution)

if not execution.isSucceded():

    raise Exception('Coordinator %s creation failed' % co_trigger_queue_process_id)
In [16]:
coordinator_id = str(json.loads(execution.processOutputs[0].data[0])['coordinatorsId'][0]['oozieId'])
In [17]:
coordinator_id
Out[17]:
'0028600-180330140554685-oozie-oozi-C'

** DANGER ZONE **

Suspend the coordinator

In [59]:
answer = raw_input('Are you sure you want to suspend the coordinator %s (YES I DO to confirm)?' % coordinator_id)

if answer == 'YES I DO':
    url = '%s:11000/oozie/v1/job/%s?user.name=oozie&action=%s' % (production_centre,
                                                                  coordinator_id,
                                                                 'suspend')
    r = requests.put(url)
    print r.status_code

    if r.status_code:
        print 'Coordinator %s suspended' % coordinator_id
200
Coordinator 0026126-180330140554685-oozie-oozi-C suspended

Kill the coordinator

In [57]:
answer = raw_input('Are you sure you want to kill the coordinator %s (YES I DO to confirm)?' % coordinator_id)

if answer == 'YES I DO':
    r = requests.put('%s:11000/oozie/v1/job/%s?user.name=oozie&action=kill' % (production_centre, coordinator_id))
    if r.status_code:
        print 'Coordinator %s killed' % coordinator_id
Coordinator 0026126-180330140554685-oozie-oozi-C killed