Coordinator - better-satcen-00002 pipe¶
This coordinator processes the better-satcen-00002 data pipeline queue items
- First do the imports of the Python libraries required
In [1]:
import sys
import os
import owslib
from owslib.wps import monitorExecution
from owslib.wps import WebProcessingService
import lxml.etree as etree
import json
import cioppy
from shapely.wkt import loads
import getpass
import folium
from datetime import datetime, timedelta
import dateutil.parser
from nbconvert.preprocessors import ExecutePreprocessor, CellExecutionError
import nbformat as nbf
import requests
- Read the data pipeline configuration information:
In [2]:
%store -r
nb_config = os.path.join('..', 'configuration.ipynb')
nb = nbf.read(nb_config, 4)
exec(nb['cells'][2]['source']) in globals(), locals()
app = dict([('artifact_id', app_artifact_id),
('version', app_version),
('repository', repository),
('community', community)])
app_process_id = '%s_%s_%s_%s' % (app['community'].replace('-', '_'),
app['artifact_id'].replace('-', '_'),
app['artifact_id'].replace('-', '_'),
app['version'].replace('.', '_'))
trigger_pipe = dict([('artifact_id', trigger_pipe_artifact_id),
('version', trigger_pipe_version),
('repository', repository),
('folder', folder),
('community', community)])
trigger_pipe_process_id = '%s_%s_%s_%s' % (trigger_pipe['community'].replace('-', '_'),
trigger_pipe['artifact_id'].replace('-', '_'),
trigger_pipe['artifact_id'].replace('-', '_'),
trigger_pipe['version'].replace('.', '_'))
print 'This notebook will process the queue of %s with the trigger %s' % (app_process_id,
trigger_pipe_process_id)
This notebook will process the queue of ec_better_ewf_satcen_01_02_01_ewf_satcen_01_02_01_0_18 with the trigger ec_better_tg_pipe_tg_pipe_0_3
Queue selection parameters¶
In [3]:
series = 'https://catalog.terradue.com/%s/description' % data_pipeline
Coordinator parameters¶
In [4]:
coordinator_name = 'co_%s_pipe' % data_pipeline
coordinator_date_start = '2019-01-17T09:00Z'
coordinator_date_stop = '2019-03-01T11:01Z'
coordinator_period = '0,30 * * * *'
In [5]:
start_pipe = '${coord:formatTime(coord:dateOffset(coord:nominalTime(), -30, \'MINUTE\'), "yyyy-MM-dd\'T\'HH:mm:ss\'Z\'")}'
end_pipe = '${coord:formatTime(coord:nominalTime(), "yyyy-MM-dd\'T\'HH:mm:ss\'Z\'")}'
In [6]:
co_trigger_pipe_process_id = 'coordinator_%s' % trigger_pipe_process_id
Common Parameters¶
In [7]:
tg_quotation = 'No'
recovery = 'No'
_T2Username = data_pipeline
Check data transformation application¶
In [8]:
wps_url_apps = '%s/zoo-bin/zoo_loader.cgi' % apps_deployer
wps = WebProcessingService(wps_url_apps, verbose=False, skip_caps=False)
found_process = False
message = "The process %s is not deployed" % app_process_id
for index, elem in enumerate(wps.processes):
if elem.identifier == app_process_id:
message = "The process %s is deployed" % app_process_id
found_process = True
print message
if not found_process:
raise Exception()
The process ec_better_ewf_satcen_01_02_01_ewf_satcen_01_02_01_0_18 is deployed
Check trigger coordinator¶
In [9]:
wps_url_triggers = '%s/zoo-bin/zoo_loader.cgi' % trigger_deployer
wps = WebProcessingService(wps_url_triggers, verbose=False, skip_caps=False)
found_process = False
message = "The pipe coordinator process %s is not deployed" % co_trigger_pipe_process_id
for index, elem in enumerate(wps.processes):
if elem.identifier == co_trigger_pipe_process_id:
message = "The pipe coordinator process %s is deployed" % co_trigger_pipe_process_id
found_process = True
print message
if not found_process:
raise Exception(message)
The pipe coordinator process coordinator_ec_better_tg_pipe_tg_pipe_0_3 is deployed
Process the queue¶
In [10]:
process = wps.describeprocess(co_trigger_pipe_process_id)
print process.title
print process.abstract
Trigger pipe BETTER Coordinator
Coordinator: Trigger pipe for all BETTER data pipelines
In [11]:
for data_input in process.dataInputs:
print data_input.identifier
series
data_pipeline
api_key
process_id
count
cat
update
t2_coordinator_date_start
t2_coordinator_date_stop
t2_coordinator_period
t2_coordinator_name
quotation
_T2Username
In [12]:
count='200'
cat='{in,queue}'
inputs = [('series', series),
('data_pipeline', data_pipeline),
('api_key', datapipeline_api_key),
('process_id', app_process_id),
('count', count),
('cat', cat),
('update', '%s/%s' % (start_pipe, end_pipe)),
('recovery', 'No'),
('tg_quotation', tg_quotation),
('t2_coordinator_date_start', coordinator_date_start),
('t2_coordinator_date_stop', coordinator_date_stop),
('t2_coordinator_period', coordinator_period),
('t2_coordinator_name', coordinator_name),
('quotation', tg_quotation),
('_T2Username', data_pipeline)]
Submit the coordinator request¶
In [13]:
execution = owslib.wps.WPSExecution(url=wps_url_triggers)
execution_request = execution.buildRequest(co_trigger_pipe_process_id,
inputs,
output=[('coordinatorIds', False)])
execution_response = execution.submitRequest(etree.tostring(execution_request, pretty_print=True))
execution.parseResponse(execution_response)
execution.statusLocation
monitorExecution(execution)
if not execution.isSucceded():
raise Exception('Coordinator %s creation failed' % co_trigger_pipe_process_id)
In [14]:
coordinator_id = str(json.loads(execution.processOutputs[0].data[0])['coordinatorsId'][0]['oozieId'])
In [15]:
coordinator_id
Out[15]:
'0001393-181221095105003-oozie-oozi-C'
** DANGER ZONE **
In [ ]:
answer = raw_input('Are you sure you want to kill the coordinator %s (YES I DO to confirm)?' % coordinator_id)
if answer == 'YES I DO':
r = requests.put('%s:11000/oozie/v1/job/%s?user.name=oozie&action=kill' % (production_centre, coordinator_id))
if r.status_code:
print 'Coordinator %s killed' % coordinator_id
Coordinator 0001391-181221095105003-oozie-oozi-C killed