Test the application

This Jupyter Notebook to query the catalog for a pair of Sentinel-1 SLC products, creates a Web Processing Service (WPS) request invoking the data transformation application that was deployed in the Deploy step, monitors the WPS request execution and finally retrieves the data transformation execution results

  • First do the imports of the Python libraries required
In [1]:
import os
import owslib
from owslib.wps import monitorExecution
from owslib.wps import WebProcessingService
import lxml.etree as etree
import cioppy
from datetime import datetime, timedelta
import dateutil.parser
from shapely.wkt import loads


from nbconvert.preprocessors import ExecutePreprocessor, CellExecutionError
import nbformat as nbf

import geopandas as gp


import numpy as np
import ipyleaflet
from shapely.geometry import box
from shapely.geometry import multipolygon
from shapely.wkt import loads
from ipyleaflet import Map, Polygon
  • Read the data pipeline configuration information:
In [2]:
%store -r

nb_config = os.path.join('../operations', 'configuration.ipynb')

nb = nbf.read(nb_config, 4)

exec(nb['cells'][2]['source']) in globals(), locals()

app = dict([('artifact_id', app_artifact_id),
            ('version', app_version),
            ('repository', repository),
            ('community', community)])

app_process_id = '%s_%s_%s_%s' % (app['community'].replace('-', '_'), app['artifact_id'].replace('-', '_'), app['artifact_id'].replace('-', '_'), app['version'].replace('.', '_'))
  • Define the search parameters: the catalog series OpenSearch endpoint, the time of interest and the area of interest
In [3]:
series = 'https://catalog.terradue.com/sentinel1/search'

s1_prd_type = 'SLC'

crop_wkt = 'POLYGON ((10.65344444444444 12.17622222222222, 10.65344444444444 15.48261111111111, 8.398666666666667 15.48261111111111, 8.398666666666667 12.17622222222222, 10.65344444444444 12.17622222222222))'

Search for a master

In [4]:
slave_self = 'https://catalog.terradue.com/sentinel1/search?uid=S1B_S1_SLC__1SDV_20180712T051859_20180712T051915_011773_015A8D_A652'
aoi = loads(crop_wkt)
In [5]:
ciop = cioppy.Cioppy()
# get the slave metadata
slave_search = ciop.search(end_point = slave_self,
                           params = [],
                           output_fields='self,productType,track,enclosure,identifier,startdate,wkt',
                           model='EOP')
In [6]:
slave = slave_search[0]

# look for a master up to one cycle in the past
master_search_start_date = (dateutil.parser.parse(slave['startdate']) + timedelta(days=-12)).isoformat()
master_search_stop_date = (dateutil.parser.parse(slave['startdate']) + timedelta(days=-1)).isoformat()

# do the catalogue search
master_search_params = dict([('geom', slave['wkt']),
                             ('track', slave['track']),
                             ('pt',slave['productType']),
                             ('start', master_search_start_date),
                             ('stop', master_search_stop_date),
                             ('q', 'sensorOperationalMode.text:S*')])


series = 'https://catalog.terradue.com/sentinel1/search'

master_search = ciop.search(end_point=series,
                            params=master_search_params,
                            output_fields='identifier,enclosure,self,startdate,wkt',
                            model='EOP')

result = []

# build a data frame
for index, elem in enumerate(master_search):

    master_wkt = loads(elem['wkt'])

    result.append({'self' : elem['self'],
                           'identifier' : elem['identifier'],
                           'enclosure' : elem['enclosure'],
                           'wkt': loads(elem['wkt']),
                           'aoi_intersec' : (master_wkt.intersection(aoi).area/aoi.area) * 100,
                           'slave_intersec' : (master_wkt.intersection(loads(slave['wkt']))).area / loads(slave['wkt']).area * 100,
                           'contains': master_wkt.contains(aoi),
                           'days': (dateutil.parser.parse(slave['startdate'])- dateutil.parser.parse(elem['startdate'])).days
                          })

masters = gp.GeoDataFrame(result)

# select the master
master = master_search[masters.sort_values(['aoi_intersec', 'days'],
                                                   ascending=[False, False]).iloc[0].name]
In [7]:
masters
Out[7]:
aoi_intersec contains days enclosure identifier self slave_intersec wkt
0 9.87878 False 12 https://store.terradue.com/download/sentinel1/... S1B_S1_SLC__1SDV_20180630T051858_20180630T0519... https://catalog.terradue.com/sentinel1/search?... 99.838596 POLYGON ((9.931101999999999 12.372849, 9.20709...
In [8]:
master
Out[8]:
{'enclosure': 'https://store.terradue.com/download/sentinel1/files/v1/S1B_S1_SLC__1SDV_20180630T051858_20180630T051915_011598_015523_67BF',
 'identifier': 'S1B_S1_SLC__1SDV_20180630T051858_20180630T051915_011598_015523_67BF',
 'self': 'https://catalog.terradue.com/sentinel1/search?format=atom&uid=S1B_S1_SLC__1SDV_20180630T051858_20180630T051915_011598_015523_67BF',
 'startdate': '2018-06-30T05:18:58.9393970Z',
 'wkt': 'POLYGON((9.931102 12.372849,9.207095 12.519132,9.409536 13.493165,10.136349 13.347219,9.931102 12.372849))'}
In [9]:
slave
Out[9]:
{'enclosure': 'https://store.terradue.com/download/sentinel1/files/v1/S1B_S1_SLC__1SDV_20180712T051859_20180712T051915_011773_015A8D_A652',
 'identifier': 'S1B_S1_SLC__1SDV_20180712T051859_20180712T051915_011773_015A8D_A652',
 'productType': 'SLC',
 'self': 'https://catalog.terradue.com/sentinel1/search?format=atom&uid=S1B_S1_SLC__1SDV_20180712T051859_20180712T051915_011773_015A8D_A652',
 'startdate': '2018-07-12T05:18:59.2852070Z',
 'track': '22',
 'wkt': 'POLYGON((9.932179 12.373003,9.208238 12.519269,9.41065 13.493176,10.137398 13.347246,9.932179 12.373003))'}
In [10]:
slave_locations = np.asarray([t[::-1] for t in list(loads(slave['wkt']).exterior.coords)]).tolist()
In [11]:
master_locations= np.asarray([t[::-1] for t in list(loads(master['wkt']).exterior.coords)]).tolist()
In [12]:
aoi_locations = np.asarray([t[::-1] for t in list(loads(crop_wkt).exterior.coords)]).tolist()
In [13]:
map_center_x = loads(crop_wkt).centroid.x
map_center_y = loads(crop_wkt).centroid.y
In [14]:
m = Map(center=(map_center_y,
                map_center_x),
                zoom=7)

m += Polygon(locations=slave_locations,
                 color="blue",
                 fill_color="cyan",
                 weight=2,
                 fill_opacity=0.1)

m += Polygon(locations=master_locations,
                 color="yellow",
                 fill_color="orange",
                 weight=1,
                 fill_opacity=0.1)


m += Polygon(locations=aoi_locations,
             color="green",
             fill_color="green",
             weight=1,
             fill_opacity=0.1)
m
In [15]:
source = '%s,%s' % (master['self'],
                   slave['self'])
In [16]:
source
Out[16]:
'https://catalog.terradue.com/sentinel1/search?format=atom&uid=S1B_S1_SLC__1SDV_20180630T051858_20180630T051915_011598_015523_67BF,https://catalog.terradue.com/sentinel1/search?format=atom&uid=S1B_S1_SLC__1SDV_20180712T051859_20180712T051915_011773_015A8D_A652'
In [17]:
crop_wkt
Out[17]:
'POLYGON ((10.65344444444444 12.17622222222222, 10.65344444444444 15.48261111111111, 8.398666666666667 15.48261111111111, 8.398666666666667 12.17622222222222, 10.65344444444444 12.17622222222222))'
In [18]:
algorithm = 'FULL'
  • Connect to the WPS server
In [19]:
wps_url = '%s/zoo-bin/zoo_loader.cgi' % apps_deployer

wps = WebProcessingService(wps_url,
                           verbose=False,
                           skip_caps=True)
  • Do a GetCapabilities WPS request and list the process:
In [20]:
wps.getcapabilities()
In [21]:
app_deployed = False

for index, elem in enumerate(wps.processes):
    if elem.identifier == app_process_id:
        app_deployed = True

if app_deployed:
    print 'Process %s deployed' % app_process_id
else:
    raise Exception('Process %s not deployed' % app_process_id)
Process ec_better_ewf_satcen_01_02_01_ewf_satcen_01_02_01_0_18 deployed
  • Select the process and print the title and abstract after having submited a WPS DescribeProcess request
In [22]:
process = wps.describeprocess(app_process_id)

print process.title

print process.abstract
SATCEN-01-02-01 Sentinel-1 Multi-temporal SLC and Coherence Stack
SATCEN-01-02-01 Sentinel-1 Multi-temporal SLC and Coherence Stack
  • List the WPS process inputs:
In [23]:
for data_input in process.dataInputs:
    print data_input.identifier
s1_products
crop_wkt
algorithm
_T2Username
  • Create a Python dictionary with the inputs:
In [24]:
inputs = [('s1_products', source),
          ('crop_wkt', crop_wkt),
          ('algorithm', algorithm),
          ('quotation', 'No'),
          ('_T2Username', data_pipeline)]
  • Submit the Execute WPS request:
In [25]:
execution = owslib.wps.WPSExecution(url=wps.url)

execution_request = execution.buildRequest(app_process_id,
                                           inputs,
                                           output=[('result_osd', False)])

execution_response = execution.submitRequest(etree.tostring(execution_request))

execution.parseResponse(execution_response)
  • Monitor the request:
In [26]:
execution.statusLocation
Out[26]:
'http://ec-better-apps-deployer.terradue.com/zoo-bin/zoo_loader.cgi?request=Execute&service=WPS&version=1.0.0&Identifier=GetStatus&DataInputs=sid=934a9a62-35dc-11e9-93fc-0242ac11000f&RawDataOutput=Result'
In [ ]:
monitorExecution(execution)
  • Check the outcome of the processing request
In [ ]:
if not execution.isSucceded():
    raise Exception('Processing failed')
  • Search for the results produced
In [ ]:
results_osd = execution.processOutputs[0].reference

print results_osd
In [ ]:
recast_process_id = 'dataPublication'
recast_wps_url = 'https://recast.terradue.com/t2api/ows'

wps = WebProcessingService(recast_wps_url,
                           verbose=False,
                           skip_caps=False)

recast_inputs = [('items', results_osd),
                  ('index', data_pipeline),
                  ('_T2ApiKey', data_pipeline),
                  ('_T2Username', datapipeline_api_key)]

recast_execution = wps.execute(recast_process_id,
                               recast_inputs,
                               output = [('result_osd', True)])


monitorExecution(recast_execution, sleepSecs=60)

etree.fromstring(recast_execution.processOutputs[0].data[0]).xpath('./@href')[0]