Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def __init__(self):
inputs = [
ComplexInput('crude', 'Crude Data', [Format('image/tiff; subtype=geotiff')])
]
outputs = [
ComplexOutput('RAW', 'RAW output', [Format('image/tiff; subtype=geotiff')])
]
super(L0, self).__init__(
self._handler,
identifier='L0',
version='0.1',
title='L0 Processor',
abstract='L0 Processor, which generates the RAW product',
profile='',
metadata=[Metadata('Level L0'), Metadata('Processor')],
inputs=inputs,
outputs=outputs,
buff = inGeometry.Buffer(float(100000))
# create output feature to the file
outFeature = ogr.Feature(feature_def=outLayer.GetLayerDefn())
outFeature.SetGeometryDirectly(buff)
outLayer.CreateFeature(outFeature)
outFeature.Destroy()
response.outputs['output'].data_format = FORMATS.GML
response.outputs['output'].file = outPath
return response
return Process(handler=feature,
identifier='feature',
title='Process Feature',
inputs=[ComplexInput('input', 'Input', supported_formats=[get_format('GML')])],
outputs=[ComplexOutput('output', 'Output', supported_formats=[get_format('GML')])])
def test_complex_input_default_and_supported(self):
complex_in = ComplexInput(
'foo',
'Complex foo',
default='default',
supported_formats=[
Format('a/b'),
Format('c/d')
],
)
data = complex_in.json
assert len(data["supported_formats"]) == 2
assert data["data_format"]["mime_type"] == "a/b"
def create_complex_proces():
def complex_proces(request, response):
response.outputs['complex'].data = request.inputs['complex'][0].data
return response
frmt = Format(mime_type='application/gml', extension=".gml") # this is unknown mimetype
return Process(handler=complex_proces,
identifier='my_complex_process',
title='Complex process',
inputs=[
ComplexInput(
'complex',
'Complex input',
default="DEFAULT COMPLEX DATA",
supported_formats=[frmt])
],
outputs=[
ComplexOutput(
'complex',
'Complex output',
supported_formats=[frmt])
])
def __init__(self):
inputs = [
LiteralInput('region', 'Region',
data_type='string',
# abstract= countries_longname(),
# need to handle special non-ascii char in countries.
abstract="Country code, see ISO-3166-3:\
https://en.wikipedia.org/wiki/ISO_3166-1_alpha-3#Officially_assigned_code_elements",
min_occurs=1,
max_occurs=len(countries()),
default='DEU',
allowed_values=countries()),
ComplexInput('resource', 'Resource',
abstract='NetCDF Files or archive (tar/zip) containing NetCDF files.',
min_occurs=1,
max_occurs=1000,
supported_formats=[
Format('application/x-netcdf'),
Format('application/x-tar'),
Format('application/zip'),
]),
]
outputs = [output, metalink]
super(SubsetcountryProcess, self).__init__(
self._handler,
identifier="subset_countries",
title="Subset Countries",
def __init__(self):
inputs = [
ComplexInput('dataset', 'Dataset',
abstract="URL pointing to a NetCDF File"
" or an archive (tar/zip) containing NetCDF files.",
min_occurs=0,
max_occurs=100,
supported_formats=[
Format('application/x-netcdf'),
Format('application/x-tar'),
Format('application/zip'),
]),
LiteralInput('dataset_opendap', 'Remote OpenDAP Data URL',
data_type='string',
abstract="Remote OpenDAP data URL, for example:"
" http://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/ncep.reanalysis2.dailyavgs/surface/mslp.2016.nc", # noqa
metadata=[
Metadata(
def __init__(self):
inputs = [ComplexInput('poly_in', 'Input1',
supported_formats=[Format('application/gml+xml')],
mode=MODE.SIMPLE),
LiteralInput('buffer', 'Buffer', data_type='float',
allowed_values=(0, 1, 10, (10, 10, 100), (100, 100, 1000)))]
outputs = [ComplexOutput('buff_out', 'Buffered',
supported_formats=[
Format('application/gml+xml')
])]
super(GrassBuffer, self).__init__(
self._handler,
identifier='grassbuffer',
version='0.1',
title="GRASS v.buffer",
abstract='The process uses the GRASS GIS \
v.buffer module to generate buffers around inputs ',
def __init__(self):
inputs = [
ComplexInput('resource', 'Resource',
abstract="NetCDF Files or archive (tar/zip) containing netCDF files.",
min_occurs=1,
max_occurs=1000,
# maxmegabites=5000,
supported_formats=[
Format('application/x-netcdf'),
Format('application/x-tar'),
Format('application/zip'),
]),
# LiteralInput("indices", "Index",
# abstract='Select an index',
# default='TG',
# data_type='string',
# min_occurs=1,
# max_occurs=1, # len(indices()),
def __init__(self):
inputs = [
ComplexInput('resource',
'NetCDF resource',
abstract='NetCDF files, can be OPEnDAP urls.',
max_occurs=1000,
supported_formats=[
Format('application/x-netcdf'),
Format('application/x-tar'),
Format('application/zip')])]
outputs = [
ComplexOutput('output',
'Merged NetCDF files',
abstract='Temporally merged NetCDF files.',
as_reference=True,
supported_formats=[Format('application/x-netcdf')])]
super(NCMergeProcess, self).__init__(
def __init__(self):
inputs = [
ComplexInput('resource', 'Resource',
abstract='NetCDF Files or archive (tar/zip) containing NetCDF files.',
metadata=[Metadata('Info')],
min_occurs=1,
max_occurs=1000,
supported_formats=[
Format('application/x-netcdf'),
Format('application/x-tar'),
Format('application/zip'),
]),
ComplexInput('dest', 'Grid destination',
abstract='NetCDF file whose grid defines the interpolation target.',
metadata=[Metadata('Info')],
min_occurs=1,
max_occurs=1,
supported_formats=[