Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
catchmentFeatureDBPath = os.path.abspath(catchmentFeatureDBPath)
if not os.path.isdir(outputDir):
raise IOError(errno.ENOTDIR, "Output directory %s is not a directory" % (outputDir,))
if not os.access(outputDir, os.W_OK):
raise IOError(errno.EACCES, "Not allowed to write to output directory %s" % (outputDir,))
outputDir = os.path.abspath(outputDir)
if not format in OGR_DRIVERS.keys():
raise Exception("Output format '%s' is not known" % (format,) )
catchmentFilename ="%s%s%s" % ( catchmentFilename, os.extsep, OGR_DRIVERS[format] )
catchmentFilepath = os.path.join(outputDir, catchmentFilename)
# Open input layer
ogr.UseExceptions()
poDS = ogr.Open(catchmentFeatureDBPath, OGR_UPDATE_MODE)
if not poDS:
raise Exception("Unable to open catchment feature database %s" (catchmentFeatureDBPath,))
assert(poDS.GetLayerCount() > 0)
poLayer = poDS.GetLayer(0)
assert(poLayer)
# Create output data source
poDriver = ogr.GetDriverByName(format)
assert(poDriver)
poODS = poDriver.CreateDataSource(catchmentFilepath)
assert(poODS != None)
# poOLayer = poODS.CreateLayer("catchment", poLayer.GetSpatialRef(), poLayer.GetGeomType())
poOLayer = poODS.CreateLayer("catchment", poLayer.GetSpatialRef(), ogr.wkbMultiPolygon )
# poOLayer = poODS.CreateLayer("catchment", poLayer.GetSpatialRef(), ogr.wkbPolygon )
def ogr_export(database_uri, driver, file_ident, table_metadata, cursor, format, valid_geo_ids, inner_path):
"""Gnarly list of arguments, but it let us take a bunch of lines out of api.py"""
ogr.UseExceptions()
db_details = urlparse.urlparse(database_uri)
host = db_details.hostname
user = db_details.username
password = db_details.password
database = db_details.path[1:]
in_driver = ogr.GetDriverByName("PostgreSQL")
conn = in_driver.Open("PG: host=%s dbname=%s user=%s password=%s" % (host, database, user, password))
if conn is None:
raise Exception("Could not connect to database to generate download.")
exporter = OGRExporter(inner_path, driver, file_ident, format, table_metadata)
sql = cursor.mogrify("""SELECT the_geom,full_geoid,display_name
FROM tiger2012.census_name_lookup
WHERE full_geoid IN %s
ORDER BY full_geoid""", [tuple(valid_geo_ids)])
import re
import sys
import tempfile
import logging
from csv import DictReader
from cStringIO import StringIO
from django.template import Context, Template
from geoserver.support import DimensionInfo
from dateutil.parser import parse
from django import db
from django.conf import settings
from django.utils.module_loading import import_by_path
logger = logging.getLogger(__name__)
ogr.UseExceptions()
DEFAULT_IMPORT_HANDLERS = ['mapstory.importer.import_handlers.FieldConverterHandler',
'mapstory.importer.import_handlers.GeoserverPublishHandler',
'mapstory.importer.import_handlers.GeoServerTimeHandler',
'mapstory.importer.import_handlers.GeoNodePublishHandler']
IMPORT_HANDLERS = getattr(settings, 'IMPORT_HANDLERS', DEFAULT_IMPORT_HANDLERS)
GDAL_GEOMETRY_TYPES = {
0: 'Unknown',
1: 'Point',
2: 'LineString',
3: 'Polygon',
4: 'MultiPoint',
5: 'MultiLineString',
# -*- coding: UTF-8 -*-
"""Geodatabase class representing a file geodatabase object."""
import ogr
ogr.UseExceptions()
########################################################################
class Geodatabase(object):
"""File geodatabase object."""
# ----------------------------------------------------------------------
def __init__(self, path):
"""Initialize Geodatabase class with basic properties."""
self.path = path
self.ds = None
return
# ----------------------------------------------------------------------
def get_items(self):
"""Get list of tables and feature classes inside a file gdb."""
from osgeo import osr
except:
import gdal
import ogr
import osr
__version__ = '0.1.0'
_allocation_methods = ['proportional', 'equal', 'good_practices']
VERBOSE = False
gdal.UseExceptions()
gdal.AllRegister()
ogr.UseExceptions()
ogr.RegisterAll()
logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',
level=logging.INFO,
datefmt='%H:%M:%S')
logger = logging.getLogger(__name__)
def str2num(string):
""" parse string into int, or float """
try:
v = int(string)
except ValueError:
v = float(string)
return v
def import_file(self, *args, **kwargs):
"""
Loads data that has been uploaded into whatever format we need for serving.
Expects kwarg "configuration_options" which is a list of dicts, one for each layer to import.
each dict must contain "upload_layer_id" referencing the UploadLayer being imported
and must contain "index" which is a 0-based index to identify which layer from the file is being referenced.
and can contain an optional "layer_name" to assign a custom name. "layer_name" may be ignored
if it is already in use.
"""
filename = self.file
self.completed_layers = []
err = GdalErrorHandler()
gdal.PushErrorHandler(err.handler)
gdal.UseExceptions()
ogr.UseExceptions()
configuration_options = kwargs.get('configuration_options', [{'index': 0}])
# Configuration options should be a list at this point since the
# importer can process multiple layers in a single import
if isinstance(configuration_options, dict):
configuration_options = [configuration_options]
# Ensure that upload_layer_id exists in configuration for each layer
nbad_config = 0
for co in configuration_options:
if 'upload_layer_id' not in co:
nbad_config += 1
if nbad_config > 0:
msg = '{} of {} configs missing upload_layer_id'.format(nbad_config, len(configuration_options))
logger.critical(msg)
raise Exception(msg)