Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
parser.source_database = DdlParse.DATABASE.oracle
parser.ddl = sample_ddl
table = parser.parse()
print("* BigQuery Fields * : Oracle")
print(table.to_bigquery_fields())
print("* TABLE *")
print("schema = {} : name = {} : is_temp = {}".format(table.schema, table.name, table.is_temp))
print("* BigQuery Fields *")
print(table.to_bigquery_fields())
print("* BigQuery Fields - column name to lower case / upper case *")
print(table.to_bigquery_fields(DdlParse.NAME_CASE.lower))
print(table.to_bigquery_fields(DdlParse.NAME_CASE.upper))
print("* COLUMN *")
for col in table.columns.values():
col_info = {}
col_info["name"] = col.name
col_info["data_type"] = col.data_type
col_info["length"] = col.length
col_info["precision(=length)"] = col.precision
col_info["scale"] = col.scale
col_info["is_unsigned"] = col.is_unsigned
col_info["is_zerofill"] = col.is_zerofill
col_info["constraint"] = col.constraint
col_info["not_null"] = col.not_null
col_info["PK"] = col.primary_key
col_info["unique"] = col.unique
col_info["constraint"] = col.constraint
col_info["not_null"] = col.not_null
col_info["PK"] = col.primary_key
col_info["unique"] = col.unique
col_info["bq_legacy_data_type"] = col.bigquery_legacy_data_type
col_info["bq_standard_data_type"] = col.bigquery_standard_data_type
col_info["comment"] = col.comment
col_info["description(=comment)"] = col.description
col_info["bigquery_field"] = json.loads(col.to_bigquery_field())
print(json.dumps(col_info, indent=2, ensure_ascii=False))
print("* DDL (CREATE TABLE) statements *")
print(table.to_bigquery_ddl())
print("* DDL (CREATE TABLE) statements - dataset name, table name and column name to lower case / upper case *")
print(table.to_bigquery_ddl(DdlParse.NAME_CASE.lower))
print(table.to_bigquery_ddl(DdlParse.NAME_CASE.upper))
print("* Get Column object (case insensitive) *")
print(table.columns["total"])