Skip to content

Commit

Permalink
Merge pull request #252 from xnuinside/release_v1.4.0
Browse files Browse the repository at this point in the history
release 1.4.0 temp + is_global for oracle
  • Loading branch information
xnuinside committed May 14, 2024
2 parents d1c0a63 + 61d72a8 commit 09e5dab
Show file tree
Hide file tree
Showing 11 changed files with 18,240 additions and 18,069 deletions.
11 changes: 11 additions & 0 deletions CHANGELOG.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,14 @@
**v1.4.0**

### Fixes
#### BigQuery:
1. Indexes without schema causes issues in BigQuery dialect - fixed.

### Improvements
#### Oracle:
1. Added new output keywords in table definition - `temp` & `is_global`. Added support for create global temporary table - https://github.com/xnuinside/simple-ddl-parser/issues/182


**v1.3.0**

### Fixes
Expand Down
11 changes: 11 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -486,6 +486,17 @@ for help with debugging & testing support for BigQuery dialect DDLs:


## Changelog
**v1.4.0**

### Fixes
#### BigQuery:
1. Indexes without schema causes issues in BigQuery dialect - fixed.

### Improvements
#### Oracle:
1. Added new output keywords in table definition - `temp` & `is_global`. Added support for create global temporary table - https://github.com/xnuinside/simple-ddl-parser/issues/182


**v1.3.0**

### Fixes
Expand Down
20 changes: 20 additions & 0 deletions docs/README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -549,6 +549,26 @@ for help with debugging & testing support for BigQuery dialect DDLs:
Changelog
---------

**v1.4.0**

Fixes
^^^^^

BigQuery:
~~~~~~~~~


#. Indexes without schema causes issues in BigQuery dialect - fixed.

Improvements
^^^^^^^^^^^^

Oracle:
~~~~~~~


#. Added new output keywords in table definition - ``temp`` & ``is_global``. Added support for create global temporary table - https://github.com/xnuinside/simple-ddl-parser/issues/182

**v1.3.0**

Fixes
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "simple-ddl-parser"
version = "1.3.0"
version = "1.4.0"
description = "Simple DDL Parser to parse SQL & dialects like HQL, TSQL (MSSQL), Oracle, AWS Redshift, Snowflake, MySQL, PostgreSQL, etc ddl files to json/python dict with full information about columns: types, defaults, primary keys, etc.; sequences, alters, custom types & other entities from ddl."
authors = ["Iuliia Volkova <[email protected]>"]
license = "MIT"
Expand Down
14 changes: 11 additions & 3 deletions simple_ddl_parser/dialects/sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,7 @@ def p_create_table(self, p: List):
| CREATE OR REPLACE TABLE
| CREATE id TABLE IF NOT EXISTS
| CREATE id TABLE
| CREATE id id TABLE
| CREATE OR REPLACE id TABLE IF NOT EXISTS
| CREATE OR REPLACE id TABLE
Expand All @@ -173,14 +174,21 @@ def p_create_table(self, p: List):

if "REPLACE" in p_list:
p[0]["replace"] = True

id_key = p_list[4] if "REPLACE" in p_list else p_list[2]
if "REPLACE" in p_list:
id_key = p_list[4]
elif len(p_list) == 5:
id_key = p_list[3]
else:
id_key = p_list[2]
id_key = id_key.upper()

if id_key in ["EXTERNAL", "TRANSIENT"]:
p[0][id_key.lower()] = True
elif id_key in ["GLOBAL"]:
p[0]["is_global"] = True
elif id_key in ["TEMP", "TEMPORARY"]:
p[0]["temp"] = True
if len(p_list) == 5 and p_list[2].upper() == "GLOBAL":
p[0]["is_global"] = True


class Column:
Expand Down
9 changes: 7 additions & 2 deletions simple_ddl_parser/output/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,16 +38,21 @@ def get_table_from_tables_data(self, schema: str, table_name: str) -> Dict:
return target_table

def clean_up_index_statement(self, statement: Dict) -> None:
del statement[self.schema_key]
try:
del statement[self.schema_key]
except KeyError:
del statement["schema"]
del statement["table_name"]

if self.output_mode != "mssql":
del statement["clustered"]

def add_index_to_table(self, statement: Dict) -> None:
"""populate 'index' key in output data"""

target_table = self.get_table_from_tables_data(
statement[self.schema_key], statement["table_name"]
statement.get(self.schema_key) or statement.get("schema"),
statement["table_name"],
)
self.clean_up_index_statement(statement)
target_table.index.append(statement)
Expand Down
5 changes: 3 additions & 2 deletions simple_ddl_parser/output/dialects.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ class MySSQL(Dialect):
@dataclass
@dialect(name="bigquery")
class BigQuery(Dialect):
dataset: Optional[str] = field(default=False)
dataset: Optional[str] = field(default=None)
project: Optional[str] = field(
default=None, metadata={"exclude_if_not_provided": True}
)
Expand Down Expand Up @@ -186,6 +186,7 @@ class PostgreSQL(Dialect):
class Oracle(Dialect):
# https://oracle-base.com/articles/8i/index-organized-tables

is_global: Optional[bool] = field(default=False)
organization_index: Optional[bool] = field(
default=False, metadata={"exclude_if_not_provided": True}
)
Expand Down Expand Up @@ -253,7 +254,7 @@ class CommonDialectsFieldsMixin(Dialect):
"""base fields & mixed between dialects"""

temp: Optional[bool] = field(
default=False, metadata={"output_modes": add_dialects([HQL, Redshift])}
default=False, metadata={"output_modes": add_dialects([HQL, Redshift, Oracle])}
)
tblproperties: Optional[dict] = field(
default_factory=dict,
Expand Down
1 change: 0 additions & 1 deletion simple_ddl_parser/output/table_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,6 @@ def pre_load_mods(cls, main_cls, kwargs):
table_properties = {
k.lower(): v for k, v in kwargs.items() if k.lower() not in table_main_args
}

init_data = {}
init_data.update(table_main_args)
init_data.update(table_properties)
Expand Down
36,134 changes: 18,074 additions & 18,060 deletions simple_ddl_parser/parsetab.py

Large diffs are not rendered by default.

43 changes: 43 additions & 0 deletions tests/dialects/test_bigquery.py
Original file line number Diff line number Diff line change
Expand Up @@ -994,3 +994,46 @@ def test_date_trunc():
]

assert result == expected


def test_index_without_schema():
ddl = """CREATE TABLE t1 (
val INT,
);
CREATE INDEX idx1 ON t1(val);"""

result = DDLParser(ddl).run(output_mode="bigquery")
expected = [
{
"alter": {},
"checks": [],
"columns": [
{
"check": None,
"default": None,
"name": "val",
"nullable": True,
"references": None,
"size": None,
"type": "INT",
"unique": False,
}
],
"dataset": None,
"index": [
{
"columns": ["val"],
"detailed_columns": [
{"name": "val", "nulls": "LAST", "order": "ASC"}
],
"index_name": "idx1",
"unique": False,
}
],
"partitioned_by": [],
"primary_key": [],
"table_name": "t1",
"tablespace": None,
}
]
assert expected == result
59 changes: 59 additions & 0 deletions tests/dialects/test_oracle.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,6 +255,8 @@ def test_oracle_output_mode():
"schema": None,
"table_name": "employee",
"tablespace": None,
"temp": False,
"is_global": False,
},
{
"alter": {},
Expand Down Expand Up @@ -311,6 +313,8 @@ def test_oracle_output_mode():
"schema": None,
"table_name": "employee_2",
"tablespace": None,
"temp": False,
"is_global": False,
},
],
"types": [],
Expand Down Expand Up @@ -381,6 +385,8 @@ def test_storage():
"storage": {"initial": "5m", "maxextents": "Unlimited", "next": "5m"},
"table_name": "emp_table",
"tablespace": None,
"temp": False,
"is_global": False,
}
],
"types": [],
Expand Down Expand Up @@ -502,6 +508,8 @@ def test_partition_by():
"schema": None,
"table_name": "order_items",
"tablespace": None,
"temp": False,
"is_global": False,
}
],
"types": [],
Expand Down Expand Up @@ -677,6 +685,8 @@ def test_star_in_columns_siize():
"schema": None,
"table_name": "ACT_RU_VARIABLE",
"tablespace": None,
"temp": False,
"is_global": False,
}
],
"types": [],
Expand Down Expand Up @@ -766,6 +776,8 @@ def test_organization_index():
"schema": None,
"table_name": "meta_criteria_combo",
"tablespace": None,
"temp": False,
"is_global": False,
}
],
"types": [],
Expand Down Expand Up @@ -826,6 +838,8 @@ def test_oracle_sizes():
"schema": None,
"table_name": "test",
"tablespace": None,
"temp": False,
"is_global": False,
}
],
"types": [],
Expand Down Expand Up @@ -862,6 +876,8 @@ def test_oracle_constraintin_column_def():
"schema": None,
"table_name": "event_types",
"tablespace": None,
"temp": False,
"is_global": False,
}
]

Expand Down Expand Up @@ -897,6 +913,8 @@ def test_generated_by_as_null():
"schema": None,
"table_name": "event_types",
"tablespace": None,
"temp": False,
"is_global": False,
}
]
assert result == expected
Expand Down Expand Up @@ -931,6 +949,8 @@ def test_generated_by_default():
"schema": None,
"table_name": "event_types",
"tablespace": None,
"temp": False,
"is_global": False,
}
]

Expand Down Expand Up @@ -966,6 +986,45 @@ def test_generated_always_as_identity():
"schema": None,
"table_name": "event_types",
"tablespace": None,
"temp": False,
"is_global": False,
}
]
assert expected == result


def test_is_global_and_temp():
expected = [
{
"alter": {},
"checks": [],
"columns": [
{
"check": None,
"default": None,
"encrypt": None,
"name": "id",
"nullable": True,
"references": None,
"size": None,
"type": "number",
"unique": False,
}
],
"index": [],
"is_global": True,
"partitioned_by": [],
"primary_key": [],
"schema": None,
"table_name": "test",
"tablespace": None,
"temp": True,
}
]

ddl = """
create global temporary table test (id number);
"""

result = DDLParser(ddl).run(output_mode="oracle")
assert result == expected

0 comments on commit 09e5dab

Please sign in to comment.