sqlglot.dialects.tsql
1from __future__ import annotations 2 3import datetime 4import re 5import typing as t 6from functools import partial, reduce 7 8from sqlglot import exp, generator, parser, tokens, transforms 9from sqlglot.dialects.dialect import ( 10 Dialect, 11 NormalizationStrategy, 12 any_value_to_max_sql, 13 date_delta_sql, 14 datestrtodate_sql, 15 generatedasidentitycolumnconstraint_sql, 16 max_or_greatest, 17 min_or_least, 18 build_date_delta, 19 rename_func, 20 trim_sql, 21 timestrtotime_sql, 22) 23from sqlglot.helper import seq_get 24from sqlglot.parser import build_coalesce 25from sqlglot.time import format_time 26from sqlglot.tokens import TokenType 27 28if t.TYPE_CHECKING: 29 from sqlglot._typing import E 30 31FULL_FORMAT_TIME_MAPPING = { 32 "weekday": "%A", 33 "dw": "%A", 34 "w": "%A", 35 "month": "%B", 36 "mm": "%B", 37 "m": "%B", 38} 39 40DATE_DELTA_INTERVAL = { 41 "year": "year", 42 "yyyy": "year", 43 "yy": "year", 44 "quarter": "quarter", 45 "qq": "quarter", 46 "q": "quarter", 47 "month": "month", 48 "mm": "month", 49 "m": "month", 50 "week": "week", 51 "ww": "week", 52 "wk": "week", 53 "day": "day", 54 "dd": "day", 55 "d": "day", 56} 57 58 59DATE_FMT_RE = re.compile("([dD]{1,2})|([mM]{1,2})|([yY]{1,4})|([hH]{1,2})|([sS]{1,2})") 60 61# N = Numeric, C=Currency 62TRANSPILE_SAFE_NUMBER_FMT = {"N", "C"} 63 64DEFAULT_START_DATE = datetime.date(1900, 1, 1) 65 66BIT_TYPES = {exp.EQ, exp.NEQ, exp.Is, exp.In, exp.Select, exp.Alias} 67 68# Unsupported options: 69# - OPTIMIZE FOR ( @variable_name { UNKNOWN | = <literal_constant> } [ , ...n ] ) 70# - TABLE HINT 71OPTIONS: parser.OPTIONS_TYPE = { 72 **dict.fromkeys( 73 ( 74 "DISABLE_OPTIMIZED_PLAN_FORCING", 75 "FAST", 76 "IGNORE_NONCLUSTERED_COLUMNSTORE_INDEX", 77 "LABEL", 78 "MAXDOP", 79 "MAXRECURSION", 80 "MAX_GRANT_PERCENT", 81 "MIN_GRANT_PERCENT", 82 "NO_PERFORMANCE_SPOOL", 83 "QUERYTRACEON", 84 "RECOMPILE", 85 ), 86 tuple(), 87 ), 88 "CONCAT": ("UNION",), 89 "DISABLE": ("EXTERNALPUSHDOWN", "SCALEOUTEXECUTION"), 90 "EXPAND": ("VIEWS",), 91 "FORCE": ("EXTERNALPUSHDOWN", "ORDER", "SCALEOUTEXECUTION"), 92 "HASH": ("GROUP", "JOIN", "UNION"), 93 "KEEP": ("PLAN",), 94 "KEEPFIXED": ("PLAN",), 95 "LOOP": ("JOIN",), 96 "MERGE": ("JOIN", "UNION"), 97 "OPTIMIZE": (("FOR", "UNKNOWN"),), 98 "ORDER": ("GROUP",), 99 "PARAMETERIZATION": ("FORCED", "SIMPLE"), 100 "ROBUST": ("PLAN",), 101 "USE": ("PLAN",), 102} 103 104OPTIONS_THAT_REQUIRE_EQUAL = ("MAX_GRANT_PERCENT", "MIN_GRANT_PERCENT", "LABEL") 105 106 107def _build_formatted_time( 108 exp_class: t.Type[E], full_format_mapping: t.Optional[bool] = None 109) -> t.Callable[[t.List], E]: 110 def _builder(args: t.List) -> E: 111 assert len(args) == 2 112 113 return exp_class( 114 this=exp.cast(args[1], exp.DataType.Type.DATETIME), 115 format=exp.Literal.string( 116 format_time( 117 args[0].name.lower(), 118 ( 119 {**TSQL.TIME_MAPPING, **FULL_FORMAT_TIME_MAPPING} 120 if full_format_mapping 121 else TSQL.TIME_MAPPING 122 ), 123 ) 124 ), 125 ) 126 127 return _builder 128 129 130def _build_format(args: t.List) -> exp.NumberToStr | exp.TimeToStr: 131 this = seq_get(args, 0) 132 fmt = seq_get(args, 1) 133 culture = seq_get(args, 2) 134 135 number_fmt = fmt and (fmt.name in TRANSPILE_SAFE_NUMBER_FMT or not DATE_FMT_RE.search(fmt.name)) 136 137 if number_fmt: 138 return exp.NumberToStr(this=this, format=fmt, culture=culture) 139 140 if fmt: 141 fmt = exp.Literal.string( 142 format_time(fmt.name, TSQL.FORMAT_TIME_MAPPING) 143 if len(fmt.name) == 1 144 else format_time(fmt.name, TSQL.TIME_MAPPING) 145 ) 146 147 return exp.TimeToStr(this=this, format=fmt, culture=culture) 148 149 150def _build_eomonth(args: t.List) -> exp.LastDay: 151 date = exp.TsOrDsToDate(this=seq_get(args, 0)) 152 month_lag = seq_get(args, 1) 153 154 if month_lag is None: 155 this: exp.Expression = date 156 else: 157 unit = DATE_DELTA_INTERVAL.get("month") 158 this = exp.DateAdd(this=date, expression=month_lag, unit=unit and exp.var(unit)) 159 160 return exp.LastDay(this=this) 161 162 163def _build_hashbytes(args: t.List) -> exp.Expression: 164 kind, data = args 165 kind = kind.name.upper() if kind.is_string else "" 166 167 if kind == "MD5": 168 args.pop(0) 169 return exp.MD5(this=data) 170 if kind in ("SHA", "SHA1"): 171 args.pop(0) 172 return exp.SHA(this=data) 173 if kind == "SHA2_256": 174 return exp.SHA2(this=data, length=exp.Literal.number(256)) 175 if kind == "SHA2_512": 176 return exp.SHA2(this=data, length=exp.Literal.number(512)) 177 178 return exp.func("HASHBYTES", *args) 179 180 181DATEPART_ONLY_FORMATS = {"DW", "HOUR", "QUARTER"} 182 183 184def _format_sql(self: TSQL.Generator, expression: exp.NumberToStr | exp.TimeToStr) -> str: 185 fmt = expression.args["format"] 186 187 if not isinstance(expression, exp.NumberToStr): 188 if fmt.is_string: 189 mapped_fmt = format_time(fmt.name, TSQL.INVERSE_TIME_MAPPING) 190 191 name = (mapped_fmt or "").upper() 192 if name in DATEPART_ONLY_FORMATS: 193 return self.func("DATEPART", name, expression.this) 194 195 fmt_sql = self.sql(exp.Literal.string(mapped_fmt)) 196 else: 197 fmt_sql = self.format_time(expression) or self.sql(fmt) 198 else: 199 fmt_sql = self.sql(fmt) 200 201 return self.func("FORMAT", expression.this, fmt_sql, expression.args.get("culture")) 202 203 204def _string_agg_sql(self: TSQL.Generator, expression: exp.GroupConcat) -> str: 205 this = expression.this 206 distinct = expression.find(exp.Distinct) 207 if distinct: 208 # exp.Distinct can appear below an exp.Order or an exp.GroupConcat expression 209 self.unsupported("T-SQL STRING_AGG doesn't support DISTINCT.") 210 this = distinct.pop().expressions[0] 211 212 order = "" 213 if isinstance(expression.this, exp.Order): 214 if expression.this.this: 215 this = expression.this.this.pop() 216 # Order has a leading space 217 order = f" WITHIN GROUP ({self.sql(expression.this)[1:]})" 218 219 separator = expression.args.get("separator") or exp.Literal.string(",") 220 return f"STRING_AGG({self.format_args(this, separator)}){order}" 221 222 223def _build_date_delta( 224 exp_class: t.Type[E], unit_mapping: t.Optional[t.Dict[str, str]] = None 225) -> t.Callable[[t.List], E]: 226 def _builder(args: t.List) -> E: 227 unit = seq_get(args, 0) 228 if unit and unit_mapping: 229 unit = exp.var(unit_mapping.get(unit.name.lower(), unit.name)) 230 231 start_date = seq_get(args, 1) 232 if start_date and start_date.is_number: 233 # Numeric types are valid DATETIME values 234 if start_date.is_int: 235 adds = DEFAULT_START_DATE + datetime.timedelta(days=int(start_date.this)) 236 start_date = exp.Literal.string(adds.strftime("%F")) 237 else: 238 # We currently don't handle float values, i.e. they're not converted to equivalent DATETIMEs. 239 # This is not a problem when generating T-SQL code, it is when transpiling to other dialects. 240 return exp_class(this=seq_get(args, 2), expression=start_date, unit=unit) 241 242 return exp_class( 243 this=exp.TimeStrToTime(this=seq_get(args, 2)), 244 expression=exp.TimeStrToTime(this=start_date), 245 unit=unit, 246 ) 247 248 return _builder 249 250 251def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 252 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 253 alias = expression.args.get("alias") 254 255 if ( 256 isinstance(expression, (exp.CTE, exp.Subquery)) 257 and isinstance(alias, exp.TableAlias) 258 and not alias.columns 259 ): 260 from sqlglot.optimizer.qualify_columns import qualify_outputs 261 262 # We keep track of the unaliased column projection indexes instead of the expressions 263 # themselves, because the latter are going to be replaced by new nodes when the aliases 264 # are added and hence we won't be able to reach these newly added Alias parents 265 query = expression.this 266 unaliased_column_indexes = ( 267 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 268 ) 269 270 qualify_outputs(query) 271 272 # Preserve the quoting information of columns for newly added Alias nodes 273 query_selects = query.selects 274 for select_index in unaliased_column_indexes: 275 alias = query_selects[select_index] 276 column = alias.this 277 if isinstance(column.this, exp.Identifier): 278 alias.args["alias"].set("quoted", column.this.quoted) 279 280 return expression 281 282 283# https://learn.microsoft.com/en-us/sql/t-sql/functions/datetimefromparts-transact-sql?view=sql-server-ver16#syntax 284def _build_datetimefromparts(args: t.List) -> exp.TimestampFromParts: 285 return exp.TimestampFromParts( 286 year=seq_get(args, 0), 287 month=seq_get(args, 1), 288 day=seq_get(args, 2), 289 hour=seq_get(args, 3), 290 min=seq_get(args, 4), 291 sec=seq_get(args, 5), 292 milli=seq_get(args, 6), 293 ) 294 295 296# https://learn.microsoft.com/en-us/sql/t-sql/functions/timefromparts-transact-sql?view=sql-server-ver16#syntax 297def _build_timefromparts(args: t.List) -> exp.TimeFromParts: 298 return exp.TimeFromParts( 299 hour=seq_get(args, 0), 300 min=seq_get(args, 1), 301 sec=seq_get(args, 2), 302 fractions=seq_get(args, 3), 303 precision=seq_get(args, 4), 304 ) 305 306 307def _build_with_arg_as_text( 308 klass: t.Type[exp.Expression], 309) -> t.Callable[[t.List[exp.Expression]], exp.Expression]: 310 def _parse(args: t.List[exp.Expression]) -> exp.Expression: 311 this = seq_get(args, 0) 312 313 if this and not this.is_string: 314 this = exp.cast(this, exp.DataType.Type.TEXT) 315 316 expression = seq_get(args, 1) 317 kwargs = {"this": this} 318 319 if expression: 320 kwargs["expression"] = expression 321 322 return klass(**kwargs) 323 324 return _parse 325 326 327# https://learn.microsoft.com/en-us/sql/t-sql/functions/parsename-transact-sql?view=sql-server-ver16 328def _build_parsename(args: t.List) -> exp.SplitPart | exp.Anonymous: 329 # PARSENAME(...) will be stored into exp.SplitPart if: 330 # - All args are literals 331 # - The part index (2nd arg) is <= 4 (max valid value, otherwise TSQL returns NULL) 332 if len(args) == 2 and all(isinstance(arg, exp.Literal) for arg in args): 333 this = args[0] 334 part_index = args[1] 335 split_count = len(this.name.split(".")) 336 if split_count <= 4: 337 return exp.SplitPart( 338 this=this, 339 delimiter=exp.Literal.string("."), 340 part_index=exp.Literal.number(split_count + 1 - part_index.to_py()), 341 ) 342 343 return exp.Anonymous(this="PARSENAME", expressions=args) 344 345 346def _build_json_query(args: t.List, dialect: Dialect) -> exp.JSONExtract: 347 if len(args) == 1: 348 # The default value for path is '$'. As a result, if you don't provide a 349 # value for path, JSON_QUERY returns the input expression. 350 args.append(exp.Literal.string("$")) 351 352 return parser.build_extract_json_with_path(exp.JSONExtract)(args, dialect) 353 354 355def _json_extract_sql( 356 self: TSQL.Generator, expression: exp.JSONExtract | exp.JSONExtractScalar 357) -> str: 358 json_query = self.func("JSON_QUERY", expression.this, expression.expression) 359 json_value = self.func("JSON_VALUE", expression.this, expression.expression) 360 return self.func("ISNULL", json_query, json_value) 361 362 363def _timestrtotime_sql(self: TSQL.Generator, expression: exp.TimeStrToTime): 364 sql = timestrtotime_sql(self, expression) 365 if expression.args.get("zone"): 366 # If there is a timezone, produce an expression like: 367 # CAST('2020-01-01 12:13:14-08:00' AS DATETIMEOFFSET) AT TIME ZONE 'UTC' 368 # If you dont have AT TIME ZONE 'UTC', wrapping that expression in another cast back to DATETIME2 just drops the timezone information 369 return self.sql(exp.AtTimeZone(this=sql, zone=exp.Literal.string("UTC"))) 370 return sql 371 372 373class TSQL(Dialect): 374 SUPPORTS_SEMI_ANTI_JOIN = False 375 LOG_BASE_FIRST = False 376 TYPED_DIVISION = True 377 CONCAT_COALESCE = True 378 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 379 380 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 381 382 TIME_MAPPING = { 383 "year": "%Y", 384 "dayofyear": "%j", 385 "day": "%d", 386 "dy": "%d", 387 "y": "%Y", 388 "week": "%W", 389 "ww": "%W", 390 "wk": "%W", 391 "hour": "%h", 392 "hh": "%I", 393 "minute": "%M", 394 "mi": "%M", 395 "n": "%M", 396 "second": "%S", 397 "ss": "%S", 398 "s": "%-S", 399 "millisecond": "%f", 400 "ms": "%f", 401 "weekday": "%W", 402 "dw": "%W", 403 "month": "%m", 404 "mm": "%M", 405 "m": "%-M", 406 "Y": "%Y", 407 "YYYY": "%Y", 408 "YY": "%y", 409 "MMMM": "%B", 410 "MMM": "%b", 411 "MM": "%m", 412 "M": "%-m", 413 "dddd": "%A", 414 "dd": "%d", 415 "d": "%-d", 416 "HH": "%H", 417 "H": "%-H", 418 "h": "%-I", 419 "ffffff": "%f", 420 "yyyy": "%Y", 421 "yy": "%y", 422 } 423 424 CONVERT_FORMAT_MAPPING = { 425 "0": "%b %d %Y %-I:%M%p", 426 "1": "%m/%d/%y", 427 "2": "%y.%m.%d", 428 "3": "%d/%m/%y", 429 "4": "%d.%m.%y", 430 "5": "%d-%m-%y", 431 "6": "%d %b %y", 432 "7": "%b %d, %y", 433 "8": "%H:%M:%S", 434 "9": "%b %d %Y %-I:%M:%S:%f%p", 435 "10": "mm-dd-yy", 436 "11": "yy/mm/dd", 437 "12": "yymmdd", 438 "13": "%d %b %Y %H:%M:ss:%f", 439 "14": "%H:%M:%S:%f", 440 "20": "%Y-%m-%d %H:%M:%S", 441 "21": "%Y-%m-%d %H:%M:%S.%f", 442 "22": "%m/%d/%y %-I:%M:%S %p", 443 "23": "%Y-%m-%d", 444 "24": "%H:%M:%S", 445 "25": "%Y-%m-%d %H:%M:%S.%f", 446 "100": "%b %d %Y %-I:%M%p", 447 "101": "%m/%d/%Y", 448 "102": "%Y.%m.%d", 449 "103": "%d/%m/%Y", 450 "104": "%d.%m.%Y", 451 "105": "%d-%m-%Y", 452 "106": "%d %b %Y", 453 "107": "%b %d, %Y", 454 "108": "%H:%M:%S", 455 "109": "%b %d %Y %-I:%M:%S:%f%p", 456 "110": "%m-%d-%Y", 457 "111": "%Y/%m/%d", 458 "112": "%Y%m%d", 459 "113": "%d %b %Y %H:%M:%S:%f", 460 "114": "%H:%M:%S:%f", 461 "120": "%Y-%m-%d %H:%M:%S", 462 "121": "%Y-%m-%d %H:%M:%S.%f", 463 } 464 465 FORMAT_TIME_MAPPING = { 466 "y": "%B %Y", 467 "d": "%m/%d/%Y", 468 "H": "%-H", 469 "h": "%-I", 470 "s": "%Y-%m-%d %H:%M:%S", 471 "D": "%A,%B,%Y", 472 "f": "%A,%B,%Y %-I:%M %p", 473 "F": "%A,%B,%Y %-I:%M:%S %p", 474 "g": "%m/%d/%Y %-I:%M %p", 475 "G": "%m/%d/%Y %-I:%M:%S %p", 476 "M": "%B %-d", 477 "m": "%B %-d", 478 "O": "%Y-%m-%dT%H:%M:%S", 479 "u": "%Y-%M-%D %H:%M:%S%z", 480 "U": "%A, %B %D, %Y %H:%M:%S%z", 481 "T": "%-I:%M:%S %p", 482 "t": "%-I:%M", 483 "Y": "%a %Y", 484 } 485 486 class Tokenizer(tokens.Tokenizer): 487 IDENTIFIERS = [("[", "]"), '"'] 488 QUOTES = ["'", '"'] 489 HEX_STRINGS = [("0x", ""), ("0X", "")] 490 VAR_SINGLE_TOKENS = {"@", "$", "#"} 491 492 KEYWORDS = { 493 **tokens.Tokenizer.KEYWORDS, 494 "CLUSTERED INDEX": TokenType.INDEX, 495 "DATETIME2": TokenType.DATETIME, 496 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 497 "DECLARE": TokenType.DECLARE, 498 "EXEC": TokenType.COMMAND, 499 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 500 "IMAGE": TokenType.IMAGE, 501 "MONEY": TokenType.MONEY, 502 "NONCLUSTERED INDEX": TokenType.INDEX, 503 "NTEXT": TokenType.TEXT, 504 "OPTION": TokenType.OPTION, 505 "OUTPUT": TokenType.RETURNING, 506 "PRINT": TokenType.COMMAND, 507 "PROC": TokenType.PROCEDURE, 508 "REAL": TokenType.FLOAT, 509 "ROWVERSION": TokenType.ROWVERSION, 510 "SMALLDATETIME": TokenType.DATETIME, 511 "SMALLMONEY": TokenType.SMALLMONEY, 512 "SQL_VARIANT": TokenType.VARIANT, 513 "SYSTEM_USER": TokenType.CURRENT_USER, 514 "TOP": TokenType.TOP, 515 "TIMESTAMP": TokenType.ROWVERSION, 516 "TINYINT": TokenType.UTINYINT, 517 "UNIQUEIDENTIFIER": TokenType.UNIQUEIDENTIFIER, 518 "UPDATE STATISTICS": TokenType.COMMAND, 519 "XML": TokenType.XML, 520 } 521 KEYWORDS.pop("/*+") 522 523 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 524 525 class Parser(parser.Parser): 526 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 527 LOG_DEFAULTS_TO_LN = True 528 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 529 STRING_ALIASES = True 530 NO_PAREN_IF_COMMANDS = False 531 532 QUERY_MODIFIER_PARSERS = { 533 **parser.Parser.QUERY_MODIFIER_PARSERS, 534 TokenType.OPTION: lambda self: ("options", self._parse_options()), 535 } 536 537 FUNCTIONS = { 538 **parser.Parser.FUNCTIONS, 539 "CHARINDEX": lambda args: exp.StrPosition( 540 this=seq_get(args, 1), 541 substr=seq_get(args, 0), 542 position=seq_get(args, 2), 543 ), 544 "COUNT": lambda args: exp.Count( 545 this=seq_get(args, 0), expressions=args[1:], big_int=False 546 ), 547 "COUNT_BIG": lambda args: exp.Count( 548 this=seq_get(args, 0), expressions=args[1:], big_int=True 549 ), 550 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 551 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 552 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 553 "DATEPART": _build_formatted_time(exp.TimeToStr), 554 "DATETIMEFROMPARTS": _build_datetimefromparts, 555 "EOMONTH": _build_eomonth, 556 "FORMAT": _build_format, 557 "GETDATE": exp.CurrentTimestamp.from_arg_list, 558 "HASHBYTES": _build_hashbytes, 559 "ISNULL": build_coalesce, 560 "JSON_QUERY": _build_json_query, 561 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 562 "LEN": _build_with_arg_as_text(exp.Length), 563 "LEFT": _build_with_arg_as_text(exp.Left), 564 "RIGHT": _build_with_arg_as_text(exp.Right), 565 "PARSENAME": _build_parsename, 566 "REPLICATE": exp.Repeat.from_arg_list, 567 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 568 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 569 "SUSER_NAME": exp.CurrentUser.from_arg_list, 570 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 571 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 572 "TIMEFROMPARTS": _build_timefromparts, 573 } 574 575 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 576 577 PROCEDURE_OPTIONS = dict.fromkeys( 578 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 579 ) 580 581 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 582 TokenType.TABLE, 583 *parser.Parser.TYPE_TOKENS, 584 } 585 586 STATEMENT_PARSERS = { 587 **parser.Parser.STATEMENT_PARSERS, 588 TokenType.DECLARE: lambda self: self._parse_declare(), 589 } 590 591 RANGE_PARSERS = { 592 **parser.Parser.RANGE_PARSERS, 593 TokenType.DCOLON: lambda self, this: self.expression( 594 exp.ScopeResolution, 595 this=this, 596 expression=self._parse_function() or self._parse_var(any_token=True), 597 ), 598 } 599 600 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 601 COLUMN_OPERATORS = { 602 **parser.Parser.COLUMN_OPERATORS, 603 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 604 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 605 else self.expression(exp.ScopeResolution, this=this, expression=to), 606 } 607 608 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 609 # We want to use _parse_types() if the first token after :: is a known type, 610 # otherwise we could parse something like x::varchar(max) into a function 611 if self._match_set(self.TYPE_TOKENS, advance=False): 612 return self._parse_types() 613 614 return self._parse_function() or self._parse_types() 615 616 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 617 if not self._match(TokenType.OPTION): 618 return None 619 620 def _parse_option() -> t.Optional[exp.Expression]: 621 option = self._parse_var_from_options(OPTIONS) 622 if not option: 623 return None 624 625 self._match(TokenType.EQ) 626 return self.expression( 627 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 628 ) 629 630 return self._parse_wrapped_csv(_parse_option) 631 632 def _parse_projections(self) -> t.List[exp.Expression]: 633 """ 634 T-SQL supports the syntax alias = expression in the SELECT's projection list, 635 so we transform all parsed Selects to convert their EQ projections into Aliases. 636 637 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 638 """ 639 return [ 640 ( 641 exp.alias_(projection.expression, projection.this.this, copy=False) 642 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 643 else projection 644 ) 645 for projection in super()._parse_projections() 646 ] 647 648 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 649 """Applies to SQL Server and Azure SQL Database 650 COMMIT [ { TRAN | TRANSACTION } 651 [ transaction_name | @tran_name_variable ] ] 652 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 653 654 ROLLBACK { TRAN | TRANSACTION } 655 [ transaction_name | @tran_name_variable 656 | savepoint_name | @savepoint_variable ] 657 """ 658 rollback = self._prev.token_type == TokenType.ROLLBACK 659 660 self._match_texts(("TRAN", "TRANSACTION")) 661 this = self._parse_id_var() 662 663 if rollback: 664 return self.expression(exp.Rollback, this=this) 665 666 durability = None 667 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 668 self._match_text_seq("DELAYED_DURABILITY") 669 self._match(TokenType.EQ) 670 671 if self._match_text_seq("OFF"): 672 durability = False 673 else: 674 self._match(TokenType.ON) 675 durability = True 676 677 self._match_r_paren() 678 679 return self.expression(exp.Commit, this=this, durability=durability) 680 681 def _parse_transaction(self) -> exp.Transaction | exp.Command: 682 """Applies to SQL Server and Azure SQL Database 683 BEGIN { TRAN | TRANSACTION } 684 [ { transaction_name | @tran_name_variable } 685 [ WITH MARK [ 'description' ] ] 686 ] 687 """ 688 if self._match_texts(("TRAN", "TRANSACTION")): 689 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 690 if self._match_text_seq("WITH", "MARK"): 691 transaction.set("mark", self._parse_string()) 692 693 return transaction 694 695 return self._parse_as_command(self._prev) 696 697 def _parse_returns(self) -> exp.ReturnsProperty: 698 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 699 returns = super()._parse_returns() 700 returns.set("table", table) 701 return returns 702 703 def _parse_convert( 704 self, strict: bool, safe: t.Optional[bool] = None 705 ) -> t.Optional[exp.Expression]: 706 this = self._parse_types() 707 self._match(TokenType.COMMA) 708 args = [this, *self._parse_csv(self._parse_assignment)] 709 convert = exp.Convert.from_arg_list(args) 710 convert.set("safe", safe) 711 convert.set("strict", strict) 712 return convert 713 714 def _parse_user_defined_function( 715 self, kind: t.Optional[TokenType] = None 716 ) -> t.Optional[exp.Expression]: 717 this = super()._parse_user_defined_function(kind=kind) 718 719 if ( 720 kind == TokenType.FUNCTION 721 or isinstance(this, exp.UserDefinedFunction) 722 or self._match(TokenType.ALIAS, advance=False) 723 ): 724 return this 725 726 if not self._match(TokenType.WITH, advance=False): 727 expressions = self._parse_csv(self._parse_function_parameter) 728 else: 729 expressions = None 730 731 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 732 733 def _parse_id_var( 734 self, 735 any_token: bool = True, 736 tokens: t.Optional[t.Collection[TokenType]] = None, 737 ) -> t.Optional[exp.Expression]: 738 is_temporary = self._match(TokenType.HASH) 739 is_global = is_temporary and self._match(TokenType.HASH) 740 741 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 742 if this: 743 if is_global: 744 this.set("global", True) 745 elif is_temporary: 746 this.set("temporary", True) 747 748 return this 749 750 def _parse_create(self) -> exp.Create | exp.Command: 751 create = super()._parse_create() 752 753 if isinstance(create, exp.Create): 754 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 755 if isinstance(table, exp.Table) and table.this.args.get("temporary"): 756 if not create.args.get("properties"): 757 create.set("properties", exp.Properties(expressions=[])) 758 759 create.args["properties"].append("expressions", exp.TemporaryProperty()) 760 761 return create 762 763 def _parse_if(self) -> t.Optional[exp.Expression]: 764 index = self._index 765 766 if self._match_text_seq("OBJECT_ID"): 767 self._parse_wrapped_csv(self._parse_string) 768 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 769 return self._parse_drop(exists=True) 770 self._retreat(index) 771 772 return super()._parse_if() 773 774 def _parse_unique(self) -> exp.UniqueColumnConstraint: 775 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 776 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 777 else: 778 this = self._parse_schema(self._parse_id_var(any_token=False)) 779 780 return self.expression(exp.UniqueColumnConstraint, this=this) 781 782 def _parse_partition(self) -> t.Optional[exp.Partition]: 783 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 784 return None 785 786 def parse_range(): 787 low = self._parse_bitwise() 788 high = self._parse_bitwise() if self._match_text_seq("TO") else None 789 790 return ( 791 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 792 ) 793 794 partition = self.expression( 795 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 796 ) 797 798 self._match_r_paren() 799 800 return partition 801 802 def _parse_declare(self) -> exp.Declare | exp.Command: 803 index = self._index 804 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 805 806 if not expressions or self._curr: 807 self._retreat(index) 808 return self._parse_as_command(self._prev) 809 810 return self.expression(exp.Declare, expressions=expressions) 811 812 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 813 var = self._parse_id_var() 814 if not var: 815 return None 816 817 value = None 818 self._match(TokenType.ALIAS) 819 if self._match(TokenType.TABLE): 820 data_type = self._parse_schema() 821 else: 822 data_type = self._parse_types() 823 if self._match(TokenType.EQ): 824 value = self._parse_bitwise() 825 826 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value) 827 828 class Generator(generator.Generator): 829 LIMIT_IS_TOP = True 830 QUERY_HINTS = False 831 RETURNING_END = False 832 NVL2_SUPPORTED = False 833 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 834 LIMIT_FETCH = "FETCH" 835 COMPUTED_COLUMN_WITH_TYPE = False 836 CTE_RECURSIVE_KEYWORD_REQUIRED = False 837 ENSURE_BOOLS = True 838 NULL_ORDERING_SUPPORTED = None 839 SUPPORTS_SINGLE_ARG_CONCAT = False 840 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 841 SUPPORTS_SELECT_INTO = True 842 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 843 SUPPORTS_TO_NUMBER = False 844 SET_OP_MODIFIERS = False 845 COPY_PARAMS_EQ_REQUIRED = True 846 PARSE_JSON_NAME = None 847 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 848 849 EXPRESSIONS_WITHOUT_NESTED_CTES = { 850 exp.Create, 851 exp.Delete, 852 exp.Insert, 853 exp.Intersect, 854 exp.Except, 855 exp.Merge, 856 exp.Select, 857 exp.Subquery, 858 exp.Union, 859 exp.Update, 860 } 861 862 SUPPORTED_JSON_PATH_PARTS = { 863 exp.JSONPathKey, 864 exp.JSONPathRoot, 865 exp.JSONPathSubscript, 866 } 867 868 TYPE_MAPPING = { 869 **generator.Generator.TYPE_MAPPING, 870 exp.DataType.Type.BOOLEAN: "BIT", 871 exp.DataType.Type.DECIMAL: "NUMERIC", 872 exp.DataType.Type.DATETIME: "DATETIME2", 873 exp.DataType.Type.DOUBLE: "FLOAT", 874 exp.DataType.Type.INT: "INTEGER", 875 exp.DataType.Type.ROWVERSION: "ROWVERSION", 876 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 877 exp.DataType.Type.TIMESTAMP: "DATETIME2", 878 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 879 exp.DataType.Type.UTINYINT: "TINYINT", 880 exp.DataType.Type.VARIANT: "SQL_VARIANT", 881 } 882 883 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 884 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 885 886 TRANSFORMS = { 887 **generator.Generator.TRANSFORMS, 888 exp.AnyValue: any_value_to_max_sql, 889 exp.ArrayToString: rename_func("STRING_AGG"), 890 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 891 exp.DateAdd: date_delta_sql("DATEADD"), 892 exp.DateDiff: date_delta_sql("DATEDIFF"), 893 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 894 exp.CurrentDate: rename_func("GETDATE"), 895 exp.CurrentTimestamp: rename_func("GETDATE"), 896 exp.DateStrToDate: datestrtodate_sql, 897 exp.Extract: rename_func("DATEPART"), 898 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 899 exp.GroupConcat: _string_agg_sql, 900 exp.If: rename_func("IIF"), 901 exp.JSONExtract: _json_extract_sql, 902 exp.JSONExtractScalar: _json_extract_sql, 903 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 904 exp.Max: max_or_greatest, 905 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 906 exp.Min: min_or_least, 907 exp.NumberToStr: _format_sql, 908 exp.Repeat: rename_func("REPLICATE"), 909 exp.Select: transforms.preprocess( 910 [ 911 transforms.eliminate_distinct_on, 912 transforms.eliminate_semi_and_anti_joins, 913 transforms.eliminate_qualify, 914 transforms.unnest_generate_date_array_using_recursive_cte, 915 ] 916 ), 917 exp.Stddev: rename_func("STDEV"), 918 exp.StrPosition: lambda self, e: self.func( 919 "CHARINDEX", e.args.get("substr"), e.this, e.args.get("position") 920 ), 921 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 922 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 923 exp.SHA2: lambda self, e: self.func( 924 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 925 ), 926 exp.TemporaryProperty: lambda self, e: "", 927 exp.TimeStrToTime: _timestrtotime_sql, 928 exp.TimeToStr: _format_sql, 929 exp.Trim: trim_sql, 930 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 931 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 932 } 933 934 TRANSFORMS.pop(exp.ReturnsProperty) 935 936 PROPERTIES_LOCATION = { 937 **generator.Generator.PROPERTIES_LOCATION, 938 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 939 } 940 941 def scope_resolution(self, rhs: str, scope_name: str) -> str: 942 return f"{scope_name}::{rhs}" 943 944 def select_sql(self, expression: exp.Select) -> str: 945 if expression.args.get("offset"): 946 if not expression.args.get("order"): 947 # ORDER BY is required in order to use OFFSET in a query, so we use 948 # a noop order by, since we don't really care about the order. 949 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 950 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 951 952 limit = expression.args.get("limit") 953 if isinstance(limit, exp.Limit): 954 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 955 # we replace here because otherwise TOP would be generated in select_sql 956 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 957 958 return super().select_sql(expression) 959 960 def convert_sql(self, expression: exp.Convert) -> str: 961 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 962 return self.func( 963 name, expression.this, expression.expression, expression.args.get("style") 964 ) 965 966 def queryoption_sql(self, expression: exp.QueryOption) -> str: 967 option = self.sql(expression, "this") 968 value = self.sql(expression, "expression") 969 if value: 970 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 971 return f"{option} {optional_equal_sign}{value}" 972 return option 973 974 def lateral_op(self, expression: exp.Lateral) -> str: 975 cross_apply = expression.args.get("cross_apply") 976 if cross_apply is True: 977 return "CROSS APPLY" 978 if cross_apply is False: 979 return "OUTER APPLY" 980 981 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 982 self.unsupported("LATERAL clause is not supported.") 983 return "LATERAL" 984 985 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 986 this = expression.this 987 split_count = len(this.name.split(".")) 988 delimiter = expression.args.get("delimiter") 989 part_index = expression.args.get("part_index") 990 991 if ( 992 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 993 or (delimiter and delimiter.name != ".") 994 or not part_index 995 or split_count > 4 996 ): 997 self.unsupported( 998 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 999 ) 1000 return "" 1001 1002 return self.func( 1003 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1004 ) 1005 1006 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1007 nano = expression.args.get("nano") 1008 if nano is not None: 1009 nano.pop() 1010 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1011 1012 if expression.args.get("fractions") is None: 1013 expression.set("fractions", exp.Literal.number(0)) 1014 if expression.args.get("precision") is None: 1015 expression.set("precision", exp.Literal.number(0)) 1016 1017 return rename_func("TIMEFROMPARTS")(self, expression) 1018 1019 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1020 zone = expression.args.get("zone") 1021 if zone is not None: 1022 zone.pop() 1023 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1024 1025 nano = expression.args.get("nano") 1026 if nano is not None: 1027 nano.pop() 1028 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1029 1030 if expression.args.get("milli") is None: 1031 expression.set("milli", exp.Literal.number(0)) 1032 1033 return rename_func("DATETIMEFROMPARTS")(self, expression) 1034 1035 def setitem_sql(self, expression: exp.SetItem) -> str: 1036 this = expression.this 1037 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1038 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1039 return f"{self.sql(this.left)} {self.sql(this.right)}" 1040 1041 return super().setitem_sql(expression) 1042 1043 def boolean_sql(self, expression: exp.Boolean) -> str: 1044 if type(expression.parent) in BIT_TYPES or isinstance( 1045 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1046 ): 1047 return "1" if expression.this else "0" 1048 1049 return "(1 = 1)" if expression.this else "(1 = 0)" 1050 1051 def is_sql(self, expression: exp.Is) -> str: 1052 if isinstance(expression.expression, exp.Boolean): 1053 return self.binary(expression, "=") 1054 return self.binary(expression, "IS") 1055 1056 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1057 sql = self.sql(expression, "this") 1058 properties = expression.args.get("properties") 1059 1060 if sql[:1] != "#" and any( 1061 isinstance(prop, exp.TemporaryProperty) 1062 for prop in (properties.expressions if properties else []) 1063 ): 1064 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1065 1066 return sql 1067 1068 def create_sql(self, expression: exp.Create) -> str: 1069 kind = expression.kind 1070 exists = expression.args.pop("exists", None) 1071 1072 like_property = expression.find(exp.LikeProperty) 1073 if like_property: 1074 ctas_expression = like_property.this 1075 else: 1076 ctas_expression = expression.expression 1077 1078 if kind == "VIEW": 1079 expression.this.set("catalog", None) 1080 with_ = expression.args.get("with") 1081 if ctas_expression and with_: 1082 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1083 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1084 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1085 ctas_expression.set("with", with_.pop()) 1086 1087 sql = super().create_sql(expression) 1088 1089 table = expression.find(exp.Table) 1090 1091 # Convert CTAS statement to SELECT .. INTO .. 1092 if kind == "TABLE" and ctas_expression: 1093 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1094 ctas_expression = ctas_expression.subquery() 1095 1096 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1097 select_into.set("into", exp.Into(this=table)) 1098 1099 if like_property: 1100 select_into.limit(0, copy=False) 1101 1102 sql = self.sql(select_into) 1103 1104 if exists: 1105 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1106 sql_with_ctes = self.prepend_ctes(expression, sql) 1107 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1108 if kind == "SCHEMA": 1109 return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})""" 1110 elif kind == "TABLE": 1111 assert table 1112 where = exp.and_( 1113 exp.column("table_name").eq(table.name), 1114 exp.column("table_schema").eq(table.db) if table.db else None, 1115 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1116 ) 1117 return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})""" 1118 elif kind == "INDEX": 1119 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1120 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1121 elif expression.args.get("replace"): 1122 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1123 1124 return self.prepend_ctes(expression, sql) 1125 1126 def count_sql(self, expression: exp.Count) -> str: 1127 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1128 return rename_func(func_name)(self, expression) 1129 1130 def offset_sql(self, expression: exp.Offset) -> str: 1131 return f"{super().offset_sql(expression)} ROWS" 1132 1133 def version_sql(self, expression: exp.Version) -> str: 1134 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1135 this = f"FOR {name}" 1136 expr = expression.expression 1137 kind = expression.text("kind") 1138 if kind in ("FROM", "BETWEEN"): 1139 args = expr.expressions 1140 sep = "TO" if kind == "FROM" else "AND" 1141 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1142 else: 1143 expr_sql = self.sql(expr) 1144 1145 expr_sql = f" {expr_sql}" if expr_sql else "" 1146 return f"{this} {kind}{expr_sql}" 1147 1148 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1149 table = expression.args.get("table") 1150 table = f"{table} " if table else "" 1151 return f"RETURNS {table}{self.sql(expression, 'this')}" 1152 1153 def returning_sql(self, expression: exp.Returning) -> str: 1154 into = self.sql(expression, "into") 1155 into = self.seg(f"INTO {into}") if into else "" 1156 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1157 1158 def transaction_sql(self, expression: exp.Transaction) -> str: 1159 this = self.sql(expression, "this") 1160 this = f" {this}" if this else "" 1161 mark = self.sql(expression, "mark") 1162 mark = f" WITH MARK {mark}" if mark else "" 1163 return f"BEGIN TRANSACTION{this}{mark}" 1164 1165 def commit_sql(self, expression: exp.Commit) -> str: 1166 this = self.sql(expression, "this") 1167 this = f" {this}" if this else "" 1168 durability = expression.args.get("durability") 1169 durability = ( 1170 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1171 if durability is not None 1172 else "" 1173 ) 1174 return f"COMMIT TRANSACTION{this}{durability}" 1175 1176 def rollback_sql(self, expression: exp.Rollback) -> str: 1177 this = self.sql(expression, "this") 1178 this = f" {this}" if this else "" 1179 return f"ROLLBACK TRANSACTION{this}" 1180 1181 def identifier_sql(self, expression: exp.Identifier) -> str: 1182 identifier = super().identifier_sql(expression) 1183 1184 if expression.args.get("global"): 1185 identifier = f"##{identifier}" 1186 elif expression.args.get("temporary"): 1187 identifier = f"#{identifier}" 1188 1189 return identifier 1190 1191 def constraint_sql(self, expression: exp.Constraint) -> str: 1192 this = self.sql(expression, "this") 1193 expressions = self.expressions(expression, flat=True, sep=" ") 1194 return f"CONSTRAINT {this} {expressions}" 1195 1196 def length_sql(self, expression: exp.Length) -> str: 1197 return self._uncast_text(expression, "LEN") 1198 1199 def right_sql(self, expression: exp.Right) -> str: 1200 return self._uncast_text(expression, "RIGHT") 1201 1202 def left_sql(self, expression: exp.Left) -> str: 1203 return self._uncast_text(expression, "LEFT") 1204 1205 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1206 this = expression.this 1207 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1208 this_sql = self.sql(this, "this") 1209 else: 1210 this_sql = self.sql(this) 1211 expression_sql = self.sql(expression, "expression") 1212 return self.func(name, this_sql, expression_sql if expression_sql else None) 1213 1214 def partition_sql(self, expression: exp.Partition) -> str: 1215 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1216 1217 def alter_sql(self, expression: exp.Alter) -> str: 1218 action = seq_get(expression.args.get("actions") or [], 0) 1219 if isinstance(action, exp.AlterRename): 1220 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1221 return super().alter_sql(expression) 1222 1223 def drop_sql(self, expression: exp.Drop) -> str: 1224 if expression.args["kind"] == "VIEW": 1225 expression.this.set("catalog", None) 1226 return super().drop_sql(expression) 1227 1228 def declare_sql(self, expression: exp.Declare) -> str: 1229 return f"DECLARE {self.expressions(expression, flat=True)}" 1230 1231 def declareitem_sql(self, expression: exp.DeclareItem) -> str: 1232 variable = self.sql(expression, "this") 1233 default = self.sql(expression, "default") 1234 default = f" = {default}" if default else "" 1235 1236 kind = self.sql(expression, "kind") 1237 if isinstance(expression.args.get("kind"), exp.Schema): 1238 kind = f"TABLE {kind}" 1239 1240 return f"{variable} AS {kind}{default}" 1241 1242 def options_modifier(self, expression: exp.Expression) -> str: 1243 options = self.expressions(expression, key="options") 1244 return f" OPTION{self.wrap(options)}" if options else "" 1245 1246 def dpipe_sql(self, expression: exp.DPipe) -> str: 1247 return self.sql( 1248 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1249 )
252def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 253 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 254 alias = expression.args.get("alias") 255 256 if ( 257 isinstance(expression, (exp.CTE, exp.Subquery)) 258 and isinstance(alias, exp.TableAlias) 259 and not alias.columns 260 ): 261 from sqlglot.optimizer.qualify_columns import qualify_outputs 262 263 # We keep track of the unaliased column projection indexes instead of the expressions 264 # themselves, because the latter are going to be replaced by new nodes when the aliases 265 # are added and hence we won't be able to reach these newly added Alias parents 266 query = expression.this 267 unaliased_column_indexes = ( 268 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 269 ) 270 271 qualify_outputs(query) 272 273 # Preserve the quoting information of columns for newly added Alias nodes 274 query_selects = query.selects 275 for select_index in unaliased_column_indexes: 276 alias = query_selects[select_index] 277 column = alias.this 278 if isinstance(column.this, exp.Identifier): 279 alias.args["alias"].set("quoted", column.this.quoted) 280 281 return expression
Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.
374class TSQL(Dialect): 375 SUPPORTS_SEMI_ANTI_JOIN = False 376 LOG_BASE_FIRST = False 377 TYPED_DIVISION = True 378 CONCAT_COALESCE = True 379 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 380 381 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 382 383 TIME_MAPPING = { 384 "year": "%Y", 385 "dayofyear": "%j", 386 "day": "%d", 387 "dy": "%d", 388 "y": "%Y", 389 "week": "%W", 390 "ww": "%W", 391 "wk": "%W", 392 "hour": "%h", 393 "hh": "%I", 394 "minute": "%M", 395 "mi": "%M", 396 "n": "%M", 397 "second": "%S", 398 "ss": "%S", 399 "s": "%-S", 400 "millisecond": "%f", 401 "ms": "%f", 402 "weekday": "%W", 403 "dw": "%W", 404 "month": "%m", 405 "mm": "%M", 406 "m": "%-M", 407 "Y": "%Y", 408 "YYYY": "%Y", 409 "YY": "%y", 410 "MMMM": "%B", 411 "MMM": "%b", 412 "MM": "%m", 413 "M": "%-m", 414 "dddd": "%A", 415 "dd": "%d", 416 "d": "%-d", 417 "HH": "%H", 418 "H": "%-H", 419 "h": "%-I", 420 "ffffff": "%f", 421 "yyyy": "%Y", 422 "yy": "%y", 423 } 424 425 CONVERT_FORMAT_MAPPING = { 426 "0": "%b %d %Y %-I:%M%p", 427 "1": "%m/%d/%y", 428 "2": "%y.%m.%d", 429 "3": "%d/%m/%y", 430 "4": "%d.%m.%y", 431 "5": "%d-%m-%y", 432 "6": "%d %b %y", 433 "7": "%b %d, %y", 434 "8": "%H:%M:%S", 435 "9": "%b %d %Y %-I:%M:%S:%f%p", 436 "10": "mm-dd-yy", 437 "11": "yy/mm/dd", 438 "12": "yymmdd", 439 "13": "%d %b %Y %H:%M:ss:%f", 440 "14": "%H:%M:%S:%f", 441 "20": "%Y-%m-%d %H:%M:%S", 442 "21": "%Y-%m-%d %H:%M:%S.%f", 443 "22": "%m/%d/%y %-I:%M:%S %p", 444 "23": "%Y-%m-%d", 445 "24": "%H:%M:%S", 446 "25": "%Y-%m-%d %H:%M:%S.%f", 447 "100": "%b %d %Y %-I:%M%p", 448 "101": "%m/%d/%Y", 449 "102": "%Y.%m.%d", 450 "103": "%d/%m/%Y", 451 "104": "%d.%m.%Y", 452 "105": "%d-%m-%Y", 453 "106": "%d %b %Y", 454 "107": "%b %d, %Y", 455 "108": "%H:%M:%S", 456 "109": "%b %d %Y %-I:%M:%S:%f%p", 457 "110": "%m-%d-%Y", 458 "111": "%Y/%m/%d", 459 "112": "%Y%m%d", 460 "113": "%d %b %Y %H:%M:%S:%f", 461 "114": "%H:%M:%S:%f", 462 "120": "%Y-%m-%d %H:%M:%S", 463 "121": "%Y-%m-%d %H:%M:%S.%f", 464 } 465 466 FORMAT_TIME_MAPPING = { 467 "y": "%B %Y", 468 "d": "%m/%d/%Y", 469 "H": "%-H", 470 "h": "%-I", 471 "s": "%Y-%m-%d %H:%M:%S", 472 "D": "%A,%B,%Y", 473 "f": "%A,%B,%Y %-I:%M %p", 474 "F": "%A,%B,%Y %-I:%M:%S %p", 475 "g": "%m/%d/%Y %-I:%M %p", 476 "G": "%m/%d/%Y %-I:%M:%S %p", 477 "M": "%B %-d", 478 "m": "%B %-d", 479 "O": "%Y-%m-%dT%H:%M:%S", 480 "u": "%Y-%M-%D %H:%M:%S%z", 481 "U": "%A, %B %D, %Y %H:%M:%S%z", 482 "T": "%-I:%M:%S %p", 483 "t": "%-I:%M", 484 "Y": "%a %Y", 485 } 486 487 class Tokenizer(tokens.Tokenizer): 488 IDENTIFIERS = [("[", "]"), '"'] 489 QUOTES = ["'", '"'] 490 HEX_STRINGS = [("0x", ""), ("0X", "")] 491 VAR_SINGLE_TOKENS = {"@", "$", "#"} 492 493 KEYWORDS = { 494 **tokens.Tokenizer.KEYWORDS, 495 "CLUSTERED INDEX": TokenType.INDEX, 496 "DATETIME2": TokenType.DATETIME, 497 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 498 "DECLARE": TokenType.DECLARE, 499 "EXEC": TokenType.COMMAND, 500 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 501 "IMAGE": TokenType.IMAGE, 502 "MONEY": TokenType.MONEY, 503 "NONCLUSTERED INDEX": TokenType.INDEX, 504 "NTEXT": TokenType.TEXT, 505 "OPTION": TokenType.OPTION, 506 "OUTPUT": TokenType.RETURNING, 507 "PRINT": TokenType.COMMAND, 508 "PROC": TokenType.PROCEDURE, 509 "REAL": TokenType.FLOAT, 510 "ROWVERSION": TokenType.ROWVERSION, 511 "SMALLDATETIME": TokenType.DATETIME, 512 "SMALLMONEY": TokenType.SMALLMONEY, 513 "SQL_VARIANT": TokenType.VARIANT, 514 "SYSTEM_USER": TokenType.CURRENT_USER, 515 "TOP": TokenType.TOP, 516 "TIMESTAMP": TokenType.ROWVERSION, 517 "TINYINT": TokenType.UTINYINT, 518 "UNIQUEIDENTIFIER": TokenType.UNIQUEIDENTIFIER, 519 "UPDATE STATISTICS": TokenType.COMMAND, 520 "XML": TokenType.XML, 521 } 522 KEYWORDS.pop("/*+") 523 524 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 525 526 class Parser(parser.Parser): 527 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 528 LOG_DEFAULTS_TO_LN = True 529 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 530 STRING_ALIASES = True 531 NO_PAREN_IF_COMMANDS = False 532 533 QUERY_MODIFIER_PARSERS = { 534 **parser.Parser.QUERY_MODIFIER_PARSERS, 535 TokenType.OPTION: lambda self: ("options", self._parse_options()), 536 } 537 538 FUNCTIONS = { 539 **parser.Parser.FUNCTIONS, 540 "CHARINDEX": lambda args: exp.StrPosition( 541 this=seq_get(args, 1), 542 substr=seq_get(args, 0), 543 position=seq_get(args, 2), 544 ), 545 "COUNT": lambda args: exp.Count( 546 this=seq_get(args, 0), expressions=args[1:], big_int=False 547 ), 548 "COUNT_BIG": lambda args: exp.Count( 549 this=seq_get(args, 0), expressions=args[1:], big_int=True 550 ), 551 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 552 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 553 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 554 "DATEPART": _build_formatted_time(exp.TimeToStr), 555 "DATETIMEFROMPARTS": _build_datetimefromparts, 556 "EOMONTH": _build_eomonth, 557 "FORMAT": _build_format, 558 "GETDATE": exp.CurrentTimestamp.from_arg_list, 559 "HASHBYTES": _build_hashbytes, 560 "ISNULL": build_coalesce, 561 "JSON_QUERY": _build_json_query, 562 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 563 "LEN": _build_with_arg_as_text(exp.Length), 564 "LEFT": _build_with_arg_as_text(exp.Left), 565 "RIGHT": _build_with_arg_as_text(exp.Right), 566 "PARSENAME": _build_parsename, 567 "REPLICATE": exp.Repeat.from_arg_list, 568 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 569 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 570 "SUSER_NAME": exp.CurrentUser.from_arg_list, 571 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 572 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 573 "TIMEFROMPARTS": _build_timefromparts, 574 } 575 576 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 577 578 PROCEDURE_OPTIONS = dict.fromkeys( 579 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 580 ) 581 582 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 583 TokenType.TABLE, 584 *parser.Parser.TYPE_TOKENS, 585 } 586 587 STATEMENT_PARSERS = { 588 **parser.Parser.STATEMENT_PARSERS, 589 TokenType.DECLARE: lambda self: self._parse_declare(), 590 } 591 592 RANGE_PARSERS = { 593 **parser.Parser.RANGE_PARSERS, 594 TokenType.DCOLON: lambda self, this: self.expression( 595 exp.ScopeResolution, 596 this=this, 597 expression=self._parse_function() or self._parse_var(any_token=True), 598 ), 599 } 600 601 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 602 COLUMN_OPERATORS = { 603 **parser.Parser.COLUMN_OPERATORS, 604 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 605 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 606 else self.expression(exp.ScopeResolution, this=this, expression=to), 607 } 608 609 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 610 # We want to use _parse_types() if the first token after :: is a known type, 611 # otherwise we could parse something like x::varchar(max) into a function 612 if self._match_set(self.TYPE_TOKENS, advance=False): 613 return self._parse_types() 614 615 return self._parse_function() or self._parse_types() 616 617 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 618 if not self._match(TokenType.OPTION): 619 return None 620 621 def _parse_option() -> t.Optional[exp.Expression]: 622 option = self._parse_var_from_options(OPTIONS) 623 if not option: 624 return None 625 626 self._match(TokenType.EQ) 627 return self.expression( 628 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 629 ) 630 631 return self._parse_wrapped_csv(_parse_option) 632 633 def _parse_projections(self) -> t.List[exp.Expression]: 634 """ 635 T-SQL supports the syntax alias = expression in the SELECT's projection list, 636 so we transform all parsed Selects to convert their EQ projections into Aliases. 637 638 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 639 """ 640 return [ 641 ( 642 exp.alias_(projection.expression, projection.this.this, copy=False) 643 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 644 else projection 645 ) 646 for projection in super()._parse_projections() 647 ] 648 649 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 650 """Applies to SQL Server and Azure SQL Database 651 COMMIT [ { TRAN | TRANSACTION } 652 [ transaction_name | @tran_name_variable ] ] 653 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 654 655 ROLLBACK { TRAN | TRANSACTION } 656 [ transaction_name | @tran_name_variable 657 | savepoint_name | @savepoint_variable ] 658 """ 659 rollback = self._prev.token_type == TokenType.ROLLBACK 660 661 self._match_texts(("TRAN", "TRANSACTION")) 662 this = self._parse_id_var() 663 664 if rollback: 665 return self.expression(exp.Rollback, this=this) 666 667 durability = None 668 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 669 self._match_text_seq("DELAYED_DURABILITY") 670 self._match(TokenType.EQ) 671 672 if self._match_text_seq("OFF"): 673 durability = False 674 else: 675 self._match(TokenType.ON) 676 durability = True 677 678 self._match_r_paren() 679 680 return self.expression(exp.Commit, this=this, durability=durability) 681 682 def _parse_transaction(self) -> exp.Transaction | exp.Command: 683 """Applies to SQL Server and Azure SQL Database 684 BEGIN { TRAN | TRANSACTION } 685 [ { transaction_name | @tran_name_variable } 686 [ WITH MARK [ 'description' ] ] 687 ] 688 """ 689 if self._match_texts(("TRAN", "TRANSACTION")): 690 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 691 if self._match_text_seq("WITH", "MARK"): 692 transaction.set("mark", self._parse_string()) 693 694 return transaction 695 696 return self._parse_as_command(self._prev) 697 698 def _parse_returns(self) -> exp.ReturnsProperty: 699 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 700 returns = super()._parse_returns() 701 returns.set("table", table) 702 return returns 703 704 def _parse_convert( 705 self, strict: bool, safe: t.Optional[bool] = None 706 ) -> t.Optional[exp.Expression]: 707 this = self._parse_types() 708 self._match(TokenType.COMMA) 709 args = [this, *self._parse_csv(self._parse_assignment)] 710 convert = exp.Convert.from_arg_list(args) 711 convert.set("safe", safe) 712 convert.set("strict", strict) 713 return convert 714 715 def _parse_user_defined_function( 716 self, kind: t.Optional[TokenType] = None 717 ) -> t.Optional[exp.Expression]: 718 this = super()._parse_user_defined_function(kind=kind) 719 720 if ( 721 kind == TokenType.FUNCTION 722 or isinstance(this, exp.UserDefinedFunction) 723 or self._match(TokenType.ALIAS, advance=False) 724 ): 725 return this 726 727 if not self._match(TokenType.WITH, advance=False): 728 expressions = self._parse_csv(self._parse_function_parameter) 729 else: 730 expressions = None 731 732 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 733 734 def _parse_id_var( 735 self, 736 any_token: bool = True, 737 tokens: t.Optional[t.Collection[TokenType]] = None, 738 ) -> t.Optional[exp.Expression]: 739 is_temporary = self._match(TokenType.HASH) 740 is_global = is_temporary and self._match(TokenType.HASH) 741 742 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 743 if this: 744 if is_global: 745 this.set("global", True) 746 elif is_temporary: 747 this.set("temporary", True) 748 749 return this 750 751 def _parse_create(self) -> exp.Create | exp.Command: 752 create = super()._parse_create() 753 754 if isinstance(create, exp.Create): 755 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 756 if isinstance(table, exp.Table) and table.this.args.get("temporary"): 757 if not create.args.get("properties"): 758 create.set("properties", exp.Properties(expressions=[])) 759 760 create.args["properties"].append("expressions", exp.TemporaryProperty()) 761 762 return create 763 764 def _parse_if(self) -> t.Optional[exp.Expression]: 765 index = self._index 766 767 if self._match_text_seq("OBJECT_ID"): 768 self._parse_wrapped_csv(self._parse_string) 769 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 770 return self._parse_drop(exists=True) 771 self._retreat(index) 772 773 return super()._parse_if() 774 775 def _parse_unique(self) -> exp.UniqueColumnConstraint: 776 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 777 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 778 else: 779 this = self._parse_schema(self._parse_id_var(any_token=False)) 780 781 return self.expression(exp.UniqueColumnConstraint, this=this) 782 783 def _parse_partition(self) -> t.Optional[exp.Partition]: 784 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 785 return None 786 787 def parse_range(): 788 low = self._parse_bitwise() 789 high = self._parse_bitwise() if self._match_text_seq("TO") else None 790 791 return ( 792 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 793 ) 794 795 partition = self.expression( 796 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 797 ) 798 799 self._match_r_paren() 800 801 return partition 802 803 def _parse_declare(self) -> exp.Declare | exp.Command: 804 index = self._index 805 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 806 807 if not expressions or self._curr: 808 self._retreat(index) 809 return self._parse_as_command(self._prev) 810 811 return self.expression(exp.Declare, expressions=expressions) 812 813 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 814 var = self._parse_id_var() 815 if not var: 816 return None 817 818 value = None 819 self._match(TokenType.ALIAS) 820 if self._match(TokenType.TABLE): 821 data_type = self._parse_schema() 822 else: 823 data_type = self._parse_types() 824 if self._match(TokenType.EQ): 825 value = self._parse_bitwise() 826 827 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value) 828 829 class Generator(generator.Generator): 830 LIMIT_IS_TOP = True 831 QUERY_HINTS = False 832 RETURNING_END = False 833 NVL2_SUPPORTED = False 834 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 835 LIMIT_FETCH = "FETCH" 836 COMPUTED_COLUMN_WITH_TYPE = False 837 CTE_RECURSIVE_KEYWORD_REQUIRED = False 838 ENSURE_BOOLS = True 839 NULL_ORDERING_SUPPORTED = None 840 SUPPORTS_SINGLE_ARG_CONCAT = False 841 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 842 SUPPORTS_SELECT_INTO = True 843 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 844 SUPPORTS_TO_NUMBER = False 845 SET_OP_MODIFIERS = False 846 COPY_PARAMS_EQ_REQUIRED = True 847 PARSE_JSON_NAME = None 848 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 849 850 EXPRESSIONS_WITHOUT_NESTED_CTES = { 851 exp.Create, 852 exp.Delete, 853 exp.Insert, 854 exp.Intersect, 855 exp.Except, 856 exp.Merge, 857 exp.Select, 858 exp.Subquery, 859 exp.Union, 860 exp.Update, 861 } 862 863 SUPPORTED_JSON_PATH_PARTS = { 864 exp.JSONPathKey, 865 exp.JSONPathRoot, 866 exp.JSONPathSubscript, 867 } 868 869 TYPE_MAPPING = { 870 **generator.Generator.TYPE_MAPPING, 871 exp.DataType.Type.BOOLEAN: "BIT", 872 exp.DataType.Type.DECIMAL: "NUMERIC", 873 exp.DataType.Type.DATETIME: "DATETIME2", 874 exp.DataType.Type.DOUBLE: "FLOAT", 875 exp.DataType.Type.INT: "INTEGER", 876 exp.DataType.Type.ROWVERSION: "ROWVERSION", 877 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 878 exp.DataType.Type.TIMESTAMP: "DATETIME2", 879 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 880 exp.DataType.Type.UTINYINT: "TINYINT", 881 exp.DataType.Type.VARIANT: "SQL_VARIANT", 882 } 883 884 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 885 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 886 887 TRANSFORMS = { 888 **generator.Generator.TRANSFORMS, 889 exp.AnyValue: any_value_to_max_sql, 890 exp.ArrayToString: rename_func("STRING_AGG"), 891 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 892 exp.DateAdd: date_delta_sql("DATEADD"), 893 exp.DateDiff: date_delta_sql("DATEDIFF"), 894 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 895 exp.CurrentDate: rename_func("GETDATE"), 896 exp.CurrentTimestamp: rename_func("GETDATE"), 897 exp.DateStrToDate: datestrtodate_sql, 898 exp.Extract: rename_func("DATEPART"), 899 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 900 exp.GroupConcat: _string_agg_sql, 901 exp.If: rename_func("IIF"), 902 exp.JSONExtract: _json_extract_sql, 903 exp.JSONExtractScalar: _json_extract_sql, 904 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 905 exp.Max: max_or_greatest, 906 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 907 exp.Min: min_or_least, 908 exp.NumberToStr: _format_sql, 909 exp.Repeat: rename_func("REPLICATE"), 910 exp.Select: transforms.preprocess( 911 [ 912 transforms.eliminate_distinct_on, 913 transforms.eliminate_semi_and_anti_joins, 914 transforms.eliminate_qualify, 915 transforms.unnest_generate_date_array_using_recursive_cte, 916 ] 917 ), 918 exp.Stddev: rename_func("STDEV"), 919 exp.StrPosition: lambda self, e: self.func( 920 "CHARINDEX", e.args.get("substr"), e.this, e.args.get("position") 921 ), 922 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 923 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 924 exp.SHA2: lambda self, e: self.func( 925 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 926 ), 927 exp.TemporaryProperty: lambda self, e: "", 928 exp.TimeStrToTime: _timestrtotime_sql, 929 exp.TimeToStr: _format_sql, 930 exp.Trim: trim_sql, 931 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 932 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 933 } 934 935 TRANSFORMS.pop(exp.ReturnsProperty) 936 937 PROPERTIES_LOCATION = { 938 **generator.Generator.PROPERTIES_LOCATION, 939 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 940 } 941 942 def scope_resolution(self, rhs: str, scope_name: str) -> str: 943 return f"{scope_name}::{rhs}" 944 945 def select_sql(self, expression: exp.Select) -> str: 946 if expression.args.get("offset"): 947 if not expression.args.get("order"): 948 # ORDER BY is required in order to use OFFSET in a query, so we use 949 # a noop order by, since we don't really care about the order. 950 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 951 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 952 953 limit = expression.args.get("limit") 954 if isinstance(limit, exp.Limit): 955 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 956 # we replace here because otherwise TOP would be generated in select_sql 957 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 958 959 return super().select_sql(expression) 960 961 def convert_sql(self, expression: exp.Convert) -> str: 962 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 963 return self.func( 964 name, expression.this, expression.expression, expression.args.get("style") 965 ) 966 967 def queryoption_sql(self, expression: exp.QueryOption) -> str: 968 option = self.sql(expression, "this") 969 value = self.sql(expression, "expression") 970 if value: 971 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 972 return f"{option} {optional_equal_sign}{value}" 973 return option 974 975 def lateral_op(self, expression: exp.Lateral) -> str: 976 cross_apply = expression.args.get("cross_apply") 977 if cross_apply is True: 978 return "CROSS APPLY" 979 if cross_apply is False: 980 return "OUTER APPLY" 981 982 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 983 self.unsupported("LATERAL clause is not supported.") 984 return "LATERAL" 985 986 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 987 this = expression.this 988 split_count = len(this.name.split(".")) 989 delimiter = expression.args.get("delimiter") 990 part_index = expression.args.get("part_index") 991 992 if ( 993 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 994 or (delimiter and delimiter.name != ".") 995 or not part_index 996 or split_count > 4 997 ): 998 self.unsupported( 999 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1000 ) 1001 return "" 1002 1003 return self.func( 1004 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1005 ) 1006 1007 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1008 nano = expression.args.get("nano") 1009 if nano is not None: 1010 nano.pop() 1011 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1012 1013 if expression.args.get("fractions") is None: 1014 expression.set("fractions", exp.Literal.number(0)) 1015 if expression.args.get("precision") is None: 1016 expression.set("precision", exp.Literal.number(0)) 1017 1018 return rename_func("TIMEFROMPARTS")(self, expression) 1019 1020 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1021 zone = expression.args.get("zone") 1022 if zone is not None: 1023 zone.pop() 1024 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1025 1026 nano = expression.args.get("nano") 1027 if nano is not None: 1028 nano.pop() 1029 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1030 1031 if expression.args.get("milli") is None: 1032 expression.set("milli", exp.Literal.number(0)) 1033 1034 return rename_func("DATETIMEFROMPARTS")(self, expression) 1035 1036 def setitem_sql(self, expression: exp.SetItem) -> str: 1037 this = expression.this 1038 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1039 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1040 return f"{self.sql(this.left)} {self.sql(this.right)}" 1041 1042 return super().setitem_sql(expression) 1043 1044 def boolean_sql(self, expression: exp.Boolean) -> str: 1045 if type(expression.parent) in BIT_TYPES or isinstance( 1046 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1047 ): 1048 return "1" if expression.this else "0" 1049 1050 return "(1 = 1)" if expression.this else "(1 = 0)" 1051 1052 def is_sql(self, expression: exp.Is) -> str: 1053 if isinstance(expression.expression, exp.Boolean): 1054 return self.binary(expression, "=") 1055 return self.binary(expression, "IS") 1056 1057 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1058 sql = self.sql(expression, "this") 1059 properties = expression.args.get("properties") 1060 1061 if sql[:1] != "#" and any( 1062 isinstance(prop, exp.TemporaryProperty) 1063 for prop in (properties.expressions if properties else []) 1064 ): 1065 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1066 1067 return sql 1068 1069 def create_sql(self, expression: exp.Create) -> str: 1070 kind = expression.kind 1071 exists = expression.args.pop("exists", None) 1072 1073 like_property = expression.find(exp.LikeProperty) 1074 if like_property: 1075 ctas_expression = like_property.this 1076 else: 1077 ctas_expression = expression.expression 1078 1079 if kind == "VIEW": 1080 expression.this.set("catalog", None) 1081 with_ = expression.args.get("with") 1082 if ctas_expression and with_: 1083 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1084 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1085 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1086 ctas_expression.set("with", with_.pop()) 1087 1088 sql = super().create_sql(expression) 1089 1090 table = expression.find(exp.Table) 1091 1092 # Convert CTAS statement to SELECT .. INTO .. 1093 if kind == "TABLE" and ctas_expression: 1094 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1095 ctas_expression = ctas_expression.subquery() 1096 1097 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1098 select_into.set("into", exp.Into(this=table)) 1099 1100 if like_property: 1101 select_into.limit(0, copy=False) 1102 1103 sql = self.sql(select_into) 1104 1105 if exists: 1106 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1107 sql_with_ctes = self.prepend_ctes(expression, sql) 1108 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1109 if kind == "SCHEMA": 1110 return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})""" 1111 elif kind == "TABLE": 1112 assert table 1113 where = exp.and_( 1114 exp.column("table_name").eq(table.name), 1115 exp.column("table_schema").eq(table.db) if table.db else None, 1116 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1117 ) 1118 return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})""" 1119 elif kind == "INDEX": 1120 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1121 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1122 elif expression.args.get("replace"): 1123 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1124 1125 return self.prepend_ctes(expression, sql) 1126 1127 def count_sql(self, expression: exp.Count) -> str: 1128 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1129 return rename_func(func_name)(self, expression) 1130 1131 def offset_sql(self, expression: exp.Offset) -> str: 1132 return f"{super().offset_sql(expression)} ROWS" 1133 1134 def version_sql(self, expression: exp.Version) -> str: 1135 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1136 this = f"FOR {name}" 1137 expr = expression.expression 1138 kind = expression.text("kind") 1139 if kind in ("FROM", "BETWEEN"): 1140 args = expr.expressions 1141 sep = "TO" if kind == "FROM" else "AND" 1142 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1143 else: 1144 expr_sql = self.sql(expr) 1145 1146 expr_sql = f" {expr_sql}" if expr_sql else "" 1147 return f"{this} {kind}{expr_sql}" 1148 1149 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1150 table = expression.args.get("table") 1151 table = f"{table} " if table else "" 1152 return f"RETURNS {table}{self.sql(expression, 'this')}" 1153 1154 def returning_sql(self, expression: exp.Returning) -> str: 1155 into = self.sql(expression, "into") 1156 into = self.seg(f"INTO {into}") if into else "" 1157 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1158 1159 def transaction_sql(self, expression: exp.Transaction) -> str: 1160 this = self.sql(expression, "this") 1161 this = f" {this}" if this else "" 1162 mark = self.sql(expression, "mark") 1163 mark = f" WITH MARK {mark}" if mark else "" 1164 return f"BEGIN TRANSACTION{this}{mark}" 1165 1166 def commit_sql(self, expression: exp.Commit) -> str: 1167 this = self.sql(expression, "this") 1168 this = f" {this}" if this else "" 1169 durability = expression.args.get("durability") 1170 durability = ( 1171 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1172 if durability is not None 1173 else "" 1174 ) 1175 return f"COMMIT TRANSACTION{this}{durability}" 1176 1177 def rollback_sql(self, expression: exp.Rollback) -> str: 1178 this = self.sql(expression, "this") 1179 this = f" {this}" if this else "" 1180 return f"ROLLBACK TRANSACTION{this}" 1181 1182 def identifier_sql(self, expression: exp.Identifier) -> str: 1183 identifier = super().identifier_sql(expression) 1184 1185 if expression.args.get("global"): 1186 identifier = f"##{identifier}" 1187 elif expression.args.get("temporary"): 1188 identifier = f"#{identifier}" 1189 1190 return identifier 1191 1192 def constraint_sql(self, expression: exp.Constraint) -> str: 1193 this = self.sql(expression, "this") 1194 expressions = self.expressions(expression, flat=True, sep=" ") 1195 return f"CONSTRAINT {this} {expressions}" 1196 1197 def length_sql(self, expression: exp.Length) -> str: 1198 return self._uncast_text(expression, "LEN") 1199 1200 def right_sql(self, expression: exp.Right) -> str: 1201 return self._uncast_text(expression, "RIGHT") 1202 1203 def left_sql(self, expression: exp.Left) -> str: 1204 return self._uncast_text(expression, "LEFT") 1205 1206 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1207 this = expression.this 1208 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1209 this_sql = self.sql(this, "this") 1210 else: 1211 this_sql = self.sql(this) 1212 expression_sql = self.sql(expression, "expression") 1213 return self.func(name, this_sql, expression_sql if expression_sql else None) 1214 1215 def partition_sql(self, expression: exp.Partition) -> str: 1216 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1217 1218 def alter_sql(self, expression: exp.Alter) -> str: 1219 action = seq_get(expression.args.get("actions") or [], 0) 1220 if isinstance(action, exp.AlterRename): 1221 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1222 return super().alter_sql(expression) 1223 1224 def drop_sql(self, expression: exp.Drop) -> str: 1225 if expression.args["kind"] == "VIEW": 1226 expression.this.set("catalog", None) 1227 return super().drop_sql(expression) 1228 1229 def declare_sql(self, expression: exp.Declare) -> str: 1230 return f"DECLARE {self.expressions(expression, flat=True)}" 1231 1232 def declareitem_sql(self, expression: exp.DeclareItem) -> str: 1233 variable = self.sql(expression, "this") 1234 default = self.sql(expression, "default") 1235 default = f" = {default}" if default else "" 1236 1237 kind = self.sql(expression, "kind") 1238 if isinstance(expression.args.get("kind"), exp.Schema): 1239 kind = f"TABLE {kind}" 1240 1241 return f"{variable} AS {kind}{default}" 1242 1243 def options_modifier(self, expression: exp.Expression) -> str: 1244 options = self.expressions(expression, key="options") 1245 return f" OPTION{self.wrap(options)}" if options else "" 1246 1247 def dpipe_sql(self, expression: exp.DPipe) -> str: 1248 return self.sql( 1249 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1250 )
Whether the base comes first in the LOG
function.
Possible values: True
, False
, None
(two arguments are not supported by LOG
)
Whether the behavior of a / b
depends on the types of a
and b
.
False means a / b
is always float division.
True means a / b
is integer division if both a
and b
are integers.
A NULL
arg in CONCAT
yields NULL
by default, but in some dialects it yields an empty string.
Specifies the strategy according to which identifiers should be normalized.
Associates this dialect's time formats with their equivalent Python strftime
formats.
Inherited Members
- sqlglot.dialects.dialect.Dialect
- Dialect
- INDEX_OFFSET
- WEEK_OFFSET
- UNNEST_COLUMN_ONLY
- ALIAS_POST_TABLESAMPLE
- TABLESAMPLE_SIZE_IS_PERCENT
- IDENTIFIERS_CAN_START_WITH_DIGIT
- DPIPE_IS_STRING_CONCAT
- STRICT_STRING_CONCAT
- SUPPORTS_USER_DEFINED_TYPES
- COPY_PARAMS_ARE_CSV
- NORMALIZE_FUNCTIONS
- NULL_ORDERING
- SAFE_DIVISION
- HEX_LOWERCASE
- DATE_FORMAT
- DATEINT_FORMAT
- FORMAT_MAPPING
- UNESCAPED_SEQUENCES
- PSEUDOCOLUMNS
- PREFER_CTE_ALIAS_COLUMN
- FORCE_EARLY_ALIAS_REF_EXPANSION
- EXPAND_ALIAS_REFS_EARLY_ONLY_IN_GROUP_BY
- SUPPORTS_ORDER_BY_ALL
- HAS_DISTINCT_ARRAY_CONSTRUCTORS
- SUPPORTS_FIXED_SIZE_ARRAYS
- STRICT_JSON_PATH_SYNTAX
- ON_CONDITION_EMPTY_BEFORE_ERROR
- ARRAY_AGG_INCLUDES_NULLS
- REGEXP_EXTRACT_DEFAULT_GROUP
- SET_OP_DISTINCT_BY_DEFAULT
- CREATABLE_KIND_MAPPING
- DATE_PART_MAPPING
- TYPE_TO_EXPRESSIONS
- ANNOTATORS
- get_or_raise
- format_time
- settings
- normalize_identifier
- case_sensitive
- can_identify
- quote_identifier
- to_json_path
- parse
- parse_into
- generate
- transpile
- tokenize
- tokenizer
- jsonpath_tokenizer
- parser
- generator
487 class Tokenizer(tokens.Tokenizer): 488 IDENTIFIERS = [("[", "]"), '"'] 489 QUOTES = ["'", '"'] 490 HEX_STRINGS = [("0x", ""), ("0X", "")] 491 VAR_SINGLE_TOKENS = {"@", "$", "#"} 492 493 KEYWORDS = { 494 **tokens.Tokenizer.KEYWORDS, 495 "CLUSTERED INDEX": TokenType.INDEX, 496 "DATETIME2": TokenType.DATETIME, 497 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 498 "DECLARE": TokenType.DECLARE, 499 "EXEC": TokenType.COMMAND, 500 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 501 "IMAGE": TokenType.IMAGE, 502 "MONEY": TokenType.MONEY, 503 "NONCLUSTERED INDEX": TokenType.INDEX, 504 "NTEXT": TokenType.TEXT, 505 "OPTION": TokenType.OPTION, 506 "OUTPUT": TokenType.RETURNING, 507 "PRINT": TokenType.COMMAND, 508 "PROC": TokenType.PROCEDURE, 509 "REAL": TokenType.FLOAT, 510 "ROWVERSION": TokenType.ROWVERSION, 511 "SMALLDATETIME": TokenType.DATETIME, 512 "SMALLMONEY": TokenType.SMALLMONEY, 513 "SQL_VARIANT": TokenType.VARIANT, 514 "SYSTEM_USER": TokenType.CURRENT_USER, 515 "TOP": TokenType.TOP, 516 "TIMESTAMP": TokenType.ROWVERSION, 517 "TINYINT": TokenType.UTINYINT, 518 "UNIQUEIDENTIFIER": TokenType.UNIQUEIDENTIFIER, 519 "UPDATE STATISTICS": TokenType.COMMAND, 520 "XML": TokenType.XML, 521 } 522 KEYWORDS.pop("/*+") 523 524 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END}
Inherited Members
- sqlglot.tokens.Tokenizer
- Tokenizer
- SINGLE_TOKENS
- BIT_STRINGS
- BYTE_STRINGS
- RAW_STRINGS
- HEREDOC_STRINGS
- UNICODE_STRINGS
- IDENTIFIER_ESCAPES
- STRING_ESCAPES
- HEREDOC_TAG_IS_IDENTIFIER
- HEREDOC_STRING_ALTERNATIVE
- STRING_ESCAPES_ALLOWED_IN_RAW_STRINGS
- NESTED_COMMENTS
- WHITE_SPACE
- COMMAND_PREFIX_TOKENS
- NUMERIC_LITERALS
- COMMENTS
- dialect
- reset
- tokenize
- tokenize_rs
- size
- sql
- tokens
526 class Parser(parser.Parser): 527 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 528 LOG_DEFAULTS_TO_LN = True 529 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 530 STRING_ALIASES = True 531 NO_PAREN_IF_COMMANDS = False 532 533 QUERY_MODIFIER_PARSERS = { 534 **parser.Parser.QUERY_MODIFIER_PARSERS, 535 TokenType.OPTION: lambda self: ("options", self._parse_options()), 536 } 537 538 FUNCTIONS = { 539 **parser.Parser.FUNCTIONS, 540 "CHARINDEX": lambda args: exp.StrPosition( 541 this=seq_get(args, 1), 542 substr=seq_get(args, 0), 543 position=seq_get(args, 2), 544 ), 545 "COUNT": lambda args: exp.Count( 546 this=seq_get(args, 0), expressions=args[1:], big_int=False 547 ), 548 "COUNT_BIG": lambda args: exp.Count( 549 this=seq_get(args, 0), expressions=args[1:], big_int=True 550 ), 551 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 552 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 553 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 554 "DATEPART": _build_formatted_time(exp.TimeToStr), 555 "DATETIMEFROMPARTS": _build_datetimefromparts, 556 "EOMONTH": _build_eomonth, 557 "FORMAT": _build_format, 558 "GETDATE": exp.CurrentTimestamp.from_arg_list, 559 "HASHBYTES": _build_hashbytes, 560 "ISNULL": build_coalesce, 561 "JSON_QUERY": _build_json_query, 562 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 563 "LEN": _build_with_arg_as_text(exp.Length), 564 "LEFT": _build_with_arg_as_text(exp.Left), 565 "RIGHT": _build_with_arg_as_text(exp.Right), 566 "PARSENAME": _build_parsename, 567 "REPLICATE": exp.Repeat.from_arg_list, 568 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 569 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 570 "SUSER_NAME": exp.CurrentUser.from_arg_list, 571 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 572 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 573 "TIMEFROMPARTS": _build_timefromparts, 574 } 575 576 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 577 578 PROCEDURE_OPTIONS = dict.fromkeys( 579 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 580 ) 581 582 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 583 TokenType.TABLE, 584 *parser.Parser.TYPE_TOKENS, 585 } 586 587 STATEMENT_PARSERS = { 588 **parser.Parser.STATEMENT_PARSERS, 589 TokenType.DECLARE: lambda self: self._parse_declare(), 590 } 591 592 RANGE_PARSERS = { 593 **parser.Parser.RANGE_PARSERS, 594 TokenType.DCOLON: lambda self, this: self.expression( 595 exp.ScopeResolution, 596 this=this, 597 expression=self._parse_function() or self._parse_var(any_token=True), 598 ), 599 } 600 601 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 602 COLUMN_OPERATORS = { 603 **parser.Parser.COLUMN_OPERATORS, 604 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 605 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 606 else self.expression(exp.ScopeResolution, this=this, expression=to), 607 } 608 609 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 610 # We want to use _parse_types() if the first token after :: is a known type, 611 # otherwise we could parse something like x::varchar(max) into a function 612 if self._match_set(self.TYPE_TOKENS, advance=False): 613 return self._parse_types() 614 615 return self._parse_function() or self._parse_types() 616 617 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 618 if not self._match(TokenType.OPTION): 619 return None 620 621 def _parse_option() -> t.Optional[exp.Expression]: 622 option = self._parse_var_from_options(OPTIONS) 623 if not option: 624 return None 625 626 self._match(TokenType.EQ) 627 return self.expression( 628 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 629 ) 630 631 return self._parse_wrapped_csv(_parse_option) 632 633 def _parse_projections(self) -> t.List[exp.Expression]: 634 """ 635 T-SQL supports the syntax alias = expression in the SELECT's projection list, 636 so we transform all parsed Selects to convert their EQ projections into Aliases. 637 638 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 639 """ 640 return [ 641 ( 642 exp.alias_(projection.expression, projection.this.this, copy=False) 643 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 644 else projection 645 ) 646 for projection in super()._parse_projections() 647 ] 648 649 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 650 """Applies to SQL Server and Azure SQL Database 651 COMMIT [ { TRAN | TRANSACTION } 652 [ transaction_name | @tran_name_variable ] ] 653 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 654 655 ROLLBACK { TRAN | TRANSACTION } 656 [ transaction_name | @tran_name_variable 657 | savepoint_name | @savepoint_variable ] 658 """ 659 rollback = self._prev.token_type == TokenType.ROLLBACK 660 661 self._match_texts(("TRAN", "TRANSACTION")) 662 this = self._parse_id_var() 663 664 if rollback: 665 return self.expression(exp.Rollback, this=this) 666 667 durability = None 668 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 669 self._match_text_seq("DELAYED_DURABILITY") 670 self._match(TokenType.EQ) 671 672 if self._match_text_seq("OFF"): 673 durability = False 674 else: 675 self._match(TokenType.ON) 676 durability = True 677 678 self._match_r_paren() 679 680 return self.expression(exp.Commit, this=this, durability=durability) 681 682 def _parse_transaction(self) -> exp.Transaction | exp.Command: 683 """Applies to SQL Server and Azure SQL Database 684 BEGIN { TRAN | TRANSACTION } 685 [ { transaction_name | @tran_name_variable } 686 [ WITH MARK [ 'description' ] ] 687 ] 688 """ 689 if self._match_texts(("TRAN", "TRANSACTION")): 690 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 691 if self._match_text_seq("WITH", "MARK"): 692 transaction.set("mark", self._parse_string()) 693 694 return transaction 695 696 return self._parse_as_command(self._prev) 697 698 def _parse_returns(self) -> exp.ReturnsProperty: 699 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 700 returns = super()._parse_returns() 701 returns.set("table", table) 702 return returns 703 704 def _parse_convert( 705 self, strict: bool, safe: t.Optional[bool] = None 706 ) -> t.Optional[exp.Expression]: 707 this = self._parse_types() 708 self._match(TokenType.COMMA) 709 args = [this, *self._parse_csv(self._parse_assignment)] 710 convert = exp.Convert.from_arg_list(args) 711 convert.set("safe", safe) 712 convert.set("strict", strict) 713 return convert 714 715 def _parse_user_defined_function( 716 self, kind: t.Optional[TokenType] = None 717 ) -> t.Optional[exp.Expression]: 718 this = super()._parse_user_defined_function(kind=kind) 719 720 if ( 721 kind == TokenType.FUNCTION 722 or isinstance(this, exp.UserDefinedFunction) 723 or self._match(TokenType.ALIAS, advance=False) 724 ): 725 return this 726 727 if not self._match(TokenType.WITH, advance=False): 728 expressions = self._parse_csv(self._parse_function_parameter) 729 else: 730 expressions = None 731 732 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 733 734 def _parse_id_var( 735 self, 736 any_token: bool = True, 737 tokens: t.Optional[t.Collection[TokenType]] = None, 738 ) -> t.Optional[exp.Expression]: 739 is_temporary = self._match(TokenType.HASH) 740 is_global = is_temporary and self._match(TokenType.HASH) 741 742 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 743 if this: 744 if is_global: 745 this.set("global", True) 746 elif is_temporary: 747 this.set("temporary", True) 748 749 return this 750 751 def _parse_create(self) -> exp.Create | exp.Command: 752 create = super()._parse_create() 753 754 if isinstance(create, exp.Create): 755 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 756 if isinstance(table, exp.Table) and table.this.args.get("temporary"): 757 if not create.args.get("properties"): 758 create.set("properties", exp.Properties(expressions=[])) 759 760 create.args["properties"].append("expressions", exp.TemporaryProperty()) 761 762 return create 763 764 def _parse_if(self) -> t.Optional[exp.Expression]: 765 index = self._index 766 767 if self._match_text_seq("OBJECT_ID"): 768 self._parse_wrapped_csv(self._parse_string) 769 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 770 return self._parse_drop(exists=True) 771 self._retreat(index) 772 773 return super()._parse_if() 774 775 def _parse_unique(self) -> exp.UniqueColumnConstraint: 776 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 777 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 778 else: 779 this = self._parse_schema(self._parse_id_var(any_token=False)) 780 781 return self.expression(exp.UniqueColumnConstraint, this=this) 782 783 def _parse_partition(self) -> t.Optional[exp.Partition]: 784 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 785 return None 786 787 def parse_range(): 788 low = self._parse_bitwise() 789 high = self._parse_bitwise() if self._match_text_seq("TO") else None 790 791 return ( 792 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 793 ) 794 795 partition = self.expression( 796 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 797 ) 798 799 self._match_r_paren() 800 801 return partition 802 803 def _parse_declare(self) -> exp.Declare | exp.Command: 804 index = self._index 805 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 806 807 if not expressions or self._curr: 808 self._retreat(index) 809 return self._parse_as_command(self._prev) 810 811 return self.expression(exp.Declare, expressions=expressions) 812 813 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 814 var = self._parse_id_var() 815 if not var: 816 return None 817 818 value = None 819 self._match(TokenType.ALIAS) 820 if self._match(TokenType.TABLE): 821 data_type = self._parse_schema() 822 else: 823 data_type = self._parse_types() 824 if self._match(TokenType.EQ): 825 value = self._parse_bitwise() 826 827 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value)
Parser consumes a list of tokens produced by the Tokenizer and produces a parsed syntax tree.
Arguments:
- error_level: The desired error level. Default: ErrorLevel.IMMEDIATE
- error_message_context: The amount of context to capture from a query string when displaying the error message (in number of characters). Default: 100
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
Inherited Members
- sqlglot.parser.Parser
- Parser
- NO_PAREN_FUNCTIONS
- STRUCT_TYPE_TOKENS
- NESTED_TYPE_TOKENS
- ENUM_TYPE_TOKENS
- AGGREGATE_TYPE_TOKENS
- TYPE_TOKENS
- SIGNED_TO_UNSIGNED_TYPE_TOKEN
- SUBQUERY_PREDICATES
- RESERVED_TOKENS
- DB_CREATABLES
- CREATABLES
- ALTERABLES
- INTERVAL_VARS
- ALIAS_TOKENS
- ARRAY_CONSTRUCTORS
- COMMENT_TABLE_ALIAS_TOKENS
- UPDATE_ALIAS_TOKENS
- TRIM_TYPES
- FUNC_TOKENS
- CONJUNCTION
- ASSIGNMENT
- DISJUNCTION
- EQUALITY
- COMPARISON
- BITWISE
- TERM
- FACTOR
- EXPONENT
- TIMES
- TIMESTAMPS
- SET_OPERATIONS
- JOIN_METHODS
- JOIN_SIDES
- JOIN_KINDS
- LAMBDAS
- EXPRESSION_PARSERS
- UNARY_PARSERS
- STRING_PARSERS
- NUMERIC_PARSERS
- PRIMARY_PARSERS
- PLACEHOLDER_PARSERS
- PROPERTY_PARSERS
- CONSTRAINT_PARSERS
- ALTER_PARSERS
- ALTER_ALTER_PARSERS
- SCHEMA_UNNAMED_CONSTRAINTS
- NO_PAREN_FUNCTION_PARSERS
- INVALID_FUNC_NAME_TOKENS
- FUNCTIONS_WITH_ALIASED_ARGS
- KEY_VALUE_DEFINITIONS
- FUNCTION_PARSERS
- SET_PARSERS
- SHOW_PARSERS
- TYPE_LITERAL_PARSERS
- TYPE_CONVERTERS
- DDL_SELECT_TOKENS
- PRE_VOLATILE_TOKENS
- TRANSACTION_KIND
- TRANSACTION_CHARACTERISTICS
- CONFLICT_ACTIONS
- CREATE_SEQUENCE
- ISOLATED_LOADING_OPTIONS
- USABLES
- CAST_ACTIONS
- SCHEMA_BINDING_OPTIONS
- EXECUTE_AS_OPTIONS
- KEY_CONSTRAINT_OPTIONS
- INSERT_ALTERNATIVES
- CLONE_KEYWORDS
- HISTORICAL_DATA_PREFIX
- HISTORICAL_DATA_KIND
- OPCLASS_FOLLOW_KEYWORDS
- OPTYPE_FOLLOW_TOKENS
- TABLE_INDEX_HINT_TOKENS
- VIEW_ATTRIBUTES
- WINDOW_ALIAS_TOKENS
- WINDOW_BEFORE_PAREN_TOKENS
- WINDOW_SIDES
- JSON_KEY_VALUE_SEPARATOR_TOKENS
- FETCH_TOKENS
- ADD_CONSTRAINT_TOKENS
- DISTINCT_TOKENS
- NULL_TOKENS
- UNNEST_OFFSET_ALIAS_TOKENS
- SELECT_START_TOKENS
- COPY_INTO_VARLEN_OPTIONS
- IS_JSON_PREDICATE_KIND
- ODBC_DATETIME_LITERALS
- ON_CONDITION_TOKENS
- PRIVILEGE_FOLLOW_TOKENS
- DESCRIBE_STYLES
- OPERATION_MODIFIERS
- STRICT_CAST
- PREFIXED_PIVOT_COLUMNS
- IDENTIFY_PIVOT_STRINGS
- TABLESAMPLE_CSV
- DEFAULT_SAMPLING_METHOD
- TRIM_PATTERN_FIRST
- MODIFIERS_ATTACHED_TO_SET_OP
- SET_OP_MODIFIERS
- JSON_ARROWS_REQUIRE_JSON_TYPE
- COLON_IS_VARIANT_EXTRACT
- VALUES_FOLLOWED_BY_PAREN
- SUPPORTS_IMPLICIT_UNNEST
- INTERVAL_SPANS
- SUPPORTS_PARTITION_SELECTION
- error_level
- error_message_context
- max_errors
- dialect
- reset
- parse
- parse_into
- check_errors
- raise_error
- expression
- validate_expression
- errors
- sql
829 class Generator(generator.Generator): 830 LIMIT_IS_TOP = True 831 QUERY_HINTS = False 832 RETURNING_END = False 833 NVL2_SUPPORTED = False 834 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 835 LIMIT_FETCH = "FETCH" 836 COMPUTED_COLUMN_WITH_TYPE = False 837 CTE_RECURSIVE_KEYWORD_REQUIRED = False 838 ENSURE_BOOLS = True 839 NULL_ORDERING_SUPPORTED = None 840 SUPPORTS_SINGLE_ARG_CONCAT = False 841 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 842 SUPPORTS_SELECT_INTO = True 843 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 844 SUPPORTS_TO_NUMBER = False 845 SET_OP_MODIFIERS = False 846 COPY_PARAMS_EQ_REQUIRED = True 847 PARSE_JSON_NAME = None 848 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 849 850 EXPRESSIONS_WITHOUT_NESTED_CTES = { 851 exp.Create, 852 exp.Delete, 853 exp.Insert, 854 exp.Intersect, 855 exp.Except, 856 exp.Merge, 857 exp.Select, 858 exp.Subquery, 859 exp.Union, 860 exp.Update, 861 } 862 863 SUPPORTED_JSON_PATH_PARTS = { 864 exp.JSONPathKey, 865 exp.JSONPathRoot, 866 exp.JSONPathSubscript, 867 } 868 869 TYPE_MAPPING = { 870 **generator.Generator.TYPE_MAPPING, 871 exp.DataType.Type.BOOLEAN: "BIT", 872 exp.DataType.Type.DECIMAL: "NUMERIC", 873 exp.DataType.Type.DATETIME: "DATETIME2", 874 exp.DataType.Type.DOUBLE: "FLOAT", 875 exp.DataType.Type.INT: "INTEGER", 876 exp.DataType.Type.ROWVERSION: "ROWVERSION", 877 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 878 exp.DataType.Type.TIMESTAMP: "DATETIME2", 879 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 880 exp.DataType.Type.UTINYINT: "TINYINT", 881 exp.DataType.Type.VARIANT: "SQL_VARIANT", 882 } 883 884 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 885 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 886 887 TRANSFORMS = { 888 **generator.Generator.TRANSFORMS, 889 exp.AnyValue: any_value_to_max_sql, 890 exp.ArrayToString: rename_func("STRING_AGG"), 891 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 892 exp.DateAdd: date_delta_sql("DATEADD"), 893 exp.DateDiff: date_delta_sql("DATEDIFF"), 894 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 895 exp.CurrentDate: rename_func("GETDATE"), 896 exp.CurrentTimestamp: rename_func("GETDATE"), 897 exp.DateStrToDate: datestrtodate_sql, 898 exp.Extract: rename_func("DATEPART"), 899 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 900 exp.GroupConcat: _string_agg_sql, 901 exp.If: rename_func("IIF"), 902 exp.JSONExtract: _json_extract_sql, 903 exp.JSONExtractScalar: _json_extract_sql, 904 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 905 exp.Max: max_or_greatest, 906 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 907 exp.Min: min_or_least, 908 exp.NumberToStr: _format_sql, 909 exp.Repeat: rename_func("REPLICATE"), 910 exp.Select: transforms.preprocess( 911 [ 912 transforms.eliminate_distinct_on, 913 transforms.eliminate_semi_and_anti_joins, 914 transforms.eliminate_qualify, 915 transforms.unnest_generate_date_array_using_recursive_cte, 916 ] 917 ), 918 exp.Stddev: rename_func("STDEV"), 919 exp.StrPosition: lambda self, e: self.func( 920 "CHARINDEX", e.args.get("substr"), e.this, e.args.get("position") 921 ), 922 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 923 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 924 exp.SHA2: lambda self, e: self.func( 925 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 926 ), 927 exp.TemporaryProperty: lambda self, e: "", 928 exp.TimeStrToTime: _timestrtotime_sql, 929 exp.TimeToStr: _format_sql, 930 exp.Trim: trim_sql, 931 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 932 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 933 } 934 935 TRANSFORMS.pop(exp.ReturnsProperty) 936 937 PROPERTIES_LOCATION = { 938 **generator.Generator.PROPERTIES_LOCATION, 939 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 940 } 941 942 def scope_resolution(self, rhs: str, scope_name: str) -> str: 943 return f"{scope_name}::{rhs}" 944 945 def select_sql(self, expression: exp.Select) -> str: 946 if expression.args.get("offset"): 947 if not expression.args.get("order"): 948 # ORDER BY is required in order to use OFFSET in a query, so we use 949 # a noop order by, since we don't really care about the order. 950 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 951 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 952 953 limit = expression.args.get("limit") 954 if isinstance(limit, exp.Limit): 955 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 956 # we replace here because otherwise TOP would be generated in select_sql 957 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 958 959 return super().select_sql(expression) 960 961 def convert_sql(self, expression: exp.Convert) -> str: 962 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 963 return self.func( 964 name, expression.this, expression.expression, expression.args.get("style") 965 ) 966 967 def queryoption_sql(self, expression: exp.QueryOption) -> str: 968 option = self.sql(expression, "this") 969 value = self.sql(expression, "expression") 970 if value: 971 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 972 return f"{option} {optional_equal_sign}{value}" 973 return option 974 975 def lateral_op(self, expression: exp.Lateral) -> str: 976 cross_apply = expression.args.get("cross_apply") 977 if cross_apply is True: 978 return "CROSS APPLY" 979 if cross_apply is False: 980 return "OUTER APPLY" 981 982 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 983 self.unsupported("LATERAL clause is not supported.") 984 return "LATERAL" 985 986 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 987 this = expression.this 988 split_count = len(this.name.split(".")) 989 delimiter = expression.args.get("delimiter") 990 part_index = expression.args.get("part_index") 991 992 if ( 993 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 994 or (delimiter and delimiter.name != ".") 995 or not part_index 996 or split_count > 4 997 ): 998 self.unsupported( 999 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1000 ) 1001 return "" 1002 1003 return self.func( 1004 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1005 ) 1006 1007 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1008 nano = expression.args.get("nano") 1009 if nano is not None: 1010 nano.pop() 1011 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1012 1013 if expression.args.get("fractions") is None: 1014 expression.set("fractions", exp.Literal.number(0)) 1015 if expression.args.get("precision") is None: 1016 expression.set("precision", exp.Literal.number(0)) 1017 1018 return rename_func("TIMEFROMPARTS")(self, expression) 1019 1020 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1021 zone = expression.args.get("zone") 1022 if zone is not None: 1023 zone.pop() 1024 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1025 1026 nano = expression.args.get("nano") 1027 if nano is not None: 1028 nano.pop() 1029 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1030 1031 if expression.args.get("milli") is None: 1032 expression.set("milli", exp.Literal.number(0)) 1033 1034 return rename_func("DATETIMEFROMPARTS")(self, expression) 1035 1036 def setitem_sql(self, expression: exp.SetItem) -> str: 1037 this = expression.this 1038 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1039 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1040 return f"{self.sql(this.left)} {self.sql(this.right)}" 1041 1042 return super().setitem_sql(expression) 1043 1044 def boolean_sql(self, expression: exp.Boolean) -> str: 1045 if type(expression.parent) in BIT_TYPES or isinstance( 1046 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1047 ): 1048 return "1" if expression.this else "0" 1049 1050 return "(1 = 1)" if expression.this else "(1 = 0)" 1051 1052 def is_sql(self, expression: exp.Is) -> str: 1053 if isinstance(expression.expression, exp.Boolean): 1054 return self.binary(expression, "=") 1055 return self.binary(expression, "IS") 1056 1057 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1058 sql = self.sql(expression, "this") 1059 properties = expression.args.get("properties") 1060 1061 if sql[:1] != "#" and any( 1062 isinstance(prop, exp.TemporaryProperty) 1063 for prop in (properties.expressions if properties else []) 1064 ): 1065 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1066 1067 return sql 1068 1069 def create_sql(self, expression: exp.Create) -> str: 1070 kind = expression.kind 1071 exists = expression.args.pop("exists", None) 1072 1073 like_property = expression.find(exp.LikeProperty) 1074 if like_property: 1075 ctas_expression = like_property.this 1076 else: 1077 ctas_expression = expression.expression 1078 1079 if kind == "VIEW": 1080 expression.this.set("catalog", None) 1081 with_ = expression.args.get("with") 1082 if ctas_expression and with_: 1083 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1084 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1085 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1086 ctas_expression.set("with", with_.pop()) 1087 1088 sql = super().create_sql(expression) 1089 1090 table = expression.find(exp.Table) 1091 1092 # Convert CTAS statement to SELECT .. INTO .. 1093 if kind == "TABLE" and ctas_expression: 1094 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1095 ctas_expression = ctas_expression.subquery() 1096 1097 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1098 select_into.set("into", exp.Into(this=table)) 1099 1100 if like_property: 1101 select_into.limit(0, copy=False) 1102 1103 sql = self.sql(select_into) 1104 1105 if exists: 1106 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1107 sql_with_ctes = self.prepend_ctes(expression, sql) 1108 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1109 if kind == "SCHEMA": 1110 return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})""" 1111 elif kind == "TABLE": 1112 assert table 1113 where = exp.and_( 1114 exp.column("table_name").eq(table.name), 1115 exp.column("table_schema").eq(table.db) if table.db else None, 1116 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1117 ) 1118 return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})""" 1119 elif kind == "INDEX": 1120 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1121 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1122 elif expression.args.get("replace"): 1123 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1124 1125 return self.prepend_ctes(expression, sql) 1126 1127 def count_sql(self, expression: exp.Count) -> str: 1128 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1129 return rename_func(func_name)(self, expression) 1130 1131 def offset_sql(self, expression: exp.Offset) -> str: 1132 return f"{super().offset_sql(expression)} ROWS" 1133 1134 def version_sql(self, expression: exp.Version) -> str: 1135 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1136 this = f"FOR {name}" 1137 expr = expression.expression 1138 kind = expression.text("kind") 1139 if kind in ("FROM", "BETWEEN"): 1140 args = expr.expressions 1141 sep = "TO" if kind == "FROM" else "AND" 1142 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1143 else: 1144 expr_sql = self.sql(expr) 1145 1146 expr_sql = f" {expr_sql}" if expr_sql else "" 1147 return f"{this} {kind}{expr_sql}" 1148 1149 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1150 table = expression.args.get("table") 1151 table = f"{table} " if table else "" 1152 return f"RETURNS {table}{self.sql(expression, 'this')}" 1153 1154 def returning_sql(self, expression: exp.Returning) -> str: 1155 into = self.sql(expression, "into") 1156 into = self.seg(f"INTO {into}") if into else "" 1157 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1158 1159 def transaction_sql(self, expression: exp.Transaction) -> str: 1160 this = self.sql(expression, "this") 1161 this = f" {this}" if this else "" 1162 mark = self.sql(expression, "mark") 1163 mark = f" WITH MARK {mark}" if mark else "" 1164 return f"BEGIN TRANSACTION{this}{mark}" 1165 1166 def commit_sql(self, expression: exp.Commit) -> str: 1167 this = self.sql(expression, "this") 1168 this = f" {this}" if this else "" 1169 durability = expression.args.get("durability") 1170 durability = ( 1171 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1172 if durability is not None 1173 else "" 1174 ) 1175 return f"COMMIT TRANSACTION{this}{durability}" 1176 1177 def rollback_sql(self, expression: exp.Rollback) -> str: 1178 this = self.sql(expression, "this") 1179 this = f" {this}" if this else "" 1180 return f"ROLLBACK TRANSACTION{this}" 1181 1182 def identifier_sql(self, expression: exp.Identifier) -> str: 1183 identifier = super().identifier_sql(expression) 1184 1185 if expression.args.get("global"): 1186 identifier = f"##{identifier}" 1187 elif expression.args.get("temporary"): 1188 identifier = f"#{identifier}" 1189 1190 return identifier 1191 1192 def constraint_sql(self, expression: exp.Constraint) -> str: 1193 this = self.sql(expression, "this") 1194 expressions = self.expressions(expression, flat=True, sep=" ") 1195 return f"CONSTRAINT {this} {expressions}" 1196 1197 def length_sql(self, expression: exp.Length) -> str: 1198 return self._uncast_text(expression, "LEN") 1199 1200 def right_sql(self, expression: exp.Right) -> str: 1201 return self._uncast_text(expression, "RIGHT") 1202 1203 def left_sql(self, expression: exp.Left) -> str: 1204 return self._uncast_text(expression, "LEFT") 1205 1206 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1207 this = expression.this 1208 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1209 this_sql = self.sql(this, "this") 1210 else: 1211 this_sql = self.sql(this) 1212 expression_sql = self.sql(expression, "expression") 1213 return self.func(name, this_sql, expression_sql if expression_sql else None) 1214 1215 def partition_sql(self, expression: exp.Partition) -> str: 1216 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1217 1218 def alter_sql(self, expression: exp.Alter) -> str: 1219 action = seq_get(expression.args.get("actions") or [], 0) 1220 if isinstance(action, exp.AlterRename): 1221 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1222 return super().alter_sql(expression) 1223 1224 def drop_sql(self, expression: exp.Drop) -> str: 1225 if expression.args["kind"] == "VIEW": 1226 expression.this.set("catalog", None) 1227 return super().drop_sql(expression) 1228 1229 def declare_sql(self, expression: exp.Declare) -> str: 1230 return f"DECLARE {self.expressions(expression, flat=True)}" 1231 1232 def declareitem_sql(self, expression: exp.DeclareItem) -> str: 1233 variable = self.sql(expression, "this") 1234 default = self.sql(expression, "default") 1235 default = f" = {default}" if default else "" 1236 1237 kind = self.sql(expression, "kind") 1238 if isinstance(expression.args.get("kind"), exp.Schema): 1239 kind = f"TABLE {kind}" 1240 1241 return f"{variable} AS {kind}{default}" 1242 1243 def options_modifier(self, expression: exp.Expression) -> str: 1244 options = self.expressions(expression, key="options") 1245 return f" OPTION{self.wrap(options)}" if options else "" 1246 1247 def dpipe_sql(self, expression: exp.DPipe) -> str: 1248 return self.sql( 1249 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1250 )
Generator converts a given syntax tree to the corresponding SQL string.
Arguments:
- pretty: Whether to format the produced SQL string. Default: False.
- identify: Determines when an identifier should be quoted. Possible values are: False (default): Never quote, except in cases where it's mandatory by the dialect. True or 'always': Always quote. 'safe': Only quote identifiers that are case insensitive.
- normalize: Whether to normalize identifiers to lowercase. Default: False.
- pad: The pad size in a formatted string. For example, this affects the indentation of a projection in a query, relative to its nesting level. Default: 2.
- indent: The indentation size in a formatted string. For example, this affects the
indentation of subqueries and filters under a
WHERE
clause. Default: 2. - normalize_functions: How to normalize function names. Possible values are: "upper" or True (default): Convert names to uppercase. "lower": Convert names to lowercase. False: Disables function name normalization.
- unsupported_level: Determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- max_unsupported: Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma: Whether the comma is leading or trailing in select expressions. This is only relevant when generating in pretty mode. Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether to preserve comments in the output SQL code. Default: True
945 def select_sql(self, expression: exp.Select) -> str: 946 if expression.args.get("offset"): 947 if not expression.args.get("order"): 948 # ORDER BY is required in order to use OFFSET in a query, so we use 949 # a noop order by, since we don't really care about the order. 950 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 951 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 952 953 limit = expression.args.get("limit") 954 if isinstance(limit, exp.Limit): 955 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 956 # we replace here because otherwise TOP would be generated in select_sql 957 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 958 959 return super().select_sql(expression)
967 def queryoption_sql(self, expression: exp.QueryOption) -> str: 968 option = self.sql(expression, "this") 969 value = self.sql(expression, "expression") 970 if value: 971 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 972 return f"{option} {optional_equal_sign}{value}" 973 return option
975 def lateral_op(self, expression: exp.Lateral) -> str: 976 cross_apply = expression.args.get("cross_apply") 977 if cross_apply is True: 978 return "CROSS APPLY" 979 if cross_apply is False: 980 return "OUTER APPLY" 981 982 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 983 self.unsupported("LATERAL clause is not supported.") 984 return "LATERAL"
986 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 987 this = expression.this 988 split_count = len(this.name.split(".")) 989 delimiter = expression.args.get("delimiter") 990 part_index = expression.args.get("part_index") 991 992 if ( 993 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 994 or (delimiter and delimiter.name != ".") 995 or not part_index 996 or split_count > 4 997 ): 998 self.unsupported( 999 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1000 ) 1001 return "" 1002 1003 return self.func( 1004 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1005 )
1007 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1008 nano = expression.args.get("nano") 1009 if nano is not None: 1010 nano.pop() 1011 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1012 1013 if expression.args.get("fractions") is None: 1014 expression.set("fractions", exp.Literal.number(0)) 1015 if expression.args.get("precision") is None: 1016 expression.set("precision", exp.Literal.number(0)) 1017 1018 return rename_func("TIMEFROMPARTS")(self, expression)
1020 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1021 zone = expression.args.get("zone") 1022 if zone is not None: 1023 zone.pop() 1024 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1025 1026 nano = expression.args.get("nano") 1027 if nano is not None: 1028 nano.pop() 1029 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1030 1031 if expression.args.get("milli") is None: 1032 expression.set("milli", exp.Literal.number(0)) 1033 1034 return rename_func("DATETIMEFROMPARTS")(self, expression)
1036 def setitem_sql(self, expression: exp.SetItem) -> str: 1037 this = expression.this 1038 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1039 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1040 return f"{self.sql(this.left)} {self.sql(this.right)}" 1041 1042 return super().setitem_sql(expression)
1057 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1058 sql = self.sql(expression, "this") 1059 properties = expression.args.get("properties") 1060 1061 if sql[:1] != "#" and any( 1062 isinstance(prop, exp.TemporaryProperty) 1063 for prop in (properties.expressions if properties else []) 1064 ): 1065 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1066 1067 return sql
1069 def create_sql(self, expression: exp.Create) -> str: 1070 kind = expression.kind 1071 exists = expression.args.pop("exists", None) 1072 1073 like_property = expression.find(exp.LikeProperty) 1074 if like_property: 1075 ctas_expression = like_property.this 1076 else: 1077 ctas_expression = expression.expression 1078 1079 if kind == "VIEW": 1080 expression.this.set("catalog", None) 1081 with_ = expression.args.get("with") 1082 if ctas_expression and with_: 1083 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1084 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1085 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1086 ctas_expression.set("with", with_.pop()) 1087 1088 sql = super().create_sql(expression) 1089 1090 table = expression.find(exp.Table) 1091 1092 # Convert CTAS statement to SELECT .. INTO .. 1093 if kind == "TABLE" and ctas_expression: 1094 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1095 ctas_expression = ctas_expression.subquery() 1096 1097 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1098 select_into.set("into", exp.Into(this=table)) 1099 1100 if like_property: 1101 select_into.limit(0, copy=False) 1102 1103 sql = self.sql(select_into) 1104 1105 if exists: 1106 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1107 sql_with_ctes = self.prepend_ctes(expression, sql) 1108 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1109 if kind == "SCHEMA": 1110 return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})""" 1111 elif kind == "TABLE": 1112 assert table 1113 where = exp.and_( 1114 exp.column("table_name").eq(table.name), 1115 exp.column("table_schema").eq(table.db) if table.db else None, 1116 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1117 ) 1118 return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})""" 1119 elif kind == "INDEX": 1120 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1121 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1122 elif expression.args.get("replace"): 1123 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1124 1125 return self.prepend_ctes(expression, sql)
1134 def version_sql(self, expression: exp.Version) -> str: 1135 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1136 this = f"FOR {name}" 1137 expr = expression.expression 1138 kind = expression.text("kind") 1139 if kind in ("FROM", "BETWEEN"): 1140 args = expr.expressions 1141 sep = "TO" if kind == "FROM" else "AND" 1142 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1143 else: 1144 expr_sql = self.sql(expr) 1145 1146 expr_sql = f" {expr_sql}" if expr_sql else "" 1147 return f"{this} {kind}{expr_sql}"
1166 def commit_sql(self, expression: exp.Commit) -> str: 1167 this = self.sql(expression, "this") 1168 this = f" {this}" if this else "" 1169 durability = expression.args.get("durability") 1170 durability = ( 1171 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1172 if durability is not None 1173 else "" 1174 ) 1175 return f"COMMIT TRANSACTION{this}{durability}"
1182 def identifier_sql(self, expression: exp.Identifier) -> str: 1183 identifier = super().identifier_sql(expression) 1184 1185 if expression.args.get("global"): 1186 identifier = f"##{identifier}" 1187 elif expression.args.get("temporary"): 1188 identifier = f"#{identifier}" 1189 1190 return identifier
1232 def declareitem_sql(self, expression: exp.DeclareItem) -> str: 1233 variable = self.sql(expression, "this") 1234 default = self.sql(expression, "default") 1235 default = f" = {default}" if default else "" 1236 1237 kind = self.sql(expression, "kind") 1238 if isinstance(expression.args.get("kind"), exp.Schema): 1239 kind = f"TABLE {kind}" 1240 1241 return f"{variable} AS {kind}{default}"
Inherited Members
- sqlglot.generator.Generator
- Generator
- IGNORE_NULLS_IN_FUNC
- LOCKING_READS_SUPPORTED
- WRAP_DERIVED_VALUES
- CREATE_FUNCTION_RETURN_AS
- MATCHED_BY_SOURCE
- SINGLE_STRING_INTERVAL
- INTERVAL_ALLOWS_PLURAL_FORM
- LIMIT_ONLY_LITERALS
- RENAME_TABLE_WITH_DB
- GROUPINGS_SEP
- INDEX_ON
- JOIN_HINTS
- TABLE_HINTS
- QUERY_HINT_SEP
- IS_BOOL_ALLOWED
- DUPLICATE_KEY_UPDATE_WITH_SET
- EXTRACT_ALLOWS_QUOTES
- TZ_TO_WITH_TIME_ZONE
- VALUES_AS_TABLE
- UNNEST_WITH_ORDINALITY
- AGGREGATE_FILTER_SUPPORTED
- SEMI_ANTI_JOIN_WITH_SIDE
- SUPPORTS_TABLE_COPY
- TABLESAMPLE_REQUIRES_PARENS
- TABLESAMPLE_SIZE_IS_ROWS
- TABLESAMPLE_KEYWORDS
- TABLESAMPLE_WITH_METHOD
- COLLATE_IS_FUNC
- DATA_TYPE_SPECIFIERS_ALLOWED
- LAST_DAY_SUPPORTS_DATE_PART
- SUPPORTS_TABLE_ALIAS_COLUMNS
- UNPIVOT_ALIASES_ARE_IDENTIFIERS
- JSON_KEY_VALUE_PAIR_SEP
- INSERT_OVERWRITE
- SUPPORTS_UNLOGGED_TABLES
- SUPPORTS_CREATE_TABLE_LIKE
- LIKE_PROPERTY_INSIDE_SCHEMA
- MULTI_ARG_DISTINCT
- JSON_TYPE_REQUIRED_FOR_EXTRACTION
- JSON_PATH_SINGLE_QUOTE_ESCAPE
- CAN_IMPLEMENT_ARRAY_ANY
- COPY_PARAMS_ARE_WRAPPED
- COPY_HAS_INTO_KEYWORD
- STAR_EXCEPT
- HEX_FUNC
- WITH_PROPERTIES_PREFIX
- QUOTE_JSON_PATH
- PAD_FILL_PATTERN_IS_REQUIRED
- SUPPORTS_EXPLODING_PROJECTIONS
- ARRAY_CONCAT_IS_VAR_LEN
- SUPPORTS_CONVERT_TIMEZONE
- TIME_PART_SINGULARS
- TOKEN_MAPPING
- STRUCT_DELIMITER
- PARAMETER_TOKEN
- NAMED_PLACEHOLDER_TOKEN
- RESERVED_KEYWORDS
- WITH_SEPARATED_COMMENTS
- EXCLUDE_COMMENTS
- UNWRAPPED_INTERVAL_VALUES
- PARAMETERIZABLE_TEXT_TYPES
- SENTINEL_LINE_BREAK
- pretty
- identify
- normalize
- pad
- unsupported_level
- max_unsupported
- leading_comma
- max_text_width
- comments
- dialect
- normalize_functions
- unsupported_messages
- generate
- preprocess
- unsupported
- sep
- seg
- pad_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_parts
- column_sql
- columnposition_sql
- columndef_sql
- columnconstraint_sql
- computedcolumnconstraint_sql
- autoincrementcolumnconstraint_sql
- compresscolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- generatedasrowcolumnconstraint_sql
- periodforsystemtimeconstraint_sql
- notnullcolumnconstraint_sql
- transformcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- sequenceproperties_sql
- clone_sql
- describe_sql
- heredoc_sql
- prepend_ctes
- with_sql
- cte_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- bytestring_sql
- unicodestring_sql
- rawstring_sql
- datatypeparam_sql
- datatype_sql
- directory_sql
- delete_sql
- set_operation
- set_operations
- fetch_sql
- filter_sql
- hint_sql
- indexparameters_sql
- index_sql
- hex_sql
- lowerhex_sql
- inputoutputformat_sql
- national_sql
- properties_sql
- root_properties
- properties
- with_properties
- locate_properties
- property_name
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- partitionboundspec_sql
- partitionedofproperty_sql
- lockingproperty_sql
- withdataproperty_sql
- withsystemversioningproperty_sql
- insert_sql
- introducer_sql
- kill_sql
- pseudotype_sql
- objectidentifier_sql
- onconflict_sql
- rowformatdelimitedproperty_sql
- withtablehint_sql
- indextablehint_sql
- historicaldata_sql
- table_parts
- table_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- into_sql
- from_sql
- groupingsets_sql
- rollup_sql
- cube_sql
- group_sql
- having_sql
- connect_sql
- prior_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- set_sql
- pragma_sql
- lock_sql
- literal_sql
- escape_str
- loaddata_sql
- null_sql
- order_sql
- withfill_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognizemeasure_sql
- matchrecognize_sql
- query_modifiers
- offset_limit_modifiers
- after_limit_modifiers
- schema_sql
- schema_columns_sql
- star_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- unnest_sql
- prewhere_sql
- where_sql
- window_sql
- partition_by_sql
- windowspec_sql
- withingroup_sql
- between_sql
- bracket_offset_expressions
- bracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- nextvaluefor_sql
- extract_sql
- trim_sql
- convert_concat_args
- concat_sql
- concatws_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- if_sql
- matchagainst_sql
- jsonkeyvalue_sql
- jsonpath_sql
- json_path_part
- formatjson_sql
- jsonobject_sql
- jsonobjectagg_sql
- jsonarray_sql
- jsonarrayagg_sql
- jsoncolumndef_sql
- jsonschema_sql
- jsontable_sql
- openjsoncolumndef_sql
- openjson_sql
- in_sql
- in_unnest_op
- interval_sql
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- pivotalias_sql
- aliases_sql
- atindex_sql
- attimezone_sql
- fromtimezone_sql
- add_sql
- and_sql
- or_sql
- xor_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- currentdate_sql
- collate_sql
- command_sql
- comment_sql
- mergetreettlaction_sql
- mergetreettl_sql
- altercolumn_sql
- alterdiststyle_sql
- altersortkey_sql
- alterrename_sql
- renamecolumn_sql
- alterset_sql
- add_column_sql
- droppartition_sql
- addconstraint_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- havingmax_sql
- intdiv_sql
- div_sql
- overlaps_sql
- distance_sql
- dot_sql
- eq_sql
- propertyeq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- ilike_sql
- ilikeany_sql
- like_sql
- likeany_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- slice_sql
- sub_sql
- trycast_sql
- try_sql
- log_sql
- use_sql
- binary
- function_fallback_sql
- func
- format_args
- too_wide
- format_time
- expressions
- op_expressions
- naked_property
- tag_sql
- token_sql
- userdefinedfunction_sql
- joinhint_sql
- kwarg_sql
- when_sql
- merge_sql
- tochar_sql
- tonumber_sql
- dictproperty_sql
- dictrange_sql
- dictsubproperty_sql
- duplicatekeyproperty_sql
- distributedbyproperty_sql
- oncluster_sql
- clusteredbyproperty_sql
- anyvalue_sql
- querytransform_sql
- indexconstraintoption_sql
- checkcolumnconstraint_sql
- indexcolumnconstraint_sql
- nvl2_sql
- comprehension_sql
- columnprefix_sql
- opclass_sql
- predict_sql
- forin_sql
- refresh_sql
- toarray_sql
- tsordstotime_sql
- tsordstotimestamp_sql
- tsordstodate_sql
- unixdate_sql
- lastday_sql
- dateadd_sql
- arrayany_sql
- struct_sql
- partitionrange_sql
- truncatetable_sql
- copyparameter_sql
- credentials_sql
- copy_sql
- semicolon_sql
- datadeletionproperty_sql
- maskingpolicycolumnconstraint_sql
- gapfill_sql
- scoperesolution_sql
- parsejson_sql
- rand_sql
- changes_sql
- pad_sql
- summarize_sql
- explodinggenerateseries_sql
- arrayconcat_sql
- converttimezone_sql
- json_sql
- jsonvalue_sql
- conditionalinsert_sql
- multitableinserts_sql
- oncondition_sql
- jsonexists_sql
- arrayagg_sql
- apply_sql
- grant_sql
- grantprivilege_sql
- grantprincipal_sql
- columns_sql
- overlay_sql
- todouble_sql