Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 36 additions & 10 deletions sqlglot/dialects/tsql.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
strposition_sql,
timestrtotime_sql,
trim_sql,
map_date_part,
)
from sqlglot.helper import seq_get
from sqlglot.parser import build_coalesce
Expand Down Expand Up @@ -57,6 +58,11 @@
"d": "day",
}

DATE_PART_UNMAPPING = {
"WEEKISO": "ISO_WEEK",
"DAYOFWEEK": "WEEKDAY",
"TIMEZONE_MINUTE": "TZOFFSET",
}

DATE_FMT_RE = re.compile("([dD]{1,2})|([mM]{1,2})|([yY]{1,4})|([hH]{1,2})|([sS]{1,2})")

Expand Down Expand Up @@ -201,20 +207,12 @@ def _build_hashbytes(args: t.List) -> exp.Expression:
return exp.func("HASHBYTES", *args)


DATEPART_ONLY_FORMATS = {"DW", "WK", "HOUR", "QUARTER", "ISO_WEEK"}


def _format_sql(self: TSQL.Generator, expression: exp.NumberToStr | exp.TimeToStr) -> str:
fmt = expression.args["format"]

if not isinstance(expression, exp.NumberToStr):
if fmt.is_string:
mapped_fmt = format_time(fmt.name, TSQL.INVERSE_TIME_MAPPING)

name = (mapped_fmt or "").upper()
if name in DATEPART_ONLY_FORMATS:
return self.func("DATEPART", name, expression.this)

fmt_sql = self.sql(exp.Literal.string(mapped_fmt))
else:
fmt_sql = self.format_time(expression) or self.sql(fmt)
Expand Down Expand Up @@ -418,6 +416,22 @@ class TSQL(Dialect):

EXPRESSION_METADATA = EXPRESSION_METADATA.copy()

DATE_PART_MAPPING = {
**Dialect.DATE_PART_MAPPING,
"QQ": "QUARTER",
"M": "MONTH",
"Y": "DAYOFYEAR",
"WW": "WEEK",
"N": "MINUTE",
"SS": "SECOND",
"MCS": "MICROSECOND",
"TZOFFSET": "TIMEZONE_MINUTE",
"TZ": "TIMEZONE_MINUTE",
"ISO_WEEK": "WEEKISO",
"ISOWK": "WEEKISO",
"ISOWW": "WEEKISO",
}

TIME_MAPPING = {
"year": "%Y",
"dayofyear": "%j",
Expand Down Expand Up @@ -604,7 +618,6 @@ class Parser(parser.Parser):
exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL, big_int=True
),
"DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True),
"DATEPART": _build_formatted_time(exp.TimeToStr),
"DATETIMEFROMPARTS": _build_datetimefromparts,
"EOMONTH": _build_eomonth,
"FORMAT": _build_format,
Expand Down Expand Up @@ -670,6 +683,7 @@ class Parser(parser.Parser):
order=self._parse_order(),
null_handling=self._parse_on_handling("NULL", "NULL", "ABSENT"),
),
"DATEPART": lambda self: self._parse_datepart(),
}

# The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL
Expand All @@ -688,6 +702,13 @@ class Parser(parser.Parser):
"ts": exp.Timestamp,
}

def _parse_datepart(self) -> exp.Extract:
this = self._parse_var()
expression = self._match(TokenType.COMMA) and self._parse_bitwise()
name = map_date_part(this, self.dialect)

return self.expression(exp.Extract, this=name, expression=expression)

def _parse_alter_table_set(self) -> exp.AlterSet:
return self._parse_wrapped(super()._parse_alter_table_set)

Expand Down Expand Up @@ -1044,7 +1065,6 @@ class Generator(generator.Generator):
exp.CurrentTimestamp: rename_func("GETDATE"),
exp.CurrentTimestampLTZ: rename_func("SYSDATETIMEOFFSET"),
exp.DateStrToDate: datestrtodate_sql,
exp.Extract: rename_func("DATEPART"),
exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql,
exp.GroupConcat: _string_agg_sql,
exp.If: rename_func("IIF"),
Expand Down Expand Up @@ -1166,6 +1186,12 @@ def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str:
"PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py())
)

def extract_sql(self, expression: exp.Extract) -> str:
part = expression.this
name = DATE_PART_UNMAPPING.get(part.name.upper()) or part

return self.func("DATEPART", name, expression.expression)

def timefromparts_sql(self, expression: exp.TimeFromParts) -> str:
nano = expression.args.get("nano")
if nano is not None:
Expand Down
113 changes: 78 additions & 35 deletions tests/dialects/test_tsql.py
Original file line number Diff line number Diff line change
Expand Up @@ -1491,33 +1491,76 @@ def test_datename(self):
)

def test_datepart(self):
self.validate_identity(
"DATEPART(QUARTER, x)",
"DATEPART(QUARTER, CAST(x AS DATETIME2))",
)
self.validate_identity(
"DATEPART(YEAR, x)",
"FORMAT(CAST(x AS DATETIME2), 'yyyy')",
)
self.validate_identity(
"DATEPART(HOUR, date_and_time)",
"DATEPART(HOUR, CAST(date_and_time AS DATETIME2))",
)
self.validate_identity(
"DATEPART(WEEKDAY, date_and_time)",
"DATEPART(DW, CAST(date_and_time AS DATETIME2))",
)
self.validate_identity(
"DATEPART(DW, date_and_time)",
"DATEPART(DW, CAST(date_and_time AS DATETIME2))",
)

for fmt in ("QUARTER", "qq", "q"):
self.validate_identity(
f"DATEPART({fmt}, x)",
"DATEPART(QUARTER, x)",
)
for fmt in ("YEAR", "yy", "yyyy"):
self.validate_identity(
f"DATEPART({fmt}, x)",
"DATEPART(YEAR, x)",
)
for fmt in ("HOUR", "hh"):
self.validate_identity(
f"DATEPART({fmt}, date_and_time)",
"DATEPART(HOUR, date_and_time)",
)
for fmt in ("MINUTE", "mi", "n"):
self.validate_identity(
f"DATEPART({fmt}, date_and_time)",
"DATEPART(MINUTE, date_and_time)",
)
for fmt in ("SECOND", "ss", "s"):
self.validate_identity(
f"DATEPART({fmt}, date_and_time)",
"DATEPART(SECOND, date_and_time)",
)
for fmt in ("MILLISECOND", "ms"):
self.validate_identity(
f"DATEPART({fmt}, date_and_time)",
"DATEPART(MILLISECOND, date_and_time)",
)
for fmt in ("MICROSECOND", "mcs"):
self.validate_identity(
f"DATEPART({fmt}, date_and_time)",
"DATEPART(MICROSECOND, date_and_time)",
)
for fmt in ("NANOSECOND", "ns"):
self.validate_identity(
f"DATEPART({fmt}, date_and_time)",
"DATEPART(NANOSECOND, date_and_time)",
)
for fmt in ("WEEKDAY", "dw"):
self.validate_identity(
f"DATEPART({fmt}, date_and_time)",
"DATEPART(WEEKDAY, date_and_time)",
)
for fmt in ("TZOFFSET", "tz"):
self.validate_identity(
f"DATEPART({fmt}, date_and_time)",
"DATEPART(TZOFFSET, date_and_time)",
)
for fmt in ("MONTH", "mm", "m"):
self.validate_identity(
f"DATEPART({fmt}, date_and_time)",
"DATEPART(MONTH, date_and_time)",
)
for fmt in ("DAYOFYEAR", "dy", "y"):
self.validate_identity(
f"DATEPART({fmt}, date_and_time)",
"DATEPART(DAYOFYEAR, date_and_time)",
)
for fmt in ("DAY", "dd", "d"):
self.validate_identity(
f"DATEPART({fmt}, date_and_time)",
"DATEPART(DAY, date_and_time)",
)
self.validate_all(
"SELECT DATEPART(month,'1970-01-01')",
write={
"postgres": "SELECT TO_CHAR(CAST('1970-01-01' AS TIMESTAMP), 'MM')",
"spark": "SELECT DATE_FORMAT(CAST('1970-01-01' AS TIMESTAMP), 'MM')",
"tsql": "SELECT FORMAT(CAST('1970-01-01' AS DATETIME2), 'MM')",
"spark": "SELECT EXTRACT(month FROM '1970-01-01')",
"tsql": "SELECT DATEPART(month, '1970-01-01')",
},
)
self.validate_all(
Expand All @@ -1526,9 +1569,9 @@ def test_datepart(self):
"postgres": "SELECT DATE_PART('YEAR', '2017-01-01'::DATE)",
},
write={
"postgres": "SELECT TO_CHAR(CAST(CAST('2017-01-01' AS DATE) AS TIMESTAMP), 'YYYY')",
"spark": "SELECT DATE_FORMAT(CAST(CAST('2017-01-01' AS DATE) AS TIMESTAMP), 'yyyy')",
"tsql": "SELECT FORMAT(CAST(CAST('2017-01-01' AS DATE) AS DATETIME2), 'yyyy')",
"postgres": "SELECT EXTRACT(YEAR FROM CAST('2017-01-01' AS DATE))",
"spark": "SELECT EXTRACT(YEAR FROM CAST('2017-01-01' AS DATE))",
"tsql": "SELECT DATEPART(YEAR, CAST('2017-01-01' AS DATE))",
},
)
self.validate_all(
Expand All @@ -1537,9 +1580,9 @@ def test_datepart(self):
"postgres": "SELECT DATE_PART('month', '2017-03-01'::DATE)",
},
write={
"postgres": "SELECT TO_CHAR(CAST(CAST('2017-03-01' AS DATE) AS TIMESTAMP), 'MM')",
"spark": "SELECT DATE_FORMAT(CAST(CAST('2017-03-01' AS DATE) AS TIMESTAMP), 'MM')",
"tsql": "SELECT FORMAT(CAST(CAST('2017-03-01' AS DATE) AS DATETIME2), 'MM')",
"postgres": "SELECT EXTRACT(month FROM CAST('2017-03-01' AS DATE))",
"spark": "SELECT EXTRACT(month FROM CAST('2017-03-01' AS DATE))",
"tsql": "SELECT DATEPART(month, CAST('2017-03-01' AS DATE))",
},
)
self.validate_all(
Expand All @@ -1548,22 +1591,22 @@ def test_datepart(self):
"postgres": "SELECT DATE_PART('day', '2017-01-02'::DATE)",
},
write={
"postgres": "SELECT TO_CHAR(CAST(CAST('2017-01-02' AS DATE) AS TIMESTAMP), 'DD')",
"spark": "SELECT DATE_FORMAT(CAST(CAST('2017-01-02' AS DATE) AS TIMESTAMP), 'dd')",
"tsql": "SELECT FORMAT(CAST(CAST('2017-01-02' AS DATE) AS DATETIME2), 'dd')",
"postgres": "SELECT EXTRACT(day FROM CAST('2017-01-02' AS DATE))",
"spark": "SELECT EXTRACT(day FROM CAST('2017-01-02' AS DATE))",
"tsql": "SELECT DATEPART(day, CAST('2017-01-02' AS DATE))",
},
)

for fmt in ("WEEK", "WW", "WK"):
self.validate_identity(
f"SELECT DATEPART({fmt}, '2024-11-21')",
"SELECT DATEPART(WK, CAST('2024-11-21' AS DATETIME2))",
"SELECT DATEPART(WEEK, '2024-11-21')",
)

for fmt in ("ISOWK", "ISOWW", "ISO_WEEK"):
self.validate_identity(
f"SELECT DATEPART({fmt}, '2024-11-21')",
"SELECT DATEPART(ISO_WEEK, CAST('2024-11-21' AS DATETIME2))",
"SELECT DATEPART(ISO_WEEK, '2024-11-21')",
)

def test_convert(self):
Expand Down