|
17 | 17 |
|
18 | 18 | import os
|
19 | 19 | import re
|
20 |
| -import sys |
| 20 | + |
21 | 21 | from collections import namedtuple
|
22 | 22 | from textwrap import dedent
|
23 | 23 |
|
|
31 | 31 | "SQLConfEntry", ["name", "default", "description", "version"])
|
32 | 32 |
|
33 | 33 |
|
34 |
| -def get_public_sql_configs(jvm, group): |
| 34 | +def get_sql_configs(jvm, group): |
35 | 35 | if group == "static":
|
36 | 36 | config_set = jvm.org.apache.spark.sql.api.python.PythonSQLUtils.listStaticSQLConfigs()
|
37 | 37 | else:
|
38 |
| - config_set = jvm.org.apache.spark.sql.api.python.PythonSQLUtils.listSQLConfigs() |
| 38 | + config_set = jvm.org.apache.spark.sql.api.python.PythonSQLUtils.listRuntimeSQLConfigs() |
39 | 39 | sql_configs = [
|
40 | 40 | SQLConfEntry(
|
41 | 41 | name=_sql_config._1(),
|
@@ -81,7 +81,11 @@ def generate_sql_configs_table_html(sql_configs, path):
|
81 | 81 | """
|
82 | 82 | ))
|
83 | 83 | for config in sorted(sql_configs, key=lambda x: x.name):
|
84 |
| - if config.default == "<undefined>": |
| 84 | + if config.name == "spark.sql.session.timeZone": |
| 85 | + default = "(value of local timezone)" |
| 86 | + elif config.name == "spark.sql.warehouse.dir": |
| 87 | + default = "(value of <code>$PWD/spark-warehouse</code>)" |
| 88 | + elif config.default == "<undefined>": |
85 | 89 | default = "(none)"
|
86 | 90 | elif config.default.startswith("<value of "):
|
87 | 91 | referenced_config_name = value_reference_pattern.match(config.default).group(1)
|
@@ -119,17 +123,13 @@ def generate_sql_configs_table_html(sql_configs, path):
|
119 | 123 |
|
120 | 124 |
|
121 | 125 | if __name__ == "__main__":
|
122 |
| - if len(sys.argv) != 2: |
123 |
| - print("Usage: ./bin/spark-submit sql/gen-sql-config-docs.py <static|runtime>") |
124 |
| - sys.exit(-1) |
125 |
| - else: |
126 |
| - group = sys.argv[1] |
127 |
| - |
128 | 126 | jvm = launch_gateway().jvm
|
129 |
| - sql_configs = get_public_sql_configs(jvm, group) |
| 127 | + docs_root_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), "docs") |
130 | 128 |
|
131 |
| - spark_root_dir = os.path.dirname(os.path.dirname(__file__)) |
132 |
| - sql_configs_table_path = os.path\ |
133 |
| - .join(spark_root_dir, "docs", "generated-" + group + "-sql-config-table.html") |
| 129 | + sql_configs = get_sql_configs(jvm, "runtime") |
| 130 | + sql_configs_table_path = os.path.join(docs_root_dir, "generated-runtime-sql-config-table.html") |
| 131 | + generate_sql_configs_table_html(sql_configs, path=sql_configs_table_path) |
134 | 132 |
|
| 133 | + sql_configs = get_sql_configs(jvm, "static") |
| 134 | + sql_configs_table_path = os.path.join(docs_root_dir, "generated-static-sql-config-table.html") |
135 | 135 | generate_sql_configs_table_html(sql_configs, path=sql_configs_table_path)
|
0 commit comments