chore(doc): Fix json schema generation after pydantic v2 move (#14926)

This commit is contained in:
skrydal 2025-10-04 15:32:45 +02:00 committed by GitHub
parent 2d7c009b81
commit eea46700f6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 18 additions and 16 deletions

View File

@ -11,7 +11,7 @@ def get_base() -> Any:
"id": "https://json.schemastore.org/datahub-ingestion",
"title": "Datahub Ingestion",
"description": "Root schema of Datahub Ingestion",
"definitions": {
"$defs": {
"console_sink": {
"type": "object",
"properties": {
@ -26,7 +26,7 @@ def get_base() -> Any:
"type": "object",
"properties": {
"type": {"type": "string", "enum": ["file"]},
"config": {"$ref": "#/definitions/file_sink_config"},
"config": {"$ref": "#/$defs/file_sink_config"},
},
"required": ["type", "config"],
},
@ -45,7 +45,7 @@ def get_base() -> Any:
"type": "object",
"properties": {
"type": {"type": "string", "enum": ["datahub-rest"]},
"config": {"$ref": "#/definitions/datahub_rest_sink_config"},
"config": {"$ref": "#/$defs/datahub_rest_sink_config"},
},
"required": ["type", "config"],
"additionalProperties": False,
@ -102,7 +102,7 @@ def get_base() -> Any:
"type": "object",
"properties": {
"type": {"type": "string", "enum": ["datahub-kafka"]},
"config": {"$ref": "#/definitions/datahub_kafka_sink_config"},
"config": {"$ref": "#/$defs/datahub_kafka_sink_config"},
},
"required": ["type", "config"],
"additionalProperties": False,
@ -178,10 +178,10 @@ def get_base() -> Any:
"sink": {
"description": "sink",
"anyOf": [
{"$ref": "#/definitions/datahub_kafka_sink"},
{"$ref": "#/definitions/datahub_rest_sink"},
{"$ref": "#/definitions/console_sink"},
{"$ref": "#/definitions/file_sink"},
{"$ref": "#/$defs/datahub_kafka_sink"},
{"$ref": "#/$defs/datahub_rest_sink"},
{"$ref": "#/$defs/console_sink"},
{"$ref": "#/$defs/file_sink"},
],
},
},
@ -216,22 +216,22 @@ for jfile in glob.glob(f"{config_schemas_dir}/*"):
"type": "object",
"properties": {
"type": {"type": "string", "enum": [f"{config_name}"]},
"config": {"$ref": f"#/definitions/{config_name}_config"},
"config": {"$ref": f"#/$defs/{config_name}_config"},
},
"required": ["type", "config"],
}
configs[f"{config_name}"] = source_obj
if "definitions" in data:
definitions.update(data["definitions"])
data.pop("definitions", None)
if "$defs" in data:
definitions.update(data["$defs"])
data.pop("$defs", None)
configs[f"{config_name}_config"] = data
ref = {"$ref": f"#/definitions/{config_name}"}
ref = {"$ref": f"#/$defs/{config_name}"}
refs.append(ref)
base = get_base()
base["definitions"].update(configs)
base["definitions"].update(definitions)
base["$defs"].update(configs)
base["$defs"].update(definitions)
print(base["properties"]["source"])

View File

@ -243,7 +243,9 @@ def create_plugin_from_capability_data(
if hasattr(source_type, "get_config_class"):
source_config_class: ConfigModel = source_type.get_config_class()
plugin.config_json_schema = source_config_class.schema_json(indent=2)
plugin.config_json_schema = json.dumps(
source_config_class.model_json_schema(), indent=2
)
plugin.config_md = gen_md_table_from_pydantic(
source_config_class, current_source=plugin_name
)