chore: bump release number for unstable version (#3251)

* bump version for unstable

* allow generation of rc schemas

* update schemas
This commit is contained in:
Massimiliano Pippi 2022-09-21 16:58:06 +02:00 committed by GitHub
parent 938e6fda5b
commit 8f76d64f6f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 7492 additions and 30 deletions

View File

@ -1 +1 @@
1.8.1rc0 1.10.0rc0

File diff suppressed because it is too large Load Diff

View File

@ -2,7 +2,7 @@
"openapi": "3.0.2", "openapi": "3.0.2",
"info": { "info": {
"title": "Haystack REST API", "title": "Haystack REST API",
"version": "1.8.1rc0" "version": "1.10.0rc0"
}, },
"paths": { "paths": {
"/initialized": { "/initialized": {

File diff suppressed because it is too large Load Diff

View File

@ -214,6 +214,20 @@
"$ref": "https://raw.githubusercontent.com/deepset-ai/haystack/main/haystack/json-schemas/haystack-pipeline-1.8.0.schema.json" "$ref": "https://raw.githubusercontent.com/deepset-ai/haystack/main/haystack/json-schemas/haystack-pipeline-1.8.0.schema.json"
} }
] ]
},
{
"allOf": [
{
"properties": {
"version": {
"const": "1.10.0rc0"
}
}
},
{
"$ref": "https://raw.githubusercontent.com/deepset-ai/haystack/main/haystack/json-schemas/haystack-pipeline-1.10.0rc0.schema.json"
}
]
} }
] ]
} }

View File

@ -406,36 +406,33 @@ def inject_definition_in_schema(node_class: Type[BaseComponent], schema: Dict[st
def update_json_schema(destination_path: Path = JSON_SCHEMAS_PATH): def update_json_schema(destination_path: Path = JSON_SCHEMAS_PATH):
""" """
If the version contains "rc", only update main's schema. Create (or update) a new schema.
Otherwise, create (or update) a new schema.
""" """
# Update mains's schema # `main` schema is always updated and will contain the same data as the latest
# commit from `main` or a release branch
filename = f"haystack-pipeline-main.schema.json" filename = f"haystack-pipeline-main.schema.json"
with open(destination_path / filename, "w") as json_file: with open(destination_path / filename, "w") as json_file:
json.dump(get_json_schema(filename=filename, version="ignore"), json_file, indent=2) json.dump(get_json_schema(filename=filename, version="ignore"), json_file, indent=2)
# If it's not an rc version: # Create/update the specific version file too
if "rc" not in haystack_version: filename = f"haystack-pipeline-{haystack_version}.schema.json"
with open(destination_path / filename, "w") as json_file:
json.dump(get_json_schema(filename=filename, version=haystack_version), json_file, indent=2)
# Create/update the specific version file too # Update the index
filename = f"haystack-pipeline-{haystack_version}.schema.json" index_name = "haystack-pipeline.schema.json"
with open(destination_path / filename, "w") as json_file: with open(destination_path / index_name, "r") as json_file:
json.dump(get_json_schema(filename=filename, version=haystack_version), json_file, indent=2) index = json.load(json_file)
new_entry = {
# Update the index "allOf": [
index_name = "haystack-pipeline.schema.json" {"properties": {"version": {"const": haystack_version}}},
with open(destination_path / index_name, "r") as json_file: {
index = json.load(json_file) "$ref": "https://raw.githubusercontent.com/deepset-ai/haystack/main/haystack/json-schemas/"
new_entry = { f"haystack-pipeline-{haystack_version}.schema.json"
"allOf": [ },
{"properties": {"version": {"const": haystack_version}}}, ]
{ }
"$ref": "https://raw.githubusercontent.com/deepset-ai/haystack/main/haystack/json-schemas/" if new_entry not in index["oneOf"]:
f"haystack-pipeline-{haystack_version}.schema.json" index["oneOf"].append(new_entry)
}, with open(destination_path / index_name, "w") as json_file:
] json.dump(index, json_file, indent=2)
}
if new_entry not in index["oneOf"]:
index["oneOf"].append(new_entry)
with open(destination_path / index_name, "w") as json_file:
json.dump(index, json_file, indent=2)

View File

@ -4,9 +4,14 @@ version: ignore
components: components:
- name: FileTypeClassifier - name: FileTypeClassifier
type: FileTypeClassifier type: FileTypeClassifier
pipelines: pipelines:
- name: query - name: query
nodes: nodes:
- name: FileTypeClassifier - name: FileTypeClassifier
inputs: [File] inputs: [File]
- name: indexing
nodes:
- name: FileTypeClassifier
inputs: [File]