fix(git-ignore): Git ignore generated python and avro artifacts (#3320)

This commit is contained in:
Dexter Lee 2021-10-06 11:54:30 -07:00 committed by GitHub
parent cfc97107e8
commit 4c038d7cfe
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
40 changed files with 5 additions and 24752 deletions

View File

@ -63,6 +63,7 @@ jobs:
run: ./metadata-ingestion/scripts/install_deps.sh
- name: Run metadata-ingestion tests
run: ./gradlew :metadata-ingestion:build :metadata-ingestion:check
metadata-ingestion-by-version:
runs-on: ubuntu-latest
strategy:
@ -75,6 +76,8 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: ./metadata-ingestion/scripts/install_deps.sh && python -m pip install --upgrade pip && pip install tox tox-gh-actions
- name: Codegen
run: ./gradlew :metadata-ingestion:codegen
- name: Run tox tests
run: cd metadata-ingestion && tox

1
.gitignore vendored
View File

@ -15,6 +15,7 @@
**/src/mainGenerated*
**/src/testGenerated*
metadata-events/mxe-registration/src/main/resources/**/*.avsc
metadata-ingestion/src/datahub/metadata
# Java
.java-version

View File

@ -18,7 +18,7 @@ The reporter interface enables the source to report statistics, warnings, failur
The core for the source is the `get_workunits` method, which produces a stream of MCE objects. The [file source](./src/datahub/ingestion/source/file.py) is a good and simple example.
The MetadataChangeEventClass is defined in the [metadata models](./src/datahub/metadata/schema_classes.py). There are also some [convenience methods](./src/datahub/emitter/mce_builder.py) for commonly used operations.
The MetadataChangeEventClass is defined in the metadata models which are generated under `metadata-ingestion/src/datahub/metadata/schema_classes.py`. There are also some [convenience methods](./src/datahub/emitter/mce_builder.py) for commonly used operations.
### 4. Set up the dependencies

View File

@ -1,7 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
# fmt: on

View File

@ -1,7 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
# fmt: on

View File

@ -1,7 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
# fmt: on

View File

@ -1,11 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
from ....schema_classes import KafkaAuditHeaderClass
KafkaAuditHeader = KafkaAuditHeaderClass
# fmt: on

View File

@ -1,7 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
# fmt: on

View File

@ -1,19 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
from .....schema_classes import ChartInfoClass
from .....schema_classes import ChartQueryClass
from .....schema_classes import ChartQueryTypeClass
from .....schema_classes import ChartTypeClass
from .....schema_classes import EditableChartPropertiesClass
ChartInfo = ChartInfoClass
ChartQuery = ChartQueryClass
ChartQueryType = ChartQueryTypeClass
ChartType = ChartTypeClass
EditableChartProperties = EditableChartPropertiesClass
# fmt: on

View File

@ -1,59 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
from .....schema_classes import AccessLevelClass
from .....schema_classes import AuditStampClass
from .....schema_classes import BrowsePathsClass
from .....schema_classes import ChangeAuditStampsClass
from .....schema_classes import CostClass
from .....schema_classes import CostCostClass
from .....schema_classes import CostCostDiscriminatorClass
from .....schema_classes import CostTypeClass
from .....schema_classes import DeprecationClass
from .....schema_classes import FabricTypeClass
from .....schema_classes import GlobalTagsClass
from .....schema_classes import GlossaryTermAssociationClass
from .....schema_classes import GlossaryTermsClass
from .....schema_classes import InstitutionalMemoryClass
from .....schema_classes import InstitutionalMemoryMetadataClass
from .....schema_classes import MLFeatureDataTypeClass
from .....schema_classes import OwnerClass
from .....schema_classes import OwnershipClass
from .....schema_classes import OwnershipSourceClass
from .....schema_classes import OwnershipSourceTypeClass
from .....schema_classes import OwnershipTypeClass
from .....schema_classes import StatusClass
from .....schema_classes import TagAssociationClass
from .....schema_classes import VersionTagClass
from .....schema_classes import WindowDurationClass
AccessLevel = AccessLevelClass
AuditStamp = AuditStampClass
BrowsePaths = BrowsePathsClass
ChangeAuditStamps = ChangeAuditStampsClass
Cost = CostClass
CostCost = CostCostClass
CostCostDiscriminator = CostCostDiscriminatorClass
CostType = CostTypeClass
Deprecation = DeprecationClass
FabricType = FabricTypeClass
GlobalTags = GlobalTagsClass
GlossaryTermAssociation = GlossaryTermAssociationClass
GlossaryTerms = GlossaryTermsClass
InstitutionalMemory = InstitutionalMemoryClass
InstitutionalMemoryMetadata = InstitutionalMemoryMetadataClass
MLFeatureDataType = MLFeatureDataTypeClass
Owner = OwnerClass
Ownership = OwnershipClass
OwnershipSource = OwnershipSourceClass
OwnershipSourceType = OwnershipSourceTypeClass
OwnershipType = OwnershipTypeClass
Status = StatusClass
TagAssociation = TagAssociationClass
VersionTag = VersionTagClass
WindowDuration = WindowDurationClass
# fmt: on

View File

@ -1,13 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
from ......schema_classes import TransformationTypeClass
from ......schema_classes import UDFTransformerClass
TransformationType = TransformationTypeClass
UDFTransformer = UDFTransformerClass
# fmt: on

View File

@ -1,13 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
from .....schema_classes import DashboardInfoClass
from .....schema_classes import EditableDashboardPropertiesClass
DashboardInfo = DashboardInfoClass
EditableDashboardProperties = EditableDashboardPropertiesClass
# fmt: on

View File

@ -1,21 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
from .....schema_classes import DataFlowInfoClass
from .....schema_classes import DataJobInfoClass
from .....schema_classes import DataJobInputOutputClass
from .....schema_classes import EditableDataFlowPropertiesClass
from .....schema_classes import EditableDataJobPropertiesClass
from .....schema_classes import JobStatusClass
DataFlowInfo = DataFlowInfoClass
DataJobInfo = DataJobInfoClass
DataJobInputOutput = DataJobInputOutputClass
EditableDataFlowProperties = EditableDataFlowPropertiesClass
EditableDataJobProperties = EditableDataJobPropertiesClass
JobStatus = JobStatusClass
# fmt: on

View File

@ -1,11 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
from ......schema_classes import AzkabanJobTypeClass
AzkabanJobType = AzkabanJobTypeClass
# fmt: on

View File

@ -1,13 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
from .....schema_classes import DataPlatformInfoClass
from .....schema_classes import PlatformTypeClass
DataPlatformInfo = DataPlatformInfoClass
PlatformType = PlatformTypeClass
# fmt: on

View File

@ -1,11 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
from .....schema_classes import DataProcessInfoClass
DataProcessInfo = DataProcessInfoClass
# fmt: on

View File

@ -1,41 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
from .....schema_classes import DatasetDeprecationClass
from .....schema_classes import DatasetFieldMappingClass
from .....schema_classes import DatasetFieldProfileClass
from .....schema_classes import DatasetFieldUsageCountsClass
from .....schema_classes import DatasetLineageTypeClass
from .....schema_classes import DatasetProfileClass
from .....schema_classes import DatasetPropertiesClass
from .....schema_classes import DatasetUpstreamLineageClass
from .....schema_classes import DatasetUsageStatisticsClass
from .....schema_classes import DatasetUserUsageCountsClass
from .....schema_classes import EditableDatasetPropertiesClass
from .....schema_classes import HistogramClass
from .....schema_classes import QuantileClass
from .....schema_classes import UpstreamClass
from .....schema_classes import UpstreamLineageClass
from .....schema_classes import ValueFrequencyClass
DatasetDeprecation = DatasetDeprecationClass
DatasetFieldMapping = DatasetFieldMappingClass
DatasetFieldProfile = DatasetFieldProfileClass
DatasetFieldUsageCounts = DatasetFieldUsageCountsClass
DatasetLineageType = DatasetLineageTypeClass
DatasetProfile = DatasetProfileClass
DatasetProperties = DatasetPropertiesClass
DatasetUpstreamLineage = DatasetUpstreamLineageClass
DatasetUsageStatistics = DatasetUsageStatisticsClass
DatasetUserUsageCounts = DatasetUserUsageCountsClass
EditableDatasetProperties = EditableDatasetPropertiesClass
Histogram = HistogramClass
Quantile = QuantileClass
Upstream = UpstreamClass
UpstreamLineage = UpstreamLineageClass
ValueFrequency = ValueFrequencyClass
# fmt: on

View File

@ -1,7 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
# fmt: on

View File

@ -1,11 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
from ......schema_classes import ChangeTypeClass
ChangeType = ChangeTypeClass
# fmt: on

View File

@ -1,15 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
from .....schema_classes import GlossaryNodeInfoClass
from .....schema_classes import GlossaryRelatedTermsClass
from .....schema_classes import GlossaryTermInfoClass
GlossaryNodeInfo = GlossaryNodeInfoClass
GlossaryRelatedTerms = GlossaryRelatedTermsClass
GlossaryTermInfo = GlossaryTermInfoClass
# fmt: on

View File

@ -1,17 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
from .....schema_classes import CorpGroupInfoClass
from .....schema_classes import CorpUserEditableInfoClass
from .....schema_classes import CorpUserInfoClass
from .....schema_classes import GroupMembershipClass
CorpGroupInfo = CorpGroupInfoClass
CorpUserEditableInfo = CorpUserEditableInfoClass
CorpUserInfo = CorpUserInfoClass
GroupMembership = GroupMembershipClass
# fmt: on

View File

@ -1,7 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
# fmt: on

View File

@ -1,49 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
from ......schema_classes import ChartKeyClass
from ......schema_classes import CorpGroupKeyClass
from ......schema_classes import CorpUserKeyClass
from ......schema_classes import DashboardKeyClass
from ......schema_classes import DataFlowKeyClass
from ......schema_classes import DataHubPolicyKeyClass
from ......schema_classes import DataJobKeyClass
from ......schema_classes import DataPlatformKeyClass
from ......schema_classes import DataProcessKeyClass
from ......schema_classes import DatasetKeyClass
from ......schema_classes import GlossaryNodeKeyClass
from ......schema_classes import GlossaryTermKeyClass
from ......schema_classes import MLFeatureKeyClass
from ......schema_classes import MLFeatureTableKeyClass
from ......schema_classes import MLModelDeploymentKeyClass
from ......schema_classes import MLModelGroupKeyClass
from ......schema_classes import MLModelKeyClass
from ......schema_classes import MLPrimaryKeyKeyClass
from ......schema_classes import SchemaFieldKeyClass
from ......schema_classes import TagKeyClass
ChartKey = ChartKeyClass
CorpGroupKey = CorpGroupKeyClass
CorpUserKey = CorpUserKeyClass
DashboardKey = DashboardKeyClass
DataFlowKey = DataFlowKeyClass
DataHubPolicyKey = DataHubPolicyKeyClass
DataJobKey = DataJobKeyClass
DataPlatformKey = DataPlatformKeyClass
DataProcessKey = DataProcessKeyClass
DatasetKey = DatasetKeyClass
GlossaryNodeKey = GlossaryNodeKeyClass
GlossaryTermKey = GlossaryTermKeyClass
MLFeatureKey = MLFeatureKeyClass
MLFeatureTableKey = MLFeatureTableKeyClass
MLModelDeploymentKey = MLModelDeploymentKeyClass
MLModelGroupKey = MLModelGroupKeyClass
MLModelKey = MLModelKeyClass
MLPrimaryKeyKey = MLPrimaryKeyKeyClass
SchemaFieldKey = SchemaFieldKeyClass
TagKey = TagKeyClass
# fmt: on

View File

@ -1,49 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
from ......schema_classes import ChartSnapshotClass
from ......schema_classes import CorpGroupSnapshotClass
from ......schema_classes import CorpUserSnapshotClass
from ......schema_classes import DashboardSnapshotClass
from ......schema_classes import DataFlowSnapshotClass
from ......schema_classes import DataHubPolicySnapshotClass
from ......schema_classes import DataJobSnapshotClass
from ......schema_classes import DataPlatformSnapshotClass
from ......schema_classes import DataProcessSnapshotClass
from ......schema_classes import DatasetSnapshotClass
from ......schema_classes import GlossaryNodeSnapshotClass
from ......schema_classes import GlossaryTermSnapshotClass
from ......schema_classes import MLFeatureSnapshotClass
from ......schema_classes import MLFeatureTableSnapshotClass
from ......schema_classes import MLModelDeploymentSnapshotClass
from ......schema_classes import MLModelGroupSnapshotClass
from ......schema_classes import MLModelSnapshotClass
from ......schema_classes import MLPrimaryKeySnapshotClass
from ......schema_classes import SchemaFieldSnapshotClass
from ......schema_classes import TagSnapshotClass
ChartSnapshot = ChartSnapshotClass
CorpGroupSnapshot = CorpGroupSnapshotClass
CorpUserSnapshot = CorpUserSnapshotClass
DashboardSnapshot = DashboardSnapshotClass
DataFlowSnapshot = DataFlowSnapshotClass
DataHubPolicySnapshot = DataHubPolicySnapshotClass
DataJobSnapshot = DataJobSnapshotClass
DataPlatformSnapshot = DataPlatformSnapshotClass
DataProcessSnapshot = DataProcessSnapshotClass
DatasetSnapshot = DatasetSnapshotClass
GlossaryNodeSnapshot = GlossaryNodeSnapshotClass
GlossaryTermSnapshot = GlossaryTermSnapshotClass
MLFeatureSnapshot = MLFeatureSnapshotClass
MLFeatureTableSnapshot = MLFeatureTableSnapshotClass
MLModelDeploymentSnapshot = MLModelDeploymentSnapshotClass
MLModelGroupSnapshot = MLModelGroupSnapshotClass
MLModelSnapshot = MLModelSnapshotClass
MLPrimaryKeySnapshot = MLPrimaryKeySnapshotClass
SchemaFieldSnapshot = SchemaFieldSnapshotClass
TagSnapshot = TagSnapshotClass
# fmt: on

View File

@ -1,7 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
# fmt: on

View File

@ -1,57 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
from ......schema_classes import BaseDataClass
from ......schema_classes import CaveatDetailsClass
from ......schema_classes import CaveatsAndRecommendationsClass
from ......schema_classes import DeploymentStatusClass
from ......schema_classes import EthicalConsiderationsClass
from ......schema_classes import EvaluationDataClass
from ......schema_classes import IntendedUseClass
from ......schema_classes import IntendedUserTypeClass
from ......schema_classes import MLFeaturePropertiesClass
from ......schema_classes import MLFeatureTablePropertiesClass
from ......schema_classes import MLHyperParamClass
from ......schema_classes import MLMetricClass
from ......schema_classes import MLModelDeploymentPropertiesClass
from ......schema_classes import MLModelFactorPromptsClass
from ......schema_classes import MLModelFactorsClass
from ......schema_classes import MLModelGroupPropertiesClass
from ......schema_classes import MLModelPropertiesClass
from ......schema_classes import MLPrimaryKeyPropertiesClass
from ......schema_classes import MetricsClass
from ......schema_classes import QuantitativeAnalysesClass
from ......schema_classes import SourceCodeClass
from ......schema_classes import SourceCodeUrlClass
from ......schema_classes import SourceCodeUrlTypeClass
from ......schema_classes import TrainingDataClass
BaseData = BaseDataClass
CaveatDetails = CaveatDetailsClass
CaveatsAndRecommendations = CaveatsAndRecommendationsClass
DeploymentStatus = DeploymentStatusClass
EthicalConsiderations = EthicalConsiderationsClass
EvaluationData = EvaluationDataClass
IntendedUse = IntendedUseClass
IntendedUserType = IntendedUserTypeClass
MLFeatureProperties = MLFeaturePropertiesClass
MLFeatureTableProperties = MLFeatureTablePropertiesClass
MLHyperParam = MLHyperParamClass
MLMetric = MLMetricClass
MLModelDeploymentProperties = MLModelDeploymentPropertiesClass
MLModelFactorPrompts = MLModelFactorPromptsClass
MLModelFactors = MLModelFactorsClass
MLModelGroupProperties = MLModelGroupPropertiesClass
MLModelProperties = MLModelPropertiesClass
MLPrimaryKeyProperties = MLPrimaryKeyPropertiesClass
Metrics = MetricsClass
QuantitativeAnalyses = QuantitativeAnalysesClass
SourceCode = SourceCodeClass
SourceCodeUrl = SourceCodeUrlClass
SourceCodeUrlType = SourceCodeUrlTypeClass
TrainingData = TrainingDataClass
# fmt: on

View File

@ -1,17 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
from .....schema_classes import GenericAspectClass
from .....schema_classes import MetadataChangeEventClass
from .....schema_classes import MetadataChangeProposalClass
from .....schema_classes import SystemMetadataClass
GenericAspect = GenericAspectClass
MetadataChangeEvent = MetadataChangeEventClass
MetadataChangeProposal = MetadataChangeProposalClass
SystemMetadata = SystemMetadataClass
# fmt: on

View File

@ -1,15 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
from .....schema_classes import DataHubActorFilterClass
from .....schema_classes import DataHubPolicyInfoClass
from .....schema_classes import DataHubResourceFilterClass
DataHubActorFilter = DataHubActorFilterClass
DataHubPolicyInfo = DataHubPolicyInfoClass
DataHubResourceFilter = DataHubResourceFilterClass
# fmt: on

View File

@ -1,73 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
from .....schema_classes import ArrayTypeClass
from .....schema_classes import BinaryJsonSchemaClass
from .....schema_classes import BooleanTypeClass
from .....schema_classes import BytesTypeClass
from .....schema_classes import DatasetFieldForeignKeyClass
from .....schema_classes import DateTypeClass
from .....schema_classes import EditableSchemaFieldInfoClass
from .....schema_classes import EditableSchemaMetadataClass
from .....schema_classes import EnumTypeClass
from .....schema_classes import EspressoSchemaClass
from .....schema_classes import FixedTypeClass
from .....schema_classes import ForeignKeyConstraintClass
from .....schema_classes import ForeignKeySpecClass
from .....schema_classes import KafkaSchemaClass
from .....schema_classes import KeyValueSchemaClass
from .....schema_classes import MapTypeClass
from .....schema_classes import MySqlDDLClass
from .....schema_classes import NullTypeClass
from .....schema_classes import NumberTypeClass
from .....schema_classes import OracleDDLClass
from .....schema_classes import OrcSchemaClass
from .....schema_classes import OtherSchemaClass
from .....schema_classes import PrestoDDLClass
from .....schema_classes import RecordTypeClass
from .....schema_classes import SchemaFieldClass
from .....schema_classes import SchemaFieldDataTypeClass
from .....schema_classes import SchemaMetadataClass
from .....schema_classes import SchemalessClass
from .....schema_classes import StringTypeClass
from .....schema_classes import TimeTypeClass
from .....schema_classes import UnionTypeClass
from .....schema_classes import UrnForeignKeyClass
ArrayType = ArrayTypeClass
BinaryJsonSchema = BinaryJsonSchemaClass
BooleanType = BooleanTypeClass
BytesType = BytesTypeClass
DatasetFieldForeignKey = DatasetFieldForeignKeyClass
DateType = DateTypeClass
EditableSchemaFieldInfo = EditableSchemaFieldInfoClass
EditableSchemaMetadata = EditableSchemaMetadataClass
EnumType = EnumTypeClass
EspressoSchema = EspressoSchemaClass
FixedType = FixedTypeClass
ForeignKeyConstraint = ForeignKeyConstraintClass
ForeignKeySpec = ForeignKeySpecClass
KafkaSchema = KafkaSchemaClass
KeyValueSchema = KeyValueSchemaClass
MapType = MapTypeClass
MySqlDDL = MySqlDDLClass
NullType = NullTypeClass
NumberType = NumberTypeClass
OracleDDL = OracleDDLClass
OrcSchema = OrcSchemaClass
OtherSchema = OtherSchemaClass
PrestoDDL = PrestoDDLClass
RecordType = RecordTypeClass
SchemaField = SchemaFieldClass
SchemaFieldDataType = SchemaFieldDataTypeClass
SchemaMetadata = SchemaMetadataClass
Schemaless = SchemalessClass
StringType = StringTypeClass
TimeType = TimeTypeClass
UnionType = UnionTypeClass
UrnForeignKey = UrnForeignKeyClass
# fmt: on

View File

@ -1,11 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
from .....schema_classes import TagPropertiesClass
TagProperties = TagPropertiesClass
# fmt: on

View File

@ -1,17 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
from .....schema_classes import CalendarIntervalClass
from .....schema_classes import PartitionSpecClass
from .....schema_classes import TimeWindowClass
from .....schema_classes import TimeWindowSizeClass
CalendarInterval = CalendarIntervalClass
PartitionSpec = PartitionSpecClass
TimeWindow = TimeWindowClass
TimeWindowSize = TimeWindowSizeClass
# fmt: on

View File

@ -1,17 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
from .....schema_classes import FieldUsageCountsClass
from .....schema_classes import UsageAggregationClass
from .....schema_classes import UsageAggregationMetricsClass
from .....schema_classes import UserUsageCountsClass
FieldUsageCounts = FieldUsageCountsClass
UsageAggregation = UsageAggregationClass
UsageAggregationMetrics = UsageAggregationMetricsClass
UserUsageCounts = UserUsageCountsClass
# fmt: on

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,297 +0,0 @@
{
"type": "record",
"name": "DatasetProfile",
"namespace": "com.linkedin.pegasus2avro.dataset",
"doc": "Stats corresponding to datasets",
"fields": [
{
"name": "timestampMillis",
"type": "long",
"doc": "The event timestamp field as epoch at UTC in milli seconds."
},
{
"name": "eventGranularity",
"type": [
"null",
{
"type": "record",
"name": "TimeWindowSize",
"namespace": "com.linkedin.pegasus2avro.timeseries",
"doc": "Defines the size of a time window.",
"fields": [
{
"name": "unit",
"type": {
"type": "enum",
"name": "CalendarInterval",
"symbols": [
"SECOND",
"MINUTE",
"HOUR",
"DAY",
"WEEK",
"MONTH",
"QUARTER",
"YEAR"
]
},
"doc": "Interval unit such as minute/hour/day etc."
},
{
"name": "multiple",
"type": "int",
"doc": "How many units. Defaults to 1.",
"default": 1
}
]
}
],
"doc": "Granularity of the event if applicable",
"default": null
},
{
"name": "partitionSpec",
"type": [
"null",
{
"type": "record",
"name": "PartitionSpec",
"namespace": "com.linkedin.pegasus2avro.timeseries",
"doc": "Defines how the data is partitioned",
"fields": [
{
"name": "partition",
"type": "string",
"doc": "String representation of the partition"
},
{
"name": "timePartition",
"type": [
"null",
{
"type": "record",
"name": "TimeWindow",
"fields": [
{
"name": "startTimeMillis",
"type": "long",
"doc": "Start time as epoch at UTC."
},
{
"name": "length",
"type": "TimeWindowSize",
"doc": "The length of the window."
}
]
}
],
"doc": "Time window of the partition if applicable",
"default": null
}
]
}
],
"doc": "The optional partition specification.",
"default": null
},
{
"name": "rowCount",
"type": [
"null",
"long"
],
"default": null
},
{
"name": "columnCount",
"type": [
"null",
"long"
],
"default": null
},
{
"name": "fieldProfiles",
"type": [
"null",
{
"type": "array",
"items": {
"type": "record",
"name": "DatasetFieldProfile",
"doc": "Stats corresponding to fields in a dataset",
"fields": [
{
"name": "fieldPath",
"type": "string"
},
{
"name": "uniqueCount",
"type": [
"null",
"long"
],
"default": null
},
{
"name": "uniqueProportion",
"type": [
"null",
"float"
],
"default": null
},
{
"name": "nullCount",
"type": [
"null",
"long"
],
"default": null
},
{
"name": "nullProportion",
"type": [
"null",
"float"
],
"default": null
},
{
"name": "min",
"type": [
"null",
"string"
],
"default": null
},
{
"name": "max",
"type": [
"null",
"string"
],
"default": null
},
{
"name": "mean",
"type": [
"null",
"string"
],
"default": null
},
{
"name": "median",
"type": [
"null",
"string"
],
"default": null
},
{
"name": "stdev",
"type": [
"null",
"string"
],
"default": null
},
{
"name": "quantiles",
"type": [
"null",
{
"type": "array",
"items": {
"type": "record",
"name": "Quantile",
"fields": [
{
"name": "quantile",
"type": "string"
},
{
"name": "value",
"type": "string"
}
]
}
}
],
"default": null
},
{
"name": "distinctValueFrequencies",
"type": [
"null",
{
"type": "array",
"items": {
"type": "record",
"name": "ValueFrequency",
"fields": [
{
"name": "value",
"type": "string"
},
{
"name": "frequency",
"type": "long"
}
]
}
}
],
"default": null
},
{
"name": "histogram",
"type": [
"null",
{
"type": "record",
"name": "Histogram",
"fields": [
{
"name": "boundaries",
"type": {
"type": "array",
"items": "string"
}
},
{
"name": "heights",
"type": {
"type": "array",
"items": "float"
}
}
]
}
],
"default": null
},
{
"name": "sampleValues",
"type": [
"null",
{
"type": "array",
"items": "string"
}
],
"default": null
}
]
}
}
],
"default": null
}
],
"Aspect": {
"name": "datasetProfile",
"type": "timeseries"
}
}

View File

@ -1,212 +0,0 @@
{
"type": "record",
"name": "DatasetUsageStatistics",
"namespace": "com.linkedin.pegasus2avro.dataset",
"doc": "Stats corresponding to dataset's usage.",
"fields": [
{
"name": "timestampMillis",
"type": "long",
"doc": "The event timestamp field as epoch at UTC in milli seconds."
},
{
"name": "eventGranularity",
"type": [
"null",
{
"type": "record",
"name": "TimeWindowSize",
"namespace": "com.linkedin.pegasus2avro.timeseries",
"doc": "Defines the size of a time window.",
"fields": [
{
"name": "unit",
"type": {
"type": "enum",
"name": "CalendarInterval",
"symbols": [
"SECOND",
"MINUTE",
"HOUR",
"DAY",
"WEEK",
"MONTH",
"QUARTER",
"YEAR"
]
},
"doc": "Interval unit such as minute/hour/day etc."
},
{
"name": "multiple",
"type": "int",
"doc": "How many units. Defaults to 1.",
"default": 1
}
]
}
],
"doc": "Granularity of the event if applicable",
"default": null
},
{
"name": "partitionSpec",
"type": [
"null",
{
"type": "record",
"name": "PartitionSpec",
"namespace": "com.linkedin.pegasus2avro.timeseries",
"doc": "Defines how the data is partitioned",
"fields": [
{
"name": "partition",
"type": "string",
"doc": "String representation of the partition"
},
{
"name": "timePartition",
"type": [
"null",
{
"type": "record",
"name": "TimeWindow",
"fields": [
{
"name": "startTimeMillis",
"type": "long",
"doc": "Start time as epoch at UTC."
},
{
"name": "length",
"type": "TimeWindowSize",
"doc": "The length of the window."
}
]
}
],
"doc": "Time window of the partition if applicable",
"default": null
}
]
}
],
"doc": "The optional partition specification.",
"default": null
},
{
"name": "uniqueUserCount",
"type": [
"null",
"int"
],
"doc": "Unique user count",
"default": null,
"TimeseriesField": {}
},
{
"name": "totalSqlQueries",
"type": [
"null",
"int"
],
"doc": "Total SQL query count",
"default": null,
"TimeseriesField": {}
},
{
"name": "topSqlQueries",
"type": [
"null",
{
"type": "array",
"items": "string"
}
],
"doc": "Frequent SQL queries; mostly makes sense for datasets in SQL databases",
"default": null,
"TimeseriesField": {}
},
{
"name": "userCounts",
"type": [
"null",
{
"type": "array",
"items": {
"type": "record",
"name": "DatasetUserUsageCounts",
"doc": "Records a single user's usage counts for a given resource",
"fields": [
{
"name": "user",
"type": "string",
"doc": "The unique id of the user.",
"java": {
"class": "com.linkedin.pegasus2avro.common.urn.Urn"
}
},
{
"name": "count",
"type": "int",
"doc": "Number of times the dataset has been used by the user.",
"TimeseriesField": {}
},
{
"name": "userEmail",
"type": [
"null",
"string"
],
"doc": "If user_email is set, we attempt to resolve the user's urn upon ingest",
"default": null,
"TimeseriesField": {}
}
]
}
}
],
"doc": "Users within this bucket, with frequency counts",
"default": null,
"TimeseriesFieldCollection": {
"key": "user"
}
},
{
"name": "fieldCounts",
"type": [
"null",
{
"type": "array",
"items": {
"type": "record",
"name": "DatasetFieldUsageCounts",
"doc": "Records field-level usage counts for a given dataset",
"fields": [
{
"name": "fieldPath",
"type": "string",
"doc": "The name of the field."
},
{
"name": "count",
"type": "int",
"doc": "Number of times the field has been used.",
"TimeseriesField": {}
}
]
}
}
],
"doc": "Field-level usage stats",
"default": null,
"TimeseriesFieldCollection": {
"key": "fieldPath"
}
}
],
"Aspect": {
"name": "datasetUsageStatistics",
"type": "timeseries"
}
}

View File

@ -1,222 +0,0 @@
{
"type": "record",
"name": "MetadataChangeProposal",
"namespace": "com.linkedin.pegasus2avro.mxe",
"doc": "Kafka event for proposing a metadata change for an entity. A corresponding MetadataChangeLog is emitted when the change is accepted and committed, otherwise a FailedMetadataChangeProposal will be emitted instead.",
"fields": [
{
"name": "auditHeader",
"type": [
"null",
{
"type": "record",
"name": "KafkaAuditHeader",
"namespace": "com.linkedin.events",
"doc": "This header records information about the context of an event as it is emitted into kafka and is intended to be used by the kafka audit application. For more information see go/kafkaauditheader",
"fields": [
{
"name": "time",
"type": "long",
"doc": "The time at which the event was emitted into kafka.",
"compliance": [
{
"policy": "EVENT_TIME"
}
]
},
{
"name": "server",
"type": "string",
"doc": "The fully qualified name of the host from which the event is being emitted.",
"compliance": "NONE"
},
{
"name": "instance",
"type": [
"null",
"string"
],
"doc": "The instance on the server from which the event is being emitted. e.g. i001",
"default": null,
"compliance": "NONE"
},
{
"name": "appName",
"type": "string",
"doc": "The name of the application from which the event is being emitted. see go/appname",
"compliance": "NONE"
},
{
"name": "messageId",
"type": {
"type": "fixed",
"name": "UUID",
"size": 16
},
"doc": "A unique identifier for the message",
"compliance": "NONE"
},
{
"name": "auditVersion",
"type": [
"null",
"int"
],
"doc": "The version that is being used for auditing. In version 0, the audit trail buckets events into 10 minute audit windows based on the EventHeader timestamp. In version 1, the audit trail buckets events as follows: if the schema has an outer KafkaAuditHeader, use the outer audit header timestamp for bucketing; else if the EventHeader has an inner KafkaAuditHeader use that inner audit header's timestamp for bucketing",
"default": null,
"compliance": "NONE"
},
{
"name": "fabricUrn",
"type": [
"null",
"string"
],
"doc": "The fabricUrn of the host from which the event is being emitted. Fabric Urn in the format of urn:li:fabric:{fabric_name}. See go/fabric.",
"default": null,
"compliance": "NONE"
},
{
"name": "clusterConnectionString",
"type": [
"null",
"string"
],
"doc": "This is a String that the client uses to establish some kind of connection with the Kafka cluster. The exact format of it depends on specific versions of clients and brokers. This information could potentially identify the fabric and cluster with which the client is producing to or consuming from.",
"default": null,
"compliance": "NONE"
}
]
}
],
"doc": "Kafka audit header. See go/kafkaauditheader for more info.",
"default": null
},
{
"name": "entityType",
"type": "string",
"doc": "Type of the entity being written to"
},
{
"name": "entityUrn",
"type": [
"null",
"string"
],
"doc": "Urn of the entity being written\n",
"default": null,
"java": {
"class": "com.linkedin.pegasus2avro.common.urn.Urn"
}
},
{
"name": "entityKeyAspect",
"type": [
"null",
{
"type": "record",
"name": "GenericAspect",
"doc": "Generic record structure for serializing an Aspect\n",
"fields": [
{
"name": "value",
"type": "bytes"
},
{
"name": "contentType",
"type": "string"
}
]
}
],
"doc": "Key aspect of the entity being written",
"default": null
},
{
"name": "changeType",
"type": {
"type": "enum",
"name": "ChangeType",
"namespace": "com.linkedin.pegasus2avro.events.metadata",
"doc": "Descriptor for a change action",
"symbols": [
"UPSERT",
"CREATE",
"UPDATE",
"DELETE",
"PATCH"
],
"symbolDocs": {
"CREATE": "NOT SUPPORTED YET\ninsert if not exists. otherwise fail",
"DELETE": "NOT SUPPORTED YET\ndelete action",
"PATCH": "NOT SUPPORTED YET\npatch the changes instead of full replace",
"UPDATE": "NOT SUPPORTED YET\nupdate if exists. otherwise fail",
"UPSERT": "insert if not exists. otherwise update"
}
},
"doc": "Type of change being proposed"
},
{
"name": "aspectName",
"type": [
"null",
"string"
],
"doc": "Aspect of the entity being written to\nNot filling this out implies that the writer wants to affect the entire entity\nNote: This is only valid for CREATE and DELETE operations.\n",
"default": null
},
{
"name": "aspect",
"type": [
"null",
"GenericAspect"
],
"default": null
},
{
"name": "systemMetadata",
"type": [
"null",
{
"type": "record",
"name": "SystemMetadata",
"doc": "Kafka event for proposing a metadata change for an entity. A corresponding MetadataAuditEvent is emitted when the change is accepted and committed, otherwise a FailedMetadataChangeEvent will be emitted instead.",
"fields": [
{
"name": "lastObserved",
"type": [
"long",
"null"
],
"doc": "The timestamp the metadata was observed at",
"default": 0
},
{
"name": "runId",
"type": [
"string",
"null"
],
"doc": "The run id that produced the metadata",
"default": "no-run-id-provided"
},
{
"name": "properties",
"type": [
"null",
{
"type": "map",
"values": "string"
}
],
"doc": "Additional properties",
"default": null
}
]
}
],
"doc": "A string->string map of custom properties that one might want to attach to an event\n",
"default": null
}
]
}

View File

@ -1,147 +0,0 @@
{
"type": "record",
"name": "UsageAggregation",
"namespace": "com.linkedin.pegasus2avro.usage",
"doc": "Usage data for a given resource, rolled up into a bucket.",
"fields": [
{
"name": "bucket",
"type": "long",
"doc": " Bucket start time in milliseconds "
},
{
"name": "duration",
"type": {
"type": "enum",
"name": "WindowDuration",
"namespace": "com.linkedin.pegasus2avro.common",
"doc": "Enum to define the length of a bucket when doing aggregations",
"symbols": [
"YEAR",
"MONTH",
"WEEK",
"DAY",
"HOUR"
]
},
"doc": " Bucket duration "
},
{
"name": "resource",
"type": "string",
"doc": " Resource associated with these usage stats ",
"java": {
"class": "com.linkedin.pegasus2avro.common.urn.Urn"
}
},
{
"name": "metrics",
"type": {
"type": "record",
"name": "UsageAggregationMetrics",
"doc": "Metrics for usage data for a given resource and bucket. Not all fields\nmake sense for all buckets, so every field is optional.",
"fields": [
{
"name": "uniqueUserCount",
"type": [
"null",
"int"
],
"doc": " Unique user count ",
"default": null
},
{
"name": "users",
"type": [
"null",
{
"type": "array",
"items": {
"type": "record",
"name": "UserUsageCounts",
"doc": " Records a single user's usage counts for a given resource ",
"fields": [
{
"name": "user",
"type": [
"null",
"string"
],
"default": null,
"java": {
"class": "com.linkedin.pegasus2avro.common.urn.Urn"
}
},
{
"name": "count",
"type": "int"
},
{
"name": "userEmail",
"type": [
"null",
"string"
],
"doc": " If user_email is set, we attempt to resolve the user's urn upon ingest ",
"default": null
}
]
}
}
],
"doc": " Users within this bucket, with frequency counts ",
"default": null
},
{
"name": "totalSqlQueries",
"type": [
"null",
"int"
],
"doc": " Total SQL query count ",
"default": null
},
{
"name": "topSqlQueries",
"type": [
"null",
{
"type": "array",
"items": "string"
}
],
"doc": " Frequent SQL queries; mostly makes sense for datasets in SQL databases ",
"default": null
},
{
"name": "fields",
"type": [
"null",
{
"type": "array",
"items": {
"type": "record",
"name": "FieldUsageCounts",
"doc": " Records field-level usage counts for a given resource ",
"fields": [
{
"name": "fieldName",
"type": "string"
},
{
"name": "count",
"type": "int"
}
]
}
}
],
"doc": " Field-level usage stats ",
"default": null
}
]
},
"doc": " Metrics associated with this bucket "
}
]
}

View File

@ -1,325 +0,0 @@
# flake8: noqa
# This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
# Do not modify manually!
# fmt: off
import functools
import pathlib
def _load_schema(schema_name: str) -> str:
return (pathlib.Path(__file__).parent / f"{schema_name}.avsc").read_text()
@functools.lru_cache(maxsize=None)
def getMetadataChangeEventSchema() -> str:
return _load_schema("MetadataChangeEvent")
@functools.lru_cache(maxsize=None)
def getMetadataChangeProposalSchema() -> str:
return _load_schema("MetadataChangeProposal")
@functools.lru_cache(maxsize=None)
def getUsageAggregationSchema() -> str:
return _load_schema("UsageAggregation")
@functools.lru_cache(maxsize=None)
def getChartInfoSchema() -> str:
return _load_schema("ChartInfo")
@functools.lru_cache(maxsize=None)
def getChartQuerySchema() -> str:
return _load_schema("ChartQuery")
@functools.lru_cache(maxsize=None)
def getEditableChartPropertiesSchema() -> str:
return _load_schema("EditableChartProperties")
@functools.lru_cache(maxsize=None)
def getBrowsePathsSchema() -> str:
return _load_schema("BrowsePaths")
@functools.lru_cache(maxsize=None)
def getCostSchema() -> str:
return _load_schema("Cost")
@functools.lru_cache(maxsize=None)
def getDeprecationSchema() -> str:
return _load_schema("Deprecation")
@functools.lru_cache(maxsize=None)
def getGlobalTagsSchema() -> str:
return _load_schema("GlobalTags")
@functools.lru_cache(maxsize=None)
def getGlossaryTermsSchema() -> str:
return _load_schema("GlossaryTerms")
@functools.lru_cache(maxsize=None)
def getInstitutionalMemorySchema() -> str:
return _load_schema("InstitutionalMemory")
@functools.lru_cache(maxsize=None)
def getOwnershipSchema() -> str:
return _load_schema("Ownership")
@functools.lru_cache(maxsize=None)
def getStatusSchema() -> str:
return _load_schema("Status")
@functools.lru_cache(maxsize=None)
def getDashboardInfoSchema() -> str:
return _load_schema("DashboardInfo")
@functools.lru_cache(maxsize=None)
def getEditableDashboardPropertiesSchema() -> str:
return _load_schema("EditableDashboardProperties")
@functools.lru_cache(maxsize=None)
def getDataFlowInfoSchema() -> str:
return _load_schema("DataFlowInfo")
@functools.lru_cache(maxsize=None)
def getDataJobInfoSchema() -> str:
return _load_schema("DataJobInfo")
@functools.lru_cache(maxsize=None)
def getDataJobInputOutputSchema() -> str:
return _load_schema("DataJobInputOutput")
@functools.lru_cache(maxsize=None)
def getEditableDataFlowPropertiesSchema() -> str:
return _load_schema("EditableDataFlowProperties")
@functools.lru_cache(maxsize=None)
def getEditableDataJobPropertiesSchema() -> str:
return _load_schema("EditableDataJobProperties")
@functools.lru_cache(maxsize=None)
def getDataPlatformInfoSchema() -> str:
return _load_schema("DataPlatformInfo")
@functools.lru_cache(maxsize=None)
def getDataProcessInfoSchema() -> str:
return _load_schema("DataProcessInfo")
@functools.lru_cache(maxsize=None)
def getDatasetDeprecationSchema() -> str:
return _load_schema("DatasetDeprecation")
@functools.lru_cache(maxsize=None)
def getDatasetProfileSchema() -> str:
return _load_schema("DatasetProfile")
@functools.lru_cache(maxsize=None)
def getDatasetPropertiesSchema() -> str:
return _load_schema("DatasetProperties")
@functools.lru_cache(maxsize=None)
def getDatasetUpstreamLineageSchema() -> str:
return _load_schema("DatasetUpstreamLineage")
@functools.lru_cache(maxsize=None)
def getDatasetUsageStatisticsSchema() -> str:
return _load_schema("DatasetUsageStatistics")
@functools.lru_cache(maxsize=None)
def getEditableDatasetPropertiesSchema() -> str:
return _load_schema("EditableDatasetProperties")
@functools.lru_cache(maxsize=None)
def getUpstreamLineageSchema() -> str:
return _load_schema("UpstreamLineage")
@functools.lru_cache(maxsize=None)
def getGlossaryNodeInfoSchema() -> str:
return _load_schema("GlossaryNodeInfo")
@functools.lru_cache(maxsize=None)
def getGlossaryRelatedTermsSchema() -> str:
return _load_schema("GlossaryRelatedTerms")
@functools.lru_cache(maxsize=None)
def getGlossaryTermInfoSchema() -> str:
return _load_schema("GlossaryTermInfo")
@functools.lru_cache(maxsize=None)
def getCorpGroupInfoSchema() -> str:
return _load_schema("CorpGroupInfo")
@functools.lru_cache(maxsize=None)
def getCorpUserEditableInfoSchema() -> str:
return _load_schema("CorpUserEditableInfo")
@functools.lru_cache(maxsize=None)
def getCorpUserInfoSchema() -> str:
return _load_schema("CorpUserInfo")
@functools.lru_cache(maxsize=None)
def getGroupMembershipSchema() -> str:
return _load_schema("GroupMembership")
@functools.lru_cache(maxsize=None)
def getChartKeySchema() -> str:
return _load_schema("ChartKey")
@functools.lru_cache(maxsize=None)
def getCorpGroupKeySchema() -> str:
return _load_schema("CorpGroupKey")
@functools.lru_cache(maxsize=None)
def getCorpUserKeySchema() -> str:
return _load_schema("CorpUserKey")
@functools.lru_cache(maxsize=None)
def getDashboardKeySchema() -> str:
return _load_schema("DashboardKey")
@functools.lru_cache(maxsize=None)
def getDataFlowKeySchema() -> str:
return _load_schema("DataFlowKey")
@functools.lru_cache(maxsize=None)
def getDataHubPolicyKeySchema() -> str:
return _load_schema("DataHubPolicyKey")
@functools.lru_cache(maxsize=None)
def getDataJobKeySchema() -> str:
return _load_schema("DataJobKey")
@functools.lru_cache(maxsize=None)
def getDataPlatformKeySchema() -> str:
return _load_schema("DataPlatformKey")
@functools.lru_cache(maxsize=None)
def getDataProcessKeySchema() -> str:
return _load_schema("DataProcessKey")
@functools.lru_cache(maxsize=None)
def getDatasetKeySchema() -> str:
return _load_schema("DatasetKey")
@functools.lru_cache(maxsize=None)
def getGlossaryNodeKeySchema() -> str:
return _load_schema("GlossaryNodeKey")
@functools.lru_cache(maxsize=None)
def getGlossaryTermKeySchema() -> str:
return _load_schema("GlossaryTermKey")
@functools.lru_cache(maxsize=None)
def getMLFeatureKeySchema() -> str:
return _load_schema("MLFeatureKey")
@functools.lru_cache(maxsize=None)
def getMLFeatureTableKeySchema() -> str:
return _load_schema("MLFeatureTableKey")
@functools.lru_cache(maxsize=None)
def getMLModelDeploymentKeySchema() -> str:
return _load_schema("MLModelDeploymentKey")
@functools.lru_cache(maxsize=None)
def getMLModelGroupKeySchema() -> str:
return _load_schema("MLModelGroupKey")
@functools.lru_cache(maxsize=None)
def getMLModelKeySchema() -> str:
return _load_schema("MLModelKey")
@functools.lru_cache(maxsize=None)
def getMLPrimaryKeyKeySchema() -> str:
return _load_schema("MLPrimaryKeyKey")
@functools.lru_cache(maxsize=None)
def getSchemaFieldKeySchema() -> str:
return _load_schema("SchemaFieldKey")
@functools.lru_cache(maxsize=None)
def getTagKeySchema() -> str:
return _load_schema("TagKey")
@functools.lru_cache(maxsize=None)
def getCaveatsAndRecommendationsSchema() -> str:
return _load_schema("CaveatsAndRecommendations")
@functools.lru_cache(maxsize=None)
def getEthicalConsiderationsSchema() -> str:
return _load_schema("EthicalConsiderations")
@functools.lru_cache(maxsize=None)
def getEvaluationDataSchema() -> str:
return _load_schema("EvaluationData")
@functools.lru_cache(maxsize=None)
def getIntendedUseSchema() -> str:
return _load_schema("IntendedUse")
@functools.lru_cache(maxsize=None)
def getMLFeaturePropertiesSchema() -> str:
return _load_schema("MLFeatureProperties")
@functools.lru_cache(maxsize=None)
def getMLFeatureTablePropertiesSchema() -> str:
return _load_schema("MLFeatureTableProperties")
@functools.lru_cache(maxsize=None)
def getMLHyperParamSchema() -> str:
return _load_schema("MLHyperParam")
@functools.lru_cache(maxsize=None)
def getMLMetricSchema() -> str:
return _load_schema("MLMetric")
@functools.lru_cache(maxsize=None)
def getMLModelDeploymentPropertiesSchema() -> str:
return _load_schema("MLModelDeploymentProperties")
@functools.lru_cache(maxsize=None)
def getMLModelFactorPromptsSchema() -> str:
return _load_schema("MLModelFactorPrompts")
@functools.lru_cache(maxsize=None)
def getMLModelGroupPropertiesSchema() -> str:
return _load_schema("MLModelGroupProperties")
@functools.lru_cache(maxsize=None)
def getMLModelPropertiesSchema() -> str:
return _load_schema("MLModelProperties")
@functools.lru_cache(maxsize=None)
def getMLPrimaryKeyPropertiesSchema() -> str:
return _load_schema("MLPrimaryKeyProperties")
@functools.lru_cache(maxsize=None)
def getMetricsSchema() -> str:
return _load_schema("Metrics")
@functools.lru_cache(maxsize=None)
def getQuantitativeAnalysesSchema() -> str:
return _load_schema("QuantitativeAnalyses")
@functools.lru_cache(maxsize=None)
def getSourceCodeSchema() -> str:
return _load_schema("SourceCode")
@functools.lru_cache(maxsize=None)
def getTrainingDataSchema() -> str:
return _load_schema("TrainingData")
@functools.lru_cache(maxsize=None)
def getDataHubPolicyInfoSchema() -> str:
return _load_schema("DataHubPolicyInfo")
@functools.lru_cache(maxsize=None)
def getEditableSchemaMetadataSchema() -> str:
return _load_schema("EditableSchemaMetadata")
@functools.lru_cache(maxsize=None)
def getSchemaMetadataSchema() -> str:
return _load_schema("SchemaMetadata")
@functools.lru_cache(maxsize=None)
def getTagPropertiesSchema() -> str:
return _load_schema("TagProperties")
# fmt: on