Merge remote-tracking branch 'upstream/branch0101' into branch0101

# Conflicts:
#	python/knext/chain/base.py
#	python/knext/client/builder.py
#	python/knext/operator/builtin/auto_prompt.py
#	python/knext/rest/models/builder/pipeline/config/user_defined_extract_node_config.py
This commit is contained in:
Qu 2023-12-15 17:35:13 +08:00
commit a853d3642c
43 changed files with 992 additions and 254 deletions

View File

@ -37,7 +37,6 @@ import lombok.Getter;
* </ul>
*/
@Getter
@AllArgsConstructor
public abstract class BasePhysicalNode implements Comparable<BasePhysicalNode> {
/** ID of the physical node. */

View File

@ -13,13 +13,20 @@
package com.antgroup.openspg.builder.core.physical.process;
import com.antgroup.openspg.builder.model.pipeline.config.CheckNodeConfig;
import com.antgroup.openspg.builder.model.pipeline.config.BaseNodeConfig;
import com.antgroup.openspg.builder.model.pipeline.enums.NodeTypeEnum;
import com.antgroup.openspg.builder.model.record.BaseRecord;
import com.antgroup.openspg.builder.model.record.BaseSPGRecord;
import com.antgroup.openspg.builder.model.record.SPGRecordTypeEnum;
import java.util.List;
public class CheckProcessor extends BaseProcessor<CheckNodeConfig> {
public class CheckProcessor extends BaseProcessor<CheckProcessor.CheckNodeConfig> {
public static class CheckNodeConfig extends BaseNodeConfig {
public CheckNodeConfig() {
super(NodeTypeEnum.CHECK);
}
}
private static final String PROCESSOR_NAME = "CHECK";

View File

@ -0,0 +1,10 @@
package com.antgroup.openspg.builder.core.reason;
import com.antgroup.openspg.builder.model.record.BaseSPGRecord;
import com.antgroup.openspg.core.schema.model.semantic.BaseConceptSemantic;
import java.util.List;
public interface ConceptReasoner<T extends BaseConceptSemantic> {
List<BaseSPGRecord> reason(List<BaseSPGRecord> records, T conceptSemantic);
}

View File

@ -0,0 +1,106 @@
package com.antgroup.openspg.builder.core.reason;
import com.antgroup.kg.reasoner.catalog.impl.OpenKgCatalog;
import com.antgroup.kg.reasoner.common.graph.vertex.IVertexId;
import com.antgroup.kg.reasoner.graphstate.GraphState;
import com.antgroup.kg.reasoner.graphstate.impl.CloudExtGraphState;
import com.antgroup.kg.reasoner.lube.catalog.Catalog;
import com.antgroup.openspg.builder.core.physical.process.BaseProcessor;
import com.antgroup.openspg.builder.core.reason.impl.CausalConceptReasoner;
import com.antgroup.openspg.builder.core.reason.impl.InductiveConceptReasoner;
import com.antgroup.openspg.builder.model.pipeline.config.BaseNodeConfig;
import com.antgroup.openspg.builder.model.pipeline.enums.NodeTypeEnum;
import com.antgroup.openspg.builder.model.record.BaseAdvancedRecord;
import com.antgroup.openspg.builder.model.record.BaseRecord;
import com.antgroup.openspg.builder.model.record.BaseSPGRecord;
import com.antgroup.openspg.core.schema.model.semantic.DynamicTaxonomySemantic;
import com.antgroup.openspg.core.schema.model.semantic.LogicalCausationSemantic;
import com.antgroup.openspg.core.schema.model.type.ConceptList;
import com.antgroup.openspg.server.common.model.datasource.connection.GraphStoreConnectionInfo;
import com.google.common.collect.Lists;
import java.util.*;
@SuppressWarnings({"unchecked", "rawtypes"})
public class ReasonProcessor extends BaseProcessor<ReasonProcessor.ReasonerNodeConfig> {
public static class ReasonerNodeConfig extends BaseNodeConfig {
public ReasonerNodeConfig() {
super(NodeTypeEnum.REASON);
}
}
private final InductiveConceptReasoner inductiveConceptReasoner;
private final CausalConceptReasoner causalConceptReasoner;
public ReasonProcessor() {
super("", "", null);
Catalog catalog = buildCatalog();
GraphState<IVertexId> graphState =
buildGraphState(context.getCatalog().getGraphStoreConnInfo());
this.inductiveConceptReasoner = new InductiveConceptReasoner();
this.inductiveConceptReasoner.setCatalog(catalog);
this.inductiveConceptReasoner.setGraphState(graphState);
this.causalConceptReasoner = new CausalConceptReasoner();
this.causalConceptReasoner.setCatalog(catalog);
this.causalConceptReasoner.setBuilderCatalog(context.getCatalog());
this.causalConceptReasoner.setGraphState(graphState);
this.causalConceptReasoner.setInductiveConceptReasoner(inductiveConceptReasoner);
}
@Override
public List<BaseRecord> process(List<BaseRecord> inputs) {
List<BaseRecord> results = new ArrayList<>();
for (BaseRecord baseRecord : inputs) {
if (!(baseRecord instanceof BaseAdvancedRecord)) {
continue;
}
BaseAdvancedRecord advancedRecord = (BaseAdvancedRecord) baseRecord;
// now only supports single classification of one entity type
ConceptList conceptList =
ReasonerProcessorUtils.getConceptList(advancedRecord, context.getCatalog());
if (conceptList == null) {
continue;
}
// perform inductive and causal reasoning logic on the input advancedRecord
results.addAll(reasoning(advancedRecord, conceptList));
}
return results;
}
private List<BaseSPGRecord> reasoning(BaseAdvancedRecord record, ConceptList conceptList) {
// run the inductive reasoning logic
List<BaseSPGRecord> spgRecords = Lists.newArrayList(record);
for (DynamicTaxonomySemantic belongTo : conceptList.getDynamicTaxonomyList()) {
spgRecords = inductiveConceptReasoner.reason(spgRecords, belongTo);
}
// then run causal reasoning logic
for (LogicalCausationSemantic leadTo : conceptList.getLogicalCausation()) {
spgRecords = causalConceptReasoner.reason(spgRecords, leadTo);
}
return spgRecords;
}
private Catalog buildCatalog() {
OpenKgCatalog catalog =
new OpenKgCatalog(context.getProjectId(), null, context.getCatalog().getProjectSchema());
catalog.init();
return catalog;
}
private GraphState<IVertexId> buildGraphState(GraphStoreConnectionInfo connInfo) {
CloudExtGraphState cloudExtGraphState = new CloudExtGraphState();
Map<String, Object> params = new HashMap<>();
params.put("cloudext.graphstore.schema", connInfo.getScheme());
params.putAll(connInfo.getParams());
cloudExtGraphState.init((Map) Collections.unmodifiableMap(params));
return cloudExtGraphState;
}
@Override
public void close() throws Exception {}
}

View File

@ -0,0 +1,131 @@
/*
* Copyright 2023 Ant Group CO., Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied.
*/
package com.antgroup.openspg.builder.core.reason;
import com.antgroup.kg.reasoner.common.graph.edge.IEdge;
import com.antgroup.kg.reasoner.common.graph.property.IProperty;
import com.antgroup.kg.reasoner.common.graph.vertex.IVertex;
import com.antgroup.kg.reasoner.common.graph.vertex.IVertexId;
import com.antgroup.kg.reasoner.local.model.LocalReasonerResult;
import com.antgroup.openspg.builder.core.runtime.BuilderCatalog;
import com.antgroup.openspg.builder.model.record.BaseAdvancedRecord;
import com.antgroup.openspg.builder.model.record.BaseSPGRecord;
import com.antgroup.openspg.builder.model.record.RelationRecord;
import com.antgroup.openspg.builder.model.record.property.SPGPropertyRecord;
import com.antgroup.openspg.builder.model.record.property.SPGPropertyValue;
import com.antgroup.openspg.cloudext.interfaces.graphstore.adapter.record.impl.convertor.EdgeRecordConvertor;
import com.antgroup.openspg.cloudext.interfaces.graphstore.adapter.record.impl.convertor.VertexRecordConvertor;
import com.antgroup.openspg.common.util.CollectionsUtils;
import com.antgroup.openspg.core.schema.model.identifier.RelationIdentifier;
import com.antgroup.openspg.core.schema.model.identifier.SPGTypeIdentifier;
import com.antgroup.openspg.core.schema.model.predicate.Property;
import com.antgroup.openspg.core.schema.model.predicate.Relation;
import com.antgroup.openspg.core.schema.model.semantic.SystemPredicateEnum;
import com.antgroup.openspg.core.schema.model.type.BaseSPGType;
import com.antgroup.openspg.core.schema.model.type.ConceptList;
import com.antgroup.openspg.core.schema.model.type.SPGTypeRef;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.collections4.CollectionUtils;
public class ReasonerProcessorUtils {
public static void setBelongToProperty(
LocalReasonerResult result, BaseAdvancedRecord advancedRecord) {
if (CollectionUtils.isEmpty(result.getEdgeList())) {
return;
}
Property belongToProperty =
advancedRecord.getSpgType().getPredicateProperty(SystemPredicateEnum.BELONG_TO);
if (belongToProperty == null) {
throw new IllegalStateException(
String.format("spgType=%s has not belongTo property", advancedRecord.getName()));
}
IEdge<IVertexId, IProperty> edge = result.getEdgeList().get(0);
SPGPropertyRecord propertyRecord =
new SPGPropertyRecord(
belongToProperty, new SPGPropertyValue(edge.getTargetId().getBizId()));
advancedRecord.mergePropertyValue(propertyRecord);
}
public static List<BaseSPGRecord> toSpgRecords(
LocalReasonerResult result, BuilderCatalog catalog) {
List<IVertex<IVertexId, IProperty>> vertices =
CollectionsUtils.defaultEmpty(result.getVertexList());
List<IEdge<IVertexId, IProperty>> edges = CollectionsUtils.defaultEmpty(result.getEdgeList());
List<BaseSPGRecord> results = new ArrayList<>(vertices.size() + edges.size());
vertices.forEach(
vertex -> {
IVertexId vertexId = vertex.getId();
Map<String, String> properties = toProps(vertex.getValue());
BaseSPGType spgType = catalog.getSPGType(SPGTypeIdentifier.parse(vertexId.getType()));
BaseAdvancedRecord advancedRecord =
VertexRecordConvertor.toAdvancedRecord(spgType, vertexId.getBizId(), properties);
results.add(advancedRecord);
});
edges.forEach(
edge -> {
Relation relationType = catalog.getRelation(RelationIdentifier.parse(edge.getType()));
Map<String, String> properties = toProps(edge.getValue());
RelationRecord relationRecord =
EdgeRecordConvertor.toRelationRecord(
relationType,
edge.getSourceId().getBizId(),
edge.getTargetId().getBizId(),
properties);
results.add(relationRecord);
});
return results;
}
private static Map<String, String> toProps(IProperty property) {
Collection<String> keySet = property.getKeySet();
Map<String, String> properties = new HashMap<>(keySet.size());
for (String key : keySet) {
Object value = property.get(key);
if (value != null) {
properties.put(key, value.toString());
}
}
return properties;
}
public static ConceptList getConceptList(BaseSPGRecord spgRecord, BuilderCatalog catalog) {
if (!(spgRecord instanceof BaseAdvancedRecord)) {
return null;
}
BaseAdvancedRecord advancedRecord = (BaseAdvancedRecord) spgRecord;
Property belongToProperty =
advancedRecord.getSpgType().getPredicateProperty(SystemPredicateEnum.BELONG_TO);
if (belongToProperty == null) {
return null;
}
SPGTypeRef objectTypeRef = belongToProperty.getObjectTypeRef();
if (!objectTypeRef.isConceptType()) {
return null;
}
return catalog.getConceptList(objectTypeRef.getBaseSpgIdentifier());
}
}

View File

@ -0,0 +1,112 @@
package com.antgroup.openspg.builder.core.reason.impl;
import com.antgroup.kg.reasoner.common.graph.vertex.IVertexId;
import com.antgroup.kg.reasoner.graphstate.GraphState;
import com.antgroup.kg.reasoner.local.KGReasonerLocalRunner;
import com.antgroup.kg.reasoner.local.model.LocalReasonerResult;
import com.antgroup.kg.reasoner.local.model.LocalReasonerTask;
import com.antgroup.kg.reasoner.lube.catalog.Catalog;
import com.antgroup.openspg.builder.core.reason.ConceptReasoner;
import com.antgroup.openspg.builder.core.reason.ReasonerProcessorUtils;
import com.antgroup.openspg.builder.core.runtime.BuilderCatalog;
import com.antgroup.openspg.builder.model.record.BaseAdvancedRecord;
import com.antgroup.openspg.builder.model.record.BaseSPGRecord;
import com.antgroup.openspg.builder.model.record.property.SPGPropertyRecord;
import com.antgroup.openspg.builder.model.record.property.SPGPropertyValue;
import com.antgroup.openspg.core.schema.model.semantic.DynamicTaxonomySemantic;
import com.antgroup.openspg.core.schema.model.semantic.LogicalCausationSemantic;
import com.antgroup.openspg.core.schema.model.semantic.SystemPredicateEnum;
import com.antgroup.openspg.core.schema.model.type.ConceptList;
import com.google.common.collect.Lists;
import java.util.ArrayList;
import java.util.List;
import lombok.Setter;
import org.apache.commons.collections4.CollectionUtils;
import scala.Tuple2;
public class CausalConceptReasoner implements ConceptReasoner<LogicalCausationSemantic> {
@Setter private InductiveConceptReasoner inductiveConceptReasoner;
@Setter private BuilderCatalog builderCatalog;
@Setter private Catalog catalog;
@Setter private GraphState<IVertexId> graphState;
@Override
public List<BaseSPGRecord> reason(
List<BaseSPGRecord> records, LogicalCausationSemantic conceptSemantic) {
List<BaseSPGRecord> results = new ArrayList<>(records);
propagate(records, conceptSemantic, results);
return results;
}
private void propagate(
List<BaseSPGRecord> spgRecords,
LogicalCausationSemantic conceptSemantic,
List<BaseSPGRecord> results) {
List<BaseAdvancedRecord> toPropagated = new ArrayList<>();
for (BaseSPGRecord spgRecord : spgRecords) {
if (!(spgRecord instanceof BaseAdvancedRecord)) {
continue;
}
BaseAdvancedRecord advancedRecord = (BaseAdvancedRecord) spgRecord;
SPGPropertyRecord belongToPropertyRecord =
advancedRecord.getPredicateProperty(SystemPredicateEnum.BELONG_TO);
if (belongToPropertyRecord == null) {
// if it does not belong to a concept, then it is not propagated
// because propagation is based on concepts.
continue;
}
SPGPropertyValue propertyValue = belongToPropertyRecord.getValue();
if (propertyValue == null
|| !propertyValue.getRaw().contains(conceptSemantic.getSubjectIdentifier().getId())) {
// If the concept of belonging is not the starting point of the current leadTo,
// then propagation is not carried out.
continue;
}
toPropagated.add(advancedRecord);
}
// initiating this round of event propagation based on toPropagated.
for (BaseAdvancedRecord advancedRecord : toPropagated) {
List<BaseSPGRecord> leadToRecords = leadTo(advancedRecord, conceptSemantic);
if (CollectionUtils.isEmpty(leadToRecords)) {
continue;
}
results.addAll(leadToRecords);
// Determine the belongTo of the events propagated out.
for (BaseSPGRecord leadToRecord : leadToRecords) {
ConceptList conceptList =
ReasonerProcessorUtils.getConceptList(leadToRecord, builderCatalog);
if (conceptList == null) {
continue;
}
List<BaseSPGRecord> nextSpgRecords = Lists.newArrayList(leadToRecord);
for (DynamicTaxonomySemantic belongTo :
conceptList.getDynamicTaxonomyList(conceptSemantic.getObjectIdentifier())) {
nextSpgRecords = inductiveConceptReasoner.reason(nextSpgRecords, belongTo);
}
for (LogicalCausationSemantic nextLeadTo :
conceptList.getLogicalCausation(conceptSemantic.getObjectIdentifier())) {
propagate(nextSpgRecords, nextLeadTo, results);
}
}
}
}
private List<BaseSPGRecord> leadTo(BaseAdvancedRecord record, LogicalCausationSemantic leadTo) {
LocalReasonerTask reasonerTask = new LocalReasonerTask();
reasonerTask.setCatalog(catalog);
reasonerTask.setGraphState(graphState);
reasonerTask.setDsl(leadTo.getLogicalRule().getContent());
reasonerTask.setStartIdList(Lists.newArrayList(Tuple2.apply(record.getId(), record.getName())));
KGReasonerLocalRunner runner = new KGReasonerLocalRunner();
LocalReasonerResult reasonerResult = runner.run(reasonerTask);
return ReasonerProcessorUtils.toSpgRecords(reasonerResult, builderCatalog);
}
}

View File

@ -0,0 +1,47 @@
package com.antgroup.openspg.builder.core.reason.impl;
import com.antgroup.kg.reasoner.common.graph.vertex.IVertexId;
import com.antgroup.kg.reasoner.graphstate.GraphState;
import com.antgroup.kg.reasoner.local.KGReasonerLocalRunner;
import com.antgroup.kg.reasoner.local.model.LocalReasonerResult;
import com.antgroup.kg.reasoner.local.model.LocalReasonerTask;
import com.antgroup.kg.reasoner.lube.catalog.Catalog;
import com.antgroup.openspg.builder.core.reason.ConceptReasoner;
import com.antgroup.openspg.builder.core.reason.ReasonerProcessorUtils;
import com.antgroup.openspg.builder.model.record.BaseAdvancedRecord;
import com.antgroup.openspg.builder.model.record.BaseRecord;
import com.antgroup.openspg.builder.model.record.BaseSPGRecord;
import com.antgroup.openspg.core.schema.model.semantic.DynamicTaxonomySemantic;
import com.google.common.collect.Lists;
import java.util.List;
import lombok.Setter;
import scala.Tuple2;
public class InductiveConceptReasoner implements ConceptReasoner<DynamicTaxonomySemantic> {
@Setter private Catalog catalog;
@Setter private GraphState<IVertexId> graphState;
@Override
public List<BaseSPGRecord> reason(
List<BaseSPGRecord> records, DynamicTaxonomySemantic conceptSemantic) {
for (BaseRecord spgRecord : records) {
LocalReasonerTask reasonerTask = new LocalReasonerTask();
BaseAdvancedRecord advancedRecord = (BaseAdvancedRecord) spgRecord;
reasonerTask.setCatalog(catalog);
reasonerTask.setGraphState(graphState);
reasonerTask.setDsl(conceptSemantic.getLogicalRule().getContent());
reasonerTask.setStartIdList(Lists.newArrayList(getTupleFrom(advancedRecord)));
KGReasonerLocalRunner runner = new KGReasonerLocalRunner();
LocalReasonerResult reasonerResult = runner.run(reasonerTask);
ReasonerProcessorUtils.setBelongToProperty(reasonerResult, advancedRecord);
}
return records;
}
private Tuple2<String, String> getTupleFrom(BaseAdvancedRecord advancedRecord) {
return Tuple2.apply(advancedRecord.getId(), advancedRecord.getName());
}
}

View File

@ -4,18 +4,24 @@ import com.antgroup.openspg.core.schema.model.identifier.RelationIdentifier;
import com.antgroup.openspg.core.schema.model.identifier.SPGTypeIdentifier;
import com.antgroup.openspg.core.schema.model.predicate.Relation;
import com.antgroup.openspg.core.schema.model.type.BaseSPGType;
import com.antgroup.openspg.core.schema.model.type.ConceptList;
import com.antgroup.openspg.core.schema.model.type.ProjectSchema;
import com.antgroup.openspg.server.common.model.datasource.connection.GraphStoreConnectionInfo;
import com.antgroup.openspg.server.common.model.datasource.connection.SearchEngineConnectionInfo;
import java.io.Serializable;
public interface BuilderCatalog extends Serializable {
ProjectSchema getProjectSchema();
boolean isSpreadable(SPGTypeIdentifier identifier);
BaseSPGType getSPGType(SPGTypeIdentifier identifier);
Relation getRelation(RelationIdentifier identifier);
ConceptList getConceptList(SPGTypeIdentifier conceptType);
SearchEngineConnectionInfo getSearchEngineConnInfo();
GraphStoreConnectionInfo getGraphStoreConnInfo();

View File

@ -34,4 +34,5 @@ public class BuilderContext implements Serializable {
private int batchSize = 1;
private int parallelism = 1;
private boolean enableLeadTo;
}

View File

@ -5,6 +5,7 @@ import com.antgroup.openspg.core.schema.model.identifier.RelationIdentifier;
import com.antgroup.openspg.core.schema.model.identifier.SPGTypeIdentifier;
import com.antgroup.openspg.core.schema.model.predicate.Relation;
import com.antgroup.openspg.core.schema.model.type.BaseSPGType;
import com.antgroup.openspg.core.schema.model.type.ConceptList;
import com.antgroup.openspg.core.schema.model.type.ProjectSchema;
import com.antgroup.openspg.server.common.model.datasource.connection.GraphStoreConnectionInfo;
import com.antgroup.openspg.server.common.model.datasource.connection.SearchEngineConnectionInfo;
@ -14,9 +15,17 @@ import java.util.Map;
public class DefaultBuilderCatalog implements BuilderCatalog {
private final ProjectSchema projectSchema;
private final Map<SPGTypeIdentifier, ConceptList> conceptLists;
public DefaultBuilderCatalog(ProjectSchema projectSchema) {
public DefaultBuilderCatalog(
ProjectSchema projectSchema, Map<SPGTypeIdentifier, ConceptList> conceptLists) {
this.projectSchema = projectSchema;
this.conceptLists = conceptLists;
}
@Override
public ProjectSchema getProjectSchema() {
return projectSchema;
}
@Override
@ -34,6 +43,11 @@ public class DefaultBuilderCatalog implements BuilderCatalog {
return projectSchema.getByName(identifier);
}
@Override
public ConceptList getConceptList(SPGTypeIdentifier conceptType) {
return conceptLists.get(conceptType);
}
@Override
public GraphStoreConnectionInfo getGraphStoreConnInfo() {
// "graphstore:tugraph://127.0.0.1:9090/default?timeout=60000&accessId=admin&accessKey=73@TuGraph";

View File

@ -13,7 +13,10 @@
package com.antgroup.openspg.builder.model.pipeline;
import com.antgroup.openspg.builder.model.pipeline.config.BaseMappingNodeConfig;
import com.antgroup.openspg.core.schema.model.identifier.BaseSPGIdentifier;
import com.antgroup.openspg.server.common.model.base.BaseValObj;
import java.util.ArrayList;
import java.util.List;
import lombok.AllArgsConstructor;
import lombok.Getter;
@ -31,4 +34,15 @@ public class Pipeline extends BaseValObj {
/** The list of edge in the pipeline. */
private final List<Edge> edges;
public List<BaseSPGIdentifier> schemaUsed() {
List<BaseSPGIdentifier> results = new ArrayList<>();
for (Node node : nodes) {
if (node.getNodeConfig() instanceof BaseMappingNodeConfig) {
BaseMappingNodeConfig nodeConfig = (BaseMappingNodeConfig) node.getNodeConfig();
results.addAll(nodeConfig.getIdentifiers());
}
}
return results;
}
}

View File

@ -1,6 +1,7 @@
package com.antgroup.openspg.builder.model.pipeline.config;
import com.antgroup.openspg.builder.model.pipeline.enums.NodeTypeEnum;
import com.antgroup.openspg.core.schema.model.identifier.BaseSPGIdentifier;
import java.util.List;
import lombok.AllArgsConstructor;
import lombok.Getter;
@ -26,6 +27,8 @@ public abstract class BaseMappingNodeConfig extends BaseNodeConfig {
private final PropertyNormalizerConfig normalizerConfig;
}
public abstract List<BaseSPGIdentifier> getIdentifiers();
public List<MappingConfig> getMappingConfigs() {
throw new UnsupportedOperationException();
}

View File

@ -1,23 +0,0 @@
/*
* Copyright 2023 Ant Group CO., Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied.
*/
package com.antgroup.openspg.builder.model.pipeline.config;
import com.antgroup.openspg.builder.model.pipeline.enums.NodeTypeEnum;
public class CheckNodeConfig extends BaseNodeConfig {
public CheckNodeConfig() {
super(NodeTypeEnum.CHECK);
}
}

View File

@ -2,6 +2,10 @@ package com.antgroup.openspg.builder.model.pipeline.config;
import com.antgroup.openspg.builder.model.pipeline.enums.NodeTypeEnum;
import java.util.List;
import com.antgroup.openspg.core.schema.model.identifier.BaseSPGIdentifier;
import com.antgroup.openspg.core.schema.model.identifier.RelationIdentifier;
import com.google.common.collect.Lists;
import lombok.Getter;
@Getter
@ -20,4 +24,9 @@ public class RelationMappingNodeConfig extends BaseMappingNodeConfig {
this.mappingFilters = mappingFilters;
this.mappingConfigs = mappingConfigs;
}
@Override
public List<BaseSPGIdentifier> getIdentifiers() {
return Lists.newArrayList(RelationIdentifier.parse(relation));
}
}

View File

@ -1,6 +1,9 @@
package com.antgroup.openspg.builder.model.pipeline.config;
import com.antgroup.openspg.builder.model.pipeline.enums.NodeTypeEnum;
import com.antgroup.openspg.core.schema.model.identifier.BaseSPGIdentifier;
import com.antgroup.openspg.core.schema.model.identifier.SPGTypeIdentifier;
import com.google.common.collect.Lists;
import java.util.List;
import lombok.Getter;
@ -20,4 +23,9 @@ public class SPGTypeMappingNodeConfig extends BaseMappingNodeConfig {
this.mappingFilters = mappingFilters;
this.mappingConfigs = mappingConfigs;
}
@Override
public List<BaseSPGIdentifier> getIdentifiers() {
return Lists.newArrayList(SPGTypeIdentifier.parse(spgType));
}
}

View File

@ -1,7 +1,9 @@
package com.antgroup.openspg.builder.model.pipeline.config;
import com.antgroup.openspg.builder.model.pipeline.enums.NodeTypeEnum;
import com.antgroup.openspg.core.schema.model.identifier.BaseSPGIdentifier;
import java.util.List;
import java.util.stream.Collectors;
import lombok.Getter;
@Getter
@ -13,4 +15,12 @@ public class SubGraphMappingNodeConfig extends BaseMappingNodeConfig {
super(NodeTypeEnum.SUBGRAPH_MAPPING);
this.childrenNodeConfigs = childrenNodeConfigs;
}
@Override
public List<BaseSPGIdentifier> getIdentifiers() {
return childrenNodeConfigs.stream()
.flatMap(x -> x.getIdentifiers().stream())
.distinct()
.collect(Collectors.toList());
}
}

View File

@ -26,8 +26,9 @@ public enum NodeTypeEnum {
USER_DEFINED_EXTRACT,
LLM_BASED_EXTRACT,
/** CHECK Component */
/** OTHER Component */
CHECK,
REASON,
/** SINK Component */
GRAPH_SINK,

View File

@ -68,6 +68,19 @@ public abstract class BaseAdvancedRecord extends BaseSPGRecord
.collect(Collectors.toList());
}
public void mergePropertyValue(SPGPropertyRecord otherRecord) {
boolean find = false;
for (SPGPropertyRecord existRecord : getSpgProperties()) {
if (otherRecord.getProperty().equals(existRecord.getProperty())) {
existRecord.getValue().merge(otherRecord.getValue());
find = true;
}
}
if (!find) {
addSpgProperties(otherRecord);
}
}
@Override
public String toString() {
String spgType = getSpgType().getName();

View File

@ -27,7 +27,7 @@ public class SPGPropertyValue extends BaseValObj {
protected static final String SEPARATOR = ",";
/** 原始的属性值 */
private final String raw;
private String raw;
/** 当该属性类型是非基础类型时该字段指向属性类型的实例id由于属性可能是多值所以这里定义为list */
private List<String> ids;
@ -47,4 +47,12 @@ public class SPGPropertyValue extends BaseValObj {
stds = new ArrayList<>(1);
stds.add(std);
}
public void merge(SPGPropertyValue otherValue) {
if (raw == null) {
raw = otherValue.getRaw();
} else {
raw = raw + "," + otherValue.getRaw();
}
}
}

View File

@ -14,13 +14,24 @@ import com.antgroup.openspg.builder.model.exception.PipelineConfigException;
import com.antgroup.openspg.builder.model.pipeline.Pipeline;
import com.antgroup.openspg.builder.model.record.RecordAlterOperationEnum;
import com.antgroup.openspg.common.util.StringUtils;
import com.antgroup.openspg.core.schema.model.identifier.BaseSPGIdentifier;
import com.antgroup.openspg.core.schema.model.identifier.SPGTypeIdentifier;
import com.antgroup.openspg.core.schema.model.predicate.Property;
import com.antgroup.openspg.core.schema.model.type.BaseSPGType;
import com.antgroup.openspg.core.schema.model.type.ConceptList;
import com.antgroup.openspg.core.schema.model.type.ProjectSchema;
import com.antgroup.openspg.core.schema.model.type.SPGTypeRef;
import com.antgroup.openspg.server.api.facade.ApiResponse;
import com.antgroup.openspg.server.api.facade.client.ConceptFacade;
import com.antgroup.openspg.server.api.facade.client.SchemaFacade;
import com.antgroup.openspg.server.api.facade.dto.schema.request.ConceptRequest;
import com.antgroup.openspg.server.api.facade.dto.schema.request.ProjectSchemaRequest;
import com.antgroup.openspg.server.api.http.client.HttpConceptFacade;
import com.antgroup.openspg.server.api.http.client.HttpSchemaFacade;
import com.antgroup.openspg.server.api.http.client.util.ConnectionInfo;
import com.antgroup.openspg.server.api.http.client.util.HttpClientBootstrap;
import java.util.HashMap;
import java.util.Map;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.cli.*;
import org.slf4j.LoggerFactory;
@ -37,6 +48,7 @@ public class LocalBuilderMain {
private static final String PARALLELISM_OPTION = "parallelism";
private static final String ALTER_OPERATION_OPTION = "alterOperation";
private static final String LOG_FILE_OPTION = "logFile";
private static final String LEAD_TO_OPTION = "leadTo";
public static void main(String[] args) {
CommandLine commandLine = parseArgs(args);
@ -62,6 +74,7 @@ public class LocalBuilderMain {
options.addOption(
ALTER_OPERATION_OPTION, ALTER_OPERATION_OPTION, true, "alter operation, upsert or delete");
options.addOption(LOG_FILE_OPTION, LOG_FILE_OPTION, true, "log file");
options.addOption(LEAD_TO_OPTION, LEAD_TO_OPTION, false, "enable leadTo");
CommandLine commandLine = null;
HelpFormatter helper = new HelpFormatter();
@ -95,15 +108,20 @@ public class LocalBuilderMain {
String alterOperation = commandLine.getOptionValue(ALTER_OPERATION_OPTION);
RecordAlterOperationEnum alterOperationEnum = RecordAlterOperationEnum.valueOf(alterOperation);
boolean enableLeadTo = commandLine.hasOption(LEAD_TO_OPTION);
ProjectSchema projectSchema = getProjectSchema(projectId, schemaUrl);
Map<SPGTypeIdentifier, ConceptList> conceptLists =
getConceptLists(enableLeadTo, projectSchema, pipeline);
BuilderContext builderContext =
new BuilderContext()
.setProjectId(projectId)
.setJobName(jobName)
.setCatalog(new DefaultBuilderCatalog(projectSchema))
.setCatalog(new DefaultBuilderCatalog(projectSchema, conceptLists))
.setPythonExec(pythonExec)
.setPythonPaths(pythonPaths)
.setOperation(alterOperationEnum);
.setOperation(alterOperationEnum)
.setEnableLeadTo(enableLeadTo);
LocalBuilderRunner runner = new LocalBuilderRunner(parallelism);
runner.init(pipeline, builderContext);
@ -128,6 +146,36 @@ public class LocalBuilderMain {
throw new PipelineConfigException("");
}
private static Map<SPGTypeIdentifier, ConceptList> getConceptLists(
boolean enableLeadTo, ProjectSchema projectSchema, Pipeline pipeline) {
if (!enableLeadTo) {
return null;
}
Map<SPGTypeIdentifier, ConceptList> results = new HashMap<>();
ConceptFacade conceptFacade = new HttpConceptFacade();
for (BaseSPGIdentifier identifier : pipeline.schemaUsed()) {
if (!(identifier instanceof SPGTypeIdentifier)) {
continue;
}
BaseSPGType spgType = projectSchema.getByName((SPGTypeIdentifier) identifier);
for (Property property : spgType.getProperties()) {
SPGTypeRef objectTypeRef = property.getObjectTypeRef();
if (!objectTypeRef.isConceptType()) {
continue;
}
ApiResponse<ConceptList> response =
conceptFacade.queryConcept(
new ConceptRequest().setConceptTypeName(objectTypeRef.getName()));
if (response.isSuccess()) {
results.put(objectTypeRef.getBaseSpgIdentifier(), response.getData());
}
}
}
return results;
}
private static void setUpLogFile(String logFileName) {
LoggerContext loggerContext = (LoggerContext) LoggerFactory.getILoggerFactory();
loggerContext.reset();

View File

@ -2,6 +2,7 @@ package com.antgroup.openspg.builder.runner.local;
import com.antgroup.openspg.builder.core.logical.LogicalPlan;
import com.antgroup.openspg.builder.core.physical.PhysicalPlan;
import com.antgroup.openspg.builder.core.reason.ReasonProcessor;
import com.antgroup.openspg.builder.core.runtime.BuilderContext;
import com.antgroup.openspg.builder.core.runtime.BuilderExecutor;
import com.antgroup.openspg.builder.core.runtime.BuilderRunner;
@ -33,6 +34,7 @@ public class LocalBuilderRunner implements BuilderRunner {
private BuilderExecutor builderExecutor = null;
private BaseSourceReader<?> sourceReader = null;
private BaseSinkWriter<?> sinkWriter = null;
private ReasonProcessor reasonProcessor = null;
private BuilderMetric builderMetric = null;
private final int parallelism;
@ -64,6 +66,10 @@ public class LocalBuilderRunner implements BuilderRunner {
// 构建指标统计并将构建指标输出到log
builderMetric = new BuilderMetric(context.getJobName());
builderMetric.reportToLog();
if (context.isEnableLeadTo()) {
reasonProcessor = new ReasonProcessor();
}
}
@Override
@ -89,6 +95,7 @@ public class LocalBuilderRunner implements BuilderRunner {
if (CollectionUtils.isNotEmpty(results)) {
sinkWriter.write(results);
}
reason(results);
records = Collections.unmodifiableList(sourceReader.read());
}
},
@ -101,6 +108,20 @@ public class LocalBuilderRunner implements BuilderRunner {
failFast(futures, joint);
}
/**
* if there is a reasoning executor, the reasoning process is initiated after writing to the graph
* storage. it should be noted here that if the underlying graph storage does not make writes
* immediately visible, the reasoning will not work well
*/
private void reason(List<BaseRecord> records) {
if (reasonProcessor != null && CollectionUtils.isNotEmpty(records)) {
List<BaseRecord> reasonResults = reasonProcessor.process(records);
if (CollectionUtils.isNotEmpty(reasonResults)) {
sinkWriter.write(reasonResults);
}
}
}
private static <T> void failFast(List<CompletableFuture<T>> futures, CompletableFuture<T> joint)
throws Exception {
while (true) {

View File

@ -1,13 +1,5 @@
from knext.operator.op import BaseOp, LinkOp, ExtractOp, FuseOp, PromptOp
from knext.operator.builtin.auto_prompt import SPOPrompt
__all__ = [
"BaseOp",
"ExtractOp",
"LinkOp",
"FuseOp",
"PromptOp",
"SPOPrompt"
]
__all__ = ["BaseOp", "ExtractOp", "LinkOp", "FuseOp", "PromptOp", "SPOPrompt"]

View File

@ -3,7 +3,6 @@ from abc import ABC
class Client(ABC):
def __init__(self, host_addr: str = None, project_id: int = None):
self._host_addr = host_addr or os.environ.get("KNEXT_HOST_ADDR")
self._project_id = project_id or os.environ.get("KNEXT_PROJECT_ID")

View File

@ -32,7 +32,10 @@ class OperatorClient(Client):
def __init__(self, host_addr: str = None, project_id: int = None):
super().__init__(host_addr, project_id)
if "KNEXT_ROOT_PATH" in os.environ and "KNEXT_BUILDER_OPERATOR_DIR" in os.environ:
if (
"KNEXT_ROOT_PATH" in os.environ
and "KNEXT_BUILDER_OPERATOR_DIR" in os.environ
):
self._builder_operator_path = os.path.join(
os.environ["KNEXT_ROOT_PATH"], os.environ["KNEXT_BUILDER_OPERATOR_DIR"]
)

View File

@ -47,11 +47,12 @@ def execute_job(job_names):
for job in job_list:
builder_job = BuilderJob.by_name(job)()
builder_chain = builder_job.build()
inst = client.execute(builder_chain,
job_name=job,
parallelism=builder_job.parallelism,
alter_operation=builder_job.alter_operation
)
inst = client.execute(
builder_chain,
job_name=job,
parallelism=builder_job.parallelism,
alter_operation=builder_job.alter_operation,
)
click.secho(
f"BuilderJob [{job}] has been successfully submitted."
f" Use ` knext builder get --id {inst.building_job_inst_id} ` to check job status.",

View File

@ -9,6 +9,7 @@ from knext.operator.op import PromptOp, ExtractOp
# try:
from nn4k.invoker.base import NNInvoker # noqa: F403
# except ImportError:
# pass

View File

@ -108,14 +108,22 @@ class SPGTypeMapping(Mapping):
for tgt_name, src_name in self.mapping.items():
link_strategy = self.link_strategies.get(tgt_name, None)
if isinstance(link_strategy, LinkOp):
property_normalizer = rest.OperatorPropertyNormalizerConfig(config=link_strategy.to_rest())
property_normalizer = rest.OperatorPropertyNormalizerConfig(
config=link_strategy.to_rest()
)
elif link_strategy == LinkStrategyEnum.IDEquals:
property_normalizer = rest.IdEqualsPropertyNormalizerConfig()
elif not link_strategy:
property_normalizer = None
else:
raise ValueError(f"Invalid link_strategy {link_strategy}")
mapping_configs.append(rest.MappingConfig(source=src_name, target=tgt_name, normalizer_config=property_normalizer))
mapping_configs.append(
rest.MappingConfig(
source=src_name,
target=tgt_name,
normalizer_config=property_normalizer,
)
)
config = rest.SpgTypeMappingNodeConfig(
spg_type=self.spg_type_name,

View File

@ -1,4 +1,3 @@
from knext import rest
from knext.component.builder.base import SinkWriter
from knext.operator.spg_record import SPGRecord

View File

@ -29,7 +29,5 @@ class CertLinkerOperator(LinkOp):
query = {"match": {"certNum": has_cert}}
recall_certs = self.search_client.search(query, start=0, size=10)
if recall_certs is not None and len(recall_certs) > 0:
return [
SPGRecord('RiskMining.Cert', {'id': recall_certs[0].doc_id})
]
return [SPGRecord('RiskMining.Cert', {'id': property})]
return [SPGRecord("RiskMining.Cert", {"id": recall_certs[0].doc_id})]
return [SPGRecord("RiskMining.Cert", {"id": property})]

View File

@ -193,11 +193,27 @@ from knext.rest.models.schema.type.spg_type_ref import SpgTypeRef
from knext.rest.models.schema.type.spg_type_ref_basic_info import SpgTypeRefBasicInfo
from knext.rest.models.schema.type.standard_type import StandardType
from knext.rest.models.schema.type.standard_type_basic_info import StandardTypeBasicInfo
from knext.rest.models.builder.pipeline.config.id_equals_property_normalizer_config import IdEqualsPropertyNormalizerConfig
from knext.rest.models.builder.pipeline.config.llm_based_extract_node_config import LlmBasedExtractNodeConfig
from knext.rest.models.builder.pipeline.config.operator_property_normalizer_config import OperatorPropertyNormalizerConfig
from knext.rest.models.builder.pipeline.config.property_normalizer_config import PropertyNormalizerConfig
from knext.rest.models.builder.pipeline.config.relation_mapping_node_config import RelationMappingNodeConfig
from knext.rest.models.builder.pipeline.config.spg_type_mapping_node_config import SpgTypeMappingNodeConfig
from knext.rest.models.builder.pipeline.config.sub_graph_mapping_node_config import SubGraphMappingNodeConfig
from knext.rest.models.builder.pipeline.config.user_defined_extract_node_config import UserDefinedExtractNodeConfig
from knext.rest.models.builder.pipeline.config.id_equals_property_normalizer_config import (
IdEqualsPropertyNormalizerConfig,
)
from knext.rest.models.builder.pipeline.config.llm_based_extract_node_config import (
LlmBasedExtractNodeConfig,
)
from knext.rest.models.builder.pipeline.config.operator_property_normalizer_config import (
OperatorPropertyNormalizerConfig,
)
from knext.rest.models.builder.pipeline.config.property_normalizer_config import (
PropertyNormalizerConfig,
)
from knext.rest.models.builder.pipeline.config.relation_mapping_node_config import (
RelationMappingNodeConfig,
)
from knext.rest.models.builder.pipeline.config.spg_type_mapping_node_config import (
SpgTypeMappingNodeConfig,
)
from knext.rest.models.builder.pipeline.config.sub_graph_mapping_node_config import (
SubGraphMappingNodeConfig,
)
from knext.rest.models.builder.pipeline.config.user_defined_extract_node_config import (
UserDefinedExtractNodeConfig,
)

View File

@ -170,11 +170,27 @@ from knext.rest.models.schema.type.spg_type_ref import SpgTypeRef
from knext.rest.models.schema.type.spg_type_ref_basic_info import SpgTypeRefBasicInfo
from knext.rest.models.schema.type.standard_type import StandardType
from knext.rest.models.schema.type.standard_type_basic_info import StandardTypeBasicInfo
from knext.rest.models.builder.pipeline.config.id_equals_property_normalizer_config import IdEqualsPropertyNormalizerConfig
from knext.rest.models.builder.pipeline.config.llm_based_extract_node_config import LlmBasedExtractNodeConfig
from knext.rest.models.builder.pipeline.config.operator_property_normalizer_config import OperatorPropertyNormalizerConfig
from knext.rest.models.builder.pipeline.config.property_normalizer_config import PropertyNormalizerConfig
from knext.rest.models.builder.pipeline.config.relation_mapping_node_config import RelationMappingNodeConfig
from knext.rest.models.builder.pipeline.config.spg_type_mapping_node_config import SpgTypeMappingNodeConfig
from knext.rest.models.builder.pipeline.config.sub_graph_mapping_node_config import SubGraphMappingNodeConfig
from knext.rest.models.builder.pipeline.config.user_defined_extract_node_config import UserDefinedExtractNodeConfig
from knext.rest.models.builder.pipeline.config.id_equals_property_normalizer_config import (
IdEqualsPropertyNormalizerConfig,
)
from knext.rest.models.builder.pipeline.config.llm_based_extract_node_config import (
LlmBasedExtractNodeConfig,
)
from knext.rest.models.builder.pipeline.config.operator_property_normalizer_config import (
OperatorPropertyNormalizerConfig,
)
from knext.rest.models.builder.pipeline.config.property_normalizer_config import (
PropertyNormalizerConfig,
)
from knext.rest.models.builder.pipeline.config.relation_mapping_node_config import (
RelationMappingNodeConfig,
)
from knext.rest.models.builder.pipeline.config.spg_type_mapping_node_config import (
SpgTypeMappingNodeConfig,
)
from knext.rest.models.builder.pipeline.config.sub_graph_mapping_node_config import (
SubGraphMappingNodeConfig,
)
from knext.rest.models.builder.pipeline.config.user_defined_extract_node_config import (
UserDefinedExtractNodeConfig,
)

View File

@ -93,7 +93,15 @@ class BaseNodeConfig(object):
raise ValueError(
"Invalid value for `type`, must not be `None`"
) # noqa: E501
allowed_values = ["CSV_SOURCE", "SPG_TYPE_MAPPING", "RELATION_MAPPING", "SUBGRAPH_MAPPING", "USER_DEFINED_EXTRACT", "LLM_BASED_EXTRACT", "GRAPH_SINK"] # noqa: E501
allowed_values = [
"CSV_SOURCE",
"SPG_TYPE_MAPPING",
"RELATION_MAPPING",
"SUBGRAPH_MAPPING",
"USER_DEFINED_EXTRACT",
"LLM_BASED_EXTRACT",
"GRAPH_SINK",
] # noqa: E501
if (
self.local_vars_configuration.client_side_validation
and type not in allowed_values

View File

@ -108,7 +108,15 @@ class CsvSourceNodeConfig(object):
raise ValueError(
"Invalid value for `type`, must not be `None`"
) # noqa: E501
allowed_values = ["CSV_SOURCE", "SPG_TYPE_MAPPING", "RELATION_MAPPING", "SUBGRAPH_MAPPING", "USER_DEFINED_EXTRACT", "LLM_BASED_EXTRACT", "GRAPH_SINK"] # noqa: E501
allowed_values = [
"CSV_SOURCE",
"SPG_TYPE_MAPPING",
"RELATION_MAPPING",
"SUBGRAPH_MAPPING",
"USER_DEFINED_EXTRACT",
"LLM_BASED_EXTRACT",
"GRAPH_SINK",
] # noqa: E501
if (
self.local_vars_configuration.client_side_validation
and type not in allowed_values

View File

@ -82,7 +82,15 @@ class GraphStoreSinkNodeConfig(object):
raise ValueError(
"Invalid value for `type`, must not be `None`"
) # noqa: E501
allowed_values = ["CSV_SOURCE", "SPG_TYPE_MAPPING", "RELATION_MAPPING", "SUBGRAPH_MAPPING", "USER_DEFINED_EXTRACT", "LLM_BASED_EXTRACT", "GRAPH_SINK"] # noqa: E501
allowed_values = [
"CSV_SOURCE",
"SPG_TYPE_MAPPING",
"RELATION_MAPPING",
"SUBGRAPH_MAPPING",
"USER_DEFINED_EXTRACT",
"LLM_BASED_EXTRACT",
"GRAPH_SINK",
] # noqa: E501
if (
self.local_vars_configuration.client_side_validation
and type not in allowed_values

View File

@ -43,15 +43,13 @@ class IdEqualsPropertyNormalizerConfig(object):
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'normalizer_type': 'str'
}
openapi_types = {"normalizer_type": "str"}
attribute_map = {
'normalizer_type': 'normalizerType'
}
attribute_map = {"normalizer_type": "normalizerType"}
def __init__(self, normalizer_type='ID_EQUALS', local_vars_configuration=None): # noqa: E501
def __init__(
self, normalizer_type="ID_EQUALS", local_vars_configuration=None
): # noqa: E501
"""IdEqualsPropertyNormalizerConfig - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
@ -80,13 +78,22 @@ class IdEqualsPropertyNormalizerConfig(object):
:param normalizer_type: The normalizer_type of this IdEqualsPropertyNormalizerConfig. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and normalizer_type is None: # noqa: E501
raise ValueError("Invalid value for `normalizer_type`, must not be `None`") # noqa: E501
allowed_values = ["OPERATOR", "ID_EQUALS"] # noqa: E501
if self.local_vars_configuration.client_side_validation and normalizer_type not in allowed_values: # noqa: E501
if (
self.local_vars_configuration.client_side_validation
and normalizer_type is None
): # noqa: E501
raise ValueError(
"Invalid value for `normalizer_type` ({0}), must be one of {1}" # noqa: E501
.format(normalizer_type, allowed_values)
"Invalid value for `normalizer_type`, must not be `None`"
) # noqa: E501
allowed_values = ["OPERATOR", "ID_EQUALS"] # noqa: E501
if (
self.local_vars_configuration.client_side_validation
and normalizer_type not in allowed_values
): # noqa: E501
raise ValueError(
"Invalid value for `normalizer_type` ({0}), must be one of {1}".format( # noqa: E501
normalizer_type, allowed_values
)
)
self._normalizer_type = normalizer_type
@ -98,18 +105,20 @@ class IdEqualsPropertyNormalizerConfig(object):
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
result[attr] = list(
map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value)
)
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict")
else item,
value.items(),
)
)
else:
result[attr] = value

View File

@ -43,15 +43,13 @@ class LlmBasedExtractNodeConfig(object):
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'type': 'str'
}
openapi_types = {"type": "str"}
attribute_map = {
'type': 'type'
}
attribute_map = {"type": "type"}
def __init__(self, type='LLM_BASED_EXTRACT', local_vars_configuration=None): # noqa: E501
def __init__(
self, type="LLM_BASED_EXTRACT", local_vars_configuration=None
): # noqa: E501
"""LlmBasedExtractNodeConfig - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
@ -80,13 +78,26 @@ class LlmBasedExtractNodeConfig(object):
:param type: The type of this LlmBasedExtractNodeConfig. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and type is None: # noqa: E501
raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501
allowed_values = ["CSV_SOURCE", "GRAPH_SINK", "MAPPING", "EXTRACT"] # noqa: E501
if self.local_vars_configuration.client_side_validation and type not in allowed_values: # noqa: E501
if (
self.local_vars_configuration.client_side_validation and type is None
): # noqa: E501
raise ValueError(
"Invalid value for `type` ({0}), must be one of {1}" # noqa: E501
.format(type, allowed_values)
"Invalid value for `type`, must not be `None`"
) # noqa: E501
allowed_values = [
"CSV_SOURCE",
"GRAPH_SINK",
"MAPPING",
"EXTRACT",
] # noqa: E501
if (
self.local_vars_configuration.client_side_validation
and type not in allowed_values
): # noqa: E501
raise ValueError(
"Invalid value for `type` ({0}), must be one of {1}".format( # noqa: E501
type, allowed_values
)
)
self._type = type
@ -98,18 +109,20 @@ class LlmBasedExtractNodeConfig(object):
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
result[attr] = list(
map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value)
)
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict")
else item,
value.items(),
)
)
else:
result[attr] = value

View File

@ -44,18 +44,24 @@ class MappingConfig(object):
and the value is json key in definition.
"""
openapi_types = {
'source': 'str',
'target': 'str',
'normalizer_config': 'PropertyNormalizerConfig'
"source": "str",
"target": "str",
"normalizer_config": "PropertyNormalizerConfig",
}
attribute_map = {
'source': 'source',
'target': 'target',
'normalizer_config': 'normalizerConfig'
"source": "source",
"target": "target",
"normalizer_config": "normalizerConfig",
}
def __init__(self, source=None, target=None, normalizer_config=None, local_vars_configuration=None): # noqa: E501
def __init__(
self,
source=None,
target=None,
normalizer_config=None,
local_vars_configuration=None,
): # noqa: E501
"""MappingConfig - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
@ -143,18 +149,20 @@ class MappingConfig(object):
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
result[attr] = list(
map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value)
)
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict")
else item,
value.items(),
)
)
else:
result[attr] = value

View File

@ -44,18 +44,24 @@ class OperatorPropertyNormalizerConfig(object):
and the value is json key in definition.
"""
openapi_types = {
'normalizer_type': 'str',
'config': 'OperatorConfig',
'params': 'str'
"normalizer_type": "str",
"config": "OperatorConfig",
"params": "str",
}
attribute_map = {
'normalizer_type': 'normalizerType',
'config': 'config',
'params': 'params'
"normalizer_type": "normalizerType",
"config": "config",
"params": "params",
}
def __init__(self, normalizer_type=None, config=None, params=None, local_vars_configuration=None): # noqa: E501
def __init__(
self,
normalizer_type=None,
config=None,
params=None,
local_vars_configuration=None,
): # noqa: E501
"""OperatorPropertyNormalizerConfig - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
@ -88,13 +94,22 @@ class OperatorPropertyNormalizerConfig(object):
:param normalizer_type: The normalizer_type of this OperatorPropertyNormalizerConfig. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and normalizer_type is None: # noqa: E501
raise ValueError("Invalid value for `normalizer_type`, must not be `None`") # noqa: E501
allowed_values = ["OPERATOR", "ID_EQUALS"] # noqa: E501
if self.local_vars_configuration.client_side_validation and normalizer_type not in allowed_values: # noqa: E501
if (
self.local_vars_configuration.client_side_validation
and normalizer_type is None
): # noqa: E501
raise ValueError(
"Invalid value for `normalizer_type` ({0}), must be one of {1}" # noqa: E501
.format(normalizer_type, allowed_values)
"Invalid value for `normalizer_type`, must not be `None`"
) # noqa: E501
allowed_values = ["OPERATOR", "ID_EQUALS"] # noqa: E501
if (
self.local_vars_configuration.client_side_validation
and normalizer_type not in allowed_values
): # noqa: E501
raise ValueError(
"Invalid value for `normalizer_type` ({0}), must be one of {1}".format( # noqa: E501
normalizer_type, allowed_values
)
)
self._normalizer_type = normalizer_type
@ -117,8 +132,12 @@ class OperatorPropertyNormalizerConfig(object):
:param config: The config of this OperatorPropertyNormalizerConfig. # noqa: E501
:type: OperatorConfig
"""
if self.local_vars_configuration.client_side_validation and config is None: # noqa: E501
raise ValueError("Invalid value for `config`, must not be `None`") # noqa: E501
if (
self.local_vars_configuration.client_side_validation and config is None
): # noqa: E501
raise ValueError(
"Invalid value for `config`, must not be `None`"
) # noqa: E501
self._config = config
@ -140,8 +159,12 @@ class OperatorPropertyNormalizerConfig(object):
:param params: The params of this OperatorPropertyNormalizerConfig. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and params is None: # noqa: E501
raise ValueError("Invalid value for `params`, must not be `None`") # noqa: E501
if (
self.local_vars_configuration.client_side_validation and params is None
): # noqa: E501
raise ValueError(
"Invalid value for `params`, must not be `None`"
) # noqa: E501
self._params = params
@ -152,18 +175,20 @@ class OperatorPropertyNormalizerConfig(object):
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
result[attr] = list(
map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value)
)
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict")
else item,
value.items(),
)
)
else:
result[attr] = value

View File

@ -43,15 +43,13 @@ class PropertyNormalizerConfig(object):
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'normalizer_type': 'str'
}
openapi_types = {"normalizer_type": "str"}
attribute_map = {
'normalizer_type': 'normalizerType'
}
attribute_map = {"normalizer_type": "normalizerType"}
def __init__(self, normalizer_type=None, local_vars_configuration=None): # noqa: E501
def __init__(
self, normalizer_type=None, local_vars_configuration=None
): # noqa: E501
"""PropertyNormalizerConfig - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
@ -80,13 +78,22 @@ class PropertyNormalizerConfig(object):
:param normalizer_type: The normalizer_type of this PropertyNormalizerConfig. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and normalizer_type is None: # noqa: E501
raise ValueError("Invalid value for `normalizer_type`, must not be `None`") # noqa: E501
allowed_values = ["OPERATOR", "ID_EQUALS"] # noqa: E501
if self.local_vars_configuration.client_side_validation and normalizer_type not in allowed_values: # noqa: E501
if (
self.local_vars_configuration.client_side_validation
and normalizer_type is None
): # noqa: E501
raise ValueError(
"Invalid value for `normalizer_type` ({0}), must be one of {1}" # noqa: E501
.format(normalizer_type, allowed_values)
"Invalid value for `normalizer_type`, must not be `None`"
) # noqa: E501
allowed_values = ["OPERATOR", "ID_EQUALS"] # noqa: E501
if (
self.local_vars_configuration.client_side_validation
and normalizer_type not in allowed_values
): # noqa: E501
raise ValueError(
"Invalid value for `normalizer_type` ({0}), must be one of {1}".format( # noqa: E501
normalizer_type, allowed_values
)
)
self._normalizer_type = normalizer_type
@ -98,18 +105,20 @@ class PropertyNormalizerConfig(object):
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
result[attr] = list(
map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value)
)
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict")
else item,
value.items(),
)
)
else:
result[attr] = value

View File

@ -44,20 +44,27 @@ class RelationMappingNodeConfig(object):
and the value is json key in definition.
"""
openapi_types = {
'type': 'str',
'relation': 'str',
'mapping_filters': 'list[MappingFilter]',
'mapping_configs': 'list[MappingConfig]'
"type": "str",
"relation": "str",
"mapping_filters": "list[MappingFilter]",
"mapping_configs": "list[MappingConfig]",
}
attribute_map = {
'type': 'type',
'relation': 'relation',
'mapping_filters': 'mappingFilters',
'mapping_configs': 'mappingConfigs'
"type": "type",
"relation": "relation",
"mapping_filters": "mappingFilters",
"mapping_configs": "mappingConfigs",
}
def __init__(self, type='RELATION_MAPPING', relation=None, mapping_filters=None, mapping_configs=None, local_vars_configuration=None): # noqa: E501
def __init__(
self,
type="RELATION_MAPPING",
relation=None,
mapping_filters=None,
mapping_configs=None,
local_vars_configuration=None,
): # noqa: E501
"""RelationMappingNodeConfig - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
@ -95,13 +102,29 @@ class RelationMappingNodeConfig(object):
:param type: The type of this RelationMappingNodeConfig. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and type is None: # noqa: E501
raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501
allowed_values = ["CSV_SOURCE", "SPG_TYPE_MAPPING", "RELATION_MAPPING", "SUBGRAPH_MAPPING", "USER_DEFINED_EXTRACT", "LLM_BASED_EXTRACT", "GRAPH_SINK"] # noqa: E501
if self.local_vars_configuration.client_side_validation and type not in allowed_values: # noqa: E501
if (
self.local_vars_configuration.client_side_validation and type is None
): # noqa: E501
raise ValueError(
"Invalid value for `type` ({0}), must be one of {1}" # noqa: E501
.format(type, allowed_values)
"Invalid value for `type`, must not be `None`"
) # noqa: E501
allowed_values = [
"CSV_SOURCE",
"SPG_TYPE_MAPPING",
"RELATION_MAPPING",
"SUBGRAPH_MAPPING",
"USER_DEFINED_EXTRACT",
"LLM_BASED_EXTRACT",
"GRAPH_SINK",
] # noqa: E501
if (
self.local_vars_configuration.client_side_validation
and type not in allowed_values
): # noqa: E501
raise ValueError(
"Invalid value for `type` ({0}), must be one of {1}".format( # noqa: E501
type, allowed_values
)
)
self._type = type
@ -176,18 +199,20 @@ class RelationMappingNodeConfig(object):
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
result[attr] = list(
map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value)
)
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict")
else item,
value.items(),
)
)
else:
result[attr] = value

View File

@ -44,20 +44,27 @@ class SpgTypeMappingNodeConfig(object):
and the value is json key in definition.
"""
openapi_types = {
'type': 'str',
'spg_type': 'str',
'mapping_filters': 'list[MappingFilter]',
'mapping_configs': 'list[MappingConfig]'
"type": "str",
"spg_type": "str",
"mapping_filters": "list[MappingFilter]",
"mapping_configs": "list[MappingConfig]",
}
attribute_map = {
'type': 'type',
'spg_type': 'spgType',
'mapping_filters': 'mappingFilters',
'mapping_configs': 'mappingConfigs'
"type": "type",
"spg_type": "spgType",
"mapping_filters": "mappingFilters",
"mapping_configs": "mappingConfigs",
}
def __init__(self, type='SPG_TYPE_MAPPING', spg_type=None, mapping_filters=None, mapping_configs=None, local_vars_configuration=None): # noqa: E501
def __init__(
self,
type="SPG_TYPE_MAPPING",
spg_type=None,
mapping_filters=None,
mapping_configs=None,
local_vars_configuration=None,
): # noqa: E501
"""SpgTypeMappingNodeConfig - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
@ -95,13 +102,29 @@ class SpgTypeMappingNodeConfig(object):
:param type: The type of this SpgTypeMappingNodeConfig. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and type is None: # noqa: E501
raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501
allowed_values = ["CSV_SOURCE", "SPG_TYPE_MAPPING", "RELATION_MAPPING", "SUBGRAPH_MAPPING", "USER_DEFINED_EXTRACT", "LLM_BASED_EXTRACT", "GRAPH_SINK"] # noqa: E501
if self.local_vars_configuration.client_side_validation and type not in allowed_values: # noqa: E501
if (
self.local_vars_configuration.client_side_validation and type is None
): # noqa: E501
raise ValueError(
"Invalid value for `type` ({0}), must be one of {1}" # noqa: E501
.format(type, allowed_values)
"Invalid value for `type`, must not be `None`"
) # noqa: E501
allowed_values = [
"CSV_SOURCE",
"SPG_TYPE_MAPPING",
"RELATION_MAPPING",
"SUBGRAPH_MAPPING",
"USER_DEFINED_EXTRACT",
"LLM_BASED_EXTRACT",
"GRAPH_SINK",
] # noqa: E501
if (
self.local_vars_configuration.client_side_validation
and type not in allowed_values
): # noqa: E501
raise ValueError(
"Invalid value for `type` ({0}), must be one of {1}".format( # noqa: E501
type, allowed_values
)
)
self._type = type
@ -176,18 +199,20 @@ class SpgTypeMappingNodeConfig(object):
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
result[attr] = list(
map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value)
)
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict")
else item,
value.items(),
)
)
else:
result[attr] = value

View File

@ -43,17 +43,16 @@ class SubGraphMappingNodeConfig(object):
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'type': 'str',
'children_node_configs': 'list[BaseNodeConfig]'
}
openapi_types = {"type": "str", "children_node_configs": "list[BaseNodeConfig]"}
attribute_map = {
'type': 'type',
'children_node_configs': 'childrenNodeConfigs'
}
attribute_map = {"type": "type", "children_node_configs": "childrenNodeConfigs"}
def __init__(self, type='SUBGRAPH_MAPPING', children_node_configs=None, local_vars_configuration=None): # noqa: E501
def __init__(
self,
type="SUBGRAPH_MAPPING",
children_node_configs=None,
local_vars_configuration=None,
): # noqa: E501
"""SubGraphMappingNodeConfig - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
@ -85,13 +84,29 @@ class SubGraphMappingNodeConfig(object):
:param type: The type of this SubGraphMappingNodeConfig. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and type is None: # noqa: E501
raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501
allowed_values = ["CSV_SOURCE", "SPG_TYPE_MAPPING", "RELATION_MAPPING", "SUBGRAPH_MAPPING", "USER_DEFINED_EXTRACT", "LLM_BASED_EXTRACT", "GRAPH_SINK"] # noqa: E501
if self.local_vars_configuration.client_side_validation and type not in allowed_values: # noqa: E501
if (
self.local_vars_configuration.client_side_validation and type is None
): # noqa: E501
raise ValueError(
"Invalid value for `type` ({0}), must be one of {1}" # noqa: E501
.format(type, allowed_values)
"Invalid value for `type`, must not be `None`"
) # noqa: E501
allowed_values = [
"CSV_SOURCE",
"SPG_TYPE_MAPPING",
"RELATION_MAPPING",
"SUBGRAPH_MAPPING",
"USER_DEFINED_EXTRACT",
"LLM_BASED_EXTRACT",
"GRAPH_SINK",
] # noqa: E501
if (
self.local_vars_configuration.client_side_validation
and type not in allowed_values
): # noqa: E501
raise ValueError(
"Invalid value for `type` ({0}), must be one of {1}".format( # noqa: E501
type, allowed_values
)
)
self._type = type
@ -124,18 +139,20 @@ class SubGraphMappingNodeConfig(object):
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
result[attr] = list(
map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value)
)
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict")
else item,
value.items(),
)
)
else:
result[attr] = value

View File

@ -1,5 +1,10 @@
from knext.client.model.builder_job import BuilderJob
from knext.component.builder import CsvSourceReader, SPGTypeMapping, LLMBasedExtractor, KGSinkWriter
from knext.component.builder import (
CsvSourceReader,
SPGTypeMapping,
LLMBasedExtractor,
KGSinkWriter,
)
class Disease(BuilderJob):