This is an automated email from the ASF dual-hosted git repository.
yangjiaqi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-hugegraph-ai.git
The following commit(s) were added to refs/heads/main by this push:
new 2fcd471 fix(llm): use a proper template for default case (#57)
2fcd471 is described below
commit 2fcd471ee896c3586e45eb63174eaa055f06ce43
Author: imbajin <[email protected]>
AuthorDate: Thu Aug 1 21:59:12 2024 +0800
fix(llm): use a proper template for default case (#57)
* fix: use a proper template for default case
---
.asf.yaml | 2 +-
hugegraph-llm/src/hugegraph_llm/config/config.py | 13 +--
.../src/hugegraph_llm/demo/rag_web_demo.py | 41 ++++----
.../operators/common_op/check_schema.py | 36 +++----
.../operators/hugegraph_op/commit_to_hugegraph.py | 16 ++--
.../operators/llm_op/disambiguate_data.py | 2 +-
.../hugegraph_llm/operators/llm_op/info_extract.py | 2 +-
.../operators/llm_op/property_graph_extract.py | 103 ++++++++-------------
.../tests/operators/common_op/test_check_schema.py | 39 ++++----
9 files changed, 119 insertions(+), 135 deletions(-)
diff --git a/.asf.yaml b/.asf.yaml
index 5d923b0..c113d64 100644
--- a/.asf.yaml
+++ b/.asf.yaml
@@ -41,7 +41,7 @@ github:
main:
required_status_checks:
# strict means "Require branches to be up-to-date before merging".
- strict: false
+ strict: true
# contexts are the names of checks that must pass (now only enable the
basic check)
#contexts:
#- CodeQL
diff --git a/hugegraph-llm/src/hugegraph_llm/config/config.py
b/hugegraph-llm/src/hugegraph_llm/config/config.py
index 4ec162f..62d41d4 100644
--- a/hugegraph-llm/src/hugegraph_llm/config/config.py
+++ b/hugegraph-llm/src/hugegraph_llm/config/config.py
@@ -35,22 +35,22 @@ class Config:
# env_path: Optional[str] = ".env"
llm_type: Literal["openai", "ollama", "qianfan_wenxin", "zhipu"] = "openai"
embedding_type: Optional[Literal["openai", "ollama", "qianfan_wenxin",
"zhipu"]] = "openai"
- # OpenAI settings
+ # 1. OpenAI settings
openai_api_base: Optional[str] = os.environ.get("OPENAI_BASE_URL",
"https://api.openai.com/v1")
openai_api_key: Optional[str] = os.environ.get("OPENAI_API_KEY")
openai_language_model: Optional[str] = "gpt-4o-mini"
openai_embedding_model: Optional[str] = "text-embedding-3-small"
openai_max_tokens: int = 4096
- # Ollama settings
+ # 2. Ollama settings
ollama_host: Optional[str] = "127.0.0.1"
ollama_port: Optional[int] = 11434
ollama_language_model: Optional[str] = None
ollama_embedding_model: Optional[str] = None
- # QianFan/WenXin settings
+ # 3. QianFan/WenXin settings
qianfan_api_key: Optional[str] = None
qianfan_secret_key: Optional[str] = None
qianfan_access_token: Optional[str] = None
- ## url settings
+ # 3.1 url settings
qianfan_url_prefix: Optional[str] = (
"https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop"
)
@@ -59,14 +59,15 @@ class Config:
qianfan_embed_url: Optional[str] = qianfan_url_prefix + "/embeddings/"
# https://cloud.baidu.com/doc/WENXINWORKSHOP/s/alj562vvu
qianfan_embedding_model: Optional[str] = "embedding-v1"
- # Zhipu settings
+ # 4. ZhiPu(GLM) settings
zhipu_api_key: Optional[str] = None
zhipu_language_model: Optional[str] = "glm-4"
zhipu_embedding_model: Optional[str] = "embedding-2"
+
"""HugeGraph settings"""
graph_ip: Optional[str] = "127.0.0.1"
graph_port: Optional[int] = 8080
- # graph_space: Optional[str] = "DEFAU LT"
+ # graph_space: Optional[str] = "DEFAULT"
graph_name: Optional[str] = "hugegraph"
graph_user: Optional[str] = "admin"
graph_pwd: Optional[str] = "xxx"
diff --git a/hugegraph-llm/src/hugegraph_llm/demo/rag_web_demo.py
b/hugegraph-llm/src/hugegraph_llm/demo/rag_web_demo.py
index 3de7ba4..08a0d2d 100644
--- a/hugegraph-llm/src/hugegraph_llm/demo/rag_web_demo.py
+++ b/hugegraph-llm/src/hugegraph_llm/demo/rag_web_demo.py
@@ -339,33 +339,36 @@ if __name__ == "__main__":
)
SCHEMA = """{
- "vertices": [
+ "vertexlabels": [
{
- "vertex_label": "person",
- "properties": [
- "name",
- "age",
- "occupation"]
+ "id":1,
+ "name": "person",
+ "id_strategy": "PRIMARY_KEY",
+ "primary_keys":["name"],
+ "properties": ["name","age","occupation"]
},
{
- "vertex_label": "webpage",
- "properties": [
- "name",
- "url"]
+ "id":2,
+ "name": "webpage",
+ "id_strategy":"PRIMARY_KEY",
+ "primary_keys":["name"],
+ "properties": ["name","url"]
}
],
- "edges": [
+ "edgelabels": [
{
- "edge_label": "roommate",
- "source_vertex_label": "person",
- "target_vertex_label": "person",
- "properties": {}
+ "id": 1,
+ "name": "roommate",
+ "source_label": "person",
+ "target_label": "person",
+ "properties": ["date"]
},
{
- "edge_label": "link",
- "source_vertex_label": "webpage",
- "target_vertex_label": "person",
- "properties": {}
+ "id": 2,
+ "name": "link",
+ "source_label": "webpage",
+ "target_label": "person",
+ "properties": []
}
]
}"""
diff --git
a/hugegraph-llm/src/hugegraph_llm/operators/common_op/check_schema.py
b/hugegraph-llm/src/hugegraph_llm/operators/common_op/check_schema.py
index 137d048..616c7b1 100644
--- a/hugegraph-llm/src/hugegraph_llm/operators/common_op/check_schema.py
+++ b/hugegraph-llm/src/hugegraph_llm/operators/common_op/check_schema.py
@@ -29,17 +29,17 @@ class CheckSchema:
schema = self.data or schema
if not isinstance(schema, dict):
raise ValueError("Input data is not a dictionary.")
- if "vertices" not in schema or "edges" not in schema:
- raise ValueError("Input data does not contain 'vertices' or
'edges'.")
- if not isinstance(schema["vertices"], list) or not
isinstance(schema["edges"], list):
- raise ValueError("'vertices' or 'edges' in input data is not a
list.")
- for vertex in schema["vertices"]:
+ if "vertexlabels" not in schema or "edgelabels" not in schema:
+ raise ValueError("Input data does not contain 'vertexlabels' or
'edgelabels'.")
+ if not isinstance(schema["vertexlabels"], list) or not
isinstance(schema["edgelabels"], list):
+ raise ValueError("'vertexlabels' or 'edgelabels' in input data is
not a list.")
+ for vertex in schema["vertexlabels"]:
if not isinstance(vertex, dict):
raise ValueError("Vertex in input data is not a dictionary.")
- if "vertex_label" not in vertex:
- raise ValueError("Vertex in input data does not contain
'vertex_label'.")
- if not isinstance(vertex["vertex_label"], str):
- raise ValueError("'vertex_label' in vertex is not of correct
type.")
+ if "name" not in vertex:
+ raise ValueError("Vertex in input data does not contain
'name'.")
+ if not isinstance(vertex["name"], str):
+ raise ValueError("'name' in vertex is not of correct type.")
if "properties" not in vertex:
raise ValueError("Vertex in input data does not contain
'properties'.")
properties = vertex["properties"]
@@ -71,25 +71,25 @@ class CheckSchema:
else:
new_nullable_keys.append(key)
vertex["nullable_keys"] = new_nullable_keys
- for edge in schema["edges"]:
+ for edge in schema["edgelabels"]:
if not isinstance(edge, dict):
raise ValueError("Edge in input data is not a dictionary.")
if (
- "edge_label" not in edge
- or "source_vertex_label" not in edge
- or "target_vertex_label" not in edge
+ "name" not in edge
+ or "source_label" not in edge
+ or "target_label" not in edge
):
raise ValueError(
"Edge in input data does not contain "
- "'edge_label', 'source_vertex_label',
'target_vertex_label'."
+ "'name', 'source_label', 'target_label'."
)
if (
- not isinstance(edge["edge_label"], str)
- or not isinstance(edge["source_vertex_label"], str)
- or not isinstance(edge["target_vertex_label"], str)
+ not isinstance(edge["name"], str)
+ or not isinstance(edge["source_label"], str)
+ or not isinstance(edge["target_label"], str)
):
raise ValueError(
- "'edge_label', 'source_vertex_label',
'target_vertex_label' "
+ "'name', 'source_label', 'target_label' "
"in edge is not of correct type."
)
return {"schema": schema}
diff --git
a/hugegraph-llm/src/hugegraph_llm/operators/hugegraph_op/commit_to_hugegraph.py
b/hugegraph-llm/src/hugegraph_llm/operators/hugegraph_op/commit_to_hugegraph.py
index 2c65952..b4ad640 100644
---
a/hugegraph-llm/src/hugegraph_llm/operators/hugegraph_op/commit_to_hugegraph.py
+++
b/hugegraph-llm/src/hugegraph_llm/operators/hugegraph_op/commit_to_hugegraph.py
@@ -48,8 +48,8 @@ class CommitToKg:
def init_graph(self, vertices, edges, schema):
key_map = {}
- for vertex in schema["vertices"]:
- key_map[vertex["vertex_label"]] = vertex
+ for vertex in schema["vertexlabels"]:
+ key_map[vertex["name"]] = vertex
for vertex in vertices:
label = vertex["label"]
properties = vertex["properties"]
@@ -78,11 +78,11 @@ class CommitToKg:
print(e)
def init_schema(self, schema):
- vertices = schema["vertices"]
- edges = schema["edges"]
+ vertices = schema["vertexlabels"]
+ edges = schema["edgelabels"]
for vertex in vertices:
- vertex_label = vertex["vertex_label"]
+ vertex_label = vertex["name"]
properties = vertex["properties"]
nullable_keys = vertex["nullable_keys"]
primary_keys = vertex["primary_keys"]
@@ -92,9 +92,9 @@ class CommitToKg:
*nullable_keys
).usePrimaryKeyId().primaryKeys(*primary_keys).ifNotExist().create()
for edge in edges:
- edge_label = edge["edge_label"]
- source_vertex_label = edge["source_vertex_label"]
- target_vertex_label = edge["target_vertex_label"]
+ edge_label = edge["name"]
+ source_vertex_label = edge["source_label"]
+ target_vertex_label = edge["target_label"]
properties = edge["properties"]
for prop in properties:
self.schema.propertyKey(prop).asText().ifNotExist().create()
diff --git
a/hugegraph-llm/src/hugegraph_llm/operators/llm_op/disambiguate_data.py
b/hugegraph-llm/src/hugegraph_llm/operators/llm_op/disambiguate_data.py
index a0511d4..e34b637 100644
--- a/hugegraph-llm/src/hugegraph_llm/operators/llm_op/disambiguate_data.py
+++ b/hugegraph-llm/src/hugegraph_llm/operators/llm_op/disambiguate_data.py
@@ -52,5 +52,5 @@ class DisambiguateData:
llm_output = self.llm.generate(prompt=prompt)
data["triples"] = []
extract_triples_by_regex(llm_output, data)
- print(f"LLM input:{prompt} \n output: {llm_output} \n data:
{data}")
+ print(f"LLM {self.__class__.__name__} input:{prompt} \n output:
{llm_output} \n data: {data}")
return data
diff --git a/hugegraph-llm/src/hugegraph_llm/operators/llm_op/info_extract.py
b/hugegraph-llm/src/hugegraph_llm/operators/llm_op/info_extract.py
index d001f5e..6f0e3af 100644
--- a/hugegraph-llm/src/hugegraph_llm/operators/llm_op/info_extract.py
+++ b/hugegraph-llm/src/hugegraph_llm/operators/llm_op/info_extract.py
@@ -152,7 +152,7 @@ class InfoExtract:
for sentence in chunks:
proceeded_chunk = self.extract_triples_by_llm(schema, sentence)
- log.debug("[LLM] input: %s \n output:%s", sentence,
proceeded_chunk)
+ log.debug("[LLM] %s input: %s \n output:%s",
self.__class__.__name__, sentence, proceeded_chunk)
if schema:
extract_triples_by_regex_with_schema(schema, proceeded_chunk,
context)
else:
diff --git
a/hugegraph-llm/src/hugegraph_llm/operators/llm_op/property_graph_extract.py
b/hugegraph-llm/src/hugegraph_llm/operators/llm_op/property_graph_extract.py
index be44c7a..077ee43 100644
--- a/hugegraph-llm/src/hugegraph_llm/operators/llm_op/property_graph_extract.py
+++ b/hugegraph-llm/src/hugegraph_llm/operators/llm_op/property_graph_extract.py
@@ -24,73 +24,48 @@ from hugegraph_llm.models.llms.base import BaseLLM
from hugegraph_llm.document.chunk_split import ChunkSplitter
from hugegraph_llm.utils.log import log
-SCHEMA_EXAMPLE_PROMPT = """Main Task
-Given the following graph schema and a piece of text, your task is to analyze
the text and extract information that fits into the schema’s structure,
formatting the information into vertices and edges as specified.
+SCHEMA_EXAMPLE_PROMPT = """## Main Task
+Given the following graph schema and a piece of text, your task is to analyze
the text and extract information that fits into the schema's structure,
formatting the information into vertices and edges as specified.
-Basic Rules
-Schema Format
+## Basic Rules
+### Schema Format
Graph Schema:
+- Vertices: [List of vertex labels and their properties]
+- Edges: [List of edge labels, their source and target vertex labels, and
properties]
-Vertices: [List of vertex labels and their properties]
-Edges: [List of edge labels, their source and target vertex labels, and
properties]
-Content Rule
+### Content Rule
Please read the provided text carefully and identify any information that
corresponds to the vertices and edges defined in the schema. For each piece of
information that matches a vertex or edge, format it according to the following
JSON structures:
-
-Vertex Format:
-{“label”:“vertexLabel”,“type”:“vertex”,“properties”:{“propertyName”:“propertyValue”,…}}
-
-Edge Format:
-{“label”:“edgeLabel”,“type”:“edge”,“outV”:“sourceVertexId”,“outVLabel”:“sourceVertexLabel”,“inV”:“targetVertexId”,“inVLabel”:“targetVertexLabel”,“properties”:{“propertyName”:“propertyValue”,…}}
-
-Also follow the rules:
-
-Don’t extract attribute/property fields that do not exist in the given schema
-Ensure the extract property is in the same type as the schema (like ‘age’
should be a number)
-Translate the given schema filed into Chinese if the given text is Chinese but
the schema is in English (Optional)
-Your output should be a list of such JSON objects, each representing either a
vertex or an edge, extracted and formatted based on the text and the provided
schema.
-PrimaryKey ID Generate Rule
-
-vertexLabel的id生成策略为:id:primaryKey1!primaryKey2
-
-Example
-Input example:
-text
-道路交通事故认定书
-鱼公交认字[2013]第00478号
-天气:小雨
-交通事故时间:2013车11月24日18时09分
-交通事故地点:251省避清河菜市场路口
-当事人、车辆、道路和交通环境等基本情况:
-1、当事人基本情况:
-张小虎,男,1972年1月3日出生,山东省鱼台县清河镇清河村62号,系驾驶鲁H72886号小型轿车,驾驶证号:370827197201032316,档案号:370800767691,准驾车型:C1E,电话:15606376419.
-于海洋,男,1952年3月12日出生,山东省鱼台县号清河镇于屯村77号、身份证:370827195203122316,步行,电话:15092699426。
-2、车辆情况:
-鲁H7Z886小型轿车,入户车主:谢彪。有交通事故责任强制保险。保险单号:PDZA20133708T000075766,保险公司:中国人民产保险股份有限公司济宁市分公司。
-3、道路和咬通环境等基本情况:
-事故现场位于251省道鱼台县清河镇菜市场路口,251省道呈南北走向,道路平坦,沥青路面,视线一般,有交通标志、标线,有中心隔离带,两侧为商业店铺
-道路交通事故发生经过:
-2013年日月24日18时09分,张小虎驾驶鲁H72886号小型斩车,沿251省道自北向南行业至鱼台县清河镇菜市场路口处时与自西向东步行过公路的于海洋相撞,致于海洋受伤入院,经鱼台县人民医院抢教无效,于洋于2013车11月27日死亡,车辆损坏造成道路交通事故。张小虎肇事后驾车逃逸。
-道略交通事故证据及事故形成原因分折:
-根据现场勘查、当事人陈述证实:张小虎因观察不够,措施不当违反《中华人民共和国道路交通安全法》第三十八条“车辆、行人应当按照交通信号通行:遇有交通警察现场指挥时,应当按照交道警察的指挥通行:在没有交通信号的道路上,应当在确保安全、畅通的原则下通行。”之规定,因酒后驾车,违反《中华人民共和国道路交通安全法》第二十二条第二款“饮酒,服用国家管制的精神药品或者醉药品,或者患有妨碍安全驾驶杭动车的疾病,或者过度劳影响安全驾驶的,不得买驶机动车,”是事故发生的原因,且肇事后驾车逸逸。
-当事人导致交通事故的过错及责任或者意外原因:
-根据《中华人民共和国道路交通全法实施条例》第九十二条第一款和《道路交通事故处理程序规定》第四十六条的规定,认定当事人张小虎担地次事教的全部贡任。当事人于海洋无责任。
-交通警察:
-刘爱军HZ402
-二0一四年一月二日
-
-
-graph schema
-{"vertexLabels":[{"id":3,"name":"法条","id_strategy":"PRIMARY_KEY","primary_keys":["法典名","法条索引"],"nullable_keys":["法章名","法条内容"],"properties":["法典名","法章名","法条索引","法条内容"]},{"id":7,"name":"事故","id_strategy":"PRIMARY_KEY","primary_keys":["事故认定书编号","事故认定书单位"],"nullable_keys":[],"properties":["事故发生时间","事故认定书编号","事故认定书单位"]},{"id":11,"name":"发生地点","id_strategy":"PRIMARY_KEY","primary_keys":["城市","所属路段"],"nullable_keys":["走向","材质","路面情况","道路状况"],"properties":["城市","走向","材质","路面情况","道路状况","所属路段"]},{
[...]
-
-Output example:
-[{"label":"事故","type":"vertex","properties":{"事故发生时间":"2013-11-24
18:09:00.000","事故认定书编号":"鱼公交认字[2013]第00478号","事故认定书单位":"道路交通事故认定书"}},{"label":"发生地点","type":"vertex","properties":{"城市":"山东省鱼台县","所属路段":"251省道清河菜市场路口","走向":"南北","材质":"沥青","路面情况":"平坦","道路状况":"视线一般"}},{"label":"当事人","type":"vertex","properties":{"身份证号":"370827197201032316","姓名":"张小虎","性别":"男","年龄":"1972-01-03","驾照":"C1E"}},{"label":"当事人","type":"vertex","properties":{"身份证号":"370827195203122316","姓名":"于海洋","性别":"男","年龄":"1952
[...]
+#### Vertex Format:
+{"id":"vertexLabelID:entityName","label":"vertexLabel","type":"vertex","properties":{"propertyName":"propertyValue",
+...}}
+
+#### Edge Format:
+{"label":"edgeLabel","type":"edge","outV":"sourceVertexId","outVLabel":"sourceVertexLabel","inV":"targetVertexId","inVLabel":"targetVertexLabel","properties":{"propertyName":"propertyValue",...}}
+
+Also follow the rules:
+1. Don't extract property fields that do not exist in the given schema
+2. Ensure the extract property is in the same type as the schema (like 'age'
should be a number)
+3. If there are multiple primarykeys provided, then the generating strategy of
VID is: vertexlabelID:pk1!pk2!pk3 (pk means primary key, and '!' is the
separator, no extra space between them)
+4. Your output should be a list of such JSON objects, each representing either
a vertex or an edge, extracted and formatted based on the text and the provided
schema.
+5. Translate the given schema filed into Chinese if the given text is Chinese
but the schema is in English (Optional)
+
+
+## Example
+### Input example:
+#### text
+Meet Sarah, a 30-year-old attorney, and her roommate, James, whom she's shared
a home with since 2010. James, in his professional life, works as a journalist.
+#### graph schema
+{"vertices":[{"vertex_label":"person","properties":["name","age","occupation"]}],
"edges":[{"edge_label":"roommate",
"source_vertex_label":"person","target_vertex_label":"person","properties":["date"]]}
+
+### Output example:
+[{"id":"1:Sarah","label":"person","type":"vertex","properties":{"name":"Sarah","age":30,"occupation":"attorney"}},{"id":"1:James","label":"person","type":"vertex","properties":{"name":"James","occupation":"journalist"}},{"label":"roommate","type":"edge","outV":"1:Sarah","outVLabel":"person","inV":"1:James","inVLabel":"person","properties":{"date":"2010"}}]
"""
def generate_extract_property_graph_prompt(text, schema=None) -> str:
return f"""---
-请根据上面的完整指令, 尝试根据下面给定的 schema, 提取下面的文本, 只需要输出 json 结果:
+Following the full instructions above, try to extract the following text from
the given schema, output the JSON result:
## Text:
{text}
## Graph schema:
@@ -123,7 +98,7 @@ class PropertyGraphExtract:
items = []
for chunk in chunks:
proceeded_chunk = self.extract_property_graph_by_llm(schema, chunk)
- log.debug("[LLM] input: %s \n output:%s", chunk, proceeded_chunk)
+ log.debug("[LLM] %s input: %s \n output:%s",
self.__class__.__name__, chunk, proceeded_chunk)
items.extend(self._extract_and_filter_label(schema,
proceeded_chunk))
items = self.filter_item(schema, items)
for item in items:
@@ -149,8 +124,8 @@ class PropertyGraphExtract:
items = []
try:
property_graph = json.loads(longest_json_str)
- vertex_label_set = {vertex["vertex_label"] for vertex in
schema["vertices"]}
- edge_label_set = {edge["edge_label"] for edge in schema["edges"]}
+ vertex_label_set = {vertex["name"] for vertex in
schema["vertexlabels"]}
+ edge_label_set = {edge["name"] for edge in schema["edgelabels"]}
for item in property_graph:
if not isinstance(item, dict):
log.warning("Invalid property graph item type %s.",
type(item))
@@ -175,14 +150,14 @@ class PropertyGraphExtract:
# filter vertex and edge with invalid properties
filtered_items = []
properties_map = {"vertex": {}, "edge": {}}
- for vertex in schema["vertices"]:
- properties_map["vertex"][vertex["vertex_label"]] = {
+ for vertex in schema["vertexlabels"]:
+ properties_map["vertex"][vertex["name"]] = {
"primary_keys": vertex["primary_keys"],
"nullable_keys": vertex["nullable_keys"],
"properties": vertex["properties"]
}
- for edge in schema["edges"]:
- properties_map["edge"][edge["edge_label"]] = {
+ for edge in schema["edgelabels"]:
+ properties_map["edge"][edge["name"]] = {
"properties": edge["properties"]
}
log.info("properties_map: %s", properties_map)
diff --git a/hugegraph-llm/src/tests/operators/common_op/test_check_schema.py
b/hugegraph-llm/src/tests/operators/common_op/test_check_schema.py
index 6d002ff..d20a198 100644
--- a/hugegraph-llm/src/tests/operators/common_op/test_check_schema.py
+++ b/hugegraph-llm/src/tests/operators/common_op/test_check_schema.py
@@ -26,17 +26,22 @@ class TestCheckSchema(unittest.TestCase):
def test_schema_check_with_valid_input(self):
data = {
- "vertices": [{"vertex_label": "person"}],
- "edges": [
+ "vertexlabels": [
{
- "edge_label": "knows",
- "source_vertex_label": "person",
- "target_vertex_label": "person",
+ "name": "person",
+ "properties": ["name", "age", "occupation"]
+ }
+ ],
+ "edgelabels": [
+ {
+ "name": "knows",
+ "source_label": "person",
+ "target_label": "person",
}
],
}
check_schema = CheckSchema(data)
- self.assertEqual(check_schema.run(), data)
+ self.assertEqual(check_schema.run(), {'schema': data})
def test_schema_check_with_invalid_input(self):
data = "invalid input"
@@ -46,11 +51,11 @@ class TestCheckSchema(unittest.TestCase):
def test_schema_check_with_missing_vertices(self):
data = {
- "edges": [
+ "edgelabels": [
{
- "edge_label": "knows",
- "source_vertex_label": "person",
- "target_vertex_label": "person",
+ "name": "knows",
+ "source_label": "person",
+ "target_label": "person",
}
]
}
@@ -59,19 +64,19 @@ class TestCheckSchema(unittest.TestCase):
check_schema.run()
def test_schema_check_with_missing_edges(self):
- data = {"vertices": [{"vertex_label": "person"}]}
+ data = {"vertexlabels": [{"name": "person"}]}
check_schema = CheckSchema(data)
with self.assertRaises(ValueError):
check_schema.run()
def test_schema_check_with_invalid_vertices(self):
data = {
- "vertices": "invalid vertices",
- "edges": [
+ "vertexlabels": "invalid vertices",
+ "edgelabels": [
{
- "edge_label": "knows",
- "source_vertex_label": "person",
- "target_vertex_label": "person",
+ "name": "knows",
+ "source_label": "person",
+ "target_label": "person",
}
],
}
@@ -80,7 +85,7 @@ class TestCheckSchema(unittest.TestCase):
check_schema.run()
def test_schema_check_with_invalid_edges(self):
- data = {"vertices": [{"vertex_label": "person"}], "edges": "invalid
edges"}
+ data = {"vertexlabels": [{"name": "person"}], "edgelabels": "invalid
edges"}
check_schema = CheckSchema(data)
with self.assertRaises(ValueError):
check_schema.run()