Skip to content

Commit 1c3766a

Browse files
Ab 938 perficient add citations for the knowledge graph (#1266)
* feat: add option to include citations in ask graph question block * feat: add option to include citations in Ask Graph Question block --------- Co-authored-by: Neelasha Bhattacharjee <[email protected]>
1 parent 2340667 commit 1c3766a

File tree

2 files changed

+39
-8
lines changed

2 files changed

+39
-8
lines changed

pyproject.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -114,4 +114,3 @@ indent-style = "space"
114114
skip-magic-trailing-comma = false
115115
line-ending = "auto"
116116

117-

src/writer/blocks/writeraskkg.py

Lines changed: 39 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,15 @@ def register(cls, type: str):
5353
"validator": {
5454
"type": "boolean",
5555
},
56+
},
57+
"graphCitations": {
58+
"name": "Add inline graph citations",
59+
"type": "Boolean",
60+
"desc": "Shows what specific graph sources were used to answer the question.",
61+
"default": "yes",
62+
"validator": {
63+
"type": "boolean",
64+
},
5665
}
5766
},
5867
"outs": {
@@ -93,31 +102,54 @@ def run(self):
93102
"A state element must be provided when using streaming.")
94103
subqueries = self._get_field(
95104
"subqueries", default_field_value="yes") == "yes"
96-
97-
answer_so_far = ""
105+
graph_citations = self._get_field(
106+
"graphCitations", default_field_value="yes") == "yes"
98107

99108
response = client.graphs.question(
100109
graph_ids=graph_ids,
101110
question=question,
102111
stream=use_streaming,
103-
subqueries=subqueries
112+
subqueries=subqueries,
113+
query_config= {
114+
"inline_citations": graph_citations
115+
}
104116
)
117+
118+
answer_so_far = ""
119+
result_dict = {}
120+
citations_so_far = []
121+
105122
if use_streaming:
106123
for chunk in response:
107124
try:
108-
delta = chunk.model_extra.get("answer", "")
109-
answer_so_far += delta
110-
self._set_state(state_element, answer_so_far)
125+
delta_answer = chunk.model_extra.get("answer", "")
126+
answer_so_far += delta_answer
127+
result_dict["answer"] = answer_so_far
128+
129+
if graph_citations:
130+
delta_sources = chunk.model_extra.get("sources", "")
131+
citations_so_far.extend(delta_sources)
132+
result_dict["citations"] = citations_so_far
133+
134+
self._set_state(state_element, result_dict)
135+
111136
except json.JSONDecodeError:
112137
logging.error(
113138
"Could not parse stream chunk from graph.question")
139+
114140
else:
115141
answer_so_far = response.answer
116-
self._set_state(state_element, answer_so_far)
142+
result_dict["answer"] = answer_so_far
117143

144+
if graph_citations:
145+
citations_so_far = response.sources or []
146+
result_dict["citations"] = citations_so_far
147+
148+
self._set_state(state_element, result_dict)
118149
self.result = answer_so_far
119150
self.outcome = "success"
120151

121152
except BaseException as e:
122153
self.outcome = "error"
123154
raise e
155+

0 commit comments

Comments
 (0)