Deploying to gh-pages from @ microsoft/graphrag@931e96fdb3 🚀

This commit is contained in:
AlonsoGuevara 2024-06-03 22:26:53 +00:00
parent e78ffa7ba2
commit 649af4a17f
22 changed files with 81 additions and 70 deletions

Binary file not shown.

View File

@ -242,12 +242,12 @@ a {
<a href="/graphrag/posts/query/overview/">Query</a>
<ul><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/1-local_search/">Local Search</a>
</li><li>
<a href="/graphrag/posts/query/2-question_generation/">Question Generation</a>
</li><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/3-cli/">CLI</a>
</li><li>

View File

@ -242,12 +242,12 @@ a {
<a href="/graphrag/posts/query/overview/">Query</a>
<ul><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/1-local_search/">Local Search</a>
</li><li>
<a href="/graphrag/posts/query/2-question_generation/">Question Generation</a>
</li><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/3-cli/">CLI</a>
</li><li>

View File

@ -242,12 +242,12 @@ a {
<a href="/graphrag/posts/query/overview/">Query</a>
<ul><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/1-local_search/">Local Search</a>
</li><li>
<a href="/graphrag/posts/query/2-question_generation/">Question Generation</a>
</li><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/3-cli/">CLI</a>
</li><li>

View File

@ -242,12 +242,12 @@ a {
<a href="/graphrag/posts/query/overview/">Query</a>
<ul><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/1-local_search/">Local Search</a>
</li><li>
<a href="/graphrag/posts/query/2-question_generation/">Question Generation</a>
</li><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/3-cli/">CLI</a>
</li><li>

View File

@ -242,12 +242,12 @@ a {
<a href="/graphrag/posts/query/overview/">Query</a>
<ul><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/1-local_search/">Local Search</a>
</li><li>
<a href="/graphrag/posts/query/2-question_generation/">Question Generation</a>
</li><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/3-cli/">CLI</a>
</li><li>

View File

@ -242,12 +242,12 @@ a {
<a href="/graphrag/posts/query/overview/">Query</a>
<ul><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/1-local_search/">Local Search</a>
</li><li>
<a href="/graphrag/posts/query/2-question_generation/">Question Generation</a>
</li><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/3-cli/">CLI</a>
</li><li>

View File

@ -242,12 +242,12 @@ a {
<a href="/graphrag/posts/query/overview/">Query</a>
<ul><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/1-local_search/">Local Search</a>
</li><li>
<a href="/graphrag/posts/query/2-question_generation/">Question Generation</a>
</li><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/3-cli/">CLI</a>
</li><li>

View File

@ -242,12 +242,12 @@ a {
<a href="/graphrag/posts/query/overview/">Query</a>
<ul><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/1-local_search/">Local Search</a>
</li><li>
<a href="/graphrag/posts/query/2-question_generation/">Question Generation</a>
</li><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/3-cli/">CLI</a>
</li><li>

View File

@ -242,12 +242,12 @@ a {
<a href="/graphrag/posts/query/overview/">Query</a>
<ul><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/1-local_search/">Local Search</a>
</li><li>
<a href="/graphrag/posts/query/2-question_generation/">Question Generation</a>
</li><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/3-cli/">CLI</a>
</li><li>

View File

@ -242,12 +242,12 @@ a {
<a href="/graphrag/posts/query/overview/">Query</a>
<ul><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/1-local_search/">Local Search</a>
</li><li>
<a href="/graphrag/posts/query/2-question_generation/">Question Generation</a>
</li><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/3-cli/">CLI</a>
</li><li>

View File

@ -242,12 +242,12 @@ a {
<a href="/graphrag/posts/query/overview/">Query</a>
<ul><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/1-local_search/">Local Search</a>
</li><li>
<a href="/graphrag/posts/query/2-question_generation/">Question Generation</a>
</li><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/3-cli/">CLI</a>
</li><li>

View File

@ -242,12 +242,12 @@ a {
<a href="/graphrag/posts/query/overview/">Query</a>
<ul><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/1-local_search/">Local Search</a>
</li><li>
<a href="/graphrag/posts/query/2-question_generation/">Question Generation</a>
</li><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/3-cli/">CLI</a>
</li><li>

View File

@ -242,12 +242,12 @@ a {
<a href="/graphrag/posts/query/overview/">Query</a>
<ul><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/1-local_search/">Local Search</a>
</li><li>
<a href="/graphrag/posts/query/2-question_generation/">Question Generation</a>
</li><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/3-cli/">CLI</a>
</li><li>

View File

@ -242,12 +242,12 @@ a {
<a href="/graphrag/posts/query/overview/">Query</a>
<ul><li>
<a href="/graphrag/posts/query/0-global_search/" class="is-active" aria-current="page">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/1-local_search/">Local Search</a>
</li><li>
<a href="/graphrag/posts/query/2-question_generation/">Question Generation</a>
</li><li>
<a href="/graphrag/posts/query/0-global_search/" class="is-active" aria-current="page">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/3-cli/">CLI</a>
</li><li>
@ -275,7 +275,7 @@ a {
<p>However, with GraphRAG we can answer such questions, because the structure of the LLM-generated knowledge graph tells us about the structure (and thus themes) of the dataset as a whole. This allows the private dataset to be organized into meaningful semantic clusters that are pre-summarized. Using our <a href="https://github.com/microsoft/graphrag/blob/main//graphrag/query/structured_search/global_search/">global search</a> method, the LLM uses these clusters to summarize these themes when responding to a user query.</p>
<h2>Methodology</h2>
<pre class="mermaid">---&#10;title: Global Search Dataflow&#10;---&#10;%%{ init: { &#39;flowchart&#39;: { &#39;curve&#39;: &#39;step&#39; } } }%%&#10;flowchart LR&#10;&#10; uq[User Query] --- .1&#10; ch1[Conversation History] --- .1&#10;&#10; subgraph RIR&#10; direction TB&#10; ri1[Rated Intermediate&lt;br/&gt;Response 1]~~~ri2[Rated Intermediate&lt;br/&gt;Response 2] -.&quot;{1..N}&quot;.-rin[Rated Intermediate&lt;br/&gt;Response N]&#10; end&#10;&#10; .1--Shuffled Community&lt;br/&gt;Report Batch 1--&gt;RIR&#10; .1--Shuffled Community&lt;br/&gt;Report Batch 2--&gt;RIR---.2&#10; .1--Shuffled Community&lt;br/&gt;Report Batch N--&gt;RIR&#10;&#10; .2--Ranking +&lt;br/&gt;Filtering--&gt;agr[Aggregated Intermediate&lt;br/&gt;Responses]--&gt;res[Response]&#10;&#10;&#10;&#10; classDef green fill:#26B653,stroke:#333,stroke-width:2px,color:#fff;&#10; classDef turquoise fill:#19CCD3,stroke:#333,stroke-width:2px,color:#fff;&#10; classDef rose fill:#DD8694,stroke:#333,stroke-width:2px,color:#fff;&#10; classDef orange fill:#F19914,stroke:#333,stroke-width:2px,color:#fff;&#10; classDef purple fill:#B356CD,stroke:#333,stroke-width:2px,color:#fff;&#10; classDef invisible fill:#fff,stroke:#fff,stroke-width:0px,color:#fff, width:0px;&#10; class uq,ch1 turquoise;&#10; class ri1,ri2,rin rose;&#10; class agr orange;&#10; class res purple;&#10; class .1,.2 invisible;&#10;&#10;</pre>
<p>Given a user query and, optionally, the conversation history, the global search method uses a collection of LLM-generated community reports from a specified level of the graph's community hierarchy as context data to generate response in a map-reduce manner. At the <code>map</code> step, community reports are segmented into text chunks of pre-defined size. Each text chunk is then used to produce an intermediate response to the user query, accompanied by a numerical rating indicating the response's usefulness. At the <code>reduce</code> step, a filtered set of most useful intermediate responses are aggregated and used as the context to generate the final response.</p>
<p>Given a user query and, optionally, the conversation history, the global search method uses a collection of LLM-generated community reports from a specified level of the graph's community hierarchy as context data to generate response in a map-reduce manner. At the <code>map</code> step, community reports are segmented into text chunks of pre-defined size. Each text chunk is then used to produce an intermediate response containing a list of point, each of which is accompanied by a numerical rating indicating the importance of the point. At the <code>reduce</code> step, a filtered set of the most important points from the intermediate responses are aggregated and used as the context to generate the final response.</p>
<p>The quality of the global searchs response can be heavily influenced by the level of the community hierarchy chosen for sourcing community reports. Lower hierarchy levels, with their detailed reports, tend to yield more thorough responses, but may also increase the time and LLM resources needed to generate the final response due to the volume of reports.</p>
<h2>Configuration</h2>
<p>Below are the key parameters of the <a href="https://github.com/microsoft/graphrag/blob/main//graphrag/query/structured_search/global_search/search.py">GlobalSearch class</a>:</p>
@ -285,6 +285,8 @@ a {
<li><code>map_system_prompt</code>: prompt template used in the <code>map</code> stage. Default template can be found at <a href="https://github.com/microsoft/graphrag/blob/main//graphrag/query/structured_search/global_search/map_system_prompt.py">map_system_prompt</a></li>
<li><code>reduce_system_prompt</code>: prompt template used in the <code>reduce</code> stage, default template can be found at <a href="https://github.com/microsoft/graphrag/blob/main//graphrag/query/structured_search/global_search/reduce_system_prompt.py">reduce_system_prompt</a></li>
<li><code>response_type</code>: free-form text describing the desired response type and format (e.g., <code>Multiple Paragraphs</code>, <code>Multi-Page Report</code>)</li>
<li><code>allow_general_knowledge</code>: setting this to True will include additional instructions to the <code>reduce_system_prompt</code> to prompt the LLM to incorporate relevant real-world knowledge outside of the dataset. Note that this may increase hallucinations, but can be useful for certain scenarios. Default is False
*<code>general_knowledge_inclusion_prompt</code>: instruction to add to the <code>reduce_system_prompt</code> if <code>allow_general_knowledge</code> is enabled. Default instruction can be found at <a href="https://github.com/microsoft/graphrag/blob/main//graphrag/query/structured_search/global_search/reduce_system_prompt.py">general_knowledge_instruction</a></li>
<li><code>max_data_tokens</code>: token budget for the context data</li>
<li><code>map_llm_params</code>: a dictionary of additional parameters (e.g., temperature, max_tokens) to be passed to the LLM call at the <code>map</code> stage</li>
<li><code>reduce_llm_params</code>: a dictionary of additional parameters (e.g., temperature, max_tokens) to passed to the LLM call at the <code>reduce</code> stage</li>

View File

@ -242,12 +242,12 @@ a {
<a href="/graphrag/posts/query/overview/">Query</a>
<ul><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/1-local_search/" class="is-active" aria-current="page">Local Search</a>
</li><li>
<a href="/graphrag/posts/query/2-question_generation/">Question Generation</a>
</li><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/3-cli/">CLI</a>
</li><li>

View File

@ -242,12 +242,12 @@ a {
<a href="/graphrag/posts/query/overview/">Query</a>
<ul><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/1-local_search/">Local Search</a>
</li><li>
<a href="/graphrag/posts/query/2-question_generation/" class="is-active" aria-current="page">Question Generation</a>
</li><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/3-cli/">CLI</a>
</li><li>

View File

@ -242,12 +242,12 @@ a {
<a href="/graphrag/posts/query/overview/">Query</a>
<ul><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/1-local_search/">Local Search</a>
</li><li>
<a href="/graphrag/posts/query/2-question_generation/">Question Generation</a>
</li><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/3-cli/" class="is-active" aria-current="page">CLI</a>
</li><li>

View File

@ -242,12 +242,12 @@ a {
<a href="/graphrag/posts/query/overview/">Query</a>
<ul><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/1-local_search/">Local Search</a>
</li><li>
<a href="/graphrag/posts/query/2-question_generation/">Question Generation</a>
</li><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/3-cli/">CLI</a>
</li><li>
@ -283,20 +283,17 @@ a {
<div style="position: relative">
<pre class="language-python"><code id="code-4" class="language-python"><span class="token keyword">import</span> os
<span class="token keyword">from</span> pathlib <span class="token keyword">import</span> Path
<span class="token keyword">import</span> pandas <span class="token keyword">as</span> pd
<span class="token keyword">import</span> tiktoken
<span class="token keyword">from</span> graphrag<span class="token punctuation">.</span>query<span class="token punctuation">.</span>indexer_adapters <span class="token keyword">import</span> read_indexer_reports
<span class="token keyword">from</span> graphrag<span class="token punctuation">.</span>query<span class="token punctuation">.</span>indexer_adapters <span class="token keyword">import</span> read_indexer_entities<span class="token punctuation">,</span> read_indexer_reports
<span class="token keyword">from</span> graphrag<span class="token punctuation">.</span>query<span class="token punctuation">.</span>llm<span class="token punctuation">.</span>oai<span class="token punctuation">.</span>chat_openai <span class="token keyword">import</span> ChatOpenAI
<span class="token keyword">from</span> graphrag<span class="token punctuation">.</span>query<span class="token punctuation">.</span>llm<span class="token punctuation">.</span>oai<span class="token punctuation">.</span>typing <span class="token keyword">import</span> OpenaiApiType
<span class="token keyword">from</span> graphrag<span class="token punctuation">.</span>query<span class="token punctuation">.</span>structured_search<span class="token punctuation">.</span>global_search<span class="token punctuation">.</span>community_context <span class="token keyword">import</span> <span class="token punctuation">(</span>
GlobalCommunityContext<span class="token punctuation">,</span>
<span class="token punctuation">)</span>
<span class="token keyword">from</span> graphrag<span class="token punctuation">.</span>query<span class="token punctuation">.</span>structured_search<span class="token punctuation">.</span>global_search<span class="token punctuation">.</span>search <span class="token keyword">import</span> GlobalSearch
<span class="token keyword">print</span><span class="token punctuation">(</span>Path<span class="token punctuation">.</span>cwd<span class="token punctuation">(</span><span class="token punctuation">)</span><span class="token punctuation">)</span></code></pre>
<span class="token keyword">from</span> graphrag<span class="token punctuation">.</span>query<span class="token punctuation">.</span>structured_search<span class="token punctuation">.</span>global_search<span class="token punctuation">.</span>search <span class="token keyword">import</span> GlobalSearch</code></pre>
<button class="code-copy " data-clipboard-target="#code-4" style="position: absolute; top: 7.5px; right: 6px; padding-top: 3px; cursor: pointer; outline: none; opacity: 0.8;" title="Copy">
<span style="display:inline-block;background:url(https://api.iconify.design/mdi/content-copy.svg) no-repeat center center / contain;width: 16px; height: 16px;" class=""></span>
@ -325,63 +322,73 @@ token_encoder <span class="token operator">=</span> tiktoken<span class="token p
</div>
<h3>Load community reports as context for global search</h3>
<ul>
<li>Load all community reports from <strong>create_final_community_reports</strong> table from the ire-indexing engine.</li>
<li>Load all community reports in the <code>create_final_community_reports</code> table from the ire-indexing engine, to be used as context data for global search.</li>
<li>Load entities from the <code>create_final_nodes</code> and <code>create_final_entities</code> tables from the ire-indexing engine, to be used for calculating community weights for context ranking. Note that this is optional (if no entities are provided, we will not calculate community weights and only use the <code>rank</code> attribute in the community reports table for context ranking)</li>
</ul>
<div style="position: relative">
<pre class="language-python"><code id="code-25" class="language-python"><span class="token comment"># parquet files generated from indexing pipeline</span>
<pre class="language-python"><code id="code-30" class="language-python"><span class="token comment"># parquet files generated from indexing pipeline</span>
INPUT_DIR <span class="token operator">=</span> <span class="token string">"./inputs/operation dulce"</span>
COMMUNITY_REPORT_TABLE <span class="token operator">=</span> <span class="token string">"create_final_community_reports"</span>
ENTITY_TABLE <span class="token operator">=</span> <span class="token string">"create_final_nodes"</span>
ENTITY_EMBEDDING_TABLE <span class="token operator">=</span> <span class="token string">"create_final_entities"</span>
<span class="token comment"># community level in the Leiden community hierarchy from which we will load the community reports</span>
<span class="token comment"># higher value means we use reports on smaller communities (and thus will have more reports to query aga</span>
<span class="token comment"># higher value means we use reports from more fine-grained communities (at the cost of higher computation cost)</span>
COMMUNITY_LEVEL <span class="token operator">=</span> <span class="token number">2</span></code></pre>
<button class="code-copy " data-clipboard-target="#code-25" style="position: absolute; top: 7.5px; right: 6px; padding-top: 3px; cursor: pointer; outline: none; opacity: 0.8;" title="Copy">
<button class="code-copy " data-clipboard-target="#code-30" style="position: absolute; top: 7.5px; right: 6px; padding-top: 3px; cursor: pointer; outline: none; opacity: 0.8;" title="Copy">
<span style="display:inline-block;background:url(https://api.iconify.design/mdi/content-copy.svg) no-repeat center center / contain;width: 16px; height: 16px;" class=""></span>
</button>
</div>
<div style="position: relative">
<pre class="language-python"><code id="code-26" class="language-python">entity_df <span class="token operator">=</span> pd<span class="token punctuation">.</span>read_parquet<span class="token punctuation">(</span><span class="token string-interpolation"><span class="token string">f"</span><span class="token interpolation"><span class="token punctuation">{</span>INPUT_DIR<span class="token punctuation">}</span></span><span class="token string">/</span><span class="token interpolation"><span class="token punctuation">{</span>ENTITY_TABLE<span class="token punctuation">}</span></span><span class="token string">.parquet"</span></span><span class="token punctuation">)</span>
<pre class="language-python"><code id="code-31" class="language-python">entity_df <span class="token operator">=</span> pd<span class="token punctuation">.</span>read_parquet<span class="token punctuation">(</span><span class="token string-interpolation"><span class="token string">f"</span><span class="token interpolation"><span class="token punctuation">{</span>INPUT_DIR<span class="token punctuation">}</span></span><span class="token string">/</span><span class="token interpolation"><span class="token punctuation">{</span>ENTITY_TABLE<span class="token punctuation">}</span></span><span class="token string">.parquet"</span></span><span class="token punctuation">)</span>
report_df <span class="token operator">=</span> pd<span class="token punctuation">.</span>read_parquet<span class="token punctuation">(</span><span class="token string-interpolation"><span class="token string">f"</span><span class="token interpolation"><span class="token punctuation">{</span>INPUT_DIR<span class="token punctuation">}</span></span><span class="token string">/</span><span class="token interpolation"><span class="token punctuation">{</span>COMMUNITY_REPORT_TABLE<span class="token punctuation">}</span></span><span class="token string">.parquet"</span></span><span class="token punctuation">)</span>
entity_embedding_df <span class="token operator">=</span> pd<span class="token punctuation">.</span>read_parquet<span class="token punctuation">(</span><span class="token string-interpolation"><span class="token string">f"</span><span class="token interpolation"><span class="token punctuation">{</span>INPUT_DIR<span class="token punctuation">}</span></span><span class="token string">/</span><span class="token interpolation"><span class="token punctuation">{</span>ENTITY_EMBEDDING_TABLE<span class="token punctuation">}</span></span><span class="token string">.parquet"</span></span><span class="token punctuation">)</span>
reports <span class="token operator">=</span> read_indexer_reports<span class="token punctuation">(</span>report_df<span class="token punctuation">,</span> entity_df<span class="token punctuation">,</span> COMMUNITY_LEVEL<span class="token punctuation">)</span>
entities <span class="token operator">=</span> read_indexer_entities<span class="token punctuation">(</span>entity_df<span class="token punctuation">,</span> entity_embedding_df<span class="token punctuation">,</span> COMMUNITY_LEVEL<span class="token punctuation">)</span>
<span class="token keyword">print</span><span class="token punctuation">(</span><span class="token string-interpolation"><span class="token string">f"Report records: </span><span class="token interpolation"><span class="token punctuation">{</span><span class="token builtin">len</span><span class="token punctuation">(</span>report_df<span class="token punctuation">)</span><span class="token punctuation">}</span></span><span class="token string">"</span></span><span class="token punctuation">)</span>
report_df<span class="token punctuation">.</span>head<span class="token punctuation">(</span><span class="token punctuation">)</span></code></pre>
<button class="code-copy " data-clipboard-target="#code-26" style="position: absolute; top: 7.5px; right: 6px; padding-top: 3px; cursor: pointer; outline: none; opacity: 0.8;" title="Copy">
<button class="code-copy " data-clipboard-target="#code-31" style="position: absolute; top: 7.5px; right: 6px; padding-top: 3px; cursor: pointer; outline: none; opacity: 0.8;" title="Copy">
<span style="display:inline-block;background:url(https://api.iconify.design/mdi/content-copy.svg) no-repeat center center / contain;width: 16px; height: 16px;" class=""></span>
</button>
</div>
<h4>Build global context based on community reports</h4>
<div style="position: relative">
<pre class="language-python"><code id="code-30" class="language-python">context_builder <span class="token operator">=</span> GlobalCommunityContext<span class="token punctuation">(</span>
community_reports<span class="token operator">=</span>reports<span class="token punctuation">,</span> token_encoder<span class="token operator">=</span>token_encoder
<pre class="language-python"><code id="code-35" class="language-python">context_builder <span class="token operator">=</span> GlobalCommunityContext<span class="token punctuation">(</span>
community_reports<span class="token operator">=</span>reports<span class="token punctuation">,</span>
entities<span class="token operator">=</span>entities<span class="token punctuation">,</span> <span class="token comment"># default to None if you don't want to use community weights for ranking</span>
token_encoder<span class="token operator">=</span>token_encoder<span class="token punctuation">,</span>
<span class="token punctuation">)</span></code></pre>
<button class="code-copy " data-clipboard-target="#code-30" style="position: absolute; top: 7.5px; right: 6px; padding-top: 3px; cursor: pointer; outline: none; opacity: 0.8;" title="Copy">
<button class="code-copy " data-clipboard-target="#code-35" style="position: absolute; top: 7.5px; right: 6px; padding-top: 3px; cursor: pointer; outline: none; opacity: 0.8;" title="Copy">
<span style="display:inline-block;background:url(https://api.iconify.design/mdi/content-copy.svg) no-repeat center center / contain;width: 16px; height: 16px;" class=""></span>
</button>
</div>
<h4>Perform global search</h4>
<div style="position: relative">
<pre class="language-python"><code id="code-34" class="language-python">context_builder_params <span class="token operator">=</span> <span class="token punctuation">{</span>
<pre class="language-python"><code id="code-39" class="language-python">context_builder_params <span class="token operator">=</span> <span class="token punctuation">{</span>
<span class="token string">"use_community_summary"</span><span class="token punctuation">:</span> <span class="token boolean">False</span><span class="token punctuation">,</span> <span class="token comment"># False means using full community reports. True means using community short summaries.</span>
<span class="token string">"shuffle_data"</span><span class="token punctuation">:</span> <span class="token boolean">True</span><span class="token punctuation">,</span>
<span class="token string">"include_community_rank"</span><span class="token punctuation">:</span> <span class="token boolean">True</span><span class="token punctuation">,</span>
<span class="token string">"min_community_rank"</span><span class="token punctuation">:</span> <span class="token number">0</span><span class="token punctuation">,</span>
<span class="token string">"community_rank_name"</span><span class="token punctuation">:</span> <span class="token string">"rank"</span><span class="token punctuation">,</span>
<span class="token string">"include_community_weight"</span><span class="token punctuation">:</span> <span class="token boolean">True</span><span class="token punctuation">,</span>
<span class="token string">"community_weight_name"</span><span class="token punctuation">:</span> <span class="token string">"occurrence weight"</span><span class="token punctuation">,</span>
<span class="token string">"normalize_community_weight"</span><span class="token punctuation">:</span> <span class="token boolean">True</span><span class="token punctuation">,</span>
<span class="token string">"max_tokens"</span><span class="token punctuation">:</span> <span class="token number">12_000</span><span class="token punctuation">,</span> <span class="token comment"># change this based on the token limit you have on your model (if you are using a model with 8k limit, a good setting could be 5000)</span>
<span class="token string">"context_name"</span><span class="token punctuation">:</span> <span class="token string">"Reports"</span><span class="token punctuation">,</span>
<span class="token punctuation">}</span>
map_llm_params <span class="token operator">=</span> <span class="token punctuation">{</span>
<span class="token string">"max_tokens"</span><span class="token punctuation">:</span> <span class="token number">500</span><span class="token punctuation">,</span>
<span class="token string">"max_tokens"</span><span class="token punctuation">:</span> <span class="token number">1000</span><span class="token punctuation">,</span>
<span class="token string">"temperature"</span><span class="token punctuation">:</span> <span class="token number">0.0</span><span class="token punctuation">,</span>
<span class="token string">"response_format"</span><span class="token punctuation">:</span> <span class="token punctuation">{</span><span class="token string">"type"</span><span class="token punctuation">:</span> <span class="token string">"json_object"</span><span class="token punctuation">}</span><span class="token punctuation">,</span>
<span class="token punctuation">}</span>
reduce_llm_params <span class="token operator">=</span> <span class="token punctuation">{</span>
@ -389,55 +396,57 @@ reduce_llm_params <span class="token operator">=</span> <span class="token punct
<span class="token string">"temperature"</span><span class="token punctuation">:</span> <span class="token number">0.0</span><span class="token punctuation">,</span>
<span class="token punctuation">}</span></code></pre>
<button class="code-copy " data-clipboard-target="#code-34" style="position: absolute; top: 7.5px; right: 6px; padding-top: 3px; cursor: pointer; outline: none; opacity: 0.8;" title="Copy">
<button class="code-copy " data-clipboard-target="#code-39" style="position: absolute; top: 7.5px; right: 6px; padding-top: 3px; cursor: pointer; outline: none; opacity: 0.8;" title="Copy">
<span style="display:inline-block;background:url(https://api.iconify.design/mdi/content-copy.svg) no-repeat center center / contain;width: 16px; height: 16px;" class=""></span>
</button>
</div>
<div style="position: relative">
<pre class="language-python"><code id="code-35" class="language-python">search_engine <span class="token operator">=</span> GlobalSearch<span class="token punctuation">(</span>
<pre class="language-python"><code id="code-40" class="language-python">search_engine <span class="token operator">=</span> GlobalSearch<span class="token punctuation">(</span>
llm<span class="token operator">=</span>llm<span class="token punctuation">,</span>
context_builder<span class="token operator">=</span>context_builder<span class="token punctuation">,</span>
token_encoder<span class="token operator">=</span>token_encoder<span class="token punctuation">,</span>
max_data_tokens<span class="token operator">=</span><span class="token number">16_000</span><span class="token punctuation">,</span> <span class="token comment"># change this based on the token limit you have on your model (if you are using a model with 8k limit, a good setting could be 5000)</span>
max_data_tokens<span class="token operator">=</span><span class="token number">12_000</span><span class="token punctuation">,</span> <span class="token comment"># change this based on the token limit you have on your model (if you are using a model with 8k limit, a good setting could be 5000)</span>
map_llm_params<span class="token operator">=</span>map_llm_params<span class="token punctuation">,</span>
reduce_llm_params<span class="token operator">=</span>reduce_llm_params<span class="token punctuation">,</span>
allow_general_knowledge<span class="token operator">=</span><span class="token boolean">False</span><span class="token punctuation">,</span> <span class="token comment"># set this to True will add instruction to encourage the LLM to incorporate general knowledge in the response, which may increase hallucinations, but could be useful in some use cases.</span>
json_mode<span class="token operator">=</span><span class="token boolean">True</span><span class="token punctuation">,</span> <span class="token comment"># set this to False if your LLM model does not support JSON mode.</span>
context_builder_params<span class="token operator">=</span>context_builder_params<span class="token punctuation">,</span>
concurrent_coroutines<span class="token operator">=</span><span class="token number">32</span><span class="token punctuation">,</span>
response_type<span class="token operator">=</span><span class="token string">"multiple paragraphs"</span><span class="token punctuation">,</span> <span class="token comment"># free form text describing the response type and format, can be anything, e.g. prioritized list, single paragraph, multiple paragraphs, multiple-page report</span>
<span class="token punctuation">)</span></code></pre>
<button class="code-copy " data-clipboard-target="#code-35" style="position: absolute; top: 7.5px; right: 6px; padding-top: 3px; cursor: pointer; outline: none; opacity: 0.8;" title="Copy">
<button class="code-copy " data-clipboard-target="#code-40" style="position: absolute; top: 7.5px; right: 6px; padding-top: 3px; cursor: pointer; outline: none; opacity: 0.8;" title="Copy">
<span style="display:inline-block;background:url(https://api.iconify.design/mdi/content-copy.svg) no-repeat center center / contain;width: 16px; height: 16px;" class=""></span>
</button>
</div>
<div style="position: relative">
<pre class="language-python"><code id="code-36" class="language-python">result <span class="token operator">=</span> <span class="token keyword">await</span> search_engine<span class="token punctuation">.</span>asearch<span class="token punctuation">(</span>
<pre class="language-python"><code id="code-41" class="language-python">result <span class="token operator">=</span> <span class="token keyword">await</span> search_engine<span class="token punctuation">.</span>asearch<span class="token punctuation">(</span>
<span class="token string">"What is the major conflict in this story and who are the protagonist and antagonist?"</span>
<span class="token punctuation">)</span>
<span class="token keyword">print</span><span class="token punctuation">(</span>result<span class="token punctuation">.</span>response<span class="token punctuation">)</span></code></pre>
<button class="code-copy " data-clipboard-target="#code-36" style="position: absolute; top: 7.5px; right: 6px; padding-top: 3px; cursor: pointer; outline: none; opacity: 0.8;" title="Copy">
<button class="code-copy " data-clipboard-target="#code-41" style="position: absolute; top: 7.5px; right: 6px; padding-top: 3px; cursor: pointer; outline: none; opacity: 0.8;" title="Copy">
<span style="display:inline-block;background:url(https://api.iconify.design/mdi/content-copy.svg) no-repeat center center / contain;width: 16px; height: 16px;" class=""></span>
</button>
</div>
<div style="position: relative">
<pre class="language-python"><code id="code-37" class="language-python"><span class="token comment"># inspect the data used to build the context for the LLM responses</span>
<pre class="language-python"><code id="code-42" class="language-python"><span class="token comment"># inspect the data used to build the context for the LLM responses</span>
result<span class="token punctuation">.</span>context_data<span class="token punctuation">[</span><span class="token string">"reports"</span><span class="token punctuation">]</span></code></pre>
<button class="code-copy " data-clipboard-target="#code-37" style="position: absolute; top: 7.5px; right: 6px; padding-top: 3px; cursor: pointer; outline: none; opacity: 0.8;" title="Copy">
<button class="code-copy " data-clipboard-target="#code-42" style="position: absolute; top: 7.5px; right: 6px; padding-top: 3px; cursor: pointer; outline: none; opacity: 0.8;" title="Copy">
<span style="display:inline-block;background:url(https://api.iconify.design/mdi/content-copy.svg) no-repeat center center / contain;width: 16px; height: 16px;" class=""></span>
</button>
</div>
<div style="position: relative">
<pre class="language-python"><code id="code-38" class="language-python"><span class="token comment"># inspect number of LLM calls and tokens</span>
<pre class="language-python"><code id="code-43" class="language-python"><span class="token comment"># inspect number of LLM calls and tokens</span>
<span class="token keyword">print</span><span class="token punctuation">(</span><span class="token string-interpolation"><span class="token string">f"LLM calls: </span><span class="token interpolation"><span class="token punctuation">{</span>result<span class="token punctuation">.</span>llm_calls<span class="token punctuation">}</span></span><span class="token string">. LLM tokens: </span><span class="token interpolation"><span class="token punctuation">{</span>result<span class="token punctuation">.</span>prompt_tokens<span class="token punctuation">}</span></span><span class="token string">"</span></span><span class="token punctuation">)</span></code></pre>
<button class="code-copy " data-clipboard-target="#code-38" style="position: absolute; top: 7.5px; right: 6px; padding-top: 3px; cursor: pointer; outline: none; opacity: 0.8;" title="Copy">
<button class="code-copy " data-clipboard-target="#code-43" style="position: absolute; top: 7.5px; right: 6px; padding-top: 3px; cursor: pointer; outline: none; opacity: 0.8;" title="Copy">
<span style="display:inline-block;background:url(https://api.iconify.design/mdi/content-copy.svg) no-repeat center center / contain;width: 16px; height: 16px;" class=""></span>
</button>
</div>

View File

@ -242,12 +242,12 @@ a {
<a href="/graphrag/posts/query/overview/">Query</a>
<ul><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/1-local_search/">Local Search</a>
</li><li>
<a href="/graphrag/posts/query/2-question_generation/">Question Generation</a>
</li><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/3-cli/">CLI</a>
</li><li>

View File

@ -242,12 +242,12 @@ a {
<a href="/graphrag/posts/query/overview/">Query</a>
<ul><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/1-local_search/">Local Search</a>
</li><li>
<a href="/graphrag/posts/query/2-question_generation/">Question Generation</a>
</li><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/3-cli/">CLI</a>
</li><li>

View File

@ -242,12 +242,12 @@ a {
<a href="/graphrag/posts/query/overview/" class="is-active" aria-current="page">Query</a>
<ul><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/1-local_search/">Local Search</a>
</li><li>
<a href="/graphrag/posts/query/2-question_generation/">Question Generation</a>
</li><li>
<a href="/graphrag/posts/query/0-global_search/">Global Search</a>
</li><li>
<a href="/graphrag/posts/query/3-cli/">CLI</a>
</li><li>