From ced825c40fdf0cd1f402d00100a8858b531778d0 Mon Sep 17 00:00:00 2001 From: Tim Date: Tue, 11 Feb 2025 16:29:31 -0500 Subject: [PATCH] Update Quickstart notebook with API changes introduced in 83316b2 (#239) --- notebooks/1-Quickstart.ipynb | 28 +++++++++++++++++----------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/notebooks/1-Quickstart.ipynb b/notebooks/1-Quickstart.ipynb index 3de761c..dec8622 100644 --- a/notebooks/1-Quickstart.ipynb +++ b/notebooks/1-Quickstart.ipynb @@ -138,7 +138,7 @@ "source": [ "def upload_files(\n", " file_directory: str,\n", - " storage_name: str,\n", + " container_name: str,\n", " batch_size: int = 100,\n", " overwrite: bool = True,\n", " max_retries: int = 5,\n", @@ -147,10 +147,10 @@ " Upload files to a blob storage container.\n", "\n", " Args:\n", - " file_directory - a local directory of .txt files to upload. All files must have utf-8 encoding.\n", - " storage_name - a unique name for the Azure storage blob container.\n", + " file_directory - a local directory of .txt files to upload. All files must be in utf-8 encoding.\n", + " container_name - a unique name for the Azure storage container.\n", " batch_size - the number of files to upload in a single batch.\n", - " overwrite - whether or not to overwrite files if they already exist in the storage blob container.\n", + " overwrite - whether or not to overwrite files if they already exist in the storage container.\n", " max_retries - the maximum number of times to retry uploading a batch of files if the API is busy.\n", "\n", " NOTE: Uploading files may sometimes fail if the blob container was recently deleted\n", @@ -159,13 +159,13 @@ " url = endpoint + \"/data\"\n", "\n", " def upload_batch(\n", - " files: list, storage_name: str, overwrite: bool, max_retries: int\n", + " files: list, container_name: str, overwrite: bool, max_retries: int\n", " ) -> requests.Response:\n", " for _ in range(max_retries):\n", " response = requests.post(\n", " url=url,\n", " files=files,\n", - " params={\"storage_name\": storage_name, \"overwrite\": overwrite},\n", + " params={\"container_name\": container_name, \"overwrite\": overwrite},\n", " headers=headers,\n", " )\n", " # API may be busy, retry\n", @@ -194,20 +194,20 @@ " )\n", " # upload batch of files\n", " if len(batch_files) == batch_size:\n", - " response = upload_batch(batch_files, storage_name, overwrite, max_retries)\n", + " response = upload_batch(batch_files, container_name, overwrite, max_retries)\n", " # if response is not ok, return early\n", " if not response.ok:\n", " return response\n", " batch_files.clear()\n", " # upload remaining files\n", " if len(batch_files) > 0:\n", - " response = upload_batch(batch_files, storage_name, overwrite, max_retries)\n", + " response = upload_batch(batch_files, container_name, overwrite, max_retries)\n", " return response\n", "\n", "\n", "response = upload_files(\n", " file_directory=file_directory,\n", - " storage_name=storage_name,\n", + " container_name=storage_name,\n", " batch_size=100,\n", " overwrite=True,\n", ")\n", @@ -240,8 +240,14 @@ " This function kicks off a job that builds a knowledge graph index from files located in a blob storage container.\n", " \"\"\"\n", " url = endpoint + \"/index\"\n", - " request = {\"storage_name\": storage_name, \"index_name\": index_name}\n", - " return requests.post(url, params=request, headers=headers)\n", + " return requests.post(\n", + " url,\n", + " params={\n", + " \"index_container_name\": index_name,\n", + " \"storage_container_name\": storage_name,\n", + " },\n", + " headers=headers,\n", + " )\n", "\n", "\n", "response = build_index(storage_name=storage_name, index_name=index_name)\n",