diff --git a/datastore/factory.py b/datastore/factory.py
index 577a6f1b1..f8a770203 100644
--- a/datastore/factory.py
+++ b/datastore/factory.py
@@ -68,7 +68,7 @@ async def get_datastore() -> DataStore:
)
return ElasticsearchDataStore()
- case "mongodb":
+ case "mongodb-atlas":
from datastore.providers.mongodb_atlas_datastore import (
MongoDBAtlasDataStore,
)
@@ -77,5 +77,5 @@ async def get_datastore() -> DataStore:
case _:
raise ValueError(
f"Unsupported vector database: {datastore}. "
- f"Try one of the following: llama, elasticsearch, pinecone, weaviate, milvus, zilliz, redis, azuresearch, or qdrant"
+ f"Try one of the following: mongodb-atlas, llama, elasticsearch, pinecone, weaviate, milvus, zilliz, redis, azuresearch, or qdrant"
)
diff --git a/docs/providers/mongodb/setup.md b/docs/providers/mongodb_atlas/setup.md
similarity index 99%
rename from docs/providers/mongodb/setup.md
rename to docs/providers/mongodb_atlas/setup.md
index a1df10da3..cf70310a1 100644
--- a/docs/providers/mongodb/setup.md
+++ b/docs/providers/mongodb_atlas/setup.md
@@ -89,7 +89,7 @@ You can confirm that the required ones have been set like this: `assert "MONGOD
| `MONGODB_DATABASE` | Database name | SQUAD |
| `MONGODB_COLLECTION` | Collection name | Beyonce |
| `MONGODB_INDEX` | Search index name | vector_index |
-| `DATASTORE` | Datastore name | [must be] mongodb |
+| `DATASTORE` | Datastore name | [must be] mongodb-atlas |
| `EMBEDDING_MODEL` | OpenAI Embedding Model | text-embedding-3-small |
| `EMBEDDING_DIMENSION` | Length of Embedding Vectors | 1536 |
diff --git a/examples/providers/mongodb/semantic-search.ipynb b/examples/providers/mongodb_atlas/semantic-search.ipynb
similarity index 99%
rename from examples/providers/mongodb/semantic-search.ipynb
rename to examples/providers/mongodb_atlas/semantic-search.ipynb
index 548f60820..c015167a4 100644
--- a/examples/providers/mongodb/semantic-search.ipynb
+++ b/examples/providers/mongodb_atlas/semantic-search.ipynb
@@ -23,7 +23,7 @@
" * Create a MongoDB Atlas cluster.\n",
" * Add a Vector Index Search to it.
\n",
"\n",
- " Begin by following the detailed steps in **[setup.md](https://github.com/caseyclements/chatgpt-retrieval-plugin/blob/mongodb/docs/providers/mongodb/setup.md)**.\n",
+ " Begin by following the detailed steps in **[setup.md](https://github.com/caseyclements/chatgpt-retrieval-plugin/blob/mongodb/docs/providers/mongodb_atlas/setup.md)**.\n",
" Once completed, you will have a running Cluster, with a Database, a Collection, and a Vector Search Index attached to it.\n",
"\n",
" You will also have a number of required environment variables. These need to be available to run this example.\n",
@@ -93,7 +93,7 @@
"import os\n",
"required_vars = {'BEARER_TOKEN', 'OPENAI_API_KEY', 'DATASTORE', 'EMBEDDING_DIMENSION', 'EMBEDDING_MODEL',\n",
" 'MONGODB_COLLECTION', 'MONGODB_DATABASE', 'MONGODB_INDEX', 'MONGODB_URI'}\n",
- "assert os.environ[\"DATASTORE\"] == 'mongodb'\n",
+ "assert os.environ[\"DATASTORE\"] == 'mongodb-atlas'\n",
"missing = required_vars - set(os.environ)\n",
"if missing:\n",
" print(f\"It is strongly recommended to set these additional environment variables. {missing}=\")"
@@ -418,7 +418,7 @@
"\n",
"Posting an `upsert` request to the ChatGPT Retrieval Plugin API performs two tasks on the backend. First, it inserts into (or updates) your data in the MONGODB_COLLECTION in the MongoDB Cluster that you setup. Second, Atlas asynchronously begins populating a Vector Search Index on the embedding key. \n",
"\n",
- "If you have already created the Collection and a Vector Search Index through the Atlas UI while Setting up MongoDB Atlas Cluster in [setup.md](https://github.com/caseyclements/chatgpt-retrieval-plugin/blob/main/docs/providers/mongodb/setup.md), then indexing will begin immediately.\n",
+ "If you have already created the Collection and a Vector Search Index through the Atlas UI while Setting up MongoDB Atlas Cluster in [setup.md](https://github.com/caseyclements/chatgpt-retrieval-plugin/blob/main/docs/providers/mongodb_atlas/setup.md), then indexing will begin immediately.\n",
"\n",
"If you haven't set up the Atlas Vector Search yet, no problem. `upsert` will insert the data. To start indexing, simply go back to the Atlas UI and add a Search Index. This will trigger indexing. Once complete, we can begin semantic queries!\n",
"\n",
diff --git a/tests/datastore/providers/mongodb_atlas/test_integration.py b/tests/datastore/providers/mongodb_atlas/test_integration.py
index cea67678c..dd5ce80eb 100644
--- a/tests/datastore/providers/mongodb_atlas/test_integration.py
+++ b/tests/datastore/providers/mongodb_atlas/test_integration.py
@@ -1,7 +1,7 @@
"""Integration Tests of ChatGPT Retrieval Plugin
with MongoDB Atlas Vector Datastore and OPENAI Embedding model.
-As described in docs/providers/mongodb/setup.md, to run this, one must
+As described in docs/providers/mongodb_atlas/setup.md, to run this, one must
have a running MongoDB Atlas Cluster, and
provide a valid OPENAI_API_KEY.
"""
@@ -104,7 +104,7 @@ def test_required_vars() -> None:
"""Confirm that the environment has all it needs"""
required_vars = {'BEARER_TOKEN', 'OPENAI_API_KEY', 'DATASTORE', 'EMBEDDING_DIMENSION', 'EMBEDDING_MODEL',
'MONGODB_COLLECTION', 'MONGODB_DATABASE', 'MONGODB_INDEX', 'MONGODB_URI'}
- assert os.environ["DATASTORE"] == 'mongodb'
+ assert os.environ["DATASTORE"] == 'mongodb-atlas'
missing = required_vars - set(os.environ)
assert len(missing) == 0
diff --git a/tests/datastore/providers/mongodb_atlas/test_mongodb_datastore.py b/tests/datastore/providers/mongodb_atlas/test_mongodb_datastore.py
index ef585c7e3..847718c7b 100644
--- a/tests/datastore/providers/mongodb_atlas/test_mongodb_datastore.py
+++ b/tests/datastore/providers/mongodb_atlas/test_mongodb_datastore.py
@@ -2,10 +2,10 @@
Integration tests of MongoDB Atlas Datastore.
These tests require one to have a running Cluster, Database, Collection and Atlas Search Index
-as described in docs/providers/mongodb/setup.md.
+as described in docs/providers/mongodb_atlas/setup.md.
One will also have to set the same environment variables. Although one CAN
-use we the same collection and index used in examples/providers/mongodb/semantic-search.ipynb,
+use we the same collection and index used in examples/providers/mongodb_atlas/semantic-search.ipynb,
these tests will make changes to the data, so you may wish to create another collection.
If you have run the example notebook, you can reuse with the following.