Skip to content
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions llama-index-integrations/llms/llama-index-llms-sarvam/README.md
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
# LlamaIndex Llms Integration: Servam
# LlamaIndex Llms Integration: Sarvam

This is the Servam integration for LlamaIndex. Visit [Servam](https://docs.sarvam.ai/api-reference-docs/chat/completions) for information on how to get an API key and which models are supported.
This is the Sarvam integration for LlamaIndex. Visit [Sarvam](https://docs.sarvam.ai/api-reference-docs/chat/completions) for information on how to get an API key and which models are supported.

## Installation

```bash
pip install llama-index-llms-servam
pip install llama-index-llms-sarvam
```

## Usage

```python
from llama_index.llms.servam import Servam
from llama_index.llms.sarvam import Sarvam

llm = Servam(model="servam-m", api_key="your-api-key")
llm = Sarvam(model="sarvam-m", api_key="your-api-key")
```
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from llama_index.llms.sarvam.base import Sarvam

__all__ = ["Sarvam"]
Original file line number Diff line number Diff line change
Expand Up @@ -11,39 +11,39 @@
from llama_index.llms.openai_like import OpenAILike

DEFAULT_API_BASE = "https://api.sarvam.ai/v1"
DEFAULT_MODEL = "servam-m"
DEFAULT_MODEL = "sarvam-m"


class Servam(OpenAILike):
class Sarvam(OpenAILike):
"""
Servam LLM.
Sarvam LLM.

To instantiate the `Servam` class, you will need to provide an API key. You can set the API key either as an environment variable `SERVAM_API_KEY` or directly in the class
To instantiate the `Sarvam` class, you will need to provide an API key. You can set the API key either as an environment variable `SARVAM_API_KEY` or directly in the class
constructor. If setting it in the class constructor, it would look like this:

If you haven't signed up for an API key yet, you can do so on the Servam website at (https://servam.ai). Once you have your API key, you can use the `Servam` class to interact
If you haven't signed up for an API key yet, you can do so on the Sarvam website at (https://sarvam.ai). Once you have your API key, you can use the `Sarvam` class to interact
with the LLM for tasks like chatting, streaming, and completing prompts.

Examples:
`pip install llama-index-llms-servam`
`pip install llama-index-llms-sarvam`

```python
from llama_index.llms.servam import Servam
from llama_index.llms.sarvam import Sarvam

llm = Servam(
llm = Sarvam(
api_key="<your-api-key>",
max_tokens=256,
context_window=4096,
model="servam-m",
model="sarvam-m",
)

response = llm.complete("Hello World!")
print(str(response))
print(response)
```

"""

model: str = Field(description="The Servam model to use.")
model: str = Field(description="The Sarvam model to use.")
context_window: int = Field(
default=DEFAULT_CONTEXT_WINDOW,
description="The maximum number of context tokens for the model.",
Expand All @@ -67,8 +67,8 @@ def __init__(
) -> None:
additional_kwargs = additional_kwargs or {}

api_base = get_from_param_or_env("api_base", api_base, "SERVAM_API_BASE")
api_key = get_from_param_or_env("api_key", api_key, "SERVAM_API_KEY")
api_base = get_from_param_or_env("api_base", api_base, "SARVAM_API_BASE")
api_key = get_from_param_or_env("api_key", api_key, "SARVAM_API_KEY")

super().__init__(
model=model,
Expand All @@ -83,4 +83,4 @@ def __init__(

@classmethod
def class_name(cls) -> str:
return "Servam_LLM"
return "Sarvam_LLM"

This file was deleted.

Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from llama_index.core.base.llms.base import BaseLLM
from llama_index.llms.servam import Servam
from llama_index.llms.sarvam import Sarvam


def test_llm_class():
names_of_base_classes = [b.__name__ for b in Servam.__mro__]
names_of_base_classes = [b.__name__ for b in Sarvam.__mro__]
assert BaseLLM.__name__ in names_of_base_classes
Loading