Skip to content

Commit

Permalink
Merge pull request #1164 from You-OpenSource/you/chat-apis
Browse files Browse the repository at this point in the history
feat(you-llm): Integrate You.com conversational APIs
  • Loading branch information
arnavsinghvi11 committed Jun 18, 2024
2 parents 09993d2 + dcb71f0 commit 05a4923
Show file tree
Hide file tree
Showing 5 changed files with 135 additions and 1 deletion.
46 changes: 46 additions & 0 deletions docs/api/language_model_clients/You.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
---
sidebar_position: 13
---

# dspy.You
Wrapper around [You.com's conversational Smart and Research APIs](https://documentation.you.com/api-reference/).

Each API endpoint is designed to generate conversational
responses to a variety of query types, including inline citations
and web results when relevant.

Smart Mode:
- Quick, reliable answers for a variety of questions
- Cites the entire web page URL

Research Mode:
- In-depth answers with extensive citations for a variety of questions
- Cites the specific web page snippet relevant to the claim

For more information, check out the documentations at
https://documentation.you.com/api-reference/.


### Constructor
```python
You(
endpoint: Literal["smart", "research"] = "smart",
ydc_api_key: Optional[str] = None,
)
```

**Parameters:**
- `endpoint`: You.com conversational endpoints. Choose from "smart" or "research"
- `ydc_api_key`: You.com API key, if `YDC_API_KEY` is not set in the environment

### Usage
Obtain a You.com API key from https://api.you.com/.

Export this key to an environment variable `YDC_API_KEY`.

```python
import dspy

# The API key is inferred from the `YDC_API_KEY` environment variable
lm = dspy.You(endpoint="smart")
```
1 change: 1 addition & 0 deletions dsp/modules/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,3 +27,4 @@
from .snowflake import *
from .tensorrt_llm import TensorRTModel
from .watsonx import *
from .you import You
14 changes: 13 additions & 1 deletion dsp/modules/lm.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,8 @@ def inspect_history(self, n: int = 1, skip: int = 0):
printed.append((prompt, x["response"].choices))
elif provider == "ibm":
printed.append((prompt, x))
elif provider == "you.com":
printed.append((prompt, x["response"]["answer"]))
else:
printed.append((prompt, x["response"]["choices"]))

Expand All @@ -87,7 +89,17 @@ def inspect_history(self, n: int = 1, skip: int = 0):
printing_value += prompt

text = ""
if provider in ("cohere", "Bedrock", "Sagemaker", "clarifai", "claude", "ibm", "premai", "tensorrt_llm"):
if provider in (
"cohere",
"Bedrock",
"Sagemaker",
"clarifai",
"claude",
"ibm",
"premai",
"you.com",
"tensorrt_llm",
):
text = choices
elif provider == "openai" or provider == "ollama":
text = " " + self._get_choice_text(choices[0]).strip()
Expand Down
73 changes: 73 additions & 0 deletions dsp/modules/you.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
import os
from typing import Any, Literal, Optional

import requests

from dsp.modules.lm import LM

SMART_ENDPOINT = "https://chat-api.you.com/smart"
RESEARCH_ENDPOINT = "https://chat-api.you.com/research"


class You(LM):
"""Wrapper around You.com's conversational Smart and Research APIs.
Each API endpoint is designed to generate conversational
responses to a variety of query types, including inline citations
and web results when relevant.
Smart Mode:
- Quick, reliable answers for a variety of questions
- Cites the entire web page URL
Research Mode:
- In-depth answers with extensive citations for a variety of questions
- Cites the specific web page snippet relevant to the claim
To connect to the You.com api requires an API key which
you can get at https://api.you.com.
For more information, check out the documentations at
https://documentation.you.com/api-reference/.
Args:
endpoint: You.com conversational endpoints. Choose from "smart" or "research"
api_key: You.com API key, if `YDC_API_KEY` is not set in the environment
"""

def __init__(
self,
endpoint: Literal["smart", "research"] = "smart",
ydc_api_key: Optional[str] = None,
):
super().__init__(model="you.com")
self.ydc_api_key = ydc_api_key or os.environ["YDC_API_KEY"]
self.endpoint = endpoint

# Mandatory DSPy attributes to inspect LLM call history
self.history = []
self.provider = "you.com"

def basic_request(self, prompt, **kwargs) -> dict[str, Any]:
headers = {"x-api-key": self.ydc_api_key}
params = {"query": prompt} # DSPy `kwargs` are ignored as they are not supported by the API

response = requests.post(self.request_endpoint, headers=headers, json=params)
response.raise_for_status()

data = response.json()

# Update history
self.history.append({"prompt": prompt, "response": data, "endpoint": self.endpoint})

return data

@property
def request_endpoint(self) -> str:
if self.endpoint == "smart":
return SMART_ENDPOINT
return RESEARCH_ENDPOINT

def __call__(self, prompt, only_completed: bool = True, return_sorted: bool = False, **kwargs) -> list[str]:
response = self.request(prompt, **kwargs)
return [response["answer"]]
2 changes: 2 additions & 0 deletions dspy/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,5 +49,7 @@
Watsonx = dsp.Watsonx
PremAI = dsp.PremAI

You = dsp.You

configure = settings.configure
context = settings.context

0 comments on commit 05a4923

Please sign in to comment.