File size: 2,238 Bytes
c114607
 
0f4e2ab
 
 
c114607
 
0f4e2ab
c114607
 
 
 
 
 
 
 
 
 
0f4e2ab
 
 
 
c114607
 
 
 
0f4e2ab
c114607
 
0f4e2ab
c114607
0f4e2ab
 
c114607
0f4e2ab
c114607
 
0f4e2ab
c114607
 
 
 
 
 
 
0f4e2ab
3997a11
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c114607
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
from langchain.llms.base import LLM
from typing import Optional, List, Mapping, Any

from anthropic import Anthropic, HUMAN_PROMPT, AI_PROMPT

from urllib.parse import urlparse
import os

class ClaudeLLM(LLM):

    @property
    def _llm_type(self) -> str:

        return "custom"

    def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:


        client = Anthropic(
            # defaults to os.environ.get("ANTHROPIC_API_KEY")
            api_key= os.environ.get("ANTHROPIC_API_KEY"),
        )


        # How about the formatted prompt?
        prompt_formatted = (
        f"{HUMAN_PROMPT}{prompt}\n{AI_PROMPT}"
        )

        response = client.completions.create(
        model="claude-instant-v1-100k",
        prompt=prompt_formatted,
        stop_sequences=[HUMAN_PROMPT],
        max_tokens_to_sample=100000,
        temperature=0,
         )

        return response.completion

    @property
    def _identifying_params(self) -> Mapping[str, Any]:
        """Get the identifying parameters."""
        return {

        }
        

class ClaudeLLM2(LLM):

    @property
    def _llm_type(self) -> str:

        return "custom"

    def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:


        client = Anthropic(
            # defaults to os.environ.get("ANTHROPIC_API_KEY")
            api_key= os.environ.get("ANTHROPIC_API_KEY"),
        )


        # How about the formatted prompt?
        prompt_formatted = (
        f"{HUMAN_PROMPT}{prompt}\n{AI_PROMPT}"
        )

        response = client.completions.create(
        model="claude-2",
        prompt=prompt_formatted,
        stop_sequences=[HUMAN_PROMPT],
        max_tokens_to_sample=100000,
        temperature=0,
         )

        return response.completion

    @property
    def _identifying_params(self) -> Mapping[str, Any]:
        """Get the identifying parameters."""
        return {

        }
def remove_numbers(question):
    return question.translate(str.maketrans('', '', '0123456789'))

def extract_website_name(url):
    parsed_url = urlparse(url)
    if parsed_url.netloc.startswith("www."):
        return parsed_url.netloc.split("www.")[1].split(".")[0]
    return parsed_url.netloc.split(".")[0]