Datasets:

ArXiv:
File size: 5,292 Bytes
1b86249
76a55fb
 
 
1b86249
 
76a55fb
 
1b86249
 
 
 
 
 
 
 
 
 
76a55fb
 
 
 
 
 
 
1b86249
 
76a55fb
1b86249
 
76a55fb
 
 
1b86249
 
 
 
 
 
 
 
 
 
 
 
 
 
76a55fb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
import abc
import os
from dataclasses import dataclass
from typing import List, Optional, Union

from .artifact import Artifact
from .operator import PackageRequirementsMixin
from .settings_utils import get_settings


class InferenceEngine(abc.ABC, Artifact):
    """Abstract base class for inference."""

    @abc.abstractmethod
    def infer(self, dataset):
        """Perform inference on the input dataset."""
        pass

    @staticmethod
    def _assert_allow_passing_data_to_remote_api(remote_api_label: str):
        assert get_settings().allow_passing_data_to_remote_api, (
            f"LlmAsJudge metric cannot run send data to remote APIs ({remote_api_label}) when"
            f" unitxt.settings.allow_passing_data_to_remote_api=False."
            f" Set UNITXT_ALLOW_PASSING_DATA_TO_REMOTE_API environment variable, if you want to allow this. "
        )


class HFPipelineBasedInferenceEngine(InferenceEngine, PackageRequirementsMixin):
    model_name: str
    max_new_tokens: int
    _requirement = {
        "transformers": "Install huggingface package using 'pip install --upgrade transformers"
    }

    def prepare(self):
        from transformers import pipeline

        self.model = pipeline(model=self.model_name)

    def infer(self, dataset):
        return [
            output["generated_text"]
            for output in self.model(
                [instance["source"] for instance in dataset],
                max_new_tokens=self.max_new_tokens,
            )
        ]


@dataclass()
class IbmGenAiInferenceEngineParams:
    decoding_method: str = None
    max_new_tokens: Optional[int] = None
    min_new_tokens: Optional[int] = None
    random_seed: Optional[int] = None
    repetition_penalty: Optional[float] = None
    stop_sequences: Optional[List[str]] = None
    temperature: Optional[float] = None
    top_k: Optional[int] = None
    top_p: Optional[float] = None
    typical_p: Optional[float] = None


class IbmGenAiInferenceEngine(InferenceEngine, PackageRequirementsMixin):
    label: str = "ibm_genai"
    model_name: str
    parameters: IbmGenAiInferenceEngineParams = IbmGenAiInferenceEngineParams()
    _requirement = {
        "genai": "Install ibm-genai package using 'pip install --upgrade ibm-generative-ai"
    }

    def prepare(self):
        from genai import Client, Credentials

        api_key_env_var_name = "GENAI_KEY"
        api_key = os.environ.get(api_key_env_var_name)
        assert api_key is not None, (
            f"Error while trying to run IbmGenAiInferenceEngine."
            f" Please set the environment param '{api_key_env_var_name}'."
        )
        api_endpoint = os.environ.get("GENAI_KEY")
        credentials = Credentials(api_key=api_key, api_endpoint=api_endpoint)
        self.client = Client(credentials=credentials)

        self._assert_allow_passing_data_to_remote_api(self.label)

    def infer(self, dataset):
        from genai.schema import TextGenerationParameters

        genai_params = TextGenerationParameters(**self.parameters.__dict__)
        return list(
            self.client.text.generation.create(
                model_id=self.model_name,
                inputs=[instance["source"] for instance in dataset],
                parameters=genai_params,
            )
        )


@dataclass
class OpenAiInferenceEngineParams:
    frequency_penalty: Optional[float] = None
    presence_penalty: Optional[float] = None
    max_tokens: Optional[int] = None
    seed: Optional[int] = None
    stop: Union[Optional[str], List[str]] = None
    temperature: Optional[float] = None
    top_p: Optional[float] = None


class OpenAiInferenceEngine(InferenceEngine, PackageRequirementsMixin):
    label: str = "openai"
    model_name: str
    parameters: OpenAiInferenceEngineParams = OpenAiInferenceEngineParams()
    _requirement = {
        "openai": "Install openai package using 'pip install --upgrade openai"
    }

    def prepare(self):
        from openai import OpenAI

        api_key_env_var_name = "OPENAI_API_KEY"
        api_key = os.environ.get(api_key_env_var_name)
        assert api_key is not None, (
            f"Error while trying to run OpenAiInferenceEngine."
            f" Please set the environment param '{api_key_env_var_name}'."
        )

        self.client = OpenAI(api_key=api_key)
        self._assert_allow_passing_data_to_remote_api(self.label)

    def infer(self, dataset):
        return [
            self.client.chat.completions.create(
                messages=[
                    # {
                    #     "role": "system",
                    #     "content": self.system_prompt,
                    # },
                    {
                        "role": "user",
                        "content": instance["source"],
                    }
                ],
                model=self.model_name,
                frequency_penalty=self.parameters.frequency_penalty,
                presence_penalty=self.parameters.presence_penalty,
                max_tokens=self.parameters.max_tokens,
                seed=self.parameters.seed,
                stop=self.parameters.stop,
                temperature=self.parameters.temperature,
                top_p=self.parameters.top_p,
            )
            for instance in dataset
        ]