Yarik commited on
Commit
a02b5d4
1 Parent(s): 780c0fd

update file

Browse files
Files changed (2) hide show
  1. .gitignore +2 -1
  2. apis/chat_api.py +10 -16
.gitignore CHANGED
@@ -1,3 +1,4 @@
1
  secrets.json
2
  .env
3
- __pycache__
 
 
1
  secrets.json
2
  .env
3
+ __pycache__
4
+ post.py
apis/chat_api.py CHANGED
@@ -86,15 +86,16 @@ class ChatAPIApp:
86
  }
87
  return self.available_models
88
 
89
- def extract_api_key(self, credentials: HTTPAuthorizationCredentials = Depends(HTTPBearer(auto_error=False))):
90
- api_key = None
91
  if credentials:
92
- api_key = credentials.credentials
93
  else:
94
- api_key = os.getenv("XCHE_TOKEN")
95
- if api_key:
96
- if api_key.startswith("hf_"):
97
- return api_key
 
98
  else:
99
  logger.warn(f"Invalid HF Token!")
100
  else:
@@ -110,7 +111,7 @@ class ChatAPIApp:
110
  use_cache: bool = Field(default=False, description="(bool) Use cache")
111
  stream: bool = Field(default=True, description="(bool) Stream")
112
 
113
- def chat_completions(self, item: ChatCompletionsPostItem, api_key: str = Depends(extract_api_key)):
114
  streamer = MessageStreamer(model=item.model)
115
  composer = MessageComposer(model=item.model)
116
  composer.merge(messages=item.messages)
@@ -120,7 +121,7 @@ class ChatAPIApp:
120
  temperature=item.temperature,
121
  top_p=item.top_p,
122
  max_new_tokens=item.max_tokens,
123
- api_key=api_key,
124
  use_cache=item.use_cache,
125
  )
126
  if item.stream:
@@ -135,13 +136,6 @@ class ChatAPIApp:
135
  data_response = streamer.chat_return_dict(stream_response)
136
  return data_response
137
 
138
- def get_readme(self):
139
- readme_path = Path(__file__).parents[1] / "README.md"
140
- with open(readme_path, "r", encoding="utf-8") as rf:
141
- readme_str = rf.read()
142
- readme_html = markdown2.markdown(readme_str, extras=["table", "fenced-code-blocks", "highlightjs-lang"])
143
- return readme_html
144
-
145
  def setup_routes(self):
146
  for prefix in ["", "/v1", "/api", "/api/v1"]:
147
  if prefix in ["/api/v1"]:
 
86
  }
87
  return self.available_models
88
 
89
+ def extract_token(self, credentials: HTTPAuthorizationCredentials = Depends(HTTPBearer(auto_error=False))):
90
+ token = None
91
  if credentials:
92
+ token = credentials.credentials
93
  else:
94
+ token = os.getenv("XCHE_TOKEN")
95
+
96
+ if token:
97
+ if token.startswith("hf_"):
98
+ return token
99
  else:
100
  logger.warn(f"Invalid HF Token!")
101
  else:
 
111
  use_cache: bool = Field(default=False, description="(bool) Use cache")
112
  stream: bool = Field(default=True, description="(bool) Stream")
113
 
114
+ def chat_completions(self, item: ChatCompletionsPostItem, token: str = Depends(extract_token)):
115
  streamer = MessageStreamer(model=item.model)
116
  composer = MessageComposer(model=item.model)
117
  composer.merge(messages=item.messages)
 
121
  temperature=item.temperature,
122
  top_p=item.top_p,
123
  max_new_tokens=item.max_tokens,
124
+ api_key=token,
125
  use_cache=item.use_cache,
126
  )
127
  if item.stream:
 
136
  data_response = streamer.chat_return_dict(stream_response)
137
  return data_response
138
 
 
 
 
 
 
 
 
139
  def setup_routes(self):
140
  for prefix in ["", "/v1", "/api", "/api/v1"]:
141
  if prefix in ["/api/v1"]: