Spaces:
Runtime error
Runtime error
feat: bigquery logging
Browse files- qa_engine/logger.py +78 -4
- requirements.txt +1 -0
qa_engine/logger.py
CHANGED
@@ -1,14 +1,88 @@
|
|
1 |
import logging
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
|
3 |
|
4 |
logger = logging.getLogger(__name__)
|
5 |
|
|
|
6 |
def setup_logger() -> None:
|
7 |
"""
|
8 |
Logger setup.
|
9 |
"""
|
10 |
logger.setLevel(logging.DEBUG)
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import logging
|
2 |
+
import os
|
3 |
+
import io
|
4 |
+
import json
|
5 |
+
from google.cloud import bigquery
|
6 |
+
from google.oauth2 import service_account
|
7 |
+
from google.api_core.exceptions import GoogleAPIError
|
8 |
+
|
9 |
+
job_config = bigquery.LoadJobConfig(
|
10 |
+
schema=[
|
11 |
+
bigquery.SchemaField("timestamp", "TIMESTAMP", mode="REQUIRED"),
|
12 |
+
bigquery.SchemaField("log_entry", "STRING", mode="REQUIRED"),
|
13 |
+
],
|
14 |
+
write_disposition="WRITE_APPEND",
|
15 |
+
)
|
16 |
+
|
17 |
+
|
18 |
+
class BigQueryLoggingHandler(logging.Handler):
|
19 |
+
def __init__(self):
|
20 |
+
super().__init__()
|
21 |
+
try:
|
22 |
+
project_id = os.getenv("BIGQUERY_PROJECT_ID")
|
23 |
+
dataset_id = os.getenv("BIGQUERY_DATASET_ID")
|
24 |
+
table_id = os.getenv("BIGQUERY_TABLE_ID")
|
25 |
+
print(f"project_id: {project_id}")
|
26 |
+
print(f"dataset_id: {dataset_id}")
|
27 |
+
print(f"table_id: {table_id}")
|
28 |
+
service_account_info = json.loads(
|
29 |
+
os.getenv("GOOGLE_SERVICE_ACCOUNT_JSON")
|
30 |
+
.replace('"', "")
|
31 |
+
.replace("'", '"')
|
32 |
+
)
|
33 |
+
print(f"service_account_info: {service_account_info}")
|
34 |
+
print(f"service_account_info type: {type(service_account_info)}")
|
35 |
+
print(f"service_account_info keys: {service_account_info.keys()}")
|
36 |
+
credentials = service_account.Credentials.from_service_account_info(
|
37 |
+
service_account_info
|
38 |
+
)
|
39 |
+
self.client = bigquery.Client(credentials=credentials, project=project_id)
|
40 |
+
self.table_ref = self.client.dataset(dataset_id).table(table_id)
|
41 |
+
except Exception as e:
|
42 |
+
print(f"Error: {e}")
|
43 |
+
self.handleError(e)
|
44 |
+
|
45 |
+
def emit(self, record):
|
46 |
+
try:
|
47 |
+
recordstr = f"{self.format(record)}"
|
48 |
+
body = io.BytesIO(recordstr.encode("utf-8"))
|
49 |
+
job = self.client.load_table_from_file(
|
50 |
+
body, self.table_ref, job_config=job_config
|
51 |
+
)
|
52 |
+
job.result()
|
53 |
+
except GoogleAPIError as e:
|
54 |
+
self.handleError(e)
|
55 |
+
except Exception as e:
|
56 |
+
self.handleError(e)
|
57 |
+
|
58 |
+
def handleError(self, record):
|
59 |
+
"""
|
60 |
+
Handle errors associated with logging.
|
61 |
+
This method prevents logging-related exceptions from propagating.
|
62 |
+
Optionally, implement more sophisticated error handling here.
|
63 |
+
"""
|
64 |
+
if isinstance(record, logging.LogRecord):
|
65 |
+
super().handleError(record)
|
66 |
+
else:
|
67 |
+
print(f"Logging error: {record}")
|
68 |
|
69 |
|
70 |
logger = logging.getLogger(__name__)
|
71 |
|
72 |
+
|
73 |
def setup_logger() -> None:
|
74 |
"""
|
75 |
Logger setup.
|
76 |
"""
|
77 |
logger.setLevel(logging.DEBUG)
|
78 |
+
|
79 |
+
stream_formatter = logging.Formatter(
|
80 |
+
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
81 |
+
)
|
82 |
+
stream_handler = logging.StreamHandler()
|
83 |
+
stream_handler.setFormatter(stream_formatter)
|
84 |
+
logger.addHandler(stream_handler)
|
85 |
+
|
86 |
+
bq_handler = BigQueryLoggingHandler()
|
87 |
+
bq_handler.setFormatter(stream_formatter)
|
88 |
+
logger.addHandler(bq_handler)
|
requirements.txt
CHANGED
@@ -26,3 +26,4 @@ InstructorEmbedding==1.0.0
|
|
26 |
faiss_cpu==1.7.3
|
27 |
uvicorn==0.22.0
|
28 |
pytest==7.3.1
|
|
|
|
26 |
faiss_cpu==1.7.3
|
27 |
uvicorn==0.22.0
|
28 |
pytest==7.3.1
|
29 |
+
google-cloud-bigquery==3.17.2
|