BloodyInside commited on
Commit
cdc95a6
·
1 Parent(s): 43a9eda
backend/__init__.py CHANGED
@@ -0,0 +1 @@
 
 
1
+ import backend.tests
backend/__pycache__/__init__.cpython-312.pyc CHANGED
Binary files a/backend/__pycache__/__init__.cpython-312.pyc and b/backend/__pycache__/__init__.cpython-312.pyc differ
 
backend/__pycache__/tests.cpython-312.pyc ADDED
Binary file (178 Bytes). View file
 
backend/__pycache__/urls.cpython-312.pyc CHANGED
Binary files a/backend/__pycache__/urls.cpython-312.pyc and b/backend/__pycache__/urls.cpython-312.pyc differ
 
backend/invoke_worker/__init__.py CHANGED
@@ -2,10 +2,8 @@ from core.settings import DEBUG
2
  import sys
3
 
4
  if len(sys.argv) > 1 and not (sys.argv[1] in ['migrate', "makemigrations", "clear_all_sessions"]):
5
- print("[Worker] Starting Thread...")
6
  from backend.invoke_worker import (
7
  session,
8
  chapter_queue,
9
  )
10
- print("[Worker] Thread Started!")
11
 
 
2
  import sys
3
 
4
  if len(sys.argv) > 1 and not (sys.argv[1] in ['migrate', "makemigrations", "clear_all_sessions"]):
 
5
  from backend.invoke_worker import (
6
  session,
7
  chapter_queue,
8
  )
 
9
 
backend/invoke_worker/__pycache__/__init__.cpython-312.pyc CHANGED
Binary files a/backend/invoke_worker/__pycache__/__init__.cpython-312.pyc and b/backend/invoke_worker/__pycache__/__init__.cpython-312.pyc differ
 
backend/invoke_worker/__pycache__/chapter_queue.cpython-312.pyc CHANGED
Binary files a/backend/invoke_worker/__pycache__/chapter_queue.cpython-312.pyc and b/backend/invoke_worker/__pycache__/chapter_queue.cpython-312.pyc differ
 
backend/invoke_worker/chapter_queue.py CHANGED
@@ -13,6 +13,7 @@ from asgiref.sync import async_to_sync
13
 
14
  from django.db.models import Count
15
 
 
16
  import requests, environ, os, subprocess, shutil, zipfile, uuid, sys
17
 
18
  env = environ.Env()
@@ -100,19 +101,16 @@ class Job(Thread):
100
  job = web_scrap.source_control[source].get_chapter.scrap(comic_id=comic_id,chapter_id=chapter_id,output_dir=input_dir)
101
  if job.get("status") == "success":
102
 
103
- # with open(os.path.join(LOG_DIR,"image_translator_output.log"), "w") as file:
104
- result = subprocess.run(
105
- script,
106
- cwd=os.path.join(BASE_DIR, "backend", "module", "utils", "image_translator"),
107
- check=True,
108
- shell=False if sys.platform.startswith('linux') else True,
109
- # stdout=file,
110
- # stderr=file,
111
- text=True,
112
- )
113
-
114
-
115
-
116
  if result.returncode != 0: raise Exception("Image Translator Execution error!")
117
  os.makedirs(managed_output_dir,exist_ok=True)
118
  shutil.rmtree(input_dir)
 
13
 
14
  from django.db.models import Count
15
 
16
+
17
  import requests, environ, os, subprocess, shutil, zipfile, uuid, sys
18
 
19
  env = environ.Env()
 
101
  job = web_scrap.source_control[source].get_chapter.scrap(comic_id=comic_id,chapter_id=chapter_id,output_dir=input_dir)
102
  if job.get("status") == "success":
103
 
104
+ with open(os.path.join(LOG_DIR,"image_translator_output.log"), "w") as file:
105
+ result = subprocess.run(
106
+ script,
107
+ cwd=os.path.join(BASE_DIR, "backend", "module", "utils", "image_translator"),
108
+ check=True,
109
+ shell=False if sys.platform.startswith('linux') else True,
110
+ stdout=file,
111
+ stderr=file,
112
+ text=True,
113
+ )
 
 
 
114
  if result.returncode != 0: raise Exception("Image Translator Execution error!")
115
  os.makedirs(managed_output_dir,exist_ok=True)
116
  shutil.rmtree(input_dir)
backend/module/web_scrap/ColaManga/__pycache__/get.cpython-312.pyc CHANGED
Binary files a/backend/module/web_scrap/ColaManga/__pycache__/get.cpython-312.pyc and b/backend/module/web_scrap/ColaManga/__pycache__/get.cpython-312.pyc differ
 
backend/module/web_scrap/ColaManga/__pycache__/get_chapter.cpython-312.pyc CHANGED
Binary files a/backend/module/web_scrap/ColaManga/__pycache__/get_chapter.cpython-312.pyc and b/backend/module/web_scrap/ColaManga/__pycache__/get_chapter.cpython-312.pyc differ
 
backend/module/web_scrap/ColaManga/__pycache__/get_list.cpython-312.pyc CHANGED
Binary files a/backend/module/web_scrap/ColaManga/__pycache__/get_list.cpython-312.pyc and b/backend/module/web_scrap/ColaManga/__pycache__/get_list.cpython-312.pyc differ
 
backend/module/web_scrap/ColaManga/__pycache__/search.cpython-312.pyc CHANGED
Binary files a/backend/module/web_scrap/ColaManga/__pycache__/search.cpython-312.pyc and b/backend/module/web_scrap/ColaManga/__pycache__/search.cpython-312.pyc differ
 
backend/module/web_scrap/ColaManga/get.py CHANGED
@@ -33,7 +33,7 @@ def scrap(id:int=1):
33
  cover_url = driver.find_element(By.CLASS_NAME, "fed-list-pics").get_attribute("data-original")
34
  cover_url_split = cover_url.split("/")
35
  cover_id = cover_url_split[len(cover_url_split)-2]
36
- DATA["cover"] = f"/api/web_scrap/colamanga/get_cover/{id}/{cover_id}/"
37
 
38
 
39
  content_info_element = driver.find_element(By.CLASS_NAME, "fed-deta-content")
 
33
  cover_url = driver.find_element(By.CLASS_NAME, "fed-list-pics").get_attribute("data-original")
34
  cover_url_split = cover_url.split("/")
35
  cover_id = cover_url_split[len(cover_url_split)-2]
36
+ DATA["cover"] = f"/api/web_scrap/get_cover/colamanga/{id}/{cover_id}/"
37
 
38
 
39
  content_info_element = driver.find_element(By.CLASS_NAME, "fed-deta-content")
backend/module/web_scrap/ColaManga/get_chapter.py CHANGED
@@ -17,36 +17,24 @@ scraper = None
17
 
18
  def __scrollToBottom(driver:object=None):
19
  if not driver: raise ValueError("The 'driver' argument is required.")
20
- timeout = 10
21
- interval = [0]
22
-
23
- def timer(interval,timeout):
24
- while True:
25
- time.sleep(1)
26
- interval[0] = interval[0]+1
27
- if interval[0] >= timeout: break
28
- t = threading.Thread(target=timer, args=[interval,timeout])
29
- t.daemon = True
30
- t.start()
31
 
32
  previous_height = 0
33
  scrolledY = 0
34
  while True:
 
35
  # Scroll to the bottom of the page
36
  driver.execute_script(f"window.scrollBy(0, {scrolledY});")
37
- time.sleep(0.25)
38
 
39
  current_height = driver.execute_script("return document.documentElement.scrollHeight")
40
 
41
  if current_height > previous_height:
42
  previous_height = current_height
43
- interval[0] = 0
44
  else:
45
  parent_div = driver.find_element(By.CLASS_NAME, "mh_mangalist")
46
  child_elements = parent_div.find_elements(By.XPATH, "./*")
47
- if child_elements[-1].tag_name != 'script' and child_elements[-1].get_attribute('text') != '__cad.read_periodical();':
48
- if interval[0] >= timeout: break
49
- else: interval[0] = 0
50
  scrolledY += 50
51
 
52
 
@@ -71,24 +59,28 @@ def scrap(comic_id:str="",chapter_id:str="",output_dir:str=""):
71
  child_list = parent_element.find_elements(By.CLASS_NAME, "mh_comicpic")
72
 
73
  blob_list = []
74
-
75
  for child in child_list:
76
- image_element = child.find_element(By.TAG_NAME, "img")
77
- url = image_element.get_attribute("src")
78
- if not url: continue
79
- if url.split(":")[0] == "blob":
80
- timeout = 0
81
- while True:
82
- if timeout >= MAX_TIMEOUT: raise Exception('#1 Timed out!')
 
 
 
 
83
  is_image_loaded = driver.execute_script(
84
  "return arguments[0].complete",
85
  image_element
86
  )
87
- if is_image_loaded: break
 
 
 
88
 
89
- timeout += 1
90
- time.sleep(1)
91
- blob_list.append(url)
92
 
93
 
94
  def process_browser_log_entry(entry):
 
17
 
18
  def __scrollToBottom(driver:object=None):
19
  if not driver: raise ValueError("The 'driver' argument is required.")
20
+
21
+ timeout = date_utils.utc_time().add(60,'second').get()
 
 
 
 
 
 
 
 
 
22
 
23
  previous_height = 0
24
  scrolledY = 0
25
  while True:
26
+ if date_utils.utc_time().get() >= timeout: raise Exception("[Get Chapter] Finding lastest element Timed out!")
27
  # Scroll to the bottom of the page
28
  driver.execute_script(f"window.scrollBy(0, {scrolledY});")
 
29
 
30
  current_height = driver.execute_script("return document.documentElement.scrollHeight")
31
 
32
  if current_height > previous_height:
33
  previous_height = current_height
 
34
  else:
35
  parent_div = driver.find_element(By.CLASS_NAME, "mh_mangalist")
36
  child_elements = parent_div.find_elements(By.XPATH, "./*")
37
+ if child_elements[-1].get_attribute('text') != '__cad.read_periodical();': break
 
 
38
  scrolledY += 50
39
 
40
 
 
59
  child_list = parent_element.find_elements(By.CLASS_NAME, "mh_comicpic")
60
 
61
  blob_list = []
62
+ is_no_more = False
63
  for child in child_list:
64
+ if is_no_more: break
65
+ timeout = date_utils.utc_time().add(10,'second').get()
66
+ while True:
67
+ if date_utils.utc_time().get() >= timeout:
68
+ is_no_more = True
69
+ break
70
+ image_element = child.find_element(By.TAG_NAME, "img")
71
+ driver.execute_script("arguments[0].scrollIntoView({ behavior: 'smooth', block: 'center' });", image_element)
72
+
73
+ url = image_element.get_attribute("src")
74
+ if url:
75
  is_image_loaded = driver.execute_script(
76
  "return arguments[0].complete",
77
  image_element
78
  )
79
+ if is_image_loaded:
80
+ blob_list.append(url)
81
+ break
82
+
83
 
 
 
 
84
 
85
 
86
  def process_browser_log_entry(entry):
backend/module/web_scrap/ColaManga/get_list.py CHANGED
@@ -40,7 +40,7 @@ def scrap(orderBy:str="monthlyCount",page:int=1):
40
  object["id"] = id
41
  cover_link_split = li.find("a", {"class": "fed-list-pics"}).get("data-original").split("/")
42
  cover_id = cover_link_split[len(cover_link_split)-2]
43
- object["cover"] = f"/api/web_scrap/colamanga/get_cover/{id}/{cover_id}/"
44
 
45
  DATA.append(object)
46
 
 
40
  object["id"] = id
41
  cover_link_split = li.find("a", {"class": "fed-list-pics"}).get("data-original").split("/")
42
  cover_id = cover_link_split[len(cover_link_split)-2]
43
+ object["cover"] = f"/api/web_scrap/get_cover/colamanga/{id}/{cover_id}/"
44
 
45
  DATA.append(object)
46
 
backend/module/web_scrap/ColaManga/search.py CHANGED
@@ -40,7 +40,7 @@ def scrap(search:str="",page:int=1):
40
 
41
  cover_link_split = a.get("data-original").split("/")
42
  cover_id = cover_link_split[len(cover_link_split)-2]
43
- object["cover"] = f"/api/web_scrap/colamanga/get_cover/{id}/{cover_id}/"
44
 
45
  dd = dl.find("dd",{"class": "fed-deta-content"})
46
 
 
40
 
41
  cover_link_split = a.get("data-original").split("/")
42
  cover_id = cover_link_split[len(cover_link_split)-2]
43
+ object["cover"] = f"/api/web_scrap/get_cover/colamanga/{id}/{cover_id}/"
44
 
45
  dd = dl.find("dd",{"class": "fed-deta-content"})
46
 
backend/module/web_scrap/__pycache__/utils.cpython-312.pyc CHANGED
Binary files a/backend/module/web_scrap/__pycache__/utils.cpython-312.pyc and b/backend/module/web_scrap/__pycache__/utils.cpython-312.pyc differ
 
backend/module/web_scrap/utils.py CHANGED
@@ -13,7 +13,7 @@ import time, threading
13
  class SeleniumScraper:
14
  def __init__(self):
15
  options = Options()
16
- options.add_argument("--headless")
17
  options.add_argument('--no-sandbox')
18
  options.add_argument("--no-quit")
19
  options.add_argument('--disable-extensions')
 
13
  class SeleniumScraper:
14
  def __init__(self):
15
  options = Options()
16
+ # options.add_argument("--headless")
17
  options.add_argument('--no-sandbox')
18
  options.add_argument("--no-quit")
19
  options.add_argument('--disable-extensions')
backend/socket/__pycache__/queue.cpython-312.pyc CHANGED
Binary files a/backend/socket/__pycache__/queue.cpython-312.pyc and b/backend/socket/__pycache__/queue.cpython-312.pyc differ
 
backend/socket/queue.py CHANGED
@@ -26,7 +26,7 @@ class RequestChapter(WebsocketConsumer):
26
 
27
  SocketRequestChapterQueueCache.objects.filter(socket_id=user_socket_id).update(channel_name = self.channel_name)
28
 
29
- print(f"User: ({user_socket_id}) connected to socket room: ({self.room_id})")
30
  self.accept()
31
 
32
  self.send(text_data=json.dumps({
@@ -46,6 +46,5 @@ class RequestChapter(WebsocketConsumer):
46
  def disconnect(self, close_code):
47
  user_socket_id = self.scope['url_route']['kwargs']['socket_id']
48
 
49
- print(f"User: ({user_socket_id}) disconnected from socket room: ({self.room_id})")
50
 
51
 
 
26
 
27
  SocketRequestChapterQueueCache.objects.filter(socket_id=user_socket_id).update(channel_name = self.channel_name)
28
 
29
+
30
  self.accept()
31
 
32
  self.send(text_data=json.dumps({
 
46
  def disconnect(self, close_code):
47
  user_socket_id = self.scope['url_route']['kwargs']['socket_id']
48
 
 
49
 
50
 
backend/tests.py CHANGED
@@ -1,3 +1,7 @@
1
  from django.test import TestCase
 
 
 
 
 
2
 
3
- # Create your tests here.
 
1
  from django.test import TestCase
2
+ # from backend.module.utils import date_utils
3
+
4
+
5
+ # current_time = date_utils.utc_time().get()
6
+ # print(current_time > date_utils.utc_time().add(-10,'second').get())
7
 
 
backend/urls.py CHANGED
@@ -17,7 +17,7 @@ urlpatterns = [
17
  path('web_scrap/get_list/', web_scrap.get_list),
18
  path('web_scrap/search/', web_scrap.search),
19
  path('web_scrap/get/', web_scrap.get),
20
- path('web_scrap/<str:source>/get_cover/<str:id>/<str:cover_id>/', web_scrap.get_cover),
21
  path('web_scrap/get_chapter/', web_scrap.get_chapter),
22
 
23
 
 
17
  path('web_scrap/get_list/', web_scrap.get_list),
18
  path('web_scrap/search/', web_scrap.search),
19
  path('web_scrap/get/', web_scrap.get),
20
+ path('web_scrap/get_cover/<str:source>/<str:id>/<str:cover_id>/', web_scrap.get_cover),
21
  path('web_scrap/get_chapter/', web_scrap.get_chapter),
22
 
23
 
core/__pycache__/middleware.cpython-312.pyc CHANGED
Binary files a/core/__pycache__/middleware.cpython-312.pyc and b/core/__pycache__/middleware.cpython-312.pyc differ
 
frontend/app/index.tsx CHANGED
@@ -9,7 +9,7 @@ const Index = () => {
9
  const pathname = usePathname()
10
 
11
  if (pathname === "/" || pathname === "") return (
12
- <Redirect href="/view/colamanga/manga-id90484" />
13
  )
14
 
15
  }
 
9
  const pathname = usePathname()
10
 
11
  if (pathname === "/" || pathname === "") return (
12
+ <Redirect href="/read/colamanga/manga-nf048578?idx=1" />
13
  )
14
 
15
  }
frontend/app/read/[source]/[comic_id]/index.tsx CHANGED
@@ -110,7 +110,7 @@ const Index = ({}:any) => {
110
  const new_chapter_idx = CHAPTER_IDX.current+1
111
  if (imageKeys.slice(-1)[0]?.type !== "no-chapter-banner"){
112
  const next_stored_chapter = await ChapterStorage.getByIdx(`${SOURCE}-${COMIC_ID}`,new_chapter_idx, {exclude_fields:["data"]})
113
- if (next_stored_chapter.data_state === "completed"){
114
  if (temp_image_keys.current.hasOwnProperty(new_chapter_idx)){
115
  delete temp_image_keys.current[new_chapter_idx-2]
116
  const pre_image_keys = imageKeys.filter((data:any) => {
 
110
  const new_chapter_idx = CHAPTER_IDX.current+1
111
  if (imageKeys.slice(-1)[0]?.type !== "no-chapter-banner"){
112
  const next_stored_chapter = await ChapterStorage.getByIdx(`${SOURCE}-${COMIC_ID}`,new_chapter_idx, {exclude_fields:["data"]})
113
+ if (next_stored_chapter?.data_state === "completed"){
114
  if (temp_image_keys.current.hasOwnProperty(new_chapter_idx)){
115
  delete temp_image_keys.current[new_chapter_idx-2]
116
  const pre_image_keys = imageKeys.filter((data:any) => {
frontend/app/read/components/chapter_image.tsx CHANGED
@@ -38,6 +38,7 @@ const ChapterImage = ({item, zoom, showOptions,setShowOptions}:any)=>{
38
  display:"flex",
39
  width:"100%",
40
  height:"auto",
 
41
  alignItems:"center",
42
  }}
43
  >
 
38
  display:"flex",
39
  width:"100%",
40
  height:"auto",
41
+ borderWidth:0,
42
  alignItems:"center",
43
  }}
44
  >
frontend/app/read/modules/get_chapter.tsx CHANGED
@@ -25,7 +25,7 @@ export const get_chapter = async (
25
  continue; // Skip directories
26
  }
27
  const fileData = await zipContent.files[fileName].async('base64');
28
- file_keys.push({type:"image", idx:CHAPTER_IDX, value: `${CHAPTER_IDX}-${fileName}`})
29
  files[`${CHAPTER_IDX}-${fileName}`] = {
30
  layout: await getImageLayout("data:image/png;base64," + fileData),
31
  data: "data:image/png;base64," + fileData
@@ -46,7 +46,7 @@ export const get_chapter = async (
46
  continue; // Skip directories
47
  }
48
  const fileData = await zipContent.files[fileName].async('base64');
49
- file_keys.push({type:"image", idx:CHAPTER_IDX, value: `${CHAPTER_IDX}-${fileName}`})
50
  files[`${CHAPTER_IDX}-${fileName}`] = {
51
  layout: await getImageLayout("data:image/png;base64," + fileData),
52
  data: "data:image/png;base64," + fileData
@@ -54,7 +54,8 @@ export const get_chapter = async (
54
  }
55
  }
56
 
57
- file_keys.sort((a, b) => parseInt(a, 10) - parseInt(b, 10))
 
58
  return {file_keys: file_keys,files:files}
59
  }
60
  }
 
25
  continue; // Skip directories
26
  }
27
  const fileData = await zipContent.files[fileName].async('base64');
28
+ file_keys.push({type:"image", idx:CHAPTER_IDX, id:Number(fileName.split(".")[0]), value: `${CHAPTER_IDX}-${fileName}`})
29
  files[`${CHAPTER_IDX}-${fileName}`] = {
30
  layout: await getImageLayout("data:image/png;base64," + fileData),
31
  data: "data:image/png;base64," + fileData
 
46
  continue; // Skip directories
47
  }
48
  const fileData = await zipContent.files[fileName].async('base64');
49
+ file_keys.push({type:"image", idx:CHAPTER_IDX, id:Number(fileName.split(".")[0]), value: `${CHAPTER_IDX}-${fileName}`})
50
  files[`${CHAPTER_IDX}-${fileName}`] = {
51
  layout: await getImageLayout("data:image/png;base64," + fileData),
52
  data: "data:image/png;base64," + fileData
 
54
  }
55
  }
56
 
57
+ file_keys.sort((a, b) => Number(a.id - b.id))
58
+ console.log("HUH?",file_keys)
59
  return {file_keys: file_keys,files:files}
60
  }
61
  }
frontend/components/Image.tsx CHANGED
@@ -1,4 +1,4 @@
1
- import { useState, useEffect, useContext, useCallback } from "react"
2
  import { Image as _Image } from 'expo-image';
3
  import { View } from "react-native"
4
  import ImageCacheStorage from "@/constants/module/image_cache_storage";
@@ -11,7 +11,8 @@ import { useFocusEffect } from "expo-router";
11
 
12
 
13
  const Image = ({source, style, onError, contentFit, transition, onLoad, onLoadEnd}:any) => {
14
- const [imageData, setImageData]:any = useState(null)
 
15
  const [isError, setIsError]:any = useState(false)
16
  const {showCloudflareTurnstileContext, setShowCloudflareTurnstileContext}:any = useContext(CONTEXT)
17
  const controller = new AbortController();
@@ -20,28 +21,37 @@ const Image = ({source, style, onError, contentFit, transition, onLoad, onLoadEn
20
  (async ()=>{
21
  if(source.hasOwnProperty("type")){
22
  if (source.type === "blob"){
23
- setImageData({uri:await blobToBase64(source.data)})
 
24
  }else if (source.type === "base64"){
25
- setImageData({uri:source.data})
 
26
  }else if (source.type === "file_path"){
27
- setImageData({uri:source.data})
 
28
  }else{
29
  setIsError(true)
 
30
  }
31
  } else if (source.hasOwnProperty("uri")){
32
  const result:any = await ImageCacheStorage.get(setShowCloudflareTurnstileContext,source.uri,signal);
33
  if (result.type === "blob"){
34
- setImageData({uri:await blobToBase64(result.data)})
 
35
  }else if (result.type === "base64"){
36
- setImageData({uri:result.data})
 
37
  }else if (result.type === "file_path"){
38
- setImageData({uri:result.data})
 
39
  }else{
40
  setIsError(true)
 
41
  }
42
 
43
  }else{
44
- setImageData(source)
 
45
  }
46
 
47
 
@@ -73,12 +83,12 @@ const Image = ({source, style, onError, contentFit, transition, onLoad, onLoadEn
73
  <Icon source={"refresh-circle"} size={25} color={"yellow"}/>
74
  </Button>
75
  </View>
76
- : <>{imageData
77
 
78
 
79
  ? <_Image
80
  onError={onError}
81
- source={imageData}
82
  style={style}
83
  contentFit={contentFit}
84
  transition={transition}
 
1
+ import { useState, useEffect, useContext, useCallback, useRef } from "react"
2
  import { Image as _Image } from 'expo-image';
3
  import { View } from "react-native"
4
  import ImageCacheStorage from "@/constants/module/image_cache_storage";
 
11
 
12
 
13
  const Image = ({source, style, onError, contentFit, transition, onLoad, onLoadEnd}:any) => {
14
+ const [imageLoaded, setImageLoaded]:any = useState(false)
15
+ const imageData:any = useRef(null)
16
  const [isError, setIsError]:any = useState(false)
17
  const {showCloudflareTurnstileContext, setShowCloudflareTurnstileContext}:any = useContext(CONTEXT)
18
  const controller = new AbortController();
 
21
  (async ()=>{
22
  if(source.hasOwnProperty("type")){
23
  if (source.type === "blob"){
24
+ imageData.current = {uri:await blobToBase64(source.data)}
25
+ setImageLoaded(true)
26
  }else if (source.type === "base64"){
27
+ imageData.current = {uri:source.data}
28
+ setImageLoaded(true)
29
  }else if (source.type === "file_path"){
30
+ imageData.current = {uri:source.data}
31
+ setImageLoaded(true)
32
  }else{
33
  setIsError(true)
34
+ setImageLoaded(false)
35
  }
36
  } else if (source.hasOwnProperty("uri")){
37
  const result:any = await ImageCacheStorage.get(setShowCloudflareTurnstileContext,source.uri,signal);
38
  if (result.type === "blob"){
39
+ imageData.current = {uri:await blobToBase64(result.data)}
40
+ setImageLoaded(true)
41
  }else if (result.type === "base64"){
42
+ imageData.current = {uri:result.data}
43
+ setImageLoaded(true)
44
  }else if (result.type === "file_path"){
45
+ imageData.current = {uri:result.data}
46
+ setImageLoaded(true)
47
  }else{
48
  setIsError(true)
49
+ setImageLoaded(false)
50
  }
51
 
52
  }else{
53
+ imageData.current = source
54
+ setImageLoaded(true)
55
  }
56
 
57
 
 
83
  <Icon source={"refresh-circle"} size={25} color={"yellow"}/>
84
  </Button>
85
  </View>
86
+ : <>{imageLoaded
87
 
88
 
89
  ? <_Image
90
  onError={onError}
91
+ source={imageData.current}
92
  style={style}
93
  contentFit={contentFit}
94
  transition={transition}