import gradio as gr import requests import json from PIL import Image import urllib.request main_directory = "https://services.swpc.noaa.gov/" html = """
PAGE_LINK
""" css=""" .img_box{ display: flex; flex-direction: column; flex-wrap: wrap; height: 20000px; } .img_class{ background: #ffffff; max-width: 48%; font-family: monospace; border-top: #9300ff; border-style: inset; margin-top: 5px; } .img_class_raw{ background: #ffffff; width: 100%; font-family: monospace; border-top: #9300ff; border-style: inset; margin-top: 5px; display:flex; flex-direction:column; } """ def load_json(url1="",url2="",url3="",url4=""): get_url=f'{main_directory}{url1}{url2}{url3}{url4}' print(f'{get_url}') get_url=get_url.split("None")[0] print(f'{get_url}') get_url=get_url.split("[]")[0] print(f'{get_url}') if get_url.endswith('.json'): feed1 = requests.get(get_url) return None, feed1.text elif get_url.endswith(".png") or get_url.endswith(".gif") or get_url.endswith(".jpg"): html_out=f"
" html_out+=f'
{get_url}
' return html_out, None return None,None def make_tree(url1="",url2="",url3="",url4=""): link_box=[] html_out="" get_url=f'{main_directory}{url1}{url2}{url3}{url4}' print(f'######### :: {get_url}') if not get_url.endswith('.json'): feed1 = requests.get(get_url) spl = feed1.text.split("href=") for line in spl: spl2 = line.split(">")[0] print(spl2) if spl2.endswith('/"') or spl2.endswith('.json"') or spl2.endswith('.png"') or spl2.endswith('.gif"') or spl2.endswith('.jpg"'): fin=line.split(">")[0].strip('""') link_box.append(fin) #html_out=html_out+html.replace("PAGE_LINK",fin) return gr.update(choices=[l for l in link_box],interactive=True) else: return None def get_images(): html_out=f"
" get_url=f'{main_directory}images/geospace/' feed1 = requests.get(get_url) spl = feed1.text.split("href=") for line in spl: spl2 = line.split(">")[0].strip('""') if spl2.endswith(".png") or spl2.endswith(".gif") or spl2.endswith(".jpg"): print(spl2) html_out+=f'
{get_url}{spl2}
' else: print(spl2) get_url2=f'{main_directory}images/' feed2 = requests.get(get_url2) spl = feed2.text.split("href=") for line2 in spl: spl2 = line2.split(">")[0].strip('""') if spl2.endswith(".png") or spl2.endswith(".gif") or spl2.endswith(".jpg"): print(spl2) html_out+=f'
{get_url2}{spl2}
' else: print(spl2) html_out+="
" return html_out def make_animation(): html_out=f"
" get_url=f'{main_directory}images/animations/' feed1 = requests.get(get_url) spl = feed1.text.split("href=") gif_box=[] for line in spl: spl2 = line.split(">")[0].strip('""') if spl2.endswith("/"): feed2 = requests.get(f'{get_url}{spl2}') spl3 = feed2.text.split("href=") for line2 in spl3: spl3 = line2.split(">")[0].strip('""') if spl3.endswith(".png") or spl3.endswith(".gif") or spl3.endswith(".jpg"): gif_box.append(f'{get_url}{spl2}{spl3}') #frames = [Image.open(image) for image in glob.glob(f"{frame_folder}/*.JPG")] frames = [] for i,ea in enumerate(gif_box): urllib.request.urlretrieve(ea,f'tmp{i}.png') frames.append(Image.open("gfg.png")) frame_one = frames[0] filename=f'{spl3.split(".")[0]}.gif' frame_one.save(filename, format="GIF", append_images=frames, save_all=True, duration=100, loop=0) html_out+=f'
' html_out+='
' return html_out def run(): out=make_tree() im_html=get_images() return out, im_html with gr.Blocks() as app: with gr.Tab("Images"): html_im=gr.HTML() with gr.Tab("Animations"): anim_btn=gr.Button() anim_out=gr.HTML() with gr.Tab("Raw"): with gr.Row(): drop1=gr.Dropdown() drop2=gr.Dropdown() drop3=gr.Dropdown() drop4=gr.Dropdown() load_btn=gr.Button("Load") html_raw=gr.HTML() links=gr.JSON() ###### Images ########## load_btn.click(load_json,[drop1,drop2,drop3,drop4],[html_raw,links]) anim_btn.click(make_animation,None,[anim_out]) ####### Raw ############ drop1.change(make_tree,drop1,[drop2]) drop2.change(make_tree,[drop1,drop2],[drop3]) drop3.change(make_tree,[drop1,drop2,drop3],[drop4]) ###################### app.load(run,None,[drop1,html_im]) app.launch()