Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,5 +1,15 @@
|
|
1 |
-
import
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
from utils.uploadAndExample import add_upload
|
|
|
|
|
3 |
|
4 |
####################################### Dashboard ######################################################
|
5 |
|
@@ -43,22 +53,22 @@ with st.expander("ℹ️ - About this app", expanded=False):
|
|
43 |
#image = Image.open('docStore/img/flow.jpg')
|
44 |
#st.image(image)
|
45 |
#with c3:
|
46 |
-
st.write("""
|
47 |
-
|
48 |
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
|
58 |
-
|
59 |
-
|
60 |
|
61 |
-
st.write("")
|
62 |
|
63 |
|
64 |
apps = [processing.app, target_extraction.app, netzero.app, ghg.app,
|
|
|
1 |
+
import appStore.target as target_extraction
|
2 |
+
import appStore.netzero as netzero
|
3 |
+
import appStore.sector as sector
|
4 |
+
import appStore.adapmit as adapmit
|
5 |
+
import appStore.ghg as ghg
|
6 |
+
import appStore.policyaction as policyaction
|
7 |
+
import appStore.conditional as conditional
|
8 |
+
import appStore.indicator as indicator
|
9 |
+
import appStore.doc_processing as processing
|
10 |
from utils.uploadAndExample import add_upload
|
11 |
+
from PIL import Image
|
12 |
+
import streamlit as st
|
13 |
|
14 |
####################################### Dashboard ######################################################
|
15 |
|
|
|
53 |
#image = Image.open('docStore/img/flow.jpg')
|
54 |
#st.image(image)
|
55 |
#with c3:
|
56 |
+
#st.write("""
|
57 |
+
# What happens in the background?
|
58 |
|
59 |
+
# - Step 1: Once the document is provided to app, it undergoes *Pre-processing*.\
|
60 |
+
# In this step the document is broken into smaller paragraphs \
|
61 |
+
# (based on word/sentence count).
|
62 |
+
# - Step 2: The paragraphs are fed to **Target Classifier** which detects if
|
63 |
+
# the paragraph contains any *Target* related information or not.
|
64 |
+
# - Step 3: The paragraphs which are detected containing some target \
|
65 |
+
# related information are then fed to multiple classifier to enrich the
|
66 |
+
# Information Extraction.
|
67 |
|
68 |
+
# The Step 2 and 3 are repated then similarly for Action and Policies & Plans.
|
69 |
+
# """)
|
70 |
|
71 |
+
#st.write("")
|
72 |
|
73 |
|
74 |
apps = [processing.app, target_extraction.app, netzero.app, ghg.app,
|