ok
Browse files- app.py +90 -0
- requirements.txt +1 -0
app.py
ADDED
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
import streamlit as st
|
3 |
+
import os
|
4 |
+
from huggingface_hub import HfApi, login
|
5 |
+
from streamlit_cookies_manager import EncryptedCookieManager
|
6 |
+
|
7 |
+
st.set_page_config(
|
8 |
+
page_title="Holistic AI - ML Verticals papers",
|
9 |
+
page_icon="👋",
|
10 |
+
layout='wide'
|
11 |
+
)
|
12 |
+
|
13 |
+
def program():
|
14 |
+
st.title("Papers")
|
15 |
+
|
16 |
+
dataset_name = "holistic-ai/mitigation_ml_bias_strategies"
|
17 |
+
token = os.getenv("HF_TOKEN")
|
18 |
+
|
19 |
+
api = HfApi()
|
20 |
+
login(token)
|
21 |
+
|
22 |
+
repo_path = api.snapshot_download(repo_id=dataset_name, repo_type="dataset")
|
23 |
+
dirnames = [dirname for dirname in os.listdir(repo_path) if not dirname.startswith(".")]
|
24 |
+
dirnames = sorted(dirnames, key=lambda x:len(x))
|
25 |
+
|
26 |
+
st.sidebar.title("Namespaces")
|
27 |
+
selected_namespace = st.sidebar.selectbox("Select Namespace", dirnames)
|
28 |
+
|
29 |
+
selected_paper_type = st.sidebar.selectbox("Select Paper Type", ['Metrics',"Mitigators"])
|
30 |
+
|
31 |
+
if selected_namespace:
|
32 |
+
|
33 |
+
if selected_paper_type=='Metrics':
|
34 |
+
with open(f'{repo_path}/{selected_namespace}/grouped_metrics.json') as file:
|
35 |
+
data = json.load(file)
|
36 |
+
elif selected_paper_type=='Mitigators':
|
37 |
+
with open(f'{repo_path}/{selected_namespace}/grouped_mitigators.json') as file:
|
38 |
+
data = json.load(file)
|
39 |
+
|
40 |
+
task_names = list(data.keys())
|
41 |
+
|
42 |
+
st.sidebar.title("Tasks")
|
43 |
+
selected_task = st.sidebar.selectbox("Select a Task", task_names)
|
44 |
+
|
45 |
+
if selected_task:
|
46 |
+
st.header(selected_task)
|
47 |
+
results = data[selected_task]
|
48 |
+
rec = {str(r['id']): r for r in results['recommendations']}
|
49 |
+
for group in results['groups']:
|
50 |
+
ids = [i.strip() for i in group['ids'].split(",")]
|
51 |
+
rec2html = '\n'.join([f"""<p>[{i+1}] <a href="{rec[idx]['metadata']['id']}" target="_blank">{rec[idx]['title']}</a></p>""" for i,idx in enumerate(ids)])
|
52 |
+
title = group['title'].split(':',1)[1].strip()
|
53 |
+
st.markdown(f"""
|
54 |
+
<div style="border:1px solid #ccc; padding:10px; margin:10px 0; border-radius:5px;">
|
55 |
+
<p><b>{title}</b></p>
|
56 |
+
<p>{group['recommendation']}</p>
|
57 |
+
<p>Sources:</p>
|
58 |
+
{rec2html}
|
59 |
+
</div>
|
60 |
+
""", unsafe_allow_html=True)
|
61 |
+
|
62 |
+
SECRET_KEY = os.getenv('SECRET_KEY')
|
63 |
+
|
64 |
+
cookies = EncryptedCookieManager(
|
65 |
+
prefix="login",
|
66 |
+
password=os.getenv('COOKIES_PASSWORD')
|
67 |
+
)
|
68 |
+
|
69 |
+
if not cookies.ready():
|
70 |
+
st.stop()
|
71 |
+
|
72 |
+
def main():
|
73 |
+
# Título de la aplicación
|
74 |
+
st.title("Holistic AI - ML Papers")
|
75 |
+
|
76 |
+
if not cookies.get("authenticated"):
|
77 |
+
# Entrada de la clave secreta
|
78 |
+
user_key = st.text_input("Password:", type="password")
|
79 |
+
|
80 |
+
if st.button("Login"):
|
81 |
+
# Verificar si la clave ingresada coincide con la clave secreta
|
82 |
+
if user_key == SECRET_KEY:
|
83 |
+
cookies.__setitem__("authenticated", "True")
|
84 |
+
st.experimental_rerun()
|
85 |
+
else:
|
86 |
+
st.error("Access not granted. Incorrect Password.")
|
87 |
+
else:
|
88 |
+
program()
|
89 |
+
if __name__ == "__main__":
|
90 |
+
main()
|
requirements.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
streamlit-cookies-manager
|