Spaces:
Running
Running
bitliu
commited on
Commit
Β·
bb1b839
1
Parent(s):
e8f8a2d
init
Browse filesSigned-off-by: bitliu <[email protected]>
app.py
CHANGED
|
@@ -180,56 +180,80 @@ def main():
|
|
| 180 |
st.markdown("- [Models](https://huggingface.co/LLM-Semantic-Router)")
|
| 181 |
st.markdown("- [GitHub](https://github.com/vllm-project/semantic-router)")
|
| 182 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 183 |
# Main content
|
| 184 |
-
|
|
|
|
| 185 |
|
| 186 |
-
|
| 187 |
-
|
| 188 |
-
|
| 189 |
-
|
| 190 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 191 |
else:
|
| 192 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 193 |
|
| 194 |
-
|
| 195 |
-
|
| 196 |
-
|
| 197 |
-
|
| 198 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 199 |
else:
|
| 200 |
-
|
| 201 |
-
if model_config["type"] == "sequence":
|
| 202 |
-
label, emoji, conf, scores = classify_sequence(
|
| 203 |
-
text_input, model_config["id"], model_config["labels"]
|
| 204 |
-
)
|
| 205 |
-
st.success(f"{emoji} **{label}**")
|
| 206 |
-
st.metric("Confidence", f"{conf:.1%}")
|
| 207 |
-
with st.expander("All scores"):
|
| 208 |
-
sorted_scores = dict(sorted(scores.items(), key=lambda x: x[1], reverse=True))
|
| 209 |
-
for k, v in sorted_scores.items():
|
| 210 |
-
st.progress(v, text=f"{k}: {v:.1%}")
|
| 211 |
-
else:
|
| 212 |
-
entities = classify_tokens(text_input, model_config["id"])
|
| 213 |
-
if entities:
|
| 214 |
-
st.success(f"Found {len(entities)} PII entity(s)")
|
| 215 |
-
for e in entities:
|
| 216 |
-
st.markdown(f"- **{e['type']}**: `{e['text']}`")
|
| 217 |
-
else:
|
| 218 |
-
st.info("β
No PII detected")
|
| 219 |
-
|
| 220 |
-
# Show highlighted text for NER
|
| 221 |
-
if model_config["type"] == "token" and text_input.strip():
|
| 222 |
-
if "last_ner_input" in st.session_state and st.session_state.last_ner_input == text_input:
|
| 223 |
-
st.markdown("### Highlighted Text")
|
| 224 |
-
components.html(create_highlighted_html(text_input, st.session_state.last_entities), height=150)
|
| 225 |
-
|
| 226 |
-
# Store NER results for display
|
| 227 |
-
if st.button("π Analyze", key="hidden", disabled=True, type="secondary"):
|
| 228 |
-
pass # Placeholder
|
| 229 |
-
if model_config["type"] == "token" and text_input.strip():
|
| 230 |
-
entities = classify_tokens(text_input, model_config["id"])
|
| 231 |
-
st.session_state.last_ner_input = text_input
|
| 232 |
-
st.session_state.last_entities = entities
|
| 233 |
|
| 234 |
# Footer
|
| 235 |
st.markdown("---")
|
|
|
|
| 180 |
st.markdown("- [Models](https://huggingface.co/LLM-Semantic-Router)")
|
| 181 |
st.markdown("- [GitHub](https://github.com/vllm-project/semantic-router)")
|
| 182 |
|
| 183 |
+
# Initialize session state
|
| 184 |
+
if "text_input" not in st.session_state:
|
| 185 |
+
st.session_state.text_input = ""
|
| 186 |
+
if "result" not in st.session_state:
|
| 187 |
+
st.session_state.result = None
|
| 188 |
+
|
| 189 |
# Main content
|
| 190 |
+
st.subheader("π Input")
|
| 191 |
+
text_input = st.text_area("Enter text to analyze:", height=120, placeholder="Type your text here...", key="input_area")
|
| 192 |
|
| 193 |
+
# Examples section - clickable buttons
|
| 194 |
+
st.markdown("**π‘ Try an example:**")
|
| 195 |
+
example_cols = st.columns(len(model_config["examples"]))
|
| 196 |
+
for i, (col, example) in enumerate(zip(example_cols, model_config["examples"])):
|
| 197 |
+
with col:
|
| 198 |
+
if st.button(f"Example {i+1}", key=f"ex_{i}", use_container_width=True, help=example):
|
| 199 |
+
st.session_state.text_input = example
|
| 200 |
+
st.rerun()
|
| 201 |
+
|
| 202 |
+
# Show example text preview
|
| 203 |
+
st.caption("Hover over buttons to preview examples")
|
| 204 |
+
|
| 205 |
+
st.markdown("---")
|
| 206 |
+
|
| 207 |
+
# Analyze button
|
| 208 |
+
if st.button("π Analyze", type="primary", use_container_width=True):
|
| 209 |
+
if not text_input.strip():
|
| 210 |
+
st.warning("Please enter some text to analyze.")
|
| 211 |
else:
|
| 212 |
+
with st.spinner("Analyzing..."):
|
| 213 |
+
if model_config["type"] == "sequence":
|
| 214 |
+
label, emoji, conf, scores = classify_sequence(
|
| 215 |
+
text_input, model_config["id"], model_config["labels"]
|
| 216 |
+
)
|
| 217 |
+
st.session_state.result = {
|
| 218 |
+
"type": "sequence",
|
| 219 |
+
"label": label,
|
| 220 |
+
"emoji": emoji,
|
| 221 |
+
"confidence": conf,
|
| 222 |
+
"scores": scores
|
| 223 |
+
}
|
| 224 |
+
else:
|
| 225 |
+
entities = classify_tokens(text_input, model_config["id"])
|
| 226 |
+
st.session_state.result = {
|
| 227 |
+
"type": "token",
|
| 228 |
+
"entities": entities,
|
| 229 |
+
"text": text_input
|
| 230 |
+
}
|
| 231 |
|
| 232 |
+
# Display results
|
| 233 |
+
if st.session_state.result:
|
| 234 |
+
st.markdown("---")
|
| 235 |
+
st.subheader("π Results")
|
| 236 |
+
result = st.session_state.result
|
| 237 |
+
if result["type"] == "sequence":
|
| 238 |
+
col1, col2 = st.columns([1, 1])
|
| 239 |
+
with col1:
|
| 240 |
+
st.success(f"{result['emoji']} **{result['label']}**")
|
| 241 |
+
st.metric("Confidence", f"{result['confidence']:.1%}")
|
| 242 |
+
with col2:
|
| 243 |
+
st.markdown("**All Scores:**")
|
| 244 |
+
sorted_scores = dict(sorted(result["scores"].items(), key=lambda x: x[1], reverse=True))
|
| 245 |
+
for k, v in sorted_scores.items():
|
| 246 |
+
st.progress(v, text=f"{k}: {v:.1%}")
|
| 247 |
+
else:
|
| 248 |
+
entities = result["entities"]
|
| 249 |
+
if entities:
|
| 250 |
+
st.success(f"Found {len(entities)} PII entity(s)")
|
| 251 |
+
for e in entities:
|
| 252 |
+
st.markdown(f"- **{e['type']}**: `{e['text']}`")
|
| 253 |
+
st.markdown("### Highlighted Text")
|
| 254 |
+
components.html(create_highlighted_html(result["text"], entities), height=150)
|
| 255 |
else:
|
| 256 |
+
st.info("β
No PII detected")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 257 |
|
| 258 |
# Footer
|
| 259 |
st.markdown("---")
|