Spaces:
Running
Running
| import requests | |
| import streamlit as st | |
| import time | |
| from transformers import pipeline | |
| import os | |
| from .utils import query | |
| HF_AUTH_TOKEN = os.getenv('HF_AUTH_TOKEN') | |
| headers = {"Authorization": f"Bearer {HF_AUTH_TOKEN}"} | |
| def write(): | |
| st.markdown("# Natural Language Inference") | |
| st.sidebar.header("Natural Language Inference") | |
| st.write( | |
| """Here, you can perform Natural Language Inference using the fine-tuned TURNA NLI model. """ | |
| ) | |
| hypothesis = st.text_area(label='Hypothesis: ', height=50, | |
| value="Temple Bar'da çok sanatçı var.") | |
| premise = st.text_area(label='Premise: ', height=50, | |
| value="Temple Bar herhangi bir müzisyen veya sanatçıdan tamamen yoksundur.") | |
| url = ("/static-proxy?url=https%3A%2F%2Fapi-inference.huggingface.co%2Fmodels%2Fboun-tabi-LMG%2Fturna_nli_nli_tr%26quot%3B%3C%2Fspan%3E)%3C!-- HTML_TAG_END --> | |
| params = {"max_new_tokens": 8 } | |
| if st.button("Generate"): | |
| with st.spinner('Generating...'): | |
| output = query(f"hipotez: {hypothesis} önerme: {premise}", url, params) | |
| st.success(output) |