Jonathan Bejarano commited on
Commit
c2d48e1
·
1 Parent(s): a58fa96

Deploy this

Browse files
Files changed (1) hide show
  1. app.py +6 -0
app.py CHANGED
@@ -73,6 +73,11 @@ def respond(
73
  """
74
  For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
75
  """
 
 
 
 
 
76
  client = InferenceClient(token=hf_token.token, model="meta-llama/Llama-3.2-3B-Instruct")
77
 
78
  # Generate a new system message with random country for new conversations
@@ -128,6 +133,7 @@ chatbot = gr.ChatInterface(
128
  ["Is this country located on an island?"],
129
  ["Is the currency the Euro?"],
130
  ],
 
131
  additional_inputs=[
132
  gr.Textbox(visible=False, value="Geography game placeholder - will be replaced with random country", label="System message"),
133
  gr.Slider(visible=False, minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
 
73
  """
74
  For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
75
  """
76
+ # Check if user is logged in
77
+ if not hf_token or not hf_token.token:
78
+ yield "Please log in with your HuggingFace account to play the geography game!"
79
+ return
80
+
81
  client = InferenceClient(token=hf_token.token, model="meta-llama/Llama-3.2-3B-Instruct")
82
 
83
  # Generate a new system message with random country for new conversations
 
133
  ["Is this country located on an island?"],
134
  ["Is the currency the Euro?"],
135
  ],
136
+ cache_examples=False, # Disable caching to prevent login errors during deployment
137
  additional_inputs=[
138
  gr.Textbox(visible=False, value="Geography game placeholder - will be replaced with random country", label="System message"),
139
  gr.Slider(visible=False, minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),