jfelipenc commited on
Commit
37638df
โ€ข
1 Parent(s): b16001c

Altered token

Browse files
Files changed (1) hide show
  1. app.py +3 -37
app.py CHANGED
@@ -1,5 +1,4 @@
1
- import random
2
- import time
3
  import torch
4
  import gradio as gr
5
  import requests
@@ -9,7 +8,8 @@ from textwrap import wrap, fill
9
 
10
  ## using Falcon 7b Instruct
11
  Falcon_API_URL = "https://api-inference.huggingface.co/models/tiiuae/falcon-7b-instruct"
12
- HEADERS = {"Authorization": "Bearer <HF TOKEN>"}
 
13
  def falcon_query(payload):
14
  response = requests.post(Falcon_API_URL, headers=HEADERS, json=payload)
15
  return response.json()
@@ -33,40 +33,6 @@ def wrap_text(text, width=90):
33
  wrapped_text = '\n'.join(wrapped_lines)
34
  return wrapped_text
35
 
36
- def multimodal_prompt(user_input, system_prompt):
37
- """
38
- Generates text using a large language model, given a user input and a system prompt.
39
- Args:
40
- user_input: The user's input text to generate a response for.
41
- system_prompt: Optional system prompt.
42
- Returns:
43
- A string containing the generated text in the Falcon-like format.
44
- """
45
- # Combine user input and system prompt
46
- formatted_input = f"{{{{ {system_prompt} }}}}\nUser: {user_input}\nFalcon:"
47
-
48
- # Encode the input text
49
- encodeds = tokenizer(formatted_input, return_tensors="pt", add_special_tokens=False)
50
- model_inputs = encodeds.to(device)
51
-
52
- # Generate a response using the model
53
- output = peft_model.generate(
54
- **model_inputs,
55
- max_length=500,
56
- use_cache=True,
57
- early_stopping=False,
58
- bos_token_id=peft_model.config.bos_token_id,
59
- eos_token_id=peft_model.config.eos_token_id,
60
- pad_token_id=peft_model.config.eos_token_id,
61
- temperature=0.4,
62
- do_sample=True
63
- )
64
-
65
- # Decode the response
66
- response_text = tokenizer.decode(output[0], skip_special_tokens=True)
67
-
68
- return response_text
69
-
70
  class ChatbotInterface():
71
  def __init__(self, name, system_prompt="You are an expert medical analyst that helps users with any medical related information."):
72
  self.name = name
 
1
+ import os
 
2
  import torch
3
  import gradio as gr
4
  import requests
 
8
 
9
  ## using Falcon 7b Instruct
10
  Falcon_API_URL = "https://api-inference.huggingface.co/models/tiiuae/falcon-7b-instruct"
11
+ hf_token = os.getenv("HUGGINGFACE_TOKEN")
12
+ HEADERS = {"Authorization": "Bearer {hf_token}"}
13
  def falcon_query(payload):
14
  response = requests.post(Falcon_API_URL, headers=HEADERS, json=payload)
15
  return response.json()
 
33
  wrapped_text = '\n'.join(wrapped_lines)
34
  return wrapped_text
35
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
  class ChatbotInterface():
37
  def __init__(self, name, system_prompt="You are an expert medical analyst that helps users with any medical related information."):
38
  self.name = name