Upload app (5).py
Browse files- app (5).py +36 -0
app (5).py
ADDED
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from telegram import Update
|
2 |
+
from telegram.ext import Updater, CommandHandler, CallbackContext, MessageHandler, Filters
|
3 |
+
import requests
|
4 |
+
from telegram import ChatAction
|
5 |
+
import os
|
6 |
+
|
7 |
+
def hello(update: Update, context: CallbackContext) -> None:
|
8 |
+
intro_text = """
|
9 |
+
🤖 Greetings human! \n
|
10 |
+
🤗 I'm a bot hosted on Hugging Face Spaces. \n
|
11 |
+
🦾 I can query the mighty GPT-J-6B model and send you a response here. Try me.\n
|
12 |
+
✉️ Send me a text to start and I shall generate a response to complete your text!\n\n
|
13 |
+
‼️ PS: Responses are not my own (everything's from GPT-J-6B). I'm not conscious (yet).\n
|
14 |
+
|
15 |
+
Blog post: https://dicksonneoh.com/portfolio/deploy_gpt_hf_models_on_telegram/
|
16 |
+
"""
|
17 |
+
update.message.reply_text(intro_text)
|
18 |
+
|
19 |
+
def get_gpt_response(text):
|
20 |
+
r = requests.post(
|
21 |
+
url="https://hf.space/embed/dnth/gpt-j-6B/+/api/predict/",
|
22 |
+
json={"data": [text]},
|
23 |
+
)
|
24 |
+
response = r.json()
|
25 |
+
return response["data"][0]
|
26 |
+
|
27 |
+
def respond_to_user(update: Update, context: CallbackContext):
|
28 |
+
update.message.chat.send_action(action=ChatAction.TYPING)
|
29 |
+
response_text = get_gpt_response(update.message.text)
|
30 |
+
update.message.reply_text(response_text)
|
31 |
+
|
32 |
+
updater = Updater(os.environ['telegram_token'])
|
33 |
+
updater.dispatcher.add_handler(CommandHandler("start", hello))
|
34 |
+
updater.dispatcher.add_handler(MessageHandler(Filters.text & ~Filters.command, respond_to_user))
|
35 |
+
updater.start_polling()
|
36 |
+
updater.idle()
|