File size: 1,211 Bytes
f056e4f cf6718c f056e4f cf6718c f056e4f cf6718c 0c9a461 7ae04cd f056e4f 0c9a461 f056e4f cf6718c 0c9a461 f056e4f 0c9a461 f056e4f 0c9a461 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 |
from flask import Flask, request, jsonify
from transformers import AutoTokenizer, AutoModelForCausalLM
from flask_cors import CORS
app = Flask(__name__)
CORS(app) # Enable CORS for all routes (you can restrict this if needed)
# Load BloomZ model and tokenizer
tokenizer = AutoTokenizer.from_pretrained("bigscience/bloomz-1b1")
model = AutoModelForCausalLM.from_pretrained("bigscience/bloomz-1b1")
@app.route('/send_message', methods=['POST'])
def send_message():
try:
# Get the incoming message from the request
data = request.get_json()
user_message = data['message']
# Tokenize the input message
inputs = tokenizer(user_message, return_tensors="pt")
# Generate response from the model
outputs = model.generate(inputs['input_ids'], max_length=50, num_return_sequences=1)
# Decode the response
bot_reply = tokenizer.decode(outputs[0], skip_special_tokens=True)
# Return the response as a JSON
return jsonify({'response': bot_reply})
except Exception as e:
return jsonify({'error': str(e)}), 500
if __name__ == "__main__":
app.run(host="0.0.0.0", port=5000)
|