from flask import Flask, request, jsonify from transformers import AutoTokenizer, AutoModelForCausalLM from flask_cors import CORS app = Flask(__name__) CORS(app) # Enable CORS for all routes (you can restrict this if needed) # Load BloomZ model and tokenizer tokenizer = AutoTokenizer.from_pretrained("bigscience/bloomz-1b1") model = AutoModelForCausalLM.from_pretrained("bigscience/bloomz-1b1") @app.route('/send_message', methods=['POST']) def send_message(): try: # Get the incoming message from the request data = request.get_json() user_message = data['message'] # Tokenize the input message inputs = tokenizer(user_message, return_tensors="pt") # Generate response from the model outputs = model.generate(inputs['input_ids'], max_length=50, num_return_sequences=1) # Decode the response bot_reply = tokenizer.decode(outputs[0], skip_special_tokens=True) # Return the response as a JSON return jsonify({'response': bot_reply}) except Exception as e: return jsonify({'error': str(e)}), 500 if __name__ == "__main__": app.run(host="0.0.0.0", port=5000)