Spaces:
Sleeping
Sleeping
| from flask import Flask, request, jsonify | |
| import groq_LPU_inference | |
| app = Flask(__name__) | |
| def chat(): | |
| query = request.args.get('query') # Assuming the query is sent in JSON format | |
| model = str(request.args.get('model', "mistral")) # Optional parameter with default value | |
| max_tokens = int(request.args.get('max_tokens', 300)) | |
| temperature = float(request.args.get('temperature', 0.7)) # Optional parameter with default value | |
| assistant = str(request.args.get('assistant', "")) # Optional parameter with default value | |
| system = str(request.args.get('system', "Be Helpful and Friendly. Keep your response straightfoward, short and concise")) # Optional parameter with default value | |
| # Developer information | |
| developer_info = { | |
| 'developer': 'Devs Do Code', | |
| 'contact': { | |
| 'Telegram': 'https://t.me/devsdocode', | |
| 'YouTube Channel': 'https://www.youtube.com/@DevsDoCode', | |
| 'Discord Server': 'https://discord.gg/ehwfVtsAts', | |
| 'Instagram': { | |
| 'Personal': 'https://www.instagram.com/sree.shades_/', | |
| 'Channel': 'https://www.instagram.com/devsdocode_/' | |
| } | |
| } | |
| } | |
| if query: | |
| response = groq_LPU_inference.Groq_Inference(query, model=model, system=system, assistant=assistant, temp=temperature, max_tokens=max_tokens) | |
| return jsonify([{'response': response}, {'developer_info': developer_info}]) | |
| else: | |
| error_message = { | |
| 'developer_contact': { | |
| 'telegram': 'https://t.me/DevsDoCode', | |
| 'instagram': 'https://www.instagram.com/sree.shades_/', | |
| 'discord': 'https://discord.gg/ehwfVtsAts', | |
| 'linkedin': 'https://www.linkedin.com/in/developer-sreejan/', | |
| 'twitter': 'https://twitter.com/Anand_Sreejan' | |
| }, | |
| 'error': 'Oops! Something went wrong. Please contact the developer Devs Do Code.' | |
| } | |
| return jsonify(error_message), 400 | |
| if __name__ == '__main__': | |
| app.run(debug=True) | |