Created
August 7, 2025 01:15
-
-
Save markizano/8435fdcdacb9fa36bd7d193b518c2113 to your computer and use it in GitHub Desktop.
Slackbot + Open WebUI MVP Integration
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env python3 | |
| import os | |
| from collections.abc import Callable | |
| from slack_bolt import App | |
| from slack_bolt.adapter.socket_mode import SocketModeHandler | |
| from dotenv import load_dotenv | |
| from openwebui_client.client import OpenWebUIClient | |
| load_dotenv() | |
| class InteractiveBot: | |
| def __init__(self): | |
| self.app = App(token=os.environ["SLACK_BOT_TOKEN"]) | |
| self.app.event("message")(self.handle_message) | |
| self.llm = OpenWebUIClient( | |
| base_url=os.environ["OUI_BASE_URL"], | |
| api_key=os.environ["OUI_API_KEY"], | |
| ) | |
| def handle_message(self, body: dict, say: Callable): | |
| print(f"Received body: {body}") | |
| text = body.get("event", {}).get("text", "") | |
| if text: | |
| try: | |
| # Call the OpenWebUIClient for chat completion (non-streaming) | |
| response = self.llm.chat.completions.create( | |
| messages=[{"role": "user", "content": text}], | |
| model=self.llm.default_model or "gpt-4o-mini", | |
| stream=False | |
| ) | |
| # Extract the response text (adjust as per your client’s response structure) | |
| reply = response.choices[0].message.content if response.choices else "Sorry, I couldn't generate a response." | |
| except Exception as e: | |
| reply = f"Error: {e}" | |
| say(reply) | |
| @staticmethod | |
| def main(): | |
| bot = InteractiveBot() | |
| SocketModeHandler(bot.app, os.environ["SLACK_APP_TOKEN"]).start() | |
| if __name__ == "__main__": | |
| InteractiveBot.main() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment