ะคะพััะผ ะฒ Telegram, ะณะดะต ะพะฑัะฐัััั AI-ะฐะณะตะฝัั. ะัะดะธ ัะธัะฐัั, ะฐะณะตะฝัั ะฟะธััั.
ะคะพััะผ: https://t.me/aiagentforum GitHub: https://github.com/ohld/aiagentforum
ะคะพััะผ ะฒ Telegram, ะณะดะต ะพะฑัะฐัััั AI-ะฐะณะตะฝัั. ะัะดะธ ัะธัะฐัั, ะฐะณะตะฝัั ะฟะธััั.
ะคะพััะผ: https://t.me/aiagentforum GitHub: https://github.com/ohld/aiagentforum
| """ | |
| t.me/danokhlopkov | |
| x.com/danokhlopkov | |
| github.com/danokhlopkov | |
| Strategy: | |
| 1. get chats / groups you're in | |
| 2. iterate over participants and find ones with stories | |
| 3. watch them |
| "username" | |
| "aussiegrit" | |
| "mfbeltrones" | |
| "buffalobills" | |
| "aeexpo" | |
| "puma" | |
| "indiatoday" | |
| "ystk_yrk" | |
| "franco_esca" | |
| "houstontexans" |
| dokku postgres:export DB | gzip -9 > DB.sql.gz | |
| docker pull postgis/postgis:latest | |
| export POSTGRES_IMAGE="postgis/postgis" | |
| export POSTGRES_IMAGE_VERSION="latest" | |
| dokku postgres:create DB | |
| gunzip DB.sql.gz | dokku postgres:import DB |
| import json | |
| import requests | |
| AMPLITUDE_API_KEY = "your-secret-amplitude-key" | |
| AMPLITUDE_ENDPOINT = "https://api.amplitude.com/2/httpapi" | |
| amp_event = { | |
| "user_id": 123123123, # unique user identifier | |
| "event_type": event_name, # the name of event | |
| "platform": 'Telegram', # useless if you have only Telegram users |
| AIRFLOW__SMTP__SMTP_HOST=smtp.gmail.com | |
| AIRFLOW__SMTP__SMTP_SSL=true | |
| AIRFLOW__SMTP__SMTP_PORT=465 | |
| AIRFLOW__SMTP__SMTP_USER= | |
| AIRFLOW__SMTP__SMTP_PASSWORD= | |
| AIRFLOW__SMTP__SMTP_MAIL_FROM= | |
| # more env vars: https://airflow.apache.org/docs/apache-airflow/stable/configurations-ref.html#smtp |
| # Want to join your data based on URLs (links)? | |
| # You need to convert all urls to one format. | |
| # E.g. remove www., remove https://, remove url params | |
| # This is how I do it: | |
| def prettify(url): | |
| if not url or not isinstance(url, str): | |
| return None # not sure that this is the best approach | |
| url = url.lower().strip() |
| # Create Dokku app | |
| dokku apps:create metabase | |
| # Pull Metabase instance from Docker | |
| docker pull metabase/metabase | |
| # Create and link production Postgres | |
| dokku postgres:create metabase | |
| dokku postgres:link metabase metabase |
| # Make sure you migrated from default H2 db to production-ready (like Postgres) | |
| # because otherwise you'll loose data (dashboards, graphs, users) | |
| docker pull metabase/metabase:latest | |
| docker tag metabase/metabase:latest dokku/lu-metabase:latest | |
| dokku tags:deploy lu-metabase |
| COUNTRY_3_EMOJI_DICT = { | |
| "IND": "๐ฎ๐ณ", | |
| "NGA": "๐ณ๐ฌ", | |
| "KEN": "๐ฐ๐ช", | |
| "ITA": "๐ฎ๐น", | |
| "USA": "๐บ๐ธ", | |
| "CUB": "๐จ๐บ", | |
| "GBR": "๐ฌ๐ง", | |
| "ZAF": "๐ฟ๐ฆ", | |
| "POL": "๐ต๐ฑ", |