diff --git a/Dockerfile b/Dockerfile index 0db331a..75d6ef9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,25 +1,8 @@ FROM python:3.7-alpine RUN apk add --no-cache gcc musl-dev RUN apk update && apk upgrade && \ - apk add git alpine-sdk bash python -RUN mkdir /usr/informer -WORKDIR /usr/informer -COPY . /usr/informer + apk add git alpine-sdk bash python3 +COPY app /usr/local/app +WORKDIR /usr/local/app +RUN pip3 install -r requirements.txt -# Lets set the environment variable in the container -ENV GAE_INSTANCE=prod - -RUN pip install -I Jinja2==2.10.3 -RUN pip install -I SQLAlchemy==1.3.11 -RUN pip install -I Werkzeug==0.16.0 -RUN pip install -I pytz==2019.3 -RUN pip install -I sqlalchemy-migrate==0.13.0 -RUN pip install -I requests==2.7.0 -RUN pip install -I Flask==1.1.1 -RUN pip install -I Telethon==1.10.8 -RUN pip install -I mysql-connector-python==8.0.18 -RUN pip install -I gspread==3.1.0 -RUN pip install -I oauth2client==4.1.3 - -# Comment this out if you plan to run the script inside docker with ENTRYPOINT. Replace 1234567 with your Telegram API user ID -CMD ["python","bot.py","1234567"] diff --git a/README.md b/README.md index 264cf0a..ecb7036 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,19 @@ -![image](https://raw.githubusercontent.com/paulpierre/informer/master/screenshots/informer-logo.gif) +![image](https://raw.githubusercontent.com/paulpierre/informer/master/github/screenshots/informer-logo.gif) # Informer - Telegram Mass Surveillance + +## Update 08-23-2021 +* Updated to latest Telethon 1.23.0 +* Fixed database issues by migrating to docker-compose +* Made Google Spreadsheets optional in setup +* Secure ENV files for setup +* Easier setup + ## About **Informer (TGInformer) is a bot library that allows you to masquerade as multiple REAL users on telegram** and spy on 500+ Telegram channels **per account**. Details are logged to a MySQL database, a private Google Sheet and your own private channel for analysis. This is a functioning proof-of-concept project with known bugs. Feel free to fork, share and drop me a line. -![image](https://raw.githubusercontent.com/paulpierre/informer/master/screenshots/13.png) +![image](https://raw.githubusercontent.com/paulpierre/informer/master/github/screenshots/13.png) ## Potential Business Applications @@ -20,27 +28,27 @@ This is a functioning proof-of-concept project with known bugs. Feel free to for ## Features * Run all your bots in the cloud while you sleep. Support for Google App Engine Flexible Environment and Docker -![image](https://raw.githubusercontent.com/paulpierre/informer/master/screenshots/14.png) +![image](https://raw.githubusercontent.com/paulpierre/informer/master/github/screenshots/14.png) * Write all notifications to private Google Sheet -![image](https://raw.githubusercontent.com/paulpierre/informer/master/screenshots/9.png) +![image](https://raw.githubusercontent.com/paulpierre/informer/master/github/screenshots/9.png) * Supports regular expressions for keyword filtering * SQLAlchemy for agnostic data persistence -![image](https://raw.githubusercontent.com/paulpierre/informer/master/screenshots/8.png) +![image](https://raw.githubusercontent.com/paulpierre/informer/master/github/screenshots/8.png) * Logging contextual message and channel data to a private channel and database * Stores meta information about sender of message, channel, number of participants in the channel -![image](https://raw.githubusercontent.com/paulpierre/informer/master/screenshots/7.png) +![image](https://raw.githubusercontent.com/paulpierre/informer/master/github/screenshots/7.png) * Auto-joins channels from CSV list containing Telegram channel URLs -![image](https://raw.githubusercontent.com/paulpierre/informer/master/screenshots/10.png) +![image](https://raw.githubusercontent.com/paulpierre/informer/master/github/screenshots/10.png) * Persists session of channels joined @@ -48,7 +56,7 @@ This is a functioning proof-of-concept project with known bugs. Feel free to for * Join up to 500 channels per account -![image](https://raw.githubusercontent.com/paulpierre/informer/master/screenshots/4.png) +![image](https://raw.githubusercontent.com/paulpierre/informer/master/github/screenshots/4.png) * Uses REAL accounts avoiding bot detection, **THIS IS NOT A TELEGRAM BOT** but a real automated user account. This is an important distinction because the official bot API is limited and bots are often restricted in public channels. @@ -61,76 +69,157 @@ This is a functioning proof-of-concept project with known bugs. Feel free to for * Burner app ### Python packages -* Jinja2 (2.10.3) * SQLAlchemy (1.3.11) -* Werkzeug (0.16.0) -* pytz (2019.3) * sqlalchemy-migrate (0.13.0) -* requests (2.7.0) -* Flask (1.1.1) * Telethon (1.10.8) * mysql-connector-python (8.0.18) * gspread (3.1.0) * oauth2client (4.1.3) +## Quick Start + +### Setup your ENV vars +Edit the file informer.env which contains all the required environmental variables for informer + +You can retrieve the necessary Telegram-related information here: + +### Setup Your Telegram App + +1. Head over to `http://my.telegram.com/auth` to authenticate your account by providing a phone number and the confirmation code sent to your phone number (or Telegram) +![image](https://raw.githubusercontent.com/paulpierre/informer/master/github/screenshots/1-2.png) + +![image](https://raw.githubusercontent.com/paulpierre/informer/master/github/screenshots/1-5.png) + + +2. Once you are authenticated, click on "API Development Tools" +![image](https://raw.githubusercontent.com/paulpierre/informer/master/github/screenshots/1-3.png) + +3. Go ahead and create a New Application by filling out the form shown below +![image](https://raw.githubusercontent.com/paulpierre/informer/master/github/screenshots/1-4.png) + +4. You should now have the necessary parameter values for the `informer.env` file fields `TELEGRAM_API_HASH` and `TELEGRAM_API_APP_ID` + +![image](https://raw.githubusercontent.com/paulpierre/informer/master/github/screenshots/1-6.png) + +5. Go ahead and replace the values, including `TELEGRAM_ACCOUNT_PHONE_NUMBER` and move on to the next section + +![image](https://raw.githubusercontent.com/paulpierre/informer/master/github/screenshots/1-7.png) + +### Getting your Telegram ID + +So far we have what we need for Telethon and Informer to access the Telegram APIs, next we need to acquire the indentifiers for your bot's account. + +1. Open Telegram and search for the user `userinfobot`. + +![image](https://raw.githubusercontent.com/paulpierre/informer/master/github/screenshots/2-1.png) + +2. You will see multiple, make sure you select the correctly spelled account. + +![image](https://raw.githubusercontent.com/paulpierre/informer/master/github/screenshots/2-2.png) + +3. Click on the user and you should see a dialog option at the bottom that says "Start". Click on this. + +![image](https://raw.githubusercontent.com/paulpierre/informer/master/github/screenshots/2-3.png) + +4. The bot has a single purpose, to reflect back to you your current Telegram account's information. + +You should receive your Telegram username and your Telegram account ID. This is important + +![image](https://raw.githubusercontent.com/paulpierre/informer/master/github/screenshots/1-8.png) + +5. Go ahead and edit the `informer.env` file and fill in the values for `TELEGRAM_ACCOUNT_ID` which +should be your Telegram account ID from the previous step and `TELEGRAM_ACCOUNT_USER_NAME`. + +You can optionally fill in `TELEGRAM_NOTIFICATIONS_CHANNEL_ID` with your user name or a channel ID. + +6. Make sure you have `TELEGRAM_ACCOUNT_PHONE_NUMBER` filled out as this is key to generating the session. For creating multiple accounts, please check out the Burner App below. + +### Initialize and authenticate session + +Make sure you are running python 3 and simply run `./quick_start.sh` in the directory. + +You must run this first so that you can authenticate with Telegram on the first run and generate a local session file + +You can later copy the files for the different accounts in app/session and mount them via Docker should you choose to do so. + +![image](https://raw.githubusercontent.com/paulpierre/informer/master/github/screenshots/15.png) -## Getting Started -### Run locally without Docker -If you’re not interested in kicking the tires and want to light some fires instead, you can run the Informer bot locally and not in a docker instance. A licky boom boom down. +You will be prompted to enter in the authentication code you received either via Telegram if you have logged in before, or via SMS -1. Create a virtual environment in the local directory +![image](https://raw.githubusercontent.com/paulpierre/informer/master/github/screenshots/1-5.png) -`virtualenv venv` +Hit enter and your session should be generated in the folder `app/session` with the file name as the international phone number you provided with a `.session` extension. -2. Install the depencies in requirements.txt +Continue to the next section where we use Docker Compose to setup a database. -`pip install -r requirements.txt` -3. Use the instructions below to retrieve your Telegram user API ID and API hash and supply this information in `build_database.py` +### Setup a Notification Channel -4. Create a MySQL database locally and supply the credentials in the bot.py. MySQL comes with MacOS. You can also install the latest version for your OS and follow the instructions here: +This step is optional, but if you would like to create a private group channel and would like to acquire the group ID do the following: - https://dev.mysql.com/doc/mysql-getting-started/en/ +* Create a group (or channel) and set it as private or public +* Be sure to get the Telegram URL +![image](https://raw.githubusercontent.com/paulpierre/informer/master/github/screenshots/3-1.png) -5. Run `python3 build_database.py` . This will create the models in `models.py` inside your new MySQL database and ensure it is unicode safe for those fun eye-bleeding emojis on TG. +Set the URL in the `informer.env` file under the parameter `TELEGRAM_NOTIFICATIONS_CHANNEL_URL` -It will also setup some default values for keywords to monitor and channels to join supplied in channels.csv. +To get the channel ID simply run `python3 bot.py ` in the `app` directory where `` is the account ID you previously generated. -6. Create a group or channel on Telegram and retrieve its channel ID. This will be the channel where your snitching bot will drop all its notifications of keywords mentioned in other channels. Provide this value in `tg_notifications_channel_id=` inside `bot.py` +When the script loads, it will display all the channels you are in, simply copy this value and put it in the `TELEGRAM_NOTIFICATIONS_CHANNEL_ID` parameter of the `informer.env` file and kill the script. You're now ready to run Informer. -![image](https://raw.githubusercontent.com/paulpierre/informer/master/screenshots/2.png) -7. If all is well we can go ahead and fire up Informer by running -`python3 bot.py ` which will take your configuration and spin up an instance of the `TGInformer` class and begin surveillance. -You will need to provide the API ID you generated from the instructions below as an argument so the bot knows which account to log into. +### Running Docker Compose +After running `quick_start.sh` you can run docker compose by: + +* running `./start.sh` to build the Docker containers which include the MySQL database + +![image](https://raw.githubusercontent.com/paulpierre/informer/master/github/screenshots/16.png) + + +* Run `./stop.sh` to stop the containers + +* Run `./clean.sh` to remove an dangling containers and volumes. ** NOTE ** this will RESET the database and you will lose all your data. + +A few things to note: + +Before you were required to run your own MySQL instance and this created some issues with connection string compatability and versioning. In this update, it is just created for you and persisted on disk. + +Additionally Dozzle is provided so that you may view logs in your browser, simply go to http://localhost:9999 and click on the `app_informer` container. -**NOTE:** If this is your first time logging in, it will send you an authorization code via SMS or via the Telegram app. You will need to enter this to authenticate the bot and log into the Telegram servers. You will only need to do this once as a session file will be generated locally to persist all sessions. This entire process is handled by the Telethon Telegram client SDK. ### Create a telegram account with Burner App +If you do not want to use your own phone number and want to run the Informer bot with some degree of anonymity you can use the Burner App available on iOS and Android. + 1. Install the app Burner * Android - https://play.google.com/store/apps/details?id=org.thunderdog.challegram&hl=en_US * iOS - https://apps.apple.com/us/app/telegram-x/id898228810 -![image](https://raw.githubusercontent.com/paulpierre/informer/master/screenshots/3.png) +![image](https://raw.githubusercontent.com/paulpierre/informer/master/github/screenshots/3.png) -2. You first will need to create Telegram API credentials by providing a phone number here: +2. Follow the same steps as above by providing the new phone number here: https://my.telegram.org/auth 3. Validate with Burner. You will be sent an authcode via SMS, you will need to provide -![image](https://raw.githubusercontent.com/paulpierre/informer/master/screenshots/1.png) +![image](https://raw.githubusercontent.com/paulpierre/informer/master/github/screenshots/1.png) 5. Log into Telegram 6. Attempt to login with the app by running -`python3 bot.py ` +`python3 bot.py ` in the `app` directory. -7. Since you are logging in with Telethon it will ask you for your authcode in the terminal. This was sent via Telegram message or SMS. Provide this and it will save your session credentials in the session file mentioned below. You will no longer need to authenticate so long as you have the session file saved. +7. Since you are logging in with Telethon it will ask you for your authcode in the terminal like earlier. -Sessions are saved in the `session/` folder as `.session` +This was sent via Telegram message or SMS. + +Provide this and it will save your session credentials in the session file mentioned below. You will no longer need to authenticate so long as you have the session file saved. + +Sessions are saved in the `app/session/` folder as `.session` + +Rinse and repeat until you have all the necessary session files and simply mount them in Docker. ## Scaling Telegram accounts @@ -171,57 +260,11 @@ file when you instantiate the Telethon client: Telegram’s 2FA . Note that you will need to login for a first time and authenticate when you first use the API. - -### Docker -If you want to run the bot as a containerized instance on a server with AWS, GCP or Digital ocean you can. - -You will need to create an account with a container registry service, available on most enterprise cloud providers but Docker Hub will do (https://hub.docker.com/signup) - -1. Create a Docker repository, instructions here: https://docs.docker.com/docker-hub/repos/ - -2. Build the Docker image. We’re running on a lean Alpine Python 3.7 image. - - `docker build -t //informer:latest .` - -**NOTE:** You will want an entry point to run bot.py and provide it a Telegram API user ID. There are a few ways to approach this: - -* You can comment out and include the CMD instruction and provide the API user ID via environment variable: -`CMD [“python”,”bot.py”,”${SHILLOMATIC_ACCOUNT_ID}”]` -You will need to set the environment variable -`SHILLOMATIC_ACCOUNT_ID` to your Telegram accounts API user ID -inside your Cloud Provider’s console or export it in your shell -environment with `export SHILLOMATIC_ACCOUNT_ID=“1234567”` - -* Or you can set or over-ride the entry point in your cloud provider just make sure you provide the Telegram API user ID as an argument: - -`python3 bot.py 1234567` - -* Or you can run the bot inside the shell environment with Docker: - - 1. SSH into your remote shell environment - - 2. Pull the Docker image from the remote repository: - - `docker pull //informer:latest` - - 3. Get the Docker container ID with: - - `docker container ls` - - 4. Run the Docker image and script in interactive mode: - - `docker run -ti python3 bot.py 1234567` - Where 1234567 is your Telegram API user ID. - -3. Push the Docker image to your remote repository: - -`docker push //informer:latest` - -4. Assuming some entry point was set either in the Docker file, your cloud provider container dashboard, or manually in the shell with `docker run` you can open Telegram and login with the same account as above. +## Managing Multiple Bot Accounts As the bot runs and joins channel, you will see your client update in real time and display the new channels you have joined. -![image](https://raw.githubusercontent.com/paulpierre/informer/master/screenshots/4.png) +![image](https://raw.githubusercontent.com/paulpierre/informer/master/github/screenshots/4.png) TIP: TelegramX is by far the better client to use for these purposes as it supports multiple login. Download here: @@ -234,6 +277,8 @@ The python library gspread is used for managing io with Google Sheets. You will Instructions are here: https://www.twilio.com/blog/2017/02/an-easy-way-to-read-and-write-to-a-google-spreadsheet-in-python.html +This is optional. + ## Known Bugs * Currently a channel must have already been joined in order to begin monitoring of keywords. It is likely you will need to run the `bot.py` twice, once to let it join channels and another time to monitor them. I’m aware of this glaring bug and will fix it in the next revision. diff --git a/app/bot.py b/app/bot.py new file mode 100644 index 0000000..f190802 --- /dev/null +++ b/app/bot.py @@ -0,0 +1,53 @@ +import sys +import os +import logging +from dotenv import load_dotenv +from pathlib import Path + +# Lets set the logging level +logging.getLogger().setLevel(logging.INFO) + +# ----------------- +# Load the ENV file +# ----------------- +env_file = 'informer.env' if os.path.isfile('informer.env') else '../informer.env' +logging.info(f'env_file: {env_file}') +dotenv_path = Path(env_file) +load_dotenv(dotenv_path=dotenv_path) + +from informer import TGInformer + + +# =========== +# Quick setup +# =========== + +# virtualenv venv +# source venv/bin/activate +# pip install -r requirements.txt +# python3 informer.py + +# Read more: https://github.com/paulpierre/informer/ + +try: + account_id = sys.argv[1] +except: + raise Exception('informer.py - account_id is a required param') + +if not account_id: + raise Exception('Account ID required') + +if __name__ == '__main__': + + informer = TGInformer( + db_database = os.environ['MYSQL_DATABASE'], + db_user = os.environ['MYSQL_USER'], + db_password = os.environ['MYSQL_PASSWORD'], + db_ip_address = os.environ['MYSQL_IP_ADDRESS'], + db_port = os.environ['MYSQL_PORT'], + tg_account_id = os.environ['TELEGRAM_ACCOUNT_ID'], + tg_notifications_channel_id = os.environ['TELEGRAM_NOTIFICATIONS_CHANNEL_ID'], + google_credentials_path = os.environ['GOOGLE_APPLICATION_CREDENTIALS'], + google_sheet_name = os.environ['GOOGLE_SHEET_NAME'] + ) + informer.init() diff --git a/build_database.py b/app/build_database.py similarity index 66% rename from build_database.py rename to app/build_database.py index 1de695b..eecc2a1 100644 --- a/build_database.py +++ b/app/build_database.py @@ -1,13 +1,22 @@ -from models import Account, Channel, ChatUser, Keyword, Message, Monitor, Notification -import sqlalchemy as db + import csv -from datetime import datetime import sys import os import logging +from dotenv import load_dotenv +from pathlib import Path +import sqlalchemy as db +from datetime import datetime from sqlalchemy.orm import sessionmaker +from models import Account, Channel, ChatUser, Keyword, Message, Monitor, Notification logging.getLogger().setLevel(logging.INFO) +# ----------------- +# Load the ENV file +# ----------------- +dotenv_path = Path('informer.env') +load_dotenv(dotenv_path=dotenv_path) + Session = None session = None SERVER_MODE = None @@ -19,7 +28,7 @@ def init_db(): global session, SERVER_MODE, engine - logging.info('{}: Initializing the database'.format(sys._getframe().f_code.co_name)) + logging.info(f'{sys._getframe().f_code.co_name}: Initializing the database') Account.metadata.create_all(engine) ChatUser.metadata.create_all(engine) Channel.metadata.create_all(engine) @@ -34,6 +43,7 @@ def init_db(): Lets setup the channels to monitor in the database """ def init_data(): + global session, SERVER_MODE, engine session = Session() init_add_account() @@ -43,22 +53,24 @@ def init_data(): session.close() def init_add_account(): + global session, SERVER_MODE, engine - logging.info('{}: Adding bot account'.format(sys._getframe().f_code.co_name)) - BOT_ACCOUNTS = [ + logging.info(f'{sys._getframe().f_code.co_name}: Adding bot account') + BOT_ACCOUNTS = [ + Account( - account_id=1234567, # Insert your own Telegram API ID here - account_api_id=1234567, # Insert your own Telegram API ID here - account_api_hash='21b277e0daa5911b0f2616b8b669533c', # Insert your own Telegram API Hash here + account_id=os.environ['TELEGRAM_ACCOUNT_ID'], + account_api_id=os.environ['TELEGRAM_API_APP_ID'], + account_api_hash=os.environ['TELEGRAM_API_HASH'], account_is_bot=False, account_is_verified=False, account_is_restricted=False, - account_first_name='Darrin', - account_last_name='OBrien', - account_user_name='informer', - account_phone='+14151234567', # Enter your burner phone number here + account_first_name=os.environ['TELEGRAM_ACCOUNT_FIRST_NAME'], + account_last_name=os.environ['TELEGRAM_ACCOUNT_LAST_NAME'], + account_user_name=os.environ['TELEGRAM_ACCOUNT_USER_NAME'], + account_phone=os.environ['TELEGRAM_ACCOUNT_PHONE_NUMBER'], # Enter your burner phone number here account_is_enabled=True, account_tlogin=datetime.now(), account_tcreate=datetime.now(), @@ -80,15 +92,15 @@ def init_add_channels(): CHANNELS = [ { 'channel_name': 'Informer monitoring', - 'channel_id': 1234567, # Enter your own Telegram channel ID for monitoring here - 'channel_url': 'https://t.me/joinchat/Blahblahblah', - 'channel_is_private': True + 'channel_id': os.environ['TELEGRAM_NOTIFICATIONS_CHANNEL_ID'], # Enter your own Telegram channel ID for monitoring here + 'channel_url': os.environ['TELEGRAM_NOTIFICATIONS_CHANNEL_URL'], + 'channel_is_private': False if os.environ['TELEGRAM_NOTIFICATIONS_CHANNEL_IS_PRIVATE']=='0' else True }, ] # Lets import the CSV with the channel list - with open('channels.csv') as csv_file: + with open(os.environ['TELEGRAM_CHANNEL_MONITOR_LIST']) as csv_file: csv_reader = csv.reader(csv_file, delimiter=',') line_count = 0 for row in csv_reader: @@ -100,10 +112,11 @@ def init_add_channels(): }) line_count += 1 - logging.info('Inserted {} channels to database'.format(line_count)) + + logging.info(f'Inserting {line_count} channels to database') for channel in CHANNELS: - logging.info('{}: Adding channel {} to database'.format(sys._getframe().f_code.co_name, channel['channel_name'])) + logging.info(f"{sys._getframe().f_code.co_name}: Adding channel {channel['channel_name']} to database") channel_url = channel['channel_url'] if 'channel_url' in channel else None channel_id = channel['channel_id'] if 'channel_id' in channel else None @@ -159,7 +172,7 @@ def init_add_keywords(): ] for keyword in KEYWORDS: - logging.info('{}: Adding keyword {} to the database'.format(sys._getframe().f_code.co_name, keyword['keyword_description'])) + logging.info(f"{sys._getframe().f_code.co_name}: Adding keyword {keyword['keyword_description']} to the database") session.add(Keyword( keyword_description=keyword['keyword_description'], @@ -184,7 +197,7 @@ def init_add_monitors(): for channel in channels: if account_index in accounts: account = accounts[account_index] - logging.info('{}: Adding monitoring to channel {} with account_id {} to the database'.format(sys._getframe().f_code.co_name, channel.channel_name, account.account_id)) + logging.info(f'{sys._getframe().f_code.co_name}: Adding monitoring to channel {channel.channel_name} with account_id {account.account_id} to the database') session.add(Monitor( channel_id=channel.id, account_id=account.account_id, @@ -200,31 +213,32 @@ def init_add_monitors(): def initialize_db(): global session, SERVER_MODE, engine, Session - DATABASE_NAME = 'informer_db' - - # NOTE: you will have to manually add your own DB string connector below + DATABASE_NAME = os.environ['MYSQL_DATABASE'] - if os.getenv('GAE_INSTANCE'): - SERVER_MODE = 'prod' # prod vs local - MYSQL_CONNECTOR_STRING = 'mysql+mysqlconnector://root:root@YOUR_OWN_IP_HERE:3306' - else: - SERVER_MODE = 'local' - MYSQL_CONNECTOR_STRING = 'mysql+mysqlconnector://root:root@127.0.0.1:3306' + db_database = os.environ['MYSQL_DATABASE'] + db_user = os.environ['MYSQL_USER'] + db_password = os.environ['MYSQL_PASSWORD'] + db_ip_address = os.environ['MYSQL_IP_ADDRESS'] + db_port = os.environ['MYSQL_PORT'] + SERVER_MODE = os.environ['ENV'] + MYSQL_CONNECTOR_STRING = f'mysql+mysqlconnector://{db_user}:{db_password}@{db_ip_address}:{db_port}/{db_database}?charset=utf8mb4&collation=utf8mb4_general_ci' - engine = db.create_engine(MYSQL_CONNECTOR_STRING)#, echo=True) - Session = sessionmaker(bind=engine) - session = None - session = Session() - session.execute("CREATE DATABASE {} CHARACTER SET 'utf8' COLLATE 'utf8_unicode_ci';".format(DATABASE_NAME)) - session.close() - engine = db.create_engine('{}/{}?charset=utf8mb4'.format(MYSQL_CONNECTOR_STRING, DATABASE_NAME)) # , echo=True) # uncomment right most comment if you want to hear all the noise MySQL is making + engine = db.create_engine(MYSQL_CONNECTOR_STRING, echo=True) Session = sessionmaker(bind=engine) session = None session = Session() + session.execute(f"CREATE DATABASE IF NOT EXISTS {DATABASE_NAME} CHARACTER SET 'utf8mb4' COLLATE 'utf8mb4_unicode_ci';") + session.execute('commit') + #session.close() + + # engine = db.create_engine(f'{MYSQL_CONNECTOR_STRING}?charset=utf8mb4', echo=True) + # Session = sessionmaker(bind=engine) + # session = None + # session = Session() # A hack to support unicode for emojis session.execute('SET NAMES "utf8mb4" COLLATE "utf8mb4_unicode_ci"') - session.execute('ALTER DATABASE {} CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci;'.format(DATABASE_NAME)) + session.execute(f'ALTER DATABASE {DATABASE_NAME} CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci;') session.execute('commit') init_db() diff --git a/channels.csv b/app/channels.csv similarity index 100% rename from channels.csv rename to app/channels.csv diff --git a/informer.py b/app/informer.py similarity index 73% rename from informer.py rename to app/informer.py index 91cdd31..279935f 100644 --- a/informer.py +++ b/app/informer.py @@ -1,27 +1,28 @@ -from models import Account, Channel, ChatUser, Keyword, Message, Monitor, Notification -import sqlalchemy as db -from datetime import datetime, timedelta -from random import randrange -import build_database import sys import os -import logging import json import re import asyncio +import gspread +import logging +import build_database +import sqlalchemy as db +from datetime import datetime, timedelta +from random import randrange from telethon import utils from sqlalchemy.orm import sessionmaker from sqlalchemy.exc import IntegrityError, InterfaceError, ProgrammingError from telethon.tl.functions.users import GetFullUserRequest from telethon import TelegramClient, events from telethon.tl.types import PeerUser, PeerChat, PeerChannel -from telethon.errors.rpcerrorlist import FloodWaitError, ChannelPrivateError,UserAlreadyParticipantError +from telethon.errors.rpcerrorlist import FloodWaitError, ChannelPrivateError, UserAlreadyParticipantError from telethon.tl.functions.channels import JoinChannelRequest from telethon.tl.functions.messages import ImportChatInviteRequest -import gspread from oauth2client.service_account import ServiceAccountCredentials +from models import Account, Channel, ChatUser, Keyword, Message, Monitor, Notification -""" + +banner = """ -------------------------------------------------- ____ ____ / _/___ / __/___ _________ ___ ___ _____ @@ -30,7 +31,7 @@ /___/_/ /_/_/ \____/_/ /_/ /_/ /_/\___/_/ -------------------------------------------------- - by @paulpierre 11-26-2019 + by @paulpierre updated 2021-08-16 (2019-11-26) https://github.com/paulpierre/informer """ @@ -38,25 +39,20 @@ # Lets set the logging level logging.getLogger().setLevel(logging.INFO) - class TGInformer: def __init__(self, - account_id=None, - db_prod_ip=None, - db_prod_port=None, - db_prod_name=None, - db_prod_user=None, - db_prod_password=None, - db_local_ip=None, - db_local_port=None, - db_local_name=None, - db_local_user=None, - db_local_password=None, - tg_notifications_channel_id=None, - google_credentials_path=None, - google_sheet_name=None, - ): + db_database = os.environ['MYSQL_DATABASE'], + db_user = os.environ['MYSQL_USER'], + db_password = os.environ['MYSQL_PASSWORD'], + db_ip_address = os.environ['MYSQL_IP_ADDRESS'], + db_port = os.environ['MYSQL_PORT'], + tg_account_id = os.environ['TELEGRAM_ACCOUNT_ID'], + tg_notifications_channel_id = os.environ['TELEGRAM_NOTIFICATIONS_CHANNEL_ID'], + google_credentials_path = os.environ['GOOGLE_APPLICATION_CREDENTIALS'], + google_sheet_name = os.environ['GOOGLE_SHEET_NAME'], + tg_phone_number = os.environ['TELEGRAM_ACCOUNT_PHONE_NUMBER'] + ): # ------------------ # Instance variables @@ -65,60 +61,64 @@ def __init__(self, self.channel_list = [] self.channel_meta = {} self.bot_task = None - self.KEYWORD_REFRESH_WAIT = 15 * 60 + self.KEYWORD_REFRESH_WAIT = 15 * 60 # Every 15 minutes self.MIN_CHANNEL_JOIN_WAIT = 30 self.MAX_CHANNEL_JOIN_WAIT = 120 self.bot_uptime = 0 + self.client = None + self.loop = asyncio.get_event_loop() + # -------------- # Display banner # -------------- - print(""" - ____ ____ - / _/___ / __/___ _________ ___ ___ _____ - / // __ \/ /_/ __ \/ ___/ __ `__ \/ _ \/ ___/ - _/ // / / / __/ /_/ / / / / / / / / __/ / - /___/_/ /_/_/ \____/_/ /_/ /_/ /_/\___/_/ - - by @paulpierre 11-26-2019 - """) + print(banner) # ------------------------------------------------ # Check if we're in app engine and set environment # ------------------------------------------------ - if os.getenv('GAE_INSTANCE'): - self.SERVER_MODE = 'prod' # prod vs local - self.MYSQL_CONNECTOR_STRING = 'mysql+mysqlconnector://{}:{}@{}:{}/{}'.format(db_prod_user, db_prod_password, db_prod_ip, db_prod_port, db_prod_name) - else: - self.SERVER_MODE = 'local' - self.MYSQL_CONNECTOR_STRING = 'mysql+mysqlconnector://{}:{}@{}:{}/{}'.format(db_local_user, db_local_password, db_local_ip, db_local_port, db_local_name) - - logging.info('SERVER_MODE: {} GAE_ENV: {}'.format(self.SERVER_MODE, str(os.getenv('GAE_INSTANCE')))) + self.SERVER_MODE = os.environ['ENV'] + self.MYSQL_CONNECTOR_STRING = f'mysql+mysqlconnector://{db_user}:{db_password}@{db_ip_address}:{db_port}/{db_database}?charset=utf8mb4&collation=utf8mb4_general_ci' + + logging.info(f'Starting Informer SERVER_MODE: {self.SERVER_MODE}\n') # ----------------------------------------- # Set the channel we want to send alerts to # ----------------------------------------- self.monitor_channel = tg_notifications_channel_id - if not account_id: - logging.error('Must specify account_id for bot instance') - return + if not tg_account_id: + raise Exception('Must specify "tg_account_id" in informer.env file for bot instance') # ----------------------- # Initialize Google Sheet # ----------------------- - scope = [ - 'https://www.googleapis.com/auth/spreadsheets', - 'https://www.googleapis.com/auth/drive'] - creds = ServiceAccountCredentials.from_json_keyfile_name(google_credentials_path, scope) - self.gsheet = gspread.authorize(creds) - self.sheet = self.gsheet.open(google_sheet_name).sheet1 + logging.info(f'Attempting to access Google Sheet {google_sheet_name}.sheet1 ...\n') + + # Lets check if the file exists + + try: + if os.path.isfile(google_credentials_path): + + scope = [ + 'https://www.googleapis.com/auth/spreadsheets', + 'https://www.googleapis.com/auth/drive'] + creds = ServiceAccountCredentials.from_json_keyfile_name(google_credentials_path, scope) + + self.gsheet = gspread.authorize(creds) + self.sheet = self.gsheet.open(google_sheet_name).sheet1 + else: + self.gsheet = False + except gspread.exceptions.APIError: + self.gsheet = False # ------------------- # Initialize database # ------------------- + + logging.info(f'Setting up MySQL connector with connector string: {self.MYSQL_CONNECTOR_STRING} ... \n') self.engine = db.create_engine(self.MYSQL_CONNECTOR_STRING) # , echo=True self.Session = sessionmaker(bind=self.engine) self.session = self.Session() @@ -126,34 +126,23 @@ def __init__(self, # -------------------- # Load account from DB # -------------------- + logging.info(f'Attempting to load user session from database with account_id {tg_account_id} ...\n') + self.tg_user = None try: - self.account = self.session.query(Account).filter_by(account_id=account_id).first() + self.account = self.session.query(Account).filter_by(account_id=tg_account_id).first() except ProgrammingError as e: - logging.error('Database is not set up, setting it up') + logging.error(f'Received error {e} \n Database is not set up, setting it up') build_database.initialize_db() - self.account = self.session.query(Account).filter_by(account_id=account_id).first() + self.account = self.session.query(Account).filter_by(account_id=tg_account_id).first() if not self.account: - logging.error('Invalid account_id {} for bot instance'.format(account_id)) - sys.exit(0) + raise Exception(f'Invalid account_id {tg_account_id} for bot instance') - # ---------------------- - # Telegram service login - # ---------------------- - logging.info('Logging in with account # {}'.format(self.account.account_phone)) - session_file = 'session/' + self.account.account_phone.replace('+', '') - self.client = TelegramClient(session_file, self.account.account_api_id, self.account.account_api_hash) - - # ----------------------- - # Authorize from terminal - # ----------------------- - # TODO: automate authcode with the Burner API - self.client.connect() - if not self.client.is_user_authorized(): - logging.info('Client is currently not logged in, please sign in!') - self.client.send_code_request(self.account.account_phone) - self.tg_user = self.client.sign_in(self.account.account_phone, input('Enter code: ')) + # ======================= + # Initiate bot async loop + # ======================== + self.loop.run_until_complete(self.bot_interval()) # ============= # Get all users @@ -162,7 +151,7 @@ def get_channel_all_users(self, channel_id): # TODO: this function is not complete channel = self.client.get_entity(PeerChat(channel_id)) users = self.client.get_participants(channel) - print('total users: {}'.format(users.total)) + print(f'total users: {users.total}') for user in users: if user.username is not None and not user.is_self: print(utils.get_display_name(user), user.username, user.id, user.bot, user.verified, user.restricted, user.first_name, user.last_name, user.phone, user.is_self) @@ -193,7 +182,7 @@ def get_channel_info_by_group_id(self, id): # Get channel by channel URL # ========================== async def get_channel_info_by_url(self, url): - logging.info('{}: Getting channel info with url: {}'.format(sys._getframe().f_code.co_name, url)) + logging.info(f'{sys._getframe().f_code.co_name}: Getting channel info with url: {url}') channel_hash = utils.parse_username(url)[0] # ----------------------------------------- @@ -202,10 +191,10 @@ async def get_channel_info_by_url(self, url): try: channel = await self.client.get_entity(channel_hash) except ValueError: - logging.info('{}: Not a valid telegram URL: {}'.format(sys._getframe().f_code.co_name, url)) + logging.info(f'{sys._getframe().f_code.co_name}: Not a valid telegram URL: {url}') return False except FloodWaitError as e: - logging.info('{}: Got a flood wait error for: {}'.format(sys._getframe().f_code.co_name, url)) + logging.info(f'{sys._getframe().f_code.co_name}: Got a flood wait error for: {url}') await asyncio.sleep(e.seconds * 2) return { @@ -223,7 +212,7 @@ async def get_user_by_id(self, user_id=None): u = await self.client.get_input_entity(PeerUser(user_id=user_id)) user = await self.client(GetFullUserRequest(u)) - logging.info('{}: User ID {} has data:\n {}\n\n'.format(sys._getframe().f_code.co_name, user_id, user)) + logging.info(f'{sys._getframe().f_code.co_name}: User ID {user_id} has data:\n {user}\n\n') return { 'username': user.user.username, @@ -238,7 +227,8 @@ async def get_user_by_id(self, user_id=None): # ============================== # Initialize keywords to monitor # ============================== - def init_keywords(self): + async def init_keywords(self): + self.keyword_list = [] keywords = self.session.query(Keyword).filter_by(keyword_is_enabled=True).all() for keyword in keywords: @@ -247,7 +237,7 @@ def init_keywords(self): 'name': keyword.keyword_description, 'regex': keyword.keyword_regex }) - logging.info('{}: Monitoring keywords: {}'.format(sys._getframe().f_code.co_name, json.dumps(self.keyword_list, indent=4))) + logging.info(f'{sys._getframe().f_code.co_name}: Monitoring keywords: {json.dumps(self.keyword_list, indent=4)}') # =========================== # Initialize channels to join @@ -278,9 +268,9 @@ async def message_event_handler(event): # Lets add it to the current list of channels we're in current_channels.append(channel_id) - logging.info('id: {} name: {}'.format(dialog.id, dialog.name)) + logging.info(f'id: {dialog.id} name: {dialog.name}') - logging.info('{}: ### Current channels {}'.format(sys._getframe().f_code.co_name, json.dumps(current_channels))) + logging.info(f'{sys._getframe().f_code.co_name}: ### Current channels {json.dumps(current_channels, indent=4)}') # ----------------------------------- # Get the list of channels to monitor @@ -323,7 +313,7 @@ async def message_event_handler(event): # ------------------------------- if channel['channel_id']: self.channel_list.append(channel['channel_id']) - logging.info('Adding channel {} to monitoring w/ ID: {} hash: {}'.format(channel['channel_name'], channel['channel_id'], channel['channel_access_hash'])) + logging.info(f"Adding channel {channel['channel_name']} to monitoring w/ ID: {channel['channel_id']} hash: {channel['channel_access_hash']}") self.channel_meta[channel['channel_id']] = { 'channel_id': channel['channel_id'], @@ -344,18 +334,21 @@ async def message_event_handler(event): # If channel is invalid, ignore # ----------------------------- if o is False: - logging.error('Invalid channel URL: {}'.format(channel['channel_url'])) + logging.error(f"Invalid channel URL: {channel['channel_url']}") continue - logging.info('{}: ### Successfully identified {}'.format(sys._getframe().f_code.co_name, channel['channel_name'])) + + logging.info(f"{sys._getframe().f_code.co_name}: ### Successfully identified {channel['channel_name']}") # ------------------------- # If the channel is a group # ------------------------- elif channel['channel_is_group']: o = await self.get_channel_info_by_group_id(channel['channel_id']) - logging.info('{}: ### Successfully identified {}'.format(sys._getframe().f_code.co_name, channel['channel_name'])) + + logging.info(f"{sys._getframe().f_code.co_name}: ### Successfully identified {channel['channel_name']}") + else: - logging.info('{}: Unable to indentify channel {}'.format(sys._getframe().f_code.co_name, channel['channel_name'])) + logging.info(f"{sys._getframe().f_code.co_name}: Unable to indentify channel {channel['channel_name']}") continue channel_obj.channel_id = o['channel_id'] @@ -379,20 +372,20 @@ async def message_event_handler(event): # ------------------------------- channel_is_private = True if (channel['channel_is_private'] or '/joinchat/' in channel['channel_url']) else False if channel_is_private: - logging.info('channel_is_private: {}'.format(channel_is_private)) + logging.info(f'channel_is_private: {channel_is_private}') # ------------------------------------------ # Join if public channel and we're not in it # ------------------------------------------ if channel['channel_is_group'] is False and channel_is_private is False and channel['channel_id'] not in current_channels: - logging.info('{}: Joining channel: {} => {}'.format(sys._getframe().f_code.co_name, channel['channel_id'], channel['channel_name'])) + logging.info(f"{sys._getframe().f_code.co_name}: Joining channel: {channel['channel_id']} => {channel['channel_name']}") try: await self.client(JoinChannelRequest(channel=await self.client.get_entity(channel['channel_url']))) sec = randrange(self.MIN_CHANNEL_JOIN_WAIT, self.MAX_CHANNEL_JOIN_WAIT) - logging.info('sleeping for {} seconds'.format(sec)) + logging.info(f'sleeping for {sec} seconds') await asyncio.sleep(sec) except FloodWaitError as e: - logging.info('Received FloodWaitError, waiting for {} seconds..'.format(e.seconds)) + logging.info(f'Received FloodWaitError, waiting for {e.seconds} seconds..') # Lets wait twice as long as the API tells us for posterity await asyncio.sleep(e.seconds * 2) @@ -405,7 +398,7 @@ async def message_event_handler(event): # ------------------------------------------ elif channel_is_private and channel['channel_id'] not in current_channels: channel_obj.channel_is_private = True - logging.info('{}: Joining private channel: {} => {}'.format(sys._getframe().f_code.co_name, channel['channel_id'], channel['channel_name'])) + logging.info(f"{sys._getframe().f_code.co_name}: Joining private channel: {channel['channel_id']} => {channel['channel_name']}") # ------------------------------------- # Join private channel with secret hash @@ -419,10 +412,10 @@ async def message_event_handler(event): # Counter FloodWaitError # ---------------------- sec = randrange(self.MIN_CHANNEL_JOIN_WAIT, self.MAX_CHANNEL_JOIN_WAIT) - logging.info('sleeping for {} seconds'.format(sec)) + logging.info(f'sleeping for {sec} seconds') await asyncio.sleep(sec) except FloodWaitError as e: - logging.info('Received FloodWaitError, waiting for {} seconds..'.format(e.seconds)) + logging.info(f'Received FloodWaitError, waiting for {e.seconds} seconds..') await asyncio.sleep(e.seconds * 2) except ChannelPrivateError as e: logging.info('Channel is private or we were banned bc we didnt respond to bot') @@ -443,8 +436,8 @@ async def message_event_handler(event): pass self.session.close() - logging.info('{}: Monitoring channels: {}'.format(sys._getframe().f_code.co_name, json.dumps(self.channel_list, indent=4))) - logging.info('Channel METADATA: {}'.format(self.channel_meta)) + logging.info(f"{sys._getframe().f_code.co_name}: Monitoring channels: {json.dumps(self.channel_list, indent=4)}") + logging.info(f'Channel METADATA: {self.channel_meta}') # =========================== @@ -475,10 +468,16 @@ async def filter_message(self, event): # If it matches the regex then voila! if re.search(keyword['regex'], message, re.IGNORECASE): logging.info( - 'Filtering: {}\n\nEvent raw text: {} \n\n Data: {}'.format(channel_id, event.raw_text, event)) + f'Filtering: {channel_id}\n\nEvent raw text: {event.raw_text} \n\n Data: {event}') # Lets send the notification with all the pertinent information in the params - await self.send_notification(message_obj=event.message, event=event, sender_id=event.sender_id, channel_id=channel_id, keyword=keyword['name'], keyword_id=keyword['id']) + await self.send_notification( + message_obj=event.message, + event=event, sender_id=event.sender_id, + channel_id=channel_id, + keyword=keyword['name'], + keyword_id=keyword['id'] + ) # ==================== # Handle notifications @@ -521,8 +520,8 @@ async def send_notification(self, sender_id=None, event=None, channel_id=None, k timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") # Set the message for the notification we're about to send in our monitor channel - message = '⚠️ "{}" mentioned by {} in => "{}" url: {}\n\n Message:\n"{}\ntimestamp: {}'.format(keyword, sender_username, self.channel_meta[channel_id]['channel_title'], self.channel_meta[channel_id]['channel_url'], message_text,timestamp) - logging.info('{} Sending notification {}'.format(sys._getframe().f_code.co_name, message)) + message = f'⚠️ "{keyword}" mentioned by {sender_username} in => "{self.channel_meta[channel_id]["channel_title"]}" url: {self.channel_meta[channel_id]["channel_url"]}\n\n Message:\n"{message_text}\ntimestamp: {timestamp}' + logging.info(f'{sys._getframe().f_code.co_name} Sending notification {message}') # ---------------- # Send the message @@ -532,25 +531,26 @@ async def send_notification(self, sender_id=None, event=None, channel_id=None, k # ------------------------- # Write to the Google Sheet # ------------------------- - self.sheet.append_row([ - sender_id, - sender_username, - channel_id, - self.channel_meta[channel_id]['channel_title'], - self.channel_meta[channel_id]['channel_url'], - keyword, - message_text, - is_mention, - is_scheduled, - is_fwd, - is_reply, - is_bot, - is_channel, - is_group, - is_private, - channel_size, - timestamp - ]) + if self.gsheet is True: + self.sheet.append_row([ + sender_id, + sender_username, + channel_id, + self.channel_meta[channel_id]['channel_title'], + self.channel_meta[channel_id]['channel_url'], + keyword, + message_text, + is_mention, + is_scheduled, + is_fwd, + is_reply, + is_bot, + is_channel, + is_group, + is_private, + channel_size, + timestamp + ]) # -------------- # Add user to DB @@ -625,31 +625,42 @@ async def update_keyword_list(self): logging.info('### updating keyword_list') pass - # =========================== - # Loop we run while we listen - # =========================== - async def bot_interval(self): - self.init_keywords() - await self.init_monitor_channels() - while True: - logging.info('### Running bot interval') - await self.update_keyword_list() - await asyncio.sleep(self.KEYWORD_REFRESH_WAIT) - def stop_bot_interval(self): self.bot_task.cancel() - # =========================== - # Initialize connection to TG - # =========================== - def init(self): - loop = asyncio.get_event_loop() - self.bot_task = loop.create_task(self.bot_interval()) + + # ============== + # Main coroutine + # ============== + async def bot_interval(self): - with self.client: - self.client.run_until_disconnected() - try: - loop.run_until_complete(self.bot_task) - except asyncio.CancelledError: - logging.info('### Async cancelled') - pass \ No newline at end of file + # ---------------------- + # Telegram service login + # ---------------------- + logging.info(f'Logging in with account # {self.account.account_phone} ... \n') + session_file = 'session/' + self.account.account_phone.replace('+', '') + self.client = TelegramClient(session_file, self.account.account_api_id, self.account.account_api_hash) + + # ----------------------- + # Authorize from terminal + # ----------------------- + # TODO: automate authcode with the Burner API + await self.client.start(phone=f'{self.account.account_phone}') + + if not await self.client.is_user_authorized(): + logging.info(f'Client is currently not logged in, please sign in! Sending request code to {self.account.account_phone}, please confirm on your mobile device') + await self.client.send_code_request(self.account.account_phone) + self.tg_user = await self.client.sign_in(self.account.account_phone, input('Enter code: ')) + + self.tg_user = await self.client.get_me() + + await self.init_keywords() + await self.init_monitor_channels() + count = 0 + while True: + count +=1 + logging.info('### {count} Running bot interval') + await self.init_keywords() + await asyncio.sleep(self.KEYWORD_REFRESH_WAIT) + + \ No newline at end of file diff --git a/models.py b/app/models.py similarity index 100% rename from models.py rename to app/models.py diff --git a/app/requirements.txt b/app/requirements.txt new file mode 100644 index 0000000..3a2968a --- /dev/null +++ b/app/requirements.txt @@ -0,0 +1,7 @@ +SQLAlchemy==1.3.11 +sqlalchemy-migrate==0.13.0 +Telethon==1.23.0 +mysql-connector-python==8.0.18 +gspread==3.1.0 +oauth2client==4.1.3 +python-dotenv==0.19.0 \ No newline at end of file diff --git a/bot.py b/bot.py deleted file mode 100644 index fb3e7c6..0000000 --- a/bot.py +++ /dev/null @@ -1,40 +0,0 @@ -from informer import TGInformer -import sys -# =========== -# Quick setup -# =========== - -#virtualenv venv -#source venv/bin/activate -#pip install -r requirements.txt - -# Read more: https://github.com/paulpierre/informer/ - -try: - account_id = sys.argv[1] -except: - sys.exit('informer.py - account_id is a required param') - -if not account_id: - sys.exit('Account ID required') - -if __name__ == '__main__': - - informer = TGInformer( - account_id=account_id, - db_prod_ip='INSERT_YOUR_IP_HERE', - db_prod_port=3306, - db_prod_name='informer_db', - db_prod_user='root', - db_prod_password='root', - db_local_ip='127.0.0.1', - db_local_port='3320', - db_local_name='informer_db', - db_local_user='root', - db_local_password='root', - tg_notifications_channel_id=1234567, # Insert your own channel ID here - google_credentials_path='credentials/gcloud_api.json', # You will need download your Google API file here - google_sheet_name='Informer Notifications' - ) - informer.init() - sys.exit(0) diff --git a/clean.sh b/clean.sh new file mode 100755 index 0000000..b203fb6 --- /dev/null +++ b/clean.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +docker volume prune && docker container prune \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..0513070 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,64 @@ +version: '3.5' +services: +# -------------------------------------------------- +# ____ ____ +# / _/___ / __/___ _________ ___ ___ _____ +# / // __ \/ /_/ __ \/ ___/ __ `__ \/ _ \/ ___/ +# _/ // / / / __/ /_/ / / / / / / / / __/ / +# /___/_/ /_/_/ \____/_/ /_/ /_/ /_/\___/_/ v1.1 +# +# -------------------------------------------------- +# by @paulpierre Updated 2021-08-16 +# https://github.com/paulpierre/informer +# + + + # ================ + # Database Service + # ================ + db_informer: + env_file: + - informer.env + container_name: db_informer + image: mysql:5.7 + volumes: + - db_data:/var/lib/mysql + ports: + - "3307:3306" + + +# ========================= +# Informer Telegram Service +# ========================= + app_informer: + env_file: + - informer.env + + build: . + command: /bin/bash -c "sleep 15; python3 bot.py ${TELEGRAM_ACCOUNT_ID}" + container_name: app_informer + volumes: + - app/:/usr/local/app + ports: + - "8000:8000" + depends_on: + - db_informer + links: + - db_informer + + # =============== + # Logging service + # =============== + # available at http://localhost:9999 + # NOTE: this is totally optional, feel free to comment out below if prod + app_dozzle: + container_name: dozzle + image: amir20/dozzle:latest + volumes: + - /var/run/docker.sock:/var/run/docker.sock + ports: + - 9999:8080 + +volumes: + db_data: {} + app: {} \ No newline at end of file diff --git a/github/screenshots/1-1.png b/github/screenshots/1-1.png new file mode 100644 index 0000000..4cc55ea Binary files /dev/null and b/github/screenshots/1-1.png differ diff --git a/github/screenshots/1-2.png b/github/screenshots/1-2.png new file mode 100644 index 0000000..8d51d40 Binary files /dev/null and b/github/screenshots/1-2.png differ diff --git a/github/screenshots/1-3.png b/github/screenshots/1-3.png new file mode 100644 index 0000000..f50d5ad Binary files /dev/null and b/github/screenshots/1-3.png differ diff --git a/github/screenshots/1-4.png b/github/screenshots/1-4.png new file mode 100644 index 0000000..5cde562 Binary files /dev/null and b/github/screenshots/1-4.png differ diff --git a/github/screenshots/1-5.png b/github/screenshots/1-5.png new file mode 100644 index 0000000..34321b1 Binary files /dev/null and b/github/screenshots/1-5.png differ diff --git a/github/screenshots/1-6.png b/github/screenshots/1-6.png new file mode 100644 index 0000000..e346aef Binary files /dev/null and b/github/screenshots/1-6.png differ diff --git a/github/screenshots/1-7.png b/github/screenshots/1-7.png new file mode 100644 index 0000000..80604b0 Binary files /dev/null and b/github/screenshots/1-7.png differ diff --git a/github/screenshots/1-8.png b/github/screenshots/1-8.png new file mode 100644 index 0000000..80604b0 Binary files /dev/null and b/github/screenshots/1-8.png differ diff --git a/screenshots/1.png b/github/screenshots/1.png similarity index 100% rename from screenshots/1.png rename to github/screenshots/1.png diff --git a/screenshots/10.png b/github/screenshots/10.png similarity index 100% rename from screenshots/10.png rename to github/screenshots/10.png diff --git a/screenshots/11.png b/github/screenshots/11.png similarity index 100% rename from screenshots/11.png rename to github/screenshots/11.png diff --git a/screenshots/12.png b/github/screenshots/12.png similarity index 100% rename from screenshots/12.png rename to github/screenshots/12.png diff --git a/screenshots/13.png b/github/screenshots/13.png similarity index 100% rename from screenshots/13.png rename to github/screenshots/13.png diff --git a/screenshots/14.png b/github/screenshots/14.png similarity index 100% rename from screenshots/14.png rename to github/screenshots/14.png diff --git a/github/screenshots/15.png b/github/screenshots/15.png new file mode 100644 index 0000000..3deed16 Binary files /dev/null and b/github/screenshots/15.png differ diff --git a/github/screenshots/16.png b/github/screenshots/16.png new file mode 100644 index 0000000..65b4e53 Binary files /dev/null and b/github/screenshots/16.png differ diff --git a/github/screenshots/2-1.png b/github/screenshots/2-1.png new file mode 100644 index 0000000..0f2d1f5 Binary files /dev/null and b/github/screenshots/2-1.png differ diff --git a/github/screenshots/2-2.png b/github/screenshots/2-2.png new file mode 100644 index 0000000..8644483 Binary files /dev/null and b/github/screenshots/2-2.png differ diff --git a/github/screenshots/2-3.png b/github/screenshots/2-3.png new file mode 100644 index 0000000..0e99b1e Binary files /dev/null and b/github/screenshots/2-3.png differ diff --git a/github/screenshots/2-4.png b/github/screenshots/2-4.png new file mode 100644 index 0000000..2c8f686 Binary files /dev/null and b/github/screenshots/2-4.png differ diff --git a/screenshots/2.png b/github/screenshots/2.png similarity index 100% rename from screenshots/2.png rename to github/screenshots/2.png diff --git a/github/screenshots/3-1.png b/github/screenshots/3-1.png new file mode 100644 index 0000000..fd87ad6 Binary files /dev/null and b/github/screenshots/3-1.png differ diff --git a/screenshots/3.png b/github/screenshots/3.png similarity index 100% rename from screenshots/3.png rename to github/screenshots/3.png diff --git a/screenshots/4.png b/github/screenshots/4.png similarity index 100% rename from screenshots/4.png rename to github/screenshots/4.png diff --git a/screenshots/5.png b/github/screenshots/5.png similarity index 100% rename from screenshots/5.png rename to github/screenshots/5.png diff --git a/screenshots/6.png b/github/screenshots/6.png similarity index 100% rename from screenshots/6.png rename to github/screenshots/6.png diff --git a/screenshots/7.png b/github/screenshots/7.png similarity index 100% rename from screenshots/7.png rename to github/screenshots/7.png diff --git a/screenshots/8.png b/github/screenshots/8.png similarity index 100% rename from screenshots/8.png rename to github/screenshots/8.png diff --git a/screenshots/9.png b/github/screenshots/9.png similarity index 100% rename from screenshots/9.png rename to github/screenshots/9.png diff --git a/github/screenshots/Screen Shot 2021-08-16 at 4.58.45 PM.png b/github/screenshots/Screen Shot 2021-08-16 at 4.58.45 PM.png new file mode 100644 index 0000000..64ccc3a Binary files /dev/null and b/github/screenshots/Screen Shot 2021-08-16 at 4.58.45 PM.png differ diff --git a/screenshots/informer-logo.gif b/github/screenshots/informer-logo.gif similarity index 100% rename from screenshots/informer-logo.gif rename to github/screenshots/informer-logo.gif diff --git a/informer.env b/informer.env new file mode 100644 index 0000000..6d49748 --- /dev/null +++ b/informer.env @@ -0,0 +1,37 @@ +ENV=local + +# ----------------- +# MySQL credentails +# ----------------- + +MYSQL_ROOT_PASSWORD=root +MYSQL_DATABASE=informer_db +MYSQL_USER=informer +MYSQL_PASSWORD=informer_P455w0rd! +MYSQL_IP_ADDRESS=docker.for.mac.localhost +# Its set at 3307 in case local env is running MySQL already +MYSQL_PORT=3307 + + +# --------------- +# Telegram config +# --------------- + +TELEGRAM_ACCOUNT_ID= +TELEGRAM_API_APP_ID= +TELEGRAM_NOTIFICATIONS_CHANNEL_ID= +TELEGRAM_NOTIFICATIONS_CHANNEL_URL=https://t.me/joinchat/ +TELEGRAM_NOTIFICATIONS_CHANNEL_IS_PRIVATE=1 +TELEGRAM_API_HASH= +TELEGRAM_ACCOUNT_PHONE_NUMBER= +TELEGRAM_ACCOUNT_USER_NAME= +TELEGRAM_ACCOUNT_FIRST_NAME=Darrin +TELEGRAM_ACCOUNT_LAST_NAME=O'Brien +TELEGRAM_CHANNEL_MONITOR_LIST=channels.csv + +# ------------- +# Google config +# ------------- + +GOOGLE_APPLICATION_CREDENTIALS=credentials/google_credentials.json +GOOGLE_SHEET_NAME=Informer Notifications Sheet diff --git a/quick_start.sh b/quick_start.sh new file mode 100755 index 0000000..03d31de --- /dev/null +++ b/quick_start.sh @@ -0,0 +1,17 @@ +#!/bin/bash + + +# change to app directory +cd ./app + +# create virtual env +python3 -m venv + +# load venv +source venv/bin/activate + +# install requirements in this env +pip install -r requirements.txt + +#run the bot +python3 bot.py diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 7c78d0f..0000000 --- a/requirements.txt +++ /dev/null @@ -1,11 +0,0 @@ -Jinja2==2.11.3 -SQLAlchemy==1.3.11 -Werkzeug==0.16.0 -pytz==2019.3 -sqlalchemy-migrate==0.13.0 -requests==2.22.0 -Flask==1.1.1 -Telethon==1.10.8 -mysql-connector-python==8.0.18 -gspread==3.1.0 -oauth2client==4.1.3 \ No newline at end of file diff --git a/start.sh b/start.sh new file mode 100755 index 0000000..f09c168 --- /dev/null +++ b/start.sh @@ -0,0 +1,4 @@ +#!/bin/bash + +# Startup all the containers at once +docker-compose --env-file ./informer.env up --build diff --git a/stop.sh b/stop.sh new file mode 100755 index 0000000..40ac14e --- /dev/null +++ b/stop.sh @@ -0,0 +1,4 @@ +#!/bin/bash + +# Stop all the containers at once +docker-compose down \ No newline at end of file