Skip to content

Commit

Permalink
Added config, updated readme, updated how FB page access tokens are used
Browse files Browse the repository at this point in the history
- Now use config files to set configuration
- Updated readme to include config.ini, how to download JSON files from reddit, how to get API keys of all social media handles
- Uses FB page access tokens directly
- TODO: Remove useless code
  • Loading branch information
prashantsengar committed Mar 10, 2020
1 parent a295193 commit d472879
Show file tree
Hide file tree
Showing 8 changed files with 89 additions and 30 deletions.
2 changes: 1 addition & 1 deletion JSONs.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,4 @@ def new_JSON(__JSON):
JSONs.append(__JSON)


JSONs = []
JSONs = ['memes.json']
22 changes: 18 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,17 @@ Get images from Reddit and upload to Instagram and Twitter
## Install requirements
`pip install -r requirements.txt`

- Get Twitter API keys and add them to redins.py
- Add Instagram username and password to redins.py

## Configuration
- Rename config_example.ini to config.ini
- Open config.ini and add the required API keys (see the next secyion to see how to get them)
- Set `Instagram = True` if you want to post to Instagram. Similarly, set True or False for other accounts
- Set captions in `cap.py`

## Getting API keys
- To learn how to get Facebook `app_id` and `app_secret`, visit this [page](https://theonetechnologies.com/blog/post/how-to-get-facebook-application-id-and-secret-key)
To get `page access token`, see this [page](https://elfsight.com/blog/2017/10/how-to-get-facebook-access-token/)
- To learn how to get Twitter API keys, visit this [page](https://themepacific.com/how-to-generate-api-key-consumer-token-access-key-for-twitter-oauth/994/)

### How to use
- Open Command prompt or terminal
Expand All @@ -18,5 +27,10 @@ Get images from Reddit and upload to Instagram and Twitter
- Type `python redins.py` or `python3 redins.py`
- Enter the number of files to be uploaded

#### How to download JSON of a website
To be filled
#### How to download JSON of a subreddit
- Let us say you want to get data of the subreddit /r/memes
- Go to https://reddit.com/r/memes.json
- Save the web page as memes.json in the directory of the project
- Open `JSONs.py` and write `new_JSON('memes.json')` after line 5

*Note:* Due to recent changes in Instagram's private API, Instagram posting might not work. It will be updated to use the new Graph API
Binary file removed __pycache__/JSONs.cpython-36.pyc
Binary file not shown.
Binary file removed __pycache__/caps.cpython-36.pyc
Binary file not shown.
16 changes: 11 additions & 5 deletions config.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import configparser

# Instagram
user = ""
Expand All @@ -12,7 +13,7 @@
TWITTER = False

# Facebook
user_access_token=""
page_access_token=""
fb_app_id=""
fb_app_secret=""
long_lived_token=""
Expand All @@ -23,7 +24,7 @@ def get_instagram_data(ig_section):
return ig_section['username'], ig_section['password']

def get_fb_data(fb_section):
return fb_section['user_access_token'], fb_section['app_id'], fb_section['app_secret'], fb_section[page_id]
return fb_section['page_access_token'], fb_section['app_id'], fb_section['app_secret'], fb_section['page_id']

def get_twitter_data(t_section):
return t_section['consumer_key'], t_section['consumer_secret'], t_section['access_token_key'], t_section['access_token_secret']
Expand All @@ -36,15 +37,20 @@ def get_data():
c = configparser.ConfigParser()
c.read('config.ini')

print(eval(c['Post']['Instagram']))
print(eval(c['Post']['Facebook']))
print(eval(c['Post']['Twitter']))

if eval(c['Post']['Instagram']):
global user, passw, IG
IG = True
user, passw = get_instagram_data()
user, passw = get_instagram_data(c['Instagram'])
if eval(c['Post']['Facebook']):
global user_access_token, fb_app_id, fb_app_secret, fb_page_id, FB
FB = True
user_access_token, fb_app_id, fb_app_secret, fb_page_id = get_fb_data()
print(FB)
user_access_token, fb_app_id, fb_app_secret, fb_page_id = get_fb_data(c['Facebook'])
if eval(c['Post']['Twitter']):
global consumer_key, consumer_secret, access_token_key, access_token_secret, TWITTER
TWITTER = True
consumer_key, consumer_secret, access_token_key, access_token_secret = get_twiiter_data()
consumer_key, consumer_secret, access_token_key, access_token_secret = get_twitter_data(c['Twitter'])
20 changes: 20 additions & 0 deletions config_example.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
[Instagram]
username="@username"
password="password"

[Twitter]
consumer_key="KEY"
consumer_secret = "SECRET"
access_token_key = "TOKEN"
access_token_secret = "TOKEN SECRET"

[Facebook]
page_access_token = "TOKEN"
app_id = "APP ID"
app_secret = "APP SECRET"
page_id = "PAGE ID"

[Post]
Instagram = False
Facebook = True
Twitter = True
1 change: 1 addition & 0 deletions memes.json

Large diffs are not rendered by default.

58 changes: 38 additions & 20 deletions redins.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import config
from config import page_access_token, fb_app_id, fb_app_secret, long_lived_token, fb_page_id
import json
import os as __os
import requests as __requests
Expand Down Expand Up @@ -59,8 +60,8 @@ def write_meme():
def dload(JSON):
get_links(JSON)
write_meme()
file = open('meme.txt')
data = file.read()
with open(f'{JSON}.txt') as file:
data = file.read()
data = json.loads(data)

links = data['data']
Expand Down Expand Up @@ -95,17 +96,17 @@ def dload(JSON):
raise Exception('There has been an error')

def uload_to_twitter(num):
tweet = twitter.Api(consumer_key=consumer_key, consumer_secret=consumer_secret,
access_token_key=access_token_key,
access_token_secret=access_token_secret)
tweet = twitter.Api(consumer_key=config.consumer_key, consumer_secret=config.consumer_secret,
access_token_key=config.access_token_key,
access_token_secret=config.access_token_secret)

dirs = __os.listdir(__os.path.join(curr_dir, 'red_media'))

for j in range(num):
try:
files = __random.choice(dirs)
files = __os.path.join(curr_dir, 'red_media', files)
a.PostUpdate(__random.choice(caps), files)
tweet.PostUpdate(__random.choice(caps), files)
print("Uploaded..")
__os.remove(files)
__time.sleep(10)
Expand All @@ -115,7 +116,7 @@ def uload_to_twitter(num):


def uload_to_ig(num):
i = __ig(user, passw)
i = __ig(config.user, config.passw)
i.login()

# __os.chdir('\\red_media')
Expand Down Expand Up @@ -184,8 +185,9 @@ def get_page_access_token(long_lived_token):
def uload_to_fb(num):

# check_user_token()
long_access_token(long_lived_token,user_access_token,fb_app_id,fb_app_secret)
page_access_token=get_page_access_token(long_lived_token)
## long_access_token(longlivedtoken, user_access_token,fb_app_id,fb_app_secret)
## page_access_token=get_page_access_token(long_lived_token)

graph=facebook.GraphAPI(access_token=page_access_token,version="3.0")

dirs = __os.listdir(__os.path.join(curr_dir, 'red_media'))
Expand All @@ -204,23 +206,39 @@ def uload_to_fb(num):
)
photo.close()
except Exception as e:
print("Error occured {}" .format(str(e)))


if __name__ == '__main__':
print("Error occured: {}" .format(str(e)))

def start_upload():
print('Starting upload')
num_of_uload = int(input("Enter the number of files to be uploaded: "))
if config.IG:
print('Uploading to IG')
uload_to_ig(num_of_uload)
if config.TWITTER:
print('Uploading to Twitter')
uload_to_twitter(num_of_uload)
if config.FB:
print('Uploading to FB')
uload_to_fb(num_of_uload)
else:
print('No upload')

def main():
if check_folder():
config.get_data()
if len(__os.listdir('red_media'))>2:
start_upload()
return

for j in JSONs:
get_links(j)
write_meme()
dload(j)
num_of_uload = input("Enter the number of files to be uploaded: ")
if config.IG:
uload_to_ig(num_of_uload)
if config.TWITTER:
uload_to_twitter(num_of_uload)
if config.FB:
uload_to_fb(num_of_uload)
start_upload()
else:
print("Error has occured in creating file")



if __name__ == '__main__':
main()

0 comments on commit d472879

Please sign in to comment.