Archived
1
0
This repository has been archived on 2020-12-10. You can view files and clone it, but cannot push or open issues or pull requests.
old/social_posts.py
2018-11-01 13:20:46 +01:00

101 lines
3.6 KiB
Python

import json
import os
import random
import pymysql
import urllib.request
from datetime import datetime
from io import BytesIO
import requests
import twitter
from PIL import Image
db = pymysql.connect('localhost', 'kingofdog', 'XrE#513*IOC&tA*B', 'kingofdog')
cur = db.cursor()
def insertIntoDatabase(platform, title, url, author, authorUrl, published, image, originalID):
print(platform, title, url, author, authorUrl, published, image, originalID)
try:
print('all right')
cur.execute(
"INSERT INTO social_posts (post_plattform, post_content, post_url, post_author, post_author_url, post_date, post_img_source, post_original_id) VALUES (%s, %s, %s, %s, %s, %s, %s, %s) ON DUPLICATE KEY UPDATE post_content = %s, post_img_source = %s",
(platform, title, url, author, authorUrl, published, image, originalID, title, image))
db.commit()
except:
print('didnt work out for us... sadly')
db.rollback()
def generateImageName():
return random.getrandbits(128)
def uploadImage(imageUrl, originalName):
response = requests.get(imageUrl)
img = Image.open(BytesIO(response.content))
name = '%032x' % generateImageName()
path = 'files/userContent/%s.jpg' % name
img.save(path)
fileSize = os.path.getsize(path)
try:
cur.execute(
"INSERT INTO files (name, original_name, type, size, path, isUserData) VALUES (%s, %s, 'image/jpeg', %s, %s, 1)",
(name, originalName, fileSize, path))
db.commit()
except:
print('it didnt work')
db.rollback()
return '/f/' + name
def getYouTubeVideos():
apiUrl = 'https://www.googleapis.com/youtube/v3/playlistItems?part=snippet%2CcontentDetails&maxResults=20&playlistId=UUEDHiXaIhm2VFu-hi6CcOWw&key=AIzaSyDAZ_TwVMZeiKDQxgWM2OYRq3YskEpY9yw'
with urllib.request.urlopen(apiUrl) as url:
data = json.loads(url.read().decode())
for item in data['items']:
snippet = item['snippet']
published = datetime.strptime(snippet['publishedAt'], '%Y-%m-%dT%H:%M:%S.000Z').timestamp()
author = snippet['channelTitle']
authorUrl = 'https://youtube.com/channel/' + snippet['channelId']
title = snippet['title']
url = 'https://youtu.be/' + snippet['resourceId']['videoId']
if snippet['thumbnails']['maxres']['url']:
thumbnail = snippet['thumbnails']['maxres']['url']
else:
thumbnail = snippet['thumbnails']['standard']['url']
insertIntoDatabase('YouTube', title, url, author, authorUrl, published, thumbnail, 0)
def getTwitterPosts():
api = twitter.Api(consumer_key='TsUzd4stukv9Ix7TGG7RdYq4k',
consumer_secret='sTRq4WcELJZuciTrkNUttGgWhEiGaUkuqNhISgaG4uHRFgzm0B',
access_token_key='1880071790-Nij2RaBDVRGVWoWW2PSJUwAvuLAOaQFAAr5tAtC',
access_token_secret='ldhLg0SP3ycrrdIqhNcddj0042pdGY9vmZMKQJRClmDkD')
tweets = api.GetUserTimeline(screen_name='kingofdogtv')
for tweet in tweets:
content = tweet.text
published = datetime.strptime(tweet.created_at, '%a %b %d %H:%M:%S %z %Y').timestamp()
author = tweet.user.name
authorUrl = 'https://twitter.com/' + tweet.user.screen_name
url = authorUrl + '/status/' + tweet.id_str
originalID = tweet.id
image = ''
if tweet.media:
image = tweet.media[0].media_url
uploadImage(image, tweet.id_str)
insertIntoDatabase('Twitter', content, url, author, authorUrl, published, image, originalID)
getYouTubeVideos()
getTwitterPosts()
db.close()