Archived
1
0
This repository has been archived on 2020-12-10. You can view files and clone it, but cannot push or open issues or pull requests.
old/social_posts.py

125 lines
4.6 KiB
Python
Raw Permalink Normal View History

2018-11-01 12:20:46 +00:00
import json
import os
import random
import re
2018-11-01 12:20:46 +00:00
import pymysql
import urllib.request
2018-10-16 16:28:42 +00:00
from datetime import datetime
2018-11-01 12:20:46 +00:00
from io import BytesIO
import requests
2018-10-16 16:28:42 +00:00
import twitter
2018-11-01 12:20:46 +00:00
from PIL import Image
2018-10-16 16:28:42 +00:00
# db = pymysql.connect('localhost', 'kingofdog', 'XrE#513*IOC&tA*B', 'kingofdog')
db = pymysql.connect('localhost', 'kingofdog', '123456', 'kingofdog')
2018-10-16 16:28:42 +00:00
cur = db.cursor()
2018-11-01 12:20:46 +00:00
2018-10-16 16:28:42 +00:00
def insertIntoDatabase(platform, title, url, author, authorUrl, published, image, originalID):
print(platform, title, url, author, authorUrl, published, image, originalID)
try:
print('all right')
cur.execute(
"INSERT INTO social_posts (post_plattform, post_content, post_url, post_author, post_author_url, post_date, post_img_source, post_original_id) VALUES (%s, %s, %s, %s, %s, %s, %s, %s) ON DUPLICATE KEY UPDATE post_content = %s, post_img_source = %s",
(platform, title, url, author, authorUrl, published, image, originalID, title, image))
db.commit()
except:
print('didnt work out for us... sadly')
db.rollback()
2018-11-01 12:20:46 +00:00
def generateImageName():
return random.getrandbits(128)
# Reads image from entered url and uploads it to the own server while creating a corresponding entry in the database
2018-11-01 12:20:46 +00:00
def uploadImage(imageUrl, originalName):
2018-11-01 12:20:46 +00:00
response = requests.get(imageUrl)
img = Image.open(BytesIO(response.content))
name = '%032x' % generateImageName()
# Checks if there is already an uploaded image with the same original name
# If so the previously generated random name is overwritten and the entry in the database deleted
try:
cur.execute("SELECT * FROM files WHERE original_name = %s", (originalName))
result = cur.fetchone()
name = result[1]
cur.execute("DELETE FROM files WHERE ID = %s", (result[0]))
db.commit()
except:
db.rollback()
# Writes the image to the correct path
2018-11-01 12:20:46 +00:00
path = 'files/userContent/%s.jpg' % name
img.save(path)
fileSize = os.path.getsize(path)
# Adds a new entry to the database with all the data
2018-11-01 12:20:46 +00:00
try:
cur.execute(
"INSERT INTO files (name, original_name, type, size, path, isUserData) VALUES (%s, %s, 'image/jpeg', %s, %s, 1)",
(name, originalName, fileSize, path))
db.commit()
except:
print('it didnt work')
db.rollback()
# Returns the relative URL path for displaying the image later on
2018-11-01 12:20:46 +00:00
return '/f/' + name
2018-10-16 16:28:42 +00:00
def getYouTubeVideos():
apiUrl = 'https://www.googleapis.com/youtube/v3/playlistItems?part=snippet%2CcontentDetails&maxResults=20&playlistId=UUEDHiXaIhm2VFu-hi6CcOWw&key=AIzaSyDAZ_TwVMZeiKDQxgWM2OYRq3YskEpY9yw'
with urllib.request.urlopen(apiUrl) as url:
data = json.loads(url.read().decode())
for item in data['items']:
snippet = item['snippet']
published = datetime.strptime(snippet['publishedAt'], '%Y-%m-%dT%H:%M:%S.000Z').timestamp()
author = snippet['channelTitle']
authorUrl = 'https://youtube.com/channel/' + snippet['channelId']
title = snippet['title']
url = 'https://youtu.be/' + snippet['resourceId']['videoId']
if snippet['thumbnails']['maxres']['url']:
thumbnail = snippet['thumbnails']['maxres']['url']
else:
thumbnail = snippet['thumbnails']['standard']['url']
if thumbnail:
thumbnail = uploadImage(thumbnail, 'youtube_' + snippet['resourceId']['videoId'])
2018-10-16 16:28:42 +00:00
insertIntoDatabase('YouTube', title, url, author, authorUrl, published, thumbnail, 0)
def getTwitterPosts():
api = twitter.Api(consumer_key='TsUzd4stukv9Ix7TGG7RdYq4k',
consumer_secret='sTRq4WcELJZuciTrkNUttGgWhEiGaUkuqNhISgaG4uHRFgzm0B',
access_token_key='1880071790-Nij2RaBDVRGVWoWW2PSJUwAvuLAOaQFAAr5tAtC',
access_token_secret='ldhLg0SP3ycrrdIqhNcddj0042pdGY9vmZMKQJRClmDkD')
tweets = api.GetUserTimeline(screen_name='kingofdogtv')
for tweet in tweets:
content = tweet.text
re.sub(r' https://t.co/.+', '', content)
2018-10-16 16:28:42 +00:00
published = datetime.strptime(tweet.created_at, '%a %b %d %H:%M:%S %z %Y').timestamp()
author = tweet.user.name
authorUrl = 'https://twitter.com/' + tweet.user.screen_name
url = authorUrl + '/status/' + tweet.id_str
originalID = tweet.id
image = ''
if tweet.media:
image = tweet.media[0].media_url
image = uploadImage(image, 'twitter_' + tweet.id_str)
2018-10-16 16:28:42 +00:00
insertIntoDatabase('Twitter', content, url, author, authorUrl, published, image, originalID)
2018-11-01 12:20:46 +00:00
# getYouTubeVideos()
2018-10-16 16:28:42 +00:00
getTwitterPosts()
2018-11-01 12:20:46 +00:00
db.close()