This commit is contained in:
soju6jan
2022-10-02 20:18:05 +09:00
parent b9c3aac91f
commit 29930fdef7
150 changed files with 53982 additions and 0 deletions

17
lib/support/__init__.py Normal file
View File

@@ -0,0 +1,17 @@
def d(data):
if type(data) in [type({}), type([])]:
import json
return '\n' + json.dumps(data, indent=4, ensure_ascii=False)
else:
return str(data)
from .logger import get_logger
logger = get_logger()
def set_logger(l):
global logger
logger = l
# 일반 cli 사용 겸용이다.
# set_logger 로 인한 진입이 아니고 import가 되면 기본 경로로 로그파일을
# 생성하기 때문에, set_logger 전에 import가 되지 않도록 주의.

View File

@@ -0,0 +1,13 @@
from support import logger
"""
from support import d, get_logger, logger
from .discord import SupportDiscord
from .ffmpeg import SupportFfmpeg
from .file import SupportFile
from .image import SupportImage
from .process import SupportProcess
from .string import SupportString
from .util import SupportUtil, pt, default_headers, SingletonClass, AlchemyEncoder
from .aes import SupportAES
from .yaml import SupportYaml
"""

43
lib/support/base/aes.py Normal file
View File

@@ -0,0 +1,43 @@
import os, base64, traceback
from Crypto.Cipher import AES
from Crypto import Random
from . import logger
BS = 16
pad = lambda s: s + (BS - len(s) % BS) * chr(BS - len(s) % BS)
unpad = lambda s : s[0:-s[-1]]
key = b'140b41b22a29beb4061bda66b6747e14'
class SupportAES(object):
@classmethod
def encrypt(cls, raw, mykey=None):
try:
Random.atfork()
except Exception as exception:
logger.error('Exception:%s', exception)
logger.error(traceback.format_exc())
raw = pad(raw)
if type(raw) == type(''):
raw = raw.encode()
if mykey is not None and type(mykey) == type(''):
mykey = mykey.encode()
iv = Random.new().read( AES.block_size )
cipher = AES.new(key if mykey is None else mykey, AES.MODE_CBC, iv )
try:
tmp = cipher.encrypt( raw )
except Exception as exception:
logger.error('Exception:%s', exception)
logger.error(traceback.format_exc())
tmp = cipher.encrypt( raw.encode() )
ret = base64.b64encode( iv + tmp )
ret = ret.decode()
return ret
@classmethod
def decrypt(cls, enc, mykey=None):
enc = base64.b64decode(enc)
iv = enc[:16]
if len(iv) != 16:
iv = os.urandom(16)
cipher = AES.new(key if mykey is None else mykey, AES.MODE_CBC, iv )
return unpad(cipher.decrypt( enc[16:] )).decode()

211
lib/support/base/discord.py Normal file
View File

@@ -0,0 +1,211 @@
import os, io, traceback, time, random, requests
try:
from discord_webhook import DiscordWebhook, DiscordEmbed
except:
os.system('pip3 install discord-webhook')
from discord_webhook import DiscordWebhook, DiscordEmbed
from . import logger
webhook_list = [
#'https://discord.com/api/webhooks/933908493612744705/DGPWBQN8LiMnt2cnCSNVy6rCc5Gi_vj98QpJ3ZEeihohzsfOsCWvcixJU1A2fQuepGFq', # 1
#'https://discord.com/api/webhooks/932754078839234731/R2iFzQ7P8IKV-MGWp820ToWX07s5q8X-st-QsUJs7j3JInUj6ZlI4uDYKeR_cwIi98mf', # 2
#'https://discord.com/api/webhooks/932754171835351131/50RLrYa_B69ybk4BWoLruNqU7YlZ3pl3gpPr9bwuankWyTIGtRGbgf0CJ9ExJWJmvXwo', # 3
'https://discord.com/api/webhooks/794661043863027752/A9O-vZSHIgfQ3KX7wO5_e2xisqpLw5TJxg2Qs1stBHxyd5PK-Zx0IJbAQXmyDN1ixZ-n', # 4
'https://discord.com/api/webhooks/810373348776476683/h_uJLBBlHzD0w_CG0nUajFO-XEh3fvy-vQofQt1_8TMD7zHiR7a28t3jF-xBCP6EVlow', # 5
'https://discord.com/api/webhooks/810373405508501534/wovhf-1pqcxW5h9xy7iwkYaf8KMDjHU49cMWuLKtBWjAnj-tzS1_j8RJ7tsMyViDbZCE', # 6
'https://discord.com/api/webhooks/796558388326039552/k2VV356S1gKQa9ht-JuAs5Dqw5eVkxgZsLUzFoxmFG5lW6jqKl7zCBbbKVhs3pcLOetm', # 7
'https://discord.com/api/webhooks/810373566452858920/Qf2V8BoLOy2kQzlZGHy5HZ1nTj7lK72ol_UFrR3_eHKEOK5fyR_fQ8Yw8YzVh9EQG54o', # 8
'https://discord.com/api/webhooks/810373654411739157/SGgdO49OCkTNIlc_BSMSy7IXQwwXVonG3DsVfvBVE6luTCwvgCqEBpEk30WBeMMieCyI', # 9
'https://discord.com/api/webhooks/810373722341900288/FwcRJ4YxYjpyHpnRwF5f2an0ltEm8JPqcWeZqQi3Qz4QnhEY-kR2sjF9fo_n6stMGnf_', # 10
'https://discord.com/api/webhooks/931779811691626536/vvwCm1YQvE5tW4QJ4SNKRmXhQQrmOQxbjsgRjbTMMXOSiclB66qipiZaax5giAqqu2IB', # 11
'https://discord.com/api/webhooks/931779905631420416/VKlDwfxWQPJfIaj94-ww_hM1MNEayRKoMq0adMffCC4WQS60yoAub_nqPbpnfFRR3VU5', # 12
'https://discord.com/api/webhooks/931779947914231840/22amQuHSOI7wPijSt3U01mXwd5hTo_WHfVkeaowDQMawCo5tXVfeEMd6wAWf1n7CseiG', # 13
'https://discord.com/api/webhooks/810374294416654346/T3-TEdKIg7rwMZeDzNr46KPDvO7ZF8pRdJ3lfl39lJw2XEZamAG8uACIXagbNMX_B0YN', # 14
'https://discord.com/api/webhooks/810374337403289641/_esFkQXwlPlhxJWtlqDAdLg2Nujo-LjGPEG3mUmjiRZto69NQpkBJ0F2xtSNrCH4VAgb', # 15
'https://discord.com/api/webhooks/810374384736534568/mH5-OkBVpi7XqJioaQ8Ma-NiL-bOx7B5nYJpL1gZ03JaJaUaIW4bCHeCt5O_VGLJwAtj', # 16
'https://discord.com/api/webhooks/810374428604104724/Z1Tdxz3mb0ytWq5LHWi4rG5CeJnr9KWXy5aO_waeD0NcImQnhRXe7h7ra7UrIDRQ2jOg', # 17
'https://discord.com/api/webhooks/810374475773509643/QCPPN4djNzhuOmbS3DlrGBunK0SVR5Py9vMyCiPL-0T2VPgitFZS4YM6GCLfM2fkrn4-', # 18
'https://discord.com/api/webhooks/810374527652855819/5ypaKI_r-hYzwmdDlVmgAU6xNgU833L9tFlPnf3nw4ZDaPMSppjt77aYOiFks4KLGQk8', # 19
'https://discord.com/api/webhooks/810374587917402162/lHrG7CEysGUM_41DMnrxL2Q8eh1-xPjJXstYE68WWfLQbuUAV3rOfsNB9adncJzinYKi', # 20
]
class SupportDiscord(object):
@classmethod
def send_discord_message(cls, text, image_url=None, webhook_url=None):
try:
webhook = DiscordWebhook(url=webhook_url, content=text)
if image_url is not None:
embed = DiscordEmbed()
embed.set_timestamp()
embed.set_image(url=image_url)
webhook.add_embed(embed)
response = webhook.execute()
return True
except Exception as exception:
logger.error('Exception:%s', exception)
logger.error(traceback.format_exc())
return False
@classmethod
def discord_proxy_image(cls, image_url, webhook_url=None, retry=True):
#2020-12-23
#image_url = None
if image_url == '' or image_url is None:
return
data = None
if webhook_url is None or webhook_url == '':
webhook_url = webhook_list[random.randint(0,len(webhook_list)-1)]
try:
webhook = DiscordWebhook(url=webhook_url, content='')
embed = DiscordEmbed()
embed.set_timestamp()
embed.set_image(url=image_url)
webhook.add_embed(embed)
import io
byteio = io.BytesIO()
webhook.add_file(file=byteio.getvalue(), filename='dummy')
response = webhook.execute()
data = None
if type(response) == type([]):
if len(response) > 0:
data = response[0].json()
else:
data = response.json()
if data is not None and 'embeds' in data:
target = data['embeds'][0]['image']['proxy_url']
if requests.get(target).status_code == 200:
return target
else:
return image_url
else:
raise Exception(str(data))
except Exception as exception:
logger.error('Exception:%s', exception)
logger.error(traceback.format_exc())
if retry:
time.sleep(1)
return cls.discord_proxy_image(image_url, webhook_url=None, retry=False)
else:
return image_url
@classmethod
def discord_proxy_image_localfile(cls, filepath, retry=True):
data = None
webhook_url = webhook_list[random.randint(0,len(webhook_list)-1)]
try:
webhook = DiscordWebhook(url=webhook_url, content='')
import io
with open(filepath, 'rb') as fh:
byteio = io.BytesIO(fh.read())
webhook.add_file(file=byteio.getvalue(), filename='image.jpg')
embed = DiscordEmbed()
embed.set_image(url="attachment://image.jpg")
response = webhook.execute()
data = None
if type(response) == type([]):
if len(response) > 0:
data = response[0].json()
else:
data = response.json()
if data is not None and 'attachments' in data:
target = data['attachments'][0]['url']
if requests.get(target).status_code == 200:
return target
if retry:
time.sleep(1)
return cls.discord_proxy_image_localfile(filepath, retry=False)
except Exception as exception:
logger.error('Exception:%s', exception)
logger.error(traceback.format_exc())
if retry:
time.sleep(1)
return cls.discord_proxy_image_localfile(filepath, retry=False)
@classmethod
def discord_proxy_image_bytes(cls, bytes, retry=True):
data = None
idx = random.randint(0,len(webhook_list)-1)
webhook_url = webhook_list[idx]
try:
webhook = DiscordWebhook(url=webhook_url, content='')
webhook.add_file(file=bytes, filename='image.jpg')
embed = DiscordEmbed()
embed.set_image(url="attachment://image.jpg")
response = webhook.execute()
data = None
if type(response) == type([]):
if len(response) > 0:
data = response[0].json()
else:
data = response.json()
if data is not None and 'attachments' in data:
target = data['attachments'][0]['url']
if requests.get(target).status_code == 200:
return target
logger.error(f"discord webhook error : {webhook_url}")
logger.error(f"discord webhook error : {idx}")
if retry:
time.sleep(1)
return cls.discord_proxy_image_bytes(bytes, retry=False)
except Exception as exception:
logger.error('Exception:%s', exception)
logger.error(traceback.format_exc())
if retry:
time.sleep(1)
return cls.discord_proxy_image_bytes(bytes, retry=False)
# RSS에서 자막 올린거
@classmethod
def discord_cdn(cls, byteio=None, filepath=None, filename=None, webhook_url=None, content='', retry=True):
data = None
if webhook_url is None:
webhook_url = webhook_list[random.randint(0,9)] # sjva 채널
try:
webhook = DiscordWebhook(url=webhook_url, content=content)
if byteio is None and filepath is not None:
import io
with open(filepath, 'rb') as fh:
byteio = io.BytesIO(fh.read())
webhook.add_file(file=byteio.getvalue(), filename=filename)
embed = DiscordEmbed()
response = webhook.execute()
data = None
if type(response) == type([]):
if len(response) > 0:
data = response[0].json()
else:
data = response.json()
if data is not None and 'attachments' in data:
target = data['attachments'][0]['url']
if requests.get(target).status_code == 200:
return target
if retry:
time.sleep(1)
return cls.discord_proxy_image_localfile(filepath, retry=False)
except Exception as exception:
logger.error('Exception:%s', exception)
logger.error(traceback.format_exc())
if retry:
time.sleep(1)
return cls.discord_proxy_image_localfile(filepath, retry=False)

View File

@@ -0,0 +1,47 @@
import os, sys, traceback, subprocess, json, platform, time
import shutil
from . import logger
class SupportFfmpeg(object):
@classmethod
def download_m3u8(cls, config):
try:
if config.get('proxy') == None:
if config.get('headers') == None:
command = [config.get('ffmpeg_path'), '-y', '-correct_ts_overflow', '0', '-i', config['url'], '-c', 'copy', '-bsf:a', 'aac_adtstoasc']
else:
headers_command = []
for key, value in config.get('headers').items():
if key.lower() == 'user-agent':
headers_command.append('-user_agent')
headers_command.append(value)
pass
else:
headers_command.append('-headers')
headers_command.append('\'%s:%s\''%(key,value))
command = [config.get('ffmpeg_path'), '-y', '-correct_ts_overflow', '0'] + headers_command + ['-i', config['url'], '-c', 'copy', '-bsf:a', 'aac_adtstoasc']
else:
command = [config.get('ffmpeg_path'), '-y', '-correct_ts_overflow', '0', '-http_proxy', config.get('proxy'), '-i', config['url'], '-c', 'copy', '-bsf:a', 'aac_adtstoasc']
filename = str(int(time.time())) + '.mp4'
tmp = config.get('tmp_dir')
if tmp == None:
tmp = os.getcwd()
tmp_filepath = os.path.join(tmp, filename)
command.append(tmp_filepath)
logger.debug(' '.join(command))
from . import SupportSubprocess
ret = SupportSubprocess.execute(command, timeout=10)
logger.error(ret)
if os.path.exists(tmp_filepath):
shutil.move(tmp_filepath, config.get('output_filepath'))
except Exception as e:
logger.error(f'Exception:{str(e)}', )
logger.error(traceback.format_exc())
logger.error('command : %s', command)

293
lib/support/base/file.py Normal file
View File

@@ -0,0 +1,293 @@
import os, traceback, re, json, codecs
from . import logger
class SupportFile(object):
@classmethod
def read_file(cls, filename):
try:
ifp = codecs.open(filename, 'r', encoding='utf8')
data = ifp.read()
ifp.close()
return data
except Exception as exception:
logger.error('Exception:%s', exception)
logger.error(traceback.format_exc())
@classmethod
def write_file(cls, filename, data):
try:
import codecs
ofp = codecs.open(filename, 'w', encoding='utf8')
ofp.write(data)
ofp.close()
except Exception as exception:
logger.error('Exception:%s', exception)
logger.error(traceback.format_exc())
@classmethod
def download(cls, url, filepath):
try:
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36',
'Accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'Accept-Language' : 'ko-KR,ko;q=0.9,en-US;q=0.8,en;q=0.7',
'Connection': 'Keep-Alive',
}
import requests
response = requests.get(url, headers=headers) # get request
if len(response.content) == 0:
return False
with open(filepath, "wb") as file_is: # open in binary mode
file_is.write(response.content) # write to file
return True
except Exception as exception:
logger.debug('Exception:%s', exception)
logger.debug(traceback.format_exc())
return False
@classmethod
def write(cls, data, filepath, mode='w'):
try:
import codecs
ofp = codecs.open(filepath, mode, encoding='utf8')
if isinstance(data, bytes) and mode == 'w':
data = data.decode('utf-8')
ofp.write(data)
ofp.close()
return True
except Exception as exception:
logger.debug('Exception:%s', exception)
logger.debug(traceback.format_exc())
return False
@classmethod
def text_for_filename(cls, text):
#text = text.replace('/', '')
# 2021-07-31 X:X
#text = text.replace(':', ' ')
text = re.sub('[\\/:*?\"<>|]', ' ', text).strip()
text = re.sub("\s{2,}", ' ', text)
return text
@classmethod
def size(cls, start_path = '.'):
total_size = 0
for dirpath, dirnames, filenames in os.walk(start_path):
for f in filenames:
fp = os.path.join(dirpath, f)
if not os.path.islink(fp):
total_size += os.path.getsize(fp)
return total_size
@classmethod
def file_move(cls, source_path, target_dir, target_filename):
try:
import time, shutil
if os.path.exists(target_dir) == False:
os.makedirs(target_dir)
target_path = os.path.join(target_dir, target_filename)
if source_path != target_path:
if os.path.exists(target_path):
tmp = os.path.splitext(target_filename)
new_target_filename = f"{tmp[0]} {str(time.time()).split('.')[0]}{tmp[1]}"
target_path = os.path.join(target_dir, new_target_filename)
shutil.move(source_path, target_path)
except Exception as exception:
logger.debug('Exception:%s', exception)
logger.debug(traceback.format_exc())
"""
@classmethod
def makezip(cls, zip_path, zip_folder=None, zip_extension='zip', remove_folder=False):
import zipfile
try:
zip_path = zip_path.rstrip('/')
if zip_folder is None:
zip_folder = os.path.dirname(zip_path)
elif zip_folder == 'tmp':
from framework import path_data
zip_folder = os.path.join(path_data, 'tmp')
if os.path.isdir(zip_path):
zipfilepath = os.path.join(zip_folder, f"{os.path.basename(zip_path)}.{zip_extension}")
fantasy_zip = zipfile.ZipFile(zipfilepath, 'w')
for f in os.listdir(zip_path):
src = os.path.join(zip_path, f)
fantasy_zip.write(src, os.path.basename(src), compress_type = zipfile.ZIP_DEFLATED)
fantasy_zip.close()
if remove_folder:
import shutil
shutil.rmtree(zip_path)
return zipfilepath
except Exception as exception:
logger.error('Exception:%s', exception)
logger.error(traceback.format_exc())
return
"""
@classmethod
def rmtree(cls, folderpath):
import shutil
try:
shutil.rmtree(folderpath)
return True
except:
try:
os.system("rm -rf '{folderpath}'")
return True
except:
return False
@classmethod
def rmtree2(cls, folderpath):
import shutil
try:
for root, dirs, files in os.walk(folderpath):
for name in files:
os.remove(os.path.join(root, name))
for name in dirs:
shutil.rmtree(os.path.join(root, name))
except:
return False
@classmethod
def write_json(cls, filepath, data):
try:
if os.path.dirname(filepath) != '':
os.makedirs(os.path.dirname(filepath), exist_ok=True)
with open(filepath, "w", encoding='utf8') as json_file:
json.dump(data, json_file, indent=4, ensure_ascii=False)
except Exception as exception:
logger.error('Exception:%s', exception)
logger.error(traceback.format_exc())
@classmethod
def read_json(cls, filepath):
try:
with open(filepath, "r", encoding='utf8') as json_file:
data = json.load(json_file)
return data
except Exception as exception:
logger.error('Exception:%s', exception)
logger.error(traceback.format_exc())
@classmethod
def write_binary(cls, filename, data):
try:
with open(filename, 'wb') as f:
f.write(data)
except Exception as exception:
logger.error('Exception:%s', exception)
logger.error(traceback.format_exc())
@classmethod
def makezip(cls, zip_path, zip_extension='zip', remove_zip_path=True):
import zipfile, shutil
try:
if os.path.exists(zip_path) == False:
return False
zipfilepath = os.path.join(os.path.dirname(zip_path), f"{os.path.basename(zip_path)}.{zip_extension}")
if os.path.exists(zipfilepath):
return True
zip = zipfile.ZipFile(zipfilepath, 'w')
for f in os.listdir(zip_path):
src = os.path.join(zip_path, f)
zip.write(src, f, compress_type = zipfile.ZIP_DEFLATED)
zip.close()
if remove_zip_path:
shutil.rmtree(zip_path)
return zipfilepath
except Exception as e:
logger.error(f'Exception:{str(e)}')
logger.error(traceback.format_exc())
return None
@classmethod
def write_yaml(cls, filepath, data):
import yaml
with open(filepath, 'w', encoding='utf8') as f:
yaml.dump(data, f, default_flow_style=False, allow_unicode=True)
@classmethod
def makezip_all(cls, zip_path, zip_filepath=None, zip_extension='zip', remove_zip_path=True):
import zipfile, shutil
from pathlib import Path
try:
if os.path.exists(zip_path) == False:
return False
if zip_filepath == None:
zipfilepath = os.path.join(os.path.dirname(zip_path), f"{os.path.basename(zip_path)}.{zip_extension}")
if os.path.exists(zipfilepath):
os.remove(zipfilepath)
zip = zipfile.ZipFile(zipfilepath, 'w')
for file_path in Path(zip_path).rglob("*"):
zip.write(file_path, file_path.name)
for (path, dir, files) in os.walk(zip_path):
for file in files:
zip.write(os.path.join(path.replace(zip_path+'/', '').replace(zip_path+'\\', ''), file), compress_type=zipfile.ZIP_DEFLATED)
zip.close()
if remove_zip_path:
shutil.rmtree(zip_path)
return zipfilepath
except Exception as e:
logger.error(f'Exception:{str(e)}')
logger.error(traceback.format_exc())
return None
"""
@classmethod
def read(cls, filepath, mode='r'):
try:
import codecs
ifp = codecs.open(filepath, mode, encoding='utf8')
data = ifp.read()
ifp.close()
if isinstance(data, bytes):
data = data.decode('utf-8')
return data
except Exception as exception:
logger.error('Exception:%s', exception)
logger.error(traceback.format_exc())
"""

25
lib/support/base/image.py Normal file
View File

@@ -0,0 +1,25 @@
import os, sys, traceback, requests
from io import BytesIO
from . import logger
class SupportImage(object):
@classmethod
def horizontal_to_vertical(cls, url):
try:
from PIL import Image
im = Image.open(requests.get(url, stream=True).raw)
width,height = im.size
new_height = int(width * 1.5)
new_im = Image.new('RGB', (width, new_height))
new_im.paste(im, (0, int((new_height-height)/2)))
img_byte_arr = BytesIO()
new_im.save(img_byte_arr, format='PNG')
img_byte_arr = img_byte_arr.getvalue()
from . import SupportDiscord
return SupportDiscord.discord_proxy_image_bytes(img_byte_arr)
except Exception as e:
logger.error('Exception:%s', e)
logger.error(traceback.format_exc())

View File

@@ -0,0 +1,48 @@
import os, sys, traceback, subprocess, json, platform
from . import logger
class SupportProcess(object):
@classmethod
def execute(cls, command, format=None, shell=False, env=None, timeout=1000):
logger.debug(command)
try:
if platform.system() == 'Windows':
command = ' '.join(command)
iter_arg = ''
process = subprocess.Popen(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, shell=shell, env=env, encoding='utf8')
try:
process_ret = process.wait(timeout=timeout) # wait for the subprocess to exit
except:
import psutil
process = psutil.Process(process.pid)
for proc in process.children(recursive=True):
proc.kill()
process.kill()
return "timeout"
ret = []
with process.stdout:
for line in iter(process.stdout.readline, iter_arg):
ret.append(line.strip())
if format is None:
ret2 = '\n'.join(ret)
elif format == 'json':
try:
index = 0
for idx, tmp in enumerate(ret):
#logger.debug(tmp)
if tmp.startswith('{') or tmp.startswith('['):
index = idx
break
ret2 = json.loads(''.join(ret[index:]))
except:
ret2 = None
return ret2
except Exception as e:
logger.error(f'Exception:{str(e)}', )
logger.error(traceback.format_exc())
logger.error('command : %s', command)

View File

@@ -0,0 +1,28 @@
import os, traceback, io, re, json, codecs
from . import logger
class SupportString(object):
@classmethod
def get_cate_char_by_first(cls, title): # get_first
value = ord(title[0].upper())
if value >= ord('0') and value <= ord('9'): return '0Z'
elif value >= ord('A') and value <= ord('Z'): return '0Z'
elif value >= ord('') and value < ord(''): return ''
elif value < ord(''): return ''
elif value < ord(''): return ''
elif value < ord(''): return ''
elif value < ord(''): return ''
elif value < ord(''): return ''
elif value < ord(''): return ''
elif value < ord(''): return ''
elif value < ord(''): return ''
elif value < ord(''): return ''
elif value < ord(''): return ''
elif value < ord(''): return ''
elif value < ord(''): return ''
elif value <= ord(''): return ''
else: return '0Z'

103
lib/support/base/util.py Normal file
View File

@@ -0,0 +1,103 @@
import os, traceback, io, re, json, codecs
from . import logger
from functools import wraps
import time
def pt(f):
@wraps(f)
def wrapper(*args, **kwds):
start = time.time()
#logger.debug(f"FUNC START [{f.__name__}]")
result = f(*args, **kwds)
elapsed = time.time() - start
logger.info(f"FUNC END [{f.__name__}] {elapsed}")
return result
return wrapper
default_headers = {
'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
'accept-language': 'ko-KR,ko;q=0.9,en-US;q=0.8,en;q=0.7',
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 Safari/537.36',
}
class SupportUtil(object):
@classmethod
def sizeof_fmt(cls, num, suffix='Bytes'):
for unit in ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Y', suffix)
@classmethod
def is_arm(cls):
try:
ret = False
import platform
if platform.system() == 'Linux':
if platform.platform().find('86') == -1 and platform.platform().find('64') == -1:
ret = True
if platform.platform().find('arch') != -1:
ret = True
if platform.platform().find('arm') != -1:
ret = True
return ret
except Exception as e:
logger.error(f"Exception:{str(e)}")
logger.error(traceback.format_exc())
def dummy_func():
pass
class celery(object):
class task(object):
def __init__(self, *args, **kwargs):
if len(args) > 0:
self.f = args[0]
def __call__(self, *args, **kwargs):
if len(args) > 0 and type(args[0]) == type(dummy_func):
return args[0]
self.f(*args, **kwargs)
class SingletonClass(object):
__instance = None
@classmethod
def __getInstance(cls):
return cls.__instance
@classmethod
def instance(cls, *args, **kargs):
cls.__instance = cls(*args, **kargs)
cls.instance = cls.__getInstance
return cls.__instance
class AlchemyEncoder(json.JSONEncoder):
def default(self, obj):
from sqlalchemy.ext.declarative import DeclarativeMeta
if isinstance(obj.__class__, DeclarativeMeta):
# an SQLAlchemy class
fields = {}
for field in [x for x in dir(obj) if not x.startswith('_') and x != 'metadata']:
data = obj.__getattribute__(field)
try:
json.dumps(data) # this will fail on non-encodable values, like other classes
fields[field] = data
except TypeError:
fields[field] = None
# a json-encodable dict
return fields
return json.JSONEncoder.default(self, obj)

13
lib/support/base/yaml.py Normal file
View File

@@ -0,0 +1,13 @@
import yaml
class SupportYaml(object):
@classmethod
def write_yaml(cls, filepath, data):
with open(filepath, 'w', encoding='utf8') as f:
yaml.dump(data, f, default_flow_style=False, allow_unicode=True)
@classmethod
def read_yaml(self, filepath):
with open(filepath, encoding='utf8') as file:
data = yaml.load(file, Loader=yaml.FullLoader)
return data

80
lib/support/logger.py Normal file
View File

@@ -0,0 +1,80 @@
import os, sys, logging, logging.handlers
from datetime import datetime
from pytz import timezone, utc
"""
ConsoleColor.Black => "\x1B[30m",
ConsoleColor.DarkRed => "\x1B[31m",
ConsoleColor.DarkGreen => "\x1B[32m",
ConsoleColor.DarkYellow => "\x1B[33m",
ConsoleColor.DarkBlue => "\x1B[34m",
ConsoleColor.DarkMagenta => "\x1B[35m",
ConsoleColor.DarkCyan => "\x1B[36m",
ConsoleColor.Gray => "\x1B[37m",
ConsoleColor.Red => "\x1B[1m\x1B[31m",
ConsoleColor.Green => "\x1B[1m\x1B[32m",
ConsoleColor.Yellow => "\x1B[1m\x1B[33m",
ConsoleColor.Blue => "\x1B[1m\x1B[34m",
ConsoleColor.Magenta => "\x1B[1m\x1B[35m",
ConsoleColor.Cyan => "\x1B[1m\x1B[36m",
ConsoleColor.White => "\x1B[1m\x1B[37m",
"""
class CustomFormatter(logging.Formatter):
"""Logging Formatter to add colors and count warning / errors"""
grey = "\x1b[38;21m"
yellow = "\x1b[33;21m"
red = "\x1b[31;21m"
bold_red = "\x1b[31;1m"
reset = "\x1b[0m"
green = "\x1B[32m"
# pathname filename
#format = "[%(asctime)s|%(name)s|%(levelname)s - %(message)s (%(filename)s:%(lineno)d)"
format = '[{yellow}%(asctime)s{reset}|{color}%(levelname)s{reset}|{green}%(name)s{reset}|%(pathname)s:%(lineno)s] {color}%(message)s{reset}'
FORMATS = {
logging.DEBUG: format.format(color=grey, reset=reset, yellow=yellow, green=green),
logging.INFO: format.format(color=green, reset=reset, yellow=yellow, green=green),
logging.WARNING: format.format(color=yellow, reset=reset, yellow=yellow, green=green),
logging.ERROR: format.format(color=red, reset=reset, yellow=yellow, green=green),
logging.CRITICAL: format.format(color=bold_red, reset=reset, yellow=yellow, green=green)
}
def format(self, record):
log_fmt = self.FORMATS.get(record.levelno)
formatter = logging.Formatter(log_fmt)
return formatter.format(record)
def get_logger(name=None, log_path=None):
if name == None:
name = sys.argv[0].rsplit('.', 1)[0]
logger = logging.getLogger(name)
if not logger.handlers:
level = logging.DEBUG
logger.setLevel(level)
formatter = logging.Formatter(u'[%(asctime)s|%(levelname)s|%(filename)s:%(lineno)s] %(message)s')
def customTime(*args):
utc_dt = utc.localize(datetime.utcnow())
my_tz = timezone("Asia/Seoul")
converted = utc_dt.astimezone(my_tz)
return converted.timetuple()
formatter.converter = customTime
file_max_bytes = 1 * 1024 * 1024
if log_path == None:
log_path = os.path.join(os.getcwd(), 'tmp')
#os.makedirs(log_path, exist_ok=True)
else:
os.makedirs(log_path, exist_ok=True)
fileHandler = logging.handlers.RotatingFileHandler(filename=os.path.join(log_path, f'{name}.log'), maxBytes=file_max_bytes, backupCount=5, encoding='utf8', delay=True)
streamHandler = logging.StreamHandler()
fileHandler.setFormatter(formatter)
streamHandler.setFormatter(CustomFormatter())
logger.addHandler(fileHandler)
logger.addHandler(streamHandler)
return logger

View File

497
lib/support/site/tving.py Normal file
View File

@@ -0,0 +1,497 @@
import os, sys, traceback, time, urllib.parse, requests, json, base64, re, platform
if __name__ == '__main__':
if platform.system() == 'Windows':
sys.path += ["C:\SJVA3\lib2", "C:\SJVA3\data\custom", "C:\SJVA3_DEV"]
else:
sys.path += ["/root/SJVA3/lib2", "/root/SJVA3/data/custom"]
from support import d, logger
apikey = '1e7952d0917d6aab1f0293a063697610'
#apikey = '95a64ebcd8e154aeb96928bf34848826'
class SupportTving:
default_param = f'&screenCode=CSSD0100&networkCode=CSND0900&osCode=CSOD0900&teleCode=CSCD0900&apiKey={apikey}'
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 Safari/537.36',
'Accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'Accept-Language' : 'ko-KR,ko;q=0.9,en-US;q=0.8,en;q=0.7',
'Referer' : '',
}
# 같은 코드가 여러군에 있는게 불편하여 그냥 sjva안에서는 ins를 가져와서 사용하는 것으로 한다.
# sjva외에서는 생성해서 사용.
# ins를 만드는 것은 system plugin
ins = None
def __init__(self, token=None, proxy=None, user=None, password=None, deviceid=None, uuid=None):
self.token = token
if self.token and '_tving_token=' in self.token:
self.token = self.token.split('=')[1]
self.proxies = None
self.proxy = proxy
if self.proxy != None:
self.proxies = {"https": proxy, 'http':proxy}
self.user = user
self.password = password
self.deviceid = deviceid
self.uuid = uuid
def do_login(self, user_id, user_pw, login_type):
try:
url = 'https://user.tving.com/user/doLogin.tving'
if login_type == '0':
login_type_value = '10'
else:
login_type_value = '20'
params = {
'userId' : user_id,
'password' : user_pw,
'loginType' : login_type_value
}
res = requests.post(url, data=params)
cookie = res.headers['Set-Cookie']
for c in cookie.split(','):
c = c.strip()
if c.startswith('_tving_token'):
ret = c.split(';')[0]
return ret
except Exception as exception:
logger.error('Exception:%s', exception)
logger.error(traceback.format_exc())
def get_device_list(self):
url = f"http://api.tving.com/v1/user/device/list?{self.default_param[1:]}"
return self.api_get(url)
def get_info(self, mediacode, streamcode):
ts = str(int(time.time()))
try:
tmp_param = self.default_param
if streamcode == 'stream70':
tmp_param = self.default_param.replace('CSSD0100', 'CSSD1200')
url = f"http://api.tving.com/v2/media/stream/info?info=y{tmp_param}&noCache={ts}&mediaCode={mediacode}&streamCode={streamcode}&deviceId={self.deviceid}"
#logger.warning(url)
if self.token != None:
self.headers['Cookie'] = f"_tving_token={self.token}"
info = self.api_get(url)
if streamcode == 'stream70':
for stream in info['content']['info']['stream']:
if stream['code'] == 'stream70':
break
else:
#logger.debug("stream70이 없어서 50으로 재요청")
return self.get_info(mediacode, 'stream50')
#logger.debug(d(self.headers))
#logger.debug(d(info))
#logger.error(mediacode)
if info['result']['code'] == "000":
info['avaliable'] = True
else:
info['avaliable'] = False
return info
#logger.error(info['stream']['drm_yn'])
if 'drm_yn' in info['stream'] and info['stream']['drm_yn'] == 'Y' and '4k_nondrm_url' not in info['stream']['broadcast']:
info['drm'] = True
info['play_info'] = {
'uri' : self.__decrypt2(mediacode, ts, info['stream']['broadcast']['widevine']['broad_url']),
'drm_scheme' : 'widevine',
'drm_license_uri' : 'http://cj.drmkeyserver.com/widevine_license',
'drm_key_request_properties': {
'origin' : 'https://www.tving.com',
'sec-fetch-site' : 'cross-site',
'sec-fetch-mode' : 'cors',
'user-agent' : 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.105 Safari/537.36',
'Host' : 'cj.drmkeyserver.com',
'referer' : 'https://www.tving.com/',
'AcquireLicenseAssertion' : info['stream']['drm_license_assertion'],
}
}
info['url'] = info['play_info']['uri']
#info['play_info']['url'] = info['play_info']['uri']
else:
if '4k_nondrm_url' in info['stream']['broadcast']:
url = info['stream']['broadcast']['4k_nondrm_url']
else:
url = info['stream']['broadcast']['broad_url']
decrypted_url = self.__decrypt2(mediacode, ts, url)
#logger.error(decrypted_url)
#if decrypted_url.find('m3u8') == -1:
# decrypted_url = decrypted_url.replace('rtmp', 'http')
# decrypted_url = decrypted_url.replace('?', '/playlist.m3u8?')
#2020-06-12
# 2022-05-26
# smil/playlist.m3u8 이거 영화만 탐??
#logger.error(decrypted_url)
if decrypted_url.find('smil/playlist.m3u8') != -1 and decrypted_url.find('content_type=VOD') != -1 :
tmps = decrypted_url.split('playlist.m3u8')
r = requests.get(decrypted_url, headers=self.headers, proxies=self.proxies)
lines = r.text.split('\n')
#logger.debug(d(lines))
# 2022-05-26 이전까지는 고화질이 마지막에 나왔을텐데 영화에서 맨 처음에 나온다고 함. 당연히 확인했을테니 마지막이었겠지?
#i = -1
#last = ''
#while len(last) == 0:
# last = lines[i].strip()
# i -= 1
max_bandwidth = 0
max_url = None
while len(lines) > 0: #for line in lines:
line = lines.pop(0)
match = re.search('BANDWIDTH=(?P<bw>\d+)', line)
if match:
bw = int(match.group('bw'))
if bw > max_bandwidth:
max_bandwidth = bw
max_url = lines.pop(0)
decrypted_url = '%s%s' % (tmps[0], max_url)
#logger.debug(f"VOD : {decrypted_url}")
if 'manifest.m3u8' in decrypted_url: #QVOD
r = requests.get(decrypted_url, headers=self.headers, proxies=self.proxies)
lines = r.text.split('\n')
i = -1
last = ''
while len(last) == 0:
last = lines[i].strip()
i -= 1
tmps = decrypted_url.split('//')
tmps2 = tmps[1].split('/', 1)
tmps3 = tmps2[1].rsplit('/', 1)
tmps3[1] = re.sub(r'manifest\.m3u8\?start=(\d|-|:)+&end=(\d|-|:)+', '', tmps3[1])
decrypted_url = f"{tmps[0]}//{tmps2[0]}{last}{tmps3[1]}"
info['broad_url'] = decrypted_url
info['drm'] = False
info['url'] = decrypted_url
info['play_info'] = {
'hls': decrypted_url,
}
if mediacode[0] in ['E', 'M']:
info['filename'] = self.get_filename(info)
#logger.warning(d(info))
return info
except Exception as e:
logger.error(f"Exception:{str(e)}")
logger.error(traceback.format_exc())
# list_type : all, live, vod
def get_live_list(self, list_type='live', order='rating', include_drm=False):
def func(param, page, order='rating', include_drm=True):
has_more = 'N'
try:
result = []
url = f'https://api.tving.com/v2/media/lives?cacheType=main&pageNo={page}&pageSize=20&order={order}&adult=all&free=all&guest=all&scope=all{param}{self.default_param}'
data = self.api_get(url)
#logger.debug(url)
for item in data["result"]:
try:
# 2020-11-10 현재 /v1 에서는 drm채널인지 알려주지않고, 방송이 drm 적용인지 알려줌. 그냥 fix로..
info = {'is_drm':self.is_drm_channel(item['live_code'])}
if include_drm == False and info['is_drm']:
continue
info['id'] = item["live_code"]
info['title'] = item['schedule']['channel']['name']['ko']
info['episode_title'] = ' '
info['img'] = 'http://image.tving.com/upload/cms/caic/CAIC1900/%s.png' % item["live_code"]
if item['schedule']['episode'] is not None:
info['episode_title'] = item['schedule']['episode']['name']['ko']
if info['title'].startswith('CH.') and len(item['schedule']['episode']['image']) > 0:
info['img'] = 'http://image.tving.com' + item['schedule']['episode']['image'][0]['url']
#info['free'] = (item['schedule']['broadcast_url'][0]['broad_url1'].find('drm') == -1)
info['summary'] = info['episode_title']
result.append(info)
except Exception as exception:
logger.error('Exception:%s', exception)
logger.error(traceback.format_exc())
has_more = data["has_more"]
except Exception as exception:
logger.error('Exception:%s', exception)
logger.error(traceback.format_exc())
return has_more, result
ret = []
if list_type == 'live':
params = ['&channelType=CPCS0100,CPCS0400']
elif list_type == 'vod':
params = ['&channelType=CPCS0300']
elif list_type == 'all':
params = ['&channelType=CPCS0100,CPCS0400', '&channelType=CPCS0300']
else:
params = ['&channelType=CPCS0100,CPCS0400']
for param in params:
page = 1
while True:
hasMore, data = func(param, page, order=order, include_drm=include_drm)
ret += data
if hasMore == 'N':
break
page += 1
return ret
def get_vod_list(self, program_code=None, page=1):
url = f'http://api.tving.com/v2/media/episodes?pageNo={page}&pageSize=18&adult=all&guest=all&scope=all&personal=N{self.default_param}'
if program_code is not None:
url += f'&free=all&order=frequencyDesc&programCode={program_code}'
else:
url += "&free=all&lastFrequency=n&order=broadDate"
return self.api_get(url)
def get_vod_list_genre(self, genre, page=1):
url = f'http://api.tving.com/v2/media/episodes?pageNo={page}&pageSize=18&adult=all&guest=all&scope=all&personal=N{self.default_param}'
if genre != None and genre != 'all':
url += f"&free=all&lastFrequency=y&order=broadDate&categoryCode={genre}"
else:
url += "&free=all&lastFrequency=y&order=broadDate"
return self.api_get(url)
def get_movie_list(self, page=1, category='all'):
url = f'https://api.tving.com/v2/media/movies?pageNo={page}&pageSize=24&order=viewDay&free=all&adult=all&guest=all&scope=all&productPackageCode=338723&personal=N&diversityYn=N{self.default_param}'
if category != 'all':
url += f'&multiCategoryCode={category}'
return self.api_get(url)
def get_frequency_programid(self, programid, page=1):
url = f'https://api.tving.com/v2/media/frequency/program/{programid}?pageNo={page}&pageSize=10&order=new&free=all&adult=all&scope=all{self.default_param}'
return self.api_get(url)
def get_schedules(self, code, date, start_time, end_time):
url = f"https://api.tving.com/v2/media/schedules?pageNo=1&pageSize=20&order=chno&scope=all&adult=n&free=all&broadDate={date}&broadcastDate={date}&startBroadTime={start_time}&endBroadTime={end_time}&channelCode={','.join(code)}{self.default_param}"
return self.api_get(url)
def get_program_programid(self, programid):
url = f'https://api.tving.com/v2/media/program/{programid}?pageNo=1&pageSize=10&order=name{self.default_param}'
return self.api_get(url)
def search(self, keyword):
# gubun VODBC, VODMV
try:
import urllib.parse
url = 'https://search.tving.com/search/common/module/getAkc.jsp?kwd=' + urllib.parse.quote(str(keyword))
data = requests.get(url, headers=self.headers).json()
#logger.debug(d(data))
if 'dataList' in data['akcRsb']:
return data['akcRsb']['dataList']
except Exception as exception:
logger.error('Exception:%s', exception)
logger.error(traceback.format_exc())
def api_get(self, url):
try:
if self.token != None:
self.headers['Cookie'] = f"_tving_token={self.token}"
data = requests.get(url, headers=self.headers, proxies=self.proxies).json()
try:
if type(data['body']['result']) == type({}) and data['body']['result']['message'] != None:
logger.debug(f"tving api message : {data['body']['result']['message']}")
except:
pass
if data['header']['status'] == 200:
return data['body']
except Exception as e:
logger.error(f'url: {url}')
logger.error(f"Exception:{str(e)}")
logger.error(traceback.format_exc())
def is_drm_channel(self, code):
# C07381:ocn C05661:디즈니채널 C44441:koon C04601:ocn movie C07382:ocn thrill
return (code in ['C07381', 'C05661', 'C44441', 'C04601', 'C07382'])
def get_filename(self, episode_data):
try:
title = episode_data["content"]["program_name"]
title = title.replace("<", "").replace(">", "").replace("\\", "").replace("/", "").replace(":", "").replace("*", "").replace("\"", "").replace("|", "").replace("?", "").replace(" ", " ").strip()
currentQuality = None
if episode_data["stream"]["quality"] is None:
currentQuality = "stream40"
else:
qualityCount = len(episode_data["stream"]["quality"])
for i in range(qualityCount):
if episode_data["stream"]["quality"][i]["selected"] == "Y":
currentQuality = episode_data["stream"]["quality"][i]["code"]
break
if currentQuality is None:
return
qualityRes = self.__get_quality_to_res(currentQuality)
if 'frequency' in episode_data["content"]:
episodeno = episode_data["content"]["frequency"]
airdate = str(episode_data["content"]["info"]["episode"]["broadcast_date"])[2:]
if episodeno > 0:
ret = f"{title}.E{str(episodeno).zfill(2)}.{airdate}.{qualityRes}-ST.mp4"
else:
ret = f"{title}.{airdate}.{qualityRes}-ST.mp4"
else:
ret = f"{title}.{qualityRes}-ST.mp4"
#if episode_data['drm']:
# ret = ret.replace('.mp4', '.mkv')
from support.base import SupportFile
return SupportFile.text_for_filename(ret)
except Exception as e:
logger.error(f"Exception:{str(e)}")
logger.error(traceback.format_exc())
def __get_quality_to_res(self, quality):
if quality == 'stream50':
return '1080p'
elif quality == 'stream40':
return '720p'
elif quality == 'stream30':
return '480p'
elif quality == 'stream70':
return '2160p'
elif quality == 'stream25':
return '270p'
return '1080p'
def get_quality_to_tving(self, quality):
if quality == 'FHD':
return 'stream50'
elif quality == 'HD':
return 'stream40'
elif quality == 'SD':
return 'stream30'
elif quality == 'UHD':
return 'stream70'
return 'stream50'
def __decrypt2(self, mediacode, ts, url):
try:
#raise Exception('test')
import sc
ret = sc.td1(mediacode, str(ts), url).strip()
#data = sc.td1(code, ts, url)
ret = re.sub('[^ -~]+', '', ret)
#logger.error(f"[{ret}]")
return ret
except Exception as e:
logger.error(f"Exception:{str(e)}")
#logger.error(traceback.format_exc())
data = {'url':url, 'code':mediacode, 'ts':ts}
ret = requests.post('https://sjva.me/sjva/tving.php', data=data).json()
return ret['url']
if __name__ == '__main__':
import argparse
#from support.base import d, get_logger
from lib_wvtool import WVDownloader
parser = argparse.ArgumentParser()
parser.add_argument('--code', required=True, help='컨텐츠 코드')
parser.add_argument('--quality', required=False, default='stream50', help='화질')
parser.add_argument('--token', required=True,)
parser.add_argument('--proxy', default=None)
parser.add_argument('--deviceid', default=None)
parser.add_argument('--folder_tmp', default=None)
parser.add_argument('--folder_output', default=None)
args = parser.parse_args()
info = SupportTving(token=args.token, proxy=args.proxy, deviceid=args.deviceid).get_info(args.code, args.quality)
logger.debug(d(info['play_info']))
if info['drm']:
SupportTving.headers['Cookie'] = f"_tving_token={args.token}"
downloader = WVDownloader({
'logger' : logger,
'mpd_url' : info['play_info']['uri'],
'code' : args.code,
'output_filename' : info['filename'],
'license_headers' : info['play_info']['drm_key_request_properties'],
'license_url' : info['play_info']['drm_license_uri'],
'clean' : True,
'folder_output': args.folder_output,
'folder_tmp': args.folder_tmp,
'mpd_headers' : SupportTving.headers
})
downloader.download()
else:
logger.error("DRM 영상이 아닙니다.")
#print(args)

View File

@@ -0,0 +1 @@
from .gsheet_base import GoogleSheetBase

1
lib/support/tool/cs.json Normal file
View File

@@ -0,0 +1 @@
{"installed":{"client_id":"78061934091-l4m6ba5jip749lb4stk00jg8vf2tcsmq.apps.googleusercontent.com","project_id":"sjva-plex-scan-200106","auth_uri":"https://accounts.google.com/o/oauth2/auth","token_uri":"https://oauth2.googleapis.com/token","auth_provider_x509_cert_url":"https://www.googleapis.com/oauth2/v1/certs","client_secret":"qb0NiC8JahlPggbHZJSF7xVJ","redirect_uris":["urn:ietf:wg:oauth:2.0:oob","http://localhost"]}}

View File

@@ -0,0 +1,212 @@
import os, sys, traceback
try:
import oauth2client
except:
os.system('pip install oauth2client')
import oauth2client
from oauth2client.file import Storage
from oauth2client import tools
from oauth2client.client import flow_from_clientsecrets
try:
from apiclient.discovery import build
except:
os.system('pip install google-api-python-client')
from apiclient.discovery import build
try:
import gspread, time
from gspread_formatting import cellFormat, textFormat, color, format_cell_range
except:
os.system('pip3 install gspread')
os.system('pip3 install gspread_formatting')
import gspread, time
from gspread_formatting import cellFormat, textFormat, color, format_cell_range
from support.base import get_logger, d
logger = get_logger()
class GoogleSheetBase:
current_flow = None
color_format = {
'green' : cellFormat(
backgroundColor=color(0, 1, 0), #set it to yellow
textFormat=textFormat(foregroundColor=color(0, 0, 0)),
),
'yellow' : cellFormat(
backgroundColor=color(1, 1, 0), #set it to yellow
textFormat=textFormat(foregroundColor=color(0, 0, 0)),
),
'white' : cellFormat(
backgroundColor=color(1, 1, 1), #set it to yellow
textFormat=textFormat(foregroundColor=color(0, 0, 0)),
)
}
def __init__(self, doc_id, credentials_filepath, tab_index, unique_header):
self.credentials_filepath = credentials_filepath
self.credentials = self.get_credentials()
self.doc_id = doc_id
doc_url = f'https://docs.google.com/spreadsheets/d/{doc_id}'
gsp = gspread.authorize(self.credentials)
doc = gsp.open_by_url(doc_url)
self.tab_index = tab_index
self.ws = doc.get_worksheet(tab_index)
self.header_info = None
self.header_info_reverse = None
self.unique_header = unique_header
def get_credentials(self, project_filepath=None):
if os.path.exists(self.credentials_filepath) == False:
logger.info(f"credentials_filepath : {self.credentials_filepath}")
url = self.__make_token_cli(project_filepath)
logger.debug(f"Auth URL : {url}")
code = input("Input Code : ")
self.__save_token(self.credentials_filepath, code)
store = Storage(self.credentials_filepath)
credentials = store.get()
if not credentials or credentials.invalid:
logger.warning('credentials error')
#flow = client.flow_from_clientsecrets('credentials.json', SCOPES)
#creds = tools.run_flow(flow, store)
os.remove(self.credentials_filepath)
return self.get_credentials(self.credentials_filepath)
return credentials
def __make_token_cli(self, project_filepath):
try:
if project_filepath == None:
project_filepath = os.path.join(os.path.dirname(__file__), 'cs.json')
self.current_flow = flow_from_clientsecrets(
project_filepath, # downloaded file
'https://www.googleapis.com/auth/drive', # scope
redirect_uri='urn:ietf:wg:oauth:2.0:oob')
return self.current_flow.step1_get_authorize_url()
except Exception as e:
logger.error(f"Exception: {e}")
logger.error(traceback.format_exc())
def __save_token(self, credentials_filepath, code):
try:
credentials = self.current_flow.step2_exchange(code)
storage = Storage(credentials_filepath)
storage.put(credentials)
return True
except Exception as e:
logger.error(f"Exception: {e}")
logger.error(traceback.format_exc())
return False
def get_sheet_data(self):
tmp = self.ws.get_all_values()#[:-1]
self.set_sheet_header(tmp[0])
rows = tmp[1:]
ret = []
for row in rows:
item = {}
for idx, col in enumerate(row):
item[self.header_info_reverse[idx+1]] = col
ret.append(item)
return ret
def set_sheet_header(self, row):
self.header_info = {}
self.header_info_reverse = {}
for idx, col in enumerate(row):
self.header_info[col] = idx + 1
self.header_info_reverse[idx+1] = col
logger.debug(self.header_info)
def find_row_index(self, total_data, data):
find_row_index = -1
#find = False
#data['IDX'] = len(total_data)+1
for idx, item in enumerate(total_data):
if item[self.unique_header] == str(data[self.unique_header]):
#find = True
find_row_index = idx
#data['IDX'] = find_row_index + 1
break
if find_row_index == -1:
data['IDX'] = len(total_data)+1
return find_row_index
def sleep(self):
time.sleep(0.5)
def sleep_exception(self):
time.sleep(10)
def after_update_cell(self, sheet_row_index, sheet_col_index, key, value, old_value):
pass
def set_color(self, sheet_row, sheet_col1, sheet_col2, color):
format_cell_range(self.ws, gspread.utils.rowcol_to_a1(sheet_row,sheet_col1)+':' + gspread.utils.rowcol_to_a1(sheet_row,sheet_col2), color)
def set_color_row(self, sheet_row, color):
format_cell_range(self.ws, gspread.utils.rowcol_to_a1(sheet_row,1)+':' + gspread.utils.rowcol_to_a1(sheet_row,len(self.header_info)), color)
def set_color_cell(self, sheet_row, sheet_col, color):
format_cell_range(self.ws, gspread.utils.rowcol_to_a1(sheet_row,sheet_col)+':' + gspread.utils.rowcol_to_a1(sheet_row,sheet_col), color)
def write_data(self, total_data, data):
find_row_index = self.find_row_index(total_data, data)
write_count = 0
for key, value in data.items():
if key.startswith('_'):
continue
if value == None:
continue
if key not in self.header_info:
continue
while True:
try:
if find_row_index != -1 and str(total_data[find_row_index][key]) != str(value):
logger.warning(f"업데이트 : {key} {total_data[find_row_index][key]} ==> {value}")
self.ws.update_cell(find_row_index+2, self.header_info[key], value)
self.after_update_cell(find_row_index+2, self.header_info[key], key, value, total_data[find_row_index][key])
write_count += 1
self.sleep()
elif find_row_index == -1 and value != '':
logger.warning(f"추가 : {key} {value}")
self.ws.update_cell(len(total_data)+2, self.header_info[key], value)
self.after_update_cell(len(total_data)+2, self.header_info[key], key, value, None)
write_count += 1
self.sleep()
break
except gspread.exceptions.APIError:
self.sleep_exception()
except Exception as exception:
logger.error(f"{key} - {value}")
logger.error('Exception:%s', exception)
logger.error(traceback.format_exc())
logger.error(self.header_info)
self.sleep_exception()
if find_row_index == -1:
total_data.append(data)
else:
total_data[find_row_index] = data
return write_count