Add logging to the uploads.
This commit is contained in:
parent
0907ea72bd
commit
b48c2eece5
1 changed files with 49 additions and 11 deletions
|
@ -7,9 +7,11 @@ an Amazon S3 instance (or other implementations, like DigialOcean Spaces).
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
|
import traceback
|
||||||
from unicodedata import normalize
|
from unicodedata import normalize
|
||||||
|
|
||||||
from decouple import config
|
from decouple import config
|
||||||
|
@ -28,6 +30,15 @@ S3_BUCKET = config('S3_BUCKET')
|
||||||
# Radio name for metadata
|
# Radio name for metadata
|
||||||
RADIO_NAME = config('RADIO_NAME', default='Save Point Radio')
|
RADIO_NAME = config('RADIO_NAME', default='Save Point Radio')
|
||||||
|
|
||||||
|
logging.basicConfig(
|
||||||
|
handlers=[logging.FileHandler('./s3_uploads.log', encoding='utf8')],
|
||||||
|
level=logging.INFO,
|
||||||
|
format=('[%(asctime)s] [%(levelname)s]'
|
||||||
|
' [%(name)s.%(funcName)s] === %(message)s'),
|
||||||
|
datefmt='%Y-%m-%dT%H:%M:%S'
|
||||||
|
)
|
||||||
|
LOGGER = logging.getLogger('upload_s3')
|
||||||
|
|
||||||
|
|
||||||
class Progress(object):
|
class Progress(object):
|
||||||
'''
|
'''
|
||||||
|
@ -107,6 +118,8 @@ def import_playlist(playlist_file):
|
||||||
aws_secret_access_key=S3_SECRET_KEY
|
aws_secret_access_key=S3_SECRET_KEY
|
||||||
)
|
)
|
||||||
|
|
||||||
|
totals = {'success': 0, 'fail': 0}
|
||||||
|
|
||||||
for song in playlist['songs']:
|
for song in playlist['songs']:
|
||||||
old_path = song['store']['path']
|
old_path = song['store']['path']
|
||||||
|
|
||||||
|
@ -132,6 +145,9 @@ def import_playlist(playlist_file):
|
||||||
ext = os.path.splitext(old_path)[1]
|
ext = os.path.splitext(old_path)[1]
|
||||||
new_path = '{}/{}{}'.format(prefix, file_hash, ext)
|
new_path = '{}/{}{}'.format(prefix, file_hash, ext)
|
||||||
|
|
||||||
|
LOGGER.info('Begin upload of: %s', old_path)
|
||||||
|
|
||||||
|
try:
|
||||||
client.upload_file(
|
client.upload_file(
|
||||||
old_path,
|
old_path,
|
||||||
S3_BUCKET,
|
S3_BUCKET,
|
||||||
|
@ -142,12 +158,32 @@ def import_playlist(playlist_file):
|
||||||
},
|
},
|
||||||
Callback=Progress(old_path)
|
Callback=Progress(old_path)
|
||||||
)
|
)
|
||||||
|
except Exception:
|
||||||
|
LOGGER.error(
|
||||||
|
'Upload failed for: %s -- %s',
|
||||||
|
old_path,
|
||||||
|
traceback.print_exc()
|
||||||
|
)
|
||||||
|
totals['fail'] += 1
|
||||||
|
else:
|
||||||
song['store']['path'] = 's3://{}/{}'.format(S3_BUCKET, new_path)
|
song['store']['path'] = 's3://{}/{}'.format(S3_BUCKET, new_path)
|
||||||
|
LOGGER.info(
|
||||||
|
'Successful upload of: %s to %s',
|
||||||
|
old_path,
|
||||||
|
song['store']['path']
|
||||||
|
)
|
||||||
|
totals['success'] += 1
|
||||||
|
|
||||||
sys.stdout.write("\r\n")
|
sys.stdout.write("\r\n")
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
|
result_message = 'Uploads complete -- {} successful, {} failures'.format(
|
||||||
|
totals['success'],
|
||||||
|
totals['fail']
|
||||||
|
)
|
||||||
|
print(result_message)
|
||||||
|
LOGGER.info(result_message)
|
||||||
|
|
||||||
return playlist
|
return playlist
|
||||||
|
|
||||||
|
|
||||||
|
@ -182,6 +218,7 @@ def main():
|
||||||
results = import_playlist(args.filepath[0])
|
results = import_playlist(args.filepath[0])
|
||||||
|
|
||||||
if results:
|
if results:
|
||||||
|
LOGGER.info('Exporting new playlist file to \'playlist_s3.json\'')
|
||||||
with open('playlist_s3.json', 'w', encoding='utf8') as file:
|
with open('playlist_s3.json', 'w', encoding='utf8') as file:
|
||||||
json.dump(
|
json.dump(
|
||||||
results,
|
results,
|
||||||
|
@ -190,6 +227,7 @@ def main():
|
||||||
sort_keys=True,
|
sort_keys=True,
|
||||||
indent=4
|
indent=4
|
||||||
)
|
)
|
||||||
|
LOGGER.info('Program finished. Exiting.')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
Loading…
Reference in a new issue