34c3 mods
This commit is contained in:
parent
b0ac0b83f4
commit
04a66cb90c
3 changed files with 2692 additions and 110 deletions
2536
GPN17-Fahrplan.XML
Executable file
2536
GPN17-Fahrplan.XML
Executable file
File diff suppressed because it is too large
Load diff
|
@ -14,14 +14,14 @@ from xml.sax.saxutils import escape as xmlescape
|
||||||
|
|
||||||
# Parse arguments
|
# Parse arguments
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description='C3VOC Intro-Outro-Generator - Variant to use with apple Motion Files',
|
description='C3VOC Intro-Outro-Generator - Variant to use with apple Motion Files',
|
||||||
usage="./make.py gpn17/Intro.motn https://url/to/schedule.xml",
|
usage="./make.py gpn17/Intro.motn https://url/to/schedule.xml",
|
||||||
formatter_class=argparse.RawTextHelpFormatter)
|
formatter_class=argparse.RawTextHelpFormatter)
|
||||||
|
|
||||||
parser.add_argument('motn', action="store", metavar='Motion-File', type=str, help='''
|
parser.add_argument('motn', action="store", metavar='Motion-File', type=str, help='''
|
||||||
Path to your Motion-File .motn-File
|
Path to your Motion-File .motn-File
|
||||||
''')
|
''')
|
||||||
parser.add_argument('schedule', action="store", metavar='Schedule-URL', type=str, nargs='?', help='''
|
parser.add_argument('schedule', action="store", metavar='Schedule-URL', type=str, nargs='?', help='''
|
||||||
URL or Path to your schedule.xml
|
URL or Path to your schedule.xml
|
||||||
''')
|
''')
|
||||||
|
|
||||||
|
@ -41,172 +41,217 @@ parser.add_argument('--id', dest='ids', nargs='+', action="store", type=int, hel
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
def headline(str):
|
def headline(str):
|
||||||
print("##################################################")
|
print("##################################################")
|
||||||
print(str)
|
print(str)
|
||||||
print("##################################################")
|
print("##################################################")
|
||||||
print()
|
print()
|
||||||
|
|
||||||
|
|
||||||
def error(str):
|
def error(str):
|
||||||
headline(str)
|
headline(str)
|
||||||
parser.print_help()
|
parser.print_help()
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
if not args.motn:
|
if not args.motn:
|
||||||
error("The Motion-File is a rquired argument")
|
error("The Motion-File is a rquired argument")
|
||||||
|
|
||||||
if not args.debug and not args.schedule:
|
if not args.debug and not args.schedule:
|
||||||
error("Either specify --debug or supply a schedule")
|
error("Either specify --debug or supply a schedule")
|
||||||
|
|
||||||
if args.debug:
|
if args.debug:
|
||||||
persons = ['Arnulf Christl', 'Astrid Emde', 'Dominik Helle', 'Till Adams']
|
persons = ['Arnulf Christl', 'Astrid Emde', 'Dominik Helle', 'Till Adams']
|
||||||
events = [{
|
events = [{
|
||||||
'id': 3773,
|
'id': 3773,
|
||||||
'title': 'Was ist Open Source, wie funktioniert das?',
|
'title': 'Was ist Open Source, wie funktioniert das?',
|
||||||
'subtitle': 'Die Organisation der Open Geo- und GIS-Welt. Worauf man achten sollte.',
|
'subtitle': 'Die Organisation der Open Geo- und GIS-Welt. Worauf man achten sollte.',
|
||||||
'persons': persons,
|
'persons': persons,
|
||||||
'personnames': ', '.join(persons),
|
'personnames': ', '.join(persons),
|
||||||
'room': 'Großer Saal',
|
'room': 'Großer Saal',
|
||||||
}]
|
}]
|
||||||
|
|
||||||
else:
|
else:
|
||||||
events = list(renderlib.events(args.schedule))
|
events = list(renderlib.events(args.schedule))
|
||||||
|
|
||||||
|
|
||||||
def describe_event(event):
|
def describe_event(event):
|
||||||
return "#{}: {}".format(event['id'], event['title'])
|
return "#{}: {}".format(event['id'], event['title'])
|
||||||
|
|
||||||
|
|
||||||
def event_print(event, message):
|
def event_print(event, message):
|
||||||
print("{} – {}".format(describe_event(event), message))
|
print("{} – {}".format(describe_event(event), message))
|
||||||
|
|
||||||
tempdir = tempfile.TemporaryDirectory()
|
|
||||||
print('working in '+tempdir.name)
|
tempdir = '/Users/pkoerner/VOC/34c3_intro_bix'
|
||||||
|
print('working in ' + tempdir)
|
||||||
|
|
||||||
|
|
||||||
def fmt_command(command, **kwargs):
|
def fmt_command(command, **kwargs):
|
||||||
args = {}
|
args = {}
|
||||||
for key, value in kwargs.items():
|
for key, value in kwargs.items():
|
||||||
args[key] = shlex.quote(value)
|
args[key] = shlex.quote(value)
|
||||||
|
|
||||||
|
command = command.format(**args)
|
||||||
|
return shlex.split(command)
|
||||||
|
|
||||||
command = command.format(**args)
|
|
||||||
return shlex.split(command)
|
|
||||||
|
|
||||||
def run(command, **kwargs):
|
def run(command, **kwargs):
|
||||||
return subprocess.check_call(
|
return subprocess.check_call(
|
||||||
fmt_command(command, **kwargs))
|
fmt_command(command, **kwargs))
|
||||||
|
|
||||||
|
|
||||||
def run_output(command, **kwargs):
|
def run_output(command, **kwargs):
|
||||||
return subprocess.check_output(
|
return subprocess.check_output(
|
||||||
fmt_command(command, **kwargs),
|
fmt_command(command, **kwargs),
|
||||||
encoding='utf-8',
|
encoding='utf-8',
|
||||||
stderr=subprocess.STDOUT)
|
stderr=subprocess.STDOUT)
|
||||||
|
|
||||||
|
|
||||||
|
def enrich_event(event):
|
||||||
|
print(event)
|
||||||
|
result = {}
|
||||||
|
result.update(event)
|
||||||
|
art = {"red":"1", "green":"1", "blue":"0.20000000298023224"}
|
||||||
|
ccc = {"red":"0.60000002384185791", "green":"0.80000001192092896", "blue":"0"}
|
||||||
|
entertainment = ccc
|
||||||
|
ethics = {"red":"1", "green":"0.20000000298023224", "blue":"1"}
|
||||||
|
hardware = {"red":"1", "green":"0.40000000596046448", "blue":"0"}
|
||||||
|
resilience = {"red":"0.64313727617263794", "green":"0.10980392247438431", "blue":"0.19215686619281769"}
|
||||||
|
science = {"red":"1", "green":"0.40000000596046448", "blue":"0"}
|
||||||
|
security = {"red":"0.4117647111415863", "green":"0", "blue":"0.82745099067687988"}
|
||||||
|
|
||||||
|
if "Art" in event['track']:
|
||||||
|
result.update(art)
|
||||||
|
elif "CCC" in event['track']:
|
||||||
|
result.update(ccc)
|
||||||
|
elif "Entertainment" in event['track']:
|
||||||
|
result.update(entertainment)
|
||||||
|
elif "Ethics" in event['track']:
|
||||||
|
result.update(ethics)
|
||||||
|
elif "Hardware" in event['track']:
|
||||||
|
result.update(hardware)
|
||||||
|
elif "Resilience" in event['track']:
|
||||||
|
result.update(resilience)
|
||||||
|
elif "Science" in event['track']:
|
||||||
|
result.update(science)
|
||||||
|
elif "Security" in event['track']:
|
||||||
|
result.update(security)
|
||||||
|
else:
|
||||||
|
print("Found unrecognized track name %s, assuming CCC track colour" % (event['track']))
|
||||||
|
result.update(ccc)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
def enqueue_job(event):
|
def enqueue_job(event):
|
||||||
event_id = str(event['id'])
|
event_id = str(event['id'])
|
||||||
work_doc = os.path.join(tempdir.name, event_id+'.motn')
|
work_doc = os.path.join(tempdir, event_id + '.motn')
|
||||||
intermediate_clip = os.path.join(tempdir.name, event_id+'.mov')
|
intermediate_clip = os.path.join(tempdir, event_id + '.mov')
|
||||||
|
|
||||||
with open(args.motn, 'r') as fp:
|
with open(args.motn, 'r') as fp:
|
||||||
xmlstr = fp.read()
|
xmlstr = fp.read()
|
||||||
|
|
||||||
for key, value in event.items():
|
for key, value in event.items():
|
||||||
xmlstr = xmlstr.replace("$"+str(key), xmlescape(str(value)))
|
xmlstr = xmlstr.replace("$" + str(key), xmlescape(str(value)))
|
||||||
|
|
||||||
with open(work_doc, 'w') as fp:
|
with open(work_doc, 'w') as fp:
|
||||||
fp.write(xmlstr)
|
fp.write(xmlstr)
|
||||||
|
|
||||||
compressor_info = run_output(
|
compressor_info = run_output(
|
||||||
'/Applications/Compressor.app/Contents/MacOS/Compressor -batchname {batchname} -jobpath {jobpath} -settingpath apple-prores-4444.cmprstng -locationpath {locationpath}',
|
'/Applications/Compressor.app/Contents/MacOS/Compressor -batchname {batchname} -jobpath {jobpath} -settingpath apple-prores-4444.cmprstng -locationpath {locationpath}',
|
||||||
batchname=describe_event(event),
|
batchname=describe_event(event),
|
||||||
jobpath=work_doc,
|
jobpath=work_doc,
|
||||||
locationpath=intermediate_clip)
|
locationpath=intermediate_clip)
|
||||||
|
|
||||||
match = re.search("<jobID ([A-Z0-9\-]+) ?\/>", compressor_info)
|
match = re.search("<jobID ([A-Z0-9\-]+) ?\/>", compressor_info)
|
||||||
if not match:
|
if not match:
|
||||||
event_print(event, "unexpected output from compressor: \n"+compressor_info)
|
event_print(event, "unexpected output from compressor: \n" + compressor_info)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
return match.group(1)
|
||||||
|
|
||||||
return match.group(1)
|
|
||||||
|
|
||||||
def fetch_job_status():
|
def fetch_job_status():
|
||||||
compressor_status = run_output('/Applications/Compressor.app/Contents/MacOS/Compressor -monitor')
|
compressor_status = run_output('/Applications/Compressor.app/Contents/MacOS/Compressor -monitor')
|
||||||
job_status_matches = re.finditer("<jobStatus (.*) \/jobStatus>", compressor_status)
|
job_status_matches = re.finditer("<jobStatus (.*) \/jobStatus>", compressor_status)
|
||||||
|
|
||||||
status_dict = {}
|
status_dict = {}
|
||||||
for match in job_status_matches:
|
for match in job_status_matches:
|
||||||
lexer = shlex.shlex(match.group(1), posix=True)
|
lexer = shlex.shlex(match.group(1), posix=True)
|
||||||
lexer.wordchars += "="
|
lexer.wordchars += "="
|
||||||
|
|
||||||
job_status = dict(word.split("=", maxsplit=1) for word in lexer)
|
|
||||||
job_id = job_status['jobid']
|
|
||||||
status_dict[job_id] = job_status
|
|
||||||
|
|
||||||
return status_dict
|
|
||||||
|
|
||||||
|
job_status = dict(word.split("=", maxsplit=1) for word in lexer)
|
||||||
|
job_id = job_status['jobid']
|
||||||
|
status_dict[job_id] = job_status
|
||||||
|
|
||||||
|
return status_dict
|
||||||
|
|
||||||
|
|
||||||
def filter_finished_jobs(active_jobs):
|
def filter_finished_jobs(active_jobs):
|
||||||
job_status = fetch_job_status()
|
job_status = fetch_job_status()
|
||||||
|
|
||||||
new_active_jobs = []
|
new_active_jobs = []
|
||||||
finished_jobs = []
|
finished_jobs = []
|
||||||
for job_id, event in active_jobs:
|
for job_id, event in active_jobs:
|
||||||
if job_id not in job_status:
|
if job_id not in job_status:
|
||||||
status = 'Processing'
|
status = 'Processing'
|
||||||
else:
|
else:
|
||||||
status = job_status[job_id]['status']
|
status = job_status[job_id]['status']
|
||||||
|
|
||||||
if status == 'Processing':
|
if status == 'Processing':
|
||||||
new_active_jobs.append((job_id, event))
|
new_active_jobs.append((job_id, event))
|
||||||
continue
|
continue
|
||||||
elif status == 'Successful':
|
elif status == 'Successful':
|
||||||
finished_jobs.append((job_id, event))
|
finished_jobs.append((job_id, event))
|
||||||
else:
|
else:
|
||||||
event_print(event, "failed with staus="+status+" – removing from postprocessing queue")
|
event_print(event, "failed with staus=" + status + " – removing from postprocessing queue")
|
||||||
|
|
||||||
return new_active_jobs, finished_jobs
|
return new_active_jobs, finished_jobs
|
||||||
|
|
||||||
|
|
||||||
def finalize_job(job_id, event):
|
def finalize_job(job_id, event):
|
||||||
event_id = str(event['id'])
|
event_id = str(event['id'])
|
||||||
intermediate_clip = os.path.join(tempdir.name, event_id+'.mov')
|
intermediate_clip = os.path.join(tempdir, event_id + '.mov')
|
||||||
final_clip = os.path.join(os.path.dirname(args.motn), event_id+'.ts')
|
final_clip = os.path.join(os.path.dirname(args.motn), event_id + '.mov')
|
||||||
|
|
||||||
run('ffmpeg -y -hide_banner -loglevel error -i "{input}" -ar 48000 -ac 1 -f s16le -i /dev/zero -map 0:v -c:v mpeg2video -q:v 0 -aspect 16:9 -map 1:0 -map 1:0 -map 1:0 -map 1:0 -shortest -f mpegts "{output}"',
|
# run('ffmpeg -y -hide_banner -loglevel error -i "{input}" -ar 48000 -ac 1 -f s16le -i /dev/zero -map 0:v -c:v mpeg2video -q:v 0 -aspect 16:9 -map 1:0 -map 1:0 -map 1:0 -map 1:0 -shortest -f mpegts "{output}"',
|
||||||
input=intermediate_clip,
|
# input=intermediate_clip,
|
||||||
output=final_clip)
|
# output=final_clip)
|
||||||
|
run('mv "{input}" "{output}"',
|
||||||
event_print(event, "finalized intro to "+final_clip)
|
input=intermediate_clip,
|
||||||
|
output=final_clip)
|
||||||
|
|
||||||
|
event_print(event, "finalized intro to " + final_clip)
|
||||||
|
|
||||||
|
|
||||||
active_jobs = []
|
active_jobs = []
|
||||||
|
|
||||||
print("enqueuing {} jobs into compressor".format(len(events)))
|
print("enqueuing {} jobs into compressor".format(len(events)))
|
||||||
for event in events:
|
for event in events:
|
||||||
if args.ids and event['id'] not in args.ids:
|
if args.ids and event['id'] not in args.ids:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
job_id = enqueue_job(event)
|
event = enrich_event(event)
|
||||||
if not job_id:
|
|
||||||
event_print(event, "job was not enqueued successfully, skipping postprocessing")
|
|
||||||
continue
|
|
||||||
|
|
||||||
event_print(event, "enqueued as "+job_id)
|
job_id = enqueue_job(event)
|
||||||
active_jobs.append((job_id, event))
|
if not job_id:
|
||||||
|
event_print(event, "job was not enqueued successfully, skipping postprocessing")
|
||||||
|
continue
|
||||||
|
|
||||||
|
event_print(event, "enqueued as " + job_id)
|
||||||
|
active_jobs.append((job_id, event))
|
||||||
|
|
||||||
print("waiting for rendering to complete")
|
print("waiting for rendering to complete")
|
||||||
|
|
||||||
while len(active_jobs) > 0:
|
while len(active_jobs) > 0:
|
||||||
time.sleep(60)
|
time.sleep(60)
|
||||||
active_jobs, finished_jobs = filter_finished_jobs(active_jobs)
|
active_jobs, finished_jobs = filter_finished_jobs(active_jobs)
|
||||||
|
|
||||||
print("{} jobs in queue, {} ready to finalize".format(len(active_jobs), len(finished_jobs)))
|
print("{} jobs in queue, {} ready to finalize".format(len(active_jobs), len(finished_jobs)))
|
||||||
for job_id, event in finished_jobs:
|
for job_id, event in finished_jobs:
|
||||||
event_print(event, "finalizing job")
|
event_print(event, "finalizing job")
|
||||||
finalize_job(job_id, event)
|
finalize_job(job_id, event)
|
||||||
|
|
||||||
|
print('all done, cleaning up ' + tempdir)
|
||||||
print('all done, cleaning up '+tempdir.name)
|
#tempdir.cleanup()
|
||||||
tempdir.cleanup()
|
|
||||||
|
|
|
@ -223,6 +223,7 @@ def events(scheduleUrl, titlemap={}):
|
||||||
'persons': personnames,
|
'persons': personnames,
|
||||||
'personnames': ', '.join(personnames),
|
'personnames': ', '.join(personnames),
|
||||||
'room': room.attrib['name'],
|
'room': room.attrib['name'],
|
||||||
|
'track': event.find('track').text.split(', '),
|
||||||
}
|
}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
Loading…
Add table
Reference in a new issue