123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657 |
- from glob import glob
- from os import path
- import itertools as it
- # fname_merged = '../data/VRCliff_AllData.h5'
- # def task_merge_sessions():
- # loadpath = '/home/nickdg/theta_storage/data/VR_Experiments_Round_2/processed_data_by_experiment/VRCliffExp'
- # fnames = glob(loadpath + '/*/*.h5', recursive=True)
- # return {
- # 'actions': ['python merge_sessions.py'],
- # 'file_dep': [sorted(fnames)[0]],
- # 'targets': [fname_merged],
- # 'verbosity': 2,
- # }
- # fname_clean = '../data/VRCliff_AllData_cleaned.h5'
- # def task_process_cliffdata():
- # return {
- # 'actions': ['python cleaning.py'],
- # 'file_dep': [fname_merged],
- # 'targets': [fname_clean],
- # 'verbosity': 0,
- # }
- # Write the notebook filenames in the order you want them to be run.
- notebooks = [
- 'Merge Data and Label Phases.ipynb',
- 'Calculate and Transform Data.ipynb',
- 'Crossings Deflections Analysis By Distance2.ipynb',
- 'Movement Direction Analysis2.ipynb',
- ]
- def task_update_and_export_notebooks():
- for basename in notebooks:
- notebook = '../notebooks/{}'.format(basename)
- yield {
- 'actions': ['jupyter nbconvert --config=/home/nickdg/.ipython/profile_default/ipython_nbconvert_config.py --execute "%(dependencies)s" '],
- 'file_dep': [notebook],
- 'name': 'run_notebook: {}'.format(path.basename(notebook)),
- # 'verbosity': 2,
- }
- for output in ['html', 'latex']:
- pdf_dirname = path.join(path.dirname(notebook), output)
- if not path.exists(pdf_dirname):
- os.mkdir(pdf_dirname)
- yield {
- 'actions': ['jupyter nbconvert "%(dependencies)s" --to {} --output-dir "{}"'.format(output, path.abspath(pdf_dirname))],
- 'file_dep': [path.abspath(notebook)],
- 'targets': [path.join(pdf_dirname, path.splitext(path.basename(notebook))[0] + '.' + output)],
- 'name': 'convert_to_{}: {}'.format(output, path.basename(notebook)),
- 'verbosity': 2
- }
|