generate-parc-metadata.py 1.7 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758
  1. import os
  2. import json
  3. url_root='https://neuroglancer.humanbrainproject.eu/precomputed/data-repo-ng-bot'
  4. path_to_input_dir='./neuroglancer-scripts-input'
  5. def assemble_segmentation(seg_name):
  6. cwd=os.getcwd()
  7. reponame=os.path.basename(cwd)
  8. with open(f'{path_to_input_dir}/metadata/{seg_name}.json', 'r') as fp:
  9. meta=json.load(fp)
  10. with open(f'./output/precomputed/segmentations/{seg_name}/transform.json', 'r') as fp:
  11. transform=json.load(fp)
  12. new_dataset={
  13. "@type": "fzj/tmp/volume_type/v0.0.1",
  14. "@id": f"fzj/tmp/volume_type/v0.0.1/{seg_name}",
  15. "name": "Untitled segment",
  16. "volume_type": "neuroglancer/precomputed",
  17. "url": f"{url_root}/{reponame}/precomputed/{seg_name}",
  18. "detail": {
  19. "neuroglancer/precomputed": {
  20. "transform": transform
  21. }
  22. },
  23. "space_id": None,
  24. "map_type": "labelled"
  25. }
  26. meta['datasets']=meta.get('datasets', [])
  27. dss=[ds for ds in meta.get('datasets') if ds.get('name') == seg_name]
  28. if len(dss) > 1:
  29. raise Exception(f'Expected no more than one dataset with name {seg_name}. Instead, found {len(dss)}.')
  30. meta['datasets'] = [ds for ds in meta['datasets'] if ds.get('name') != seg_name]
  31. append_ds = {**new_dataset, **dss[0]} if len(dss) == 1 else new_dataset
  32. meta['datasets'].append(append_ds)
  33. with open(f'./output/metadata/{seg_name}.regions.json', 'r') as fp:
  34. regions=json.load(fp)
  35. meta['regions']=regions
  36. with open(f'./output/metadata/{seg_name}.full.json', 'w') as fp:
  37. json.dump(meta, fp, indent=2)
  38. def main():
  39. path_to_segs='./output/precomputed/segmentations'
  40. for seg_name in os.listdir(path_to_segs):
  41. assemble_segmentation(seg_name)
  42. pass
  43. if __name__ == "__main__":
  44. main()