12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758 |
- import os
- import json
- url_root='https://neuroglancer.humanbrainproject.eu/precomputed/data-repo-ng-bot'
- path_to_input_dir='./neuroglancer-scripts-input'
- def assemble_segmentation(seg_name):
- cwd=os.getcwd()
- reponame=os.path.basename(cwd)
- with open(f'{path_to_input_dir}/metadata/{seg_name}.json', 'r') as fp:
- meta=json.load(fp)
- with open(f'./output/precomputed/segmentations/{seg_name}/transform.json', 'r') as fp:
- transform=json.load(fp)
- new_dataset={
- "@type": "fzj/tmp/volume_type/v0.0.1",
- "@id": f"fzj/tmp/volume_type/v0.0.1/{seg_name}",
- "name": "Untitled segment",
- "volume_type": "neuroglancer/precomputed",
- "url": f"{url_root}/{reponame}/precomputed/{seg_name}",
- "detail": {
- "neuroglancer/precomputed": {
- "transform": transform
- }
- },
- "space_id": None,
- "map_type": "labelled"
- }
- meta['datasets']=meta.get('datasets', [])
- dss=[ds for ds in meta.get('datasets') if ds.get('name') == seg_name]
- if len(dss) > 1:
- raise Exception(f'Expected no more than one dataset with name {seg_name}. Instead, found {len(dss)}.')
-
- meta['datasets'] = [ds for ds in meta['datasets'] if ds.get('name') != seg_name]
- append_ds = {**new_dataset, **dss[0]} if len(dss) == 1 else new_dataset
- meta['datasets'].append(append_ds)
- with open(f'./output/metadata/{seg_name}.regions.json', 'r') as fp:
- regions=json.load(fp)
- meta['regions']=regions
-
- with open(f'./output/metadata/{seg_name}.full.json', 'w') as fp:
- json.dump(meta, fp, indent=2)
- def main():
- path_to_segs='./output/precomputed/segmentations'
- for seg_name in os.listdir(path_to_segs):
- assemble_segmentation(seg_name)
- pass
- if __name__ == "__main__":
- main()
|