Scheduled service maintenance on November 22


On Friday, November 22, 2024, between 06:00 CET and 18:00 CET, GIN services will undergo planned maintenance. Extended service interruptions should be expected. We will try to keep downtimes to a minimum, but recommend that users avoid critical tasks, large data uploads, or DOI requests during this time.

We apologize for any inconvenience.

simplified_api.py 7.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277
  1. from .constraints import (
  2. EnsureDatasetSiblingName,
  3. EnsureExistingDirectory,
  4. )
  5. # each item is a command that is allowed in the API
  6. # the key is the command name in the Python API.
  7. # the values are dicts with the following keys
  8. # - exclude_parameters: set with parameter names to
  9. # exclude from the API
  10. api = dict(
  11. clone=dict(
  12. name='&Clone a dataset',
  13. exclude_parameters=set((
  14. 'git_clone_opts',
  15. 'reckless',
  16. 'description',
  17. )),
  18. parameter_display_names=dict(
  19. source='Clone from',
  20. path='Clone into',
  21. dataset='Register in superdataset',
  22. ),
  23. parameter_order=dict(
  24. source=0,
  25. path=1,
  26. dataset=2,
  27. ),
  28. parameter_constraints=dict(
  29. path=EnsureExistingDirectory(),
  30. ),
  31. ),
  32. create=dict(
  33. name='C&reate a dataset',
  34. exclude_parameters=set((
  35. 'initopts',
  36. 'description',
  37. 'fake_dates',
  38. )),
  39. parameter_display_names=dict(
  40. force='OK if target directory not empty',
  41. path='Create at',
  42. dataset='Register in superdataset',
  43. ),
  44. parameter_order=dict(
  45. path=0,
  46. annex=1,
  47. dataset=2,
  48. ),
  49. parameter_constraints=dict(
  50. path=EnsureExistingDirectory(),
  51. ),
  52. ),
  53. create_sibling_gitlab=dict(
  54. name='Create a Git&Lab sibling',
  55. exclude_parameters=set((
  56. 'dryrun',
  57. )),
  58. ),
  59. create_sibling_gin=dict(
  60. name='Create a GI&N sibling',
  61. exclude_parameters=set((
  62. 'dryrun',
  63. 'api'
  64. )),
  65. parameter_display_names=dict(
  66. dataset='Dataset',
  67. reponame='New repository name on Gin',
  68. name='Sibling name',
  69. private='Make Gin repo private',
  70. existing='If the sibling exists already...',
  71. recursive='Create siblings for subdatasets',
  72. credential='Name of credential to be used',
  73. access_protocol='Access protocol',
  74. publish_depends='Add publication dependency to'
  75. ),
  76. parameter_order=dict(
  77. dataset=0,
  78. reponame=1,
  79. private=2,
  80. name=3,
  81. access_protocol=4,
  82. existing=5,
  83. recursive=6,
  84. credential=7,
  85. ),
  86. ),
  87. create_sibling_github=dict(
  88. name='Create a Git&Hub sibling',
  89. exclude_parameters=set((
  90. 'dryrun',
  91. 'github_login',
  92. 'github_organization',
  93. 'api'
  94. )),
  95. parameter_display_names=dict(
  96. dataset='Dataset',
  97. reponame='New repository name on Github',
  98. name='Sibling name',
  99. private='Make GitHub repo private',
  100. existing='If the sibling exists already...',
  101. recursive='Create siblings for subdatasets',
  102. credential='Name of credential to be used',
  103. access_protocol='Access protocol',
  104. publish_depends='Add publication dependency to'
  105. ),
  106. parameter_order=dict(
  107. dataset=0,
  108. reponame=1,
  109. private=2,
  110. name=3,
  111. access_protocol=4,
  112. existing=5,
  113. recursive=6,
  114. credential=7,
  115. ),
  116. ),
  117. create_sibling_webdav=dict(
  118. name='Create a &WebDAV sibling',
  119. ),
  120. drop=dict(
  121. name='Dr&op content',
  122. exclude_parameters=set((
  123. 'check',
  124. 'if_dirty',
  125. 'reckless',
  126. )),
  127. parameter_order=dict(
  128. dataset=0,
  129. what=1,
  130. path=2,
  131. recursive=3,
  132. ),
  133. ),
  134. get=dict(
  135. name='&Get content',
  136. exclude_parameters=set((
  137. 'description',
  138. 'reckless',
  139. 'source',
  140. )),
  141. parameter_display_names=dict(
  142. path='Only get',
  143. # 'all' because we have no recursion_limit enabled
  144. recursive='Also get all subdatasets',
  145. get_data='Get file content',
  146. ),
  147. parameter_order=dict(
  148. dataset=0,
  149. get_data=1,
  150. path=2,
  151. recursive=3,
  152. ),
  153. ),
  154. push=dict(
  155. name='&Push data/updates to a sibling',
  156. exclude_parameters=set((
  157. 'since',
  158. )),
  159. parameter_constraints=dict(
  160. to=EnsureDatasetSiblingName(),
  161. ),
  162. ),
  163. save=dict(
  164. name='&Save the state in a dataset',
  165. exclude_parameters=set((
  166. 'updated',
  167. 'message_file',
  168. )),
  169. parameter_display_names=dict(
  170. dataset='Save changes in dataset at',
  171. message='Description of change',
  172. path='Only save',
  173. recursive='Include changes in subdatasets',
  174. to_git='Do not put files in annex',
  175. version_tag='Tag for saved dataset state',
  176. amend='Amend last saved state',
  177. ),
  178. parameter_order=dict(
  179. dataset=0,
  180. message=1,
  181. path=2,
  182. recursive=3,
  183. to_git=4,
  184. version_tag=5,
  185. amend=6,
  186. ),
  187. ),
  188. update=dict(
  189. name='&Update from a sibling',
  190. exclude_parameters=set((
  191. 'merge',
  192. 'fetch_all',
  193. 'how_subds',
  194. 'follow',
  195. 'reobtain_data',
  196. )),
  197. parameter_constraints=dict(
  198. sibling=EnsureDatasetSiblingName(),
  199. ),
  200. ),
  201. )
  202. dataset_api = {
  203. c: s for c, s in api.items()
  204. if c in (
  205. 'clone', 'create',
  206. 'create_sibling_gitlab', 'create_sibling_gin',
  207. 'create_sibling_github', 'create_sibling_webdav',
  208. 'drop', 'get', 'push', 'save', 'update'
  209. )
  210. }
  211. directory_api = {
  212. c: s for c, s in api.items() if c in ('clone', 'create')
  213. }
  214. directory_in_ds_api = {
  215. c: s for c, s in api.items()
  216. if c in ('clone', 'create', 'drop', 'get', 'push', 'save')
  217. }
  218. file_api = None
  219. file_in_ds_api = {
  220. c: s for c, s in api.items() if c in ('save')
  221. }
  222. annexed_file_api = {
  223. c: s for c, s in api.items()
  224. if c in ('drop', 'get', 'push', 'save')
  225. }
  226. # get of a single annexed files can be simpler
  227. from copy import deepcopy
  228. annexed_file_get = deepcopy(annexed_file_api['get'])
  229. parameter_constraints=dict(
  230. path=EnsureExistingDirectory(),
  231. ),
  232. annexed_file_get['exclude_parameters'].update((
  233. # not getting data for an annexed file makes no sense
  234. 'get_data',
  235. # recursion underneath a file is not possible
  236. 'recursive',
  237. ))
  238. annexed_file_get['parameter_nargs'] = dict(
  239. path=1,
  240. )
  241. annexed_file_api['get'] = annexed_file_get
  242. gooey_suite = dict(
  243. title='Simplified',
  244. description='Simplified access to the most essential operations',
  245. options=dict(
  246. disable_manual_path_input=True,
  247. ),
  248. apis=dict(
  249. dataset=dataset_api,
  250. directory=directory_api,
  251. directory_in_ds=directory_in_ds_api,
  252. file=file_api,
  253. file_in_ds=file_in_ds_api,
  254. annexed_file=annexed_file_api,
  255. ),
  256. # simplified API has no groups
  257. api_group_order={},
  258. exclude_parameters=set((
  259. 'result_renderer',
  260. 'return_type',
  261. 'result_filter',
  262. 'result_xfm',
  263. 'on_failure',
  264. 'jobs',
  265. 'recursion_limit',
  266. )),
  267. parameter_display_names=dict(
  268. annex='Dataset with file annex',
  269. cfg_proc='Configuration procedure(s)',
  270. dataset='Dataset location',
  271. ),
  272. )