-
Notifications
You must be signed in to change notification settings - Fork 1
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
56 update code to match new api spec #57
Changes from 4 commits
b6e1448
8232649
2b7c2c8
59dcac9
276d261
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -109,13 +109,9 @@ def list_endpoints(): | |
|
||
@click.command("upload") | ||
@click.argument("file_path", nargs=-1) | ||
@click.argument("id") | ||
@click.option("-n", default=1, help="Number of threads for parallel uploads.") | ||
@click.option( | ||
"--attach_to", | ||
default=None, | ||
help="Supply a model output id to immediately attach the file to.", | ||
) | ||
def file_upload(file_path, n: int = 1, attach_to=None): | ||
def file_upload(file_path, id, n: int = 1): | ||
""" | ||
Upload a file to the server. | ||
|
||
|
@@ -130,19 +126,15 @@ def file_upload(file_path, n: int = 1, attach_to=None): | |
client.upload_files, | ||
files=list(file_path), | ||
n=n, | ||
attach_to=attach_to, | ||
id=id, | ||
progress=True, | ||
) | ||
|
||
for response in responses: | ||
|
||
# For singular case | ||
if n == 1: | ||
response = response[0] | ||
|
||
files = response.get("data").get("files") | ||
for f in files: | ||
click.echo(f.get("file")) | ||
click.echo(f.get("id")) | ||
|
||
|
||
@click.command("list") | ||
|
@@ -174,9 +166,9 @@ def file_attach(file_id: str, output_id: str): | |
click.echo("SUCCESS") | ||
|
||
|
||
@click.command("detach_all") | ||
@click.command("delete_all") | ||
@click.argument("output_id") | ||
def file_detach_all(output_id: str): | ||
def file_delete_all(output_id: str): | ||
"""Detach all files from a model output. | ||
|
||
Parameters | ||
|
@@ -185,7 +177,23 @@ def file_detach_all(output_id: str): | |
Model output ID. | ||
""" | ||
client = _get_client() | ||
_ = _call(client.detach_all_files_from_model_output, id=output_id) | ||
_ = _call(client.delete_all_files_from_model_output, id=output_id) | ||
click.echo("SUCCESS") | ||
|
||
|
||
@click.command("delete") | ||
@click.argument("output_id") | ||
@click.argument("file_id") | ||
def file_delete(output_id: str, file_id: str): | ||
"""Detach a file from a model output. | ||
|
||
Parameters | ||
---------- | ||
output_id : str | ||
Model output ID. | ||
""" | ||
client = _get_client() | ||
_ = _call(client.delete_file_from_model_output, id=output_id, file_id=file_id) | ||
click.echo("SUCCESS") | ||
|
||
|
||
|
@@ -286,9 +294,8 @@ def cli_analysis(): | |
# Add file commands | ||
cli_file.add_command(file_list) | ||
cli_file.add_command(file_upload) | ||
cli_file.add_command(file_attach) | ||
cli_file.add_command(file_detach_all) | ||
|
||
cli_file.add_command(file_delete) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Added for CLI access. |
||
cli_file.add_command(file_delete_all) | ||
|
||
# Add endpoint commands | ||
cli_endpoints.add_command(list_endpoints) | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -221,8 +221,8 @@ def logout(self): | |
def _upload_files_parallel( | ||
self, | ||
files: Union[str, Path, list], | ||
id: str, | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
|
||
n: int = 2, | ||
attach_to: str = None, | ||
progress=True, | ||
): | ||
"""Upload files in parallel. | ||
|
@@ -231,10 +231,10 @@ def _upload_files_parallel( | |
---------- | ||
files : Union[str, Path, list] | ||
A path to a file, or a list of paths. | ||
id : str | ||
Module output id to attach to, by default None. | ||
n : int, optional | ||
Number of threads to use, by default 2. | ||
attach_to : str, optional | ||
Module output id to attach to, by default None. | ||
|
||
Returns | ||
------- | ||
|
@@ -248,16 +248,17 @@ def _upload_files_parallel( | |
# Do the parallel upload | ||
responses = None | ||
responses = meop.parallelise( | ||
self._upload_file, n, files=files, attach_to=attach_to, progress=progress | ||
self._upload_file, n, filepath=files, id=id, progress=progress | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Passing the id over to the parallelise method. |
||
) | ||
|
||
# These should already be a list as per the parallelise function. | ||
return responses | ||
|
||
def upload_files( | ||
self, | ||
files: Union[str, Path, list], | ||
id: str, | ||
n: int = 1, | ||
attach_to: str = None, | ||
progress=True, | ||
) -> list: | ||
"""Upload files. | ||
|
@@ -266,10 +267,11 @@ def upload_files( | |
---------- | ||
files : Union[str, Path, list] | ||
A filepath, or a list of filepaths. | ||
id : str | ||
Model output ID to immediately attach to. | ||
n : int, optional | ||
Number of threads to parallelise over, by default 1 | ||
attach_to : str, optional | ||
Model output ID to immediately attach to, by default None | ||
|
||
|
||
Returns | ||
------- | ||
|
@@ -288,40 +290,27 @@ def upload_files( | |
responses = list() | ||
if n == 1: | ||
for fp in tqdm(files, total=len(files)): | ||
response = self._upload_file(fp, attach_to=attach_to) | ||
responses.append(response) | ||
response = self._upload_file(fp, id=id) | ||
responses += response | ||
else: | ||
|
||
# Disable the auto attach to avoid race condition | ||
responses = self._upload_files_parallel( | ||
files, n=n, attach_to=None, progress=progress | ||
responses += self._upload_files_parallel( | ||
files, n=n, id=id, progress=progress | ||
) | ||
|
||
if attach_to: | ||
|
||
file_ids = list() | ||
|
||
for response in responses: | ||
file_id = response.get("data").get("files")[0].get("file") | ||
file_ids.append(file_id) | ||
|
||
self.attach_files_to_model_output(id=attach_to, files=file_ids) | ||
|
||
return mu.ensure_list(responses) | ||
# return mu.ensure_list(responses) | ||
return responses | ||
|
||
def _upload_file( | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
TL;DR - making this single-file made everything a lot simpler for the other methods which all call this. |
||
self, | ||
files: Union[str, Path, list], | ||
attach_to: str = None, | ||
self, filepath: Union[str, Path], id: str | ||
) -> Union[dict, requests.Response]: | ||
"""Upload a file. | ||
"""Upload a single file. | ||
|
||
Parameters | ||
---------- | ||
files : path-like, list | ||
Path to the file, or a list containing paths. | ||
attach_to : str, optional | ||
Optional model_output_id to attach the files to, by default None | ||
filepath : path-like | ||
Path to the file | ||
id : str | ||
model_output_id to attach the files to | ||
|
||
Returns | ||
------- | ||
|
@@ -331,56 +320,42 @@ def _upload_file( | |
Raises | ||
------ | ||
TypeError | ||
When supplied file(s) are neither path-like nor readable. | ||
When supplied file is neither path-like nor readable. | ||
FileNotFoundError | ||
When supplied file(s) cannot be found. | ||
When supplied file is cannot be found. | ||
bschroeter marked this conversation as resolved.
Show resolved
Hide resolved
|
||
""" | ||
|
||
# Cast as list for iterative upload | ||
files = mu.ensure_list(files) | ||
file_obj = None | ||
|
||
# Prepare the files | ||
_files = list() | ||
for ix, f in enumerate(files): | ||
# Path-like | ||
if isinstance(f, (str, Path)) and os.path.isfile(f): | ||
_files.append(open(f, "rb")) | ||
if isinstance(filepath, (str, Path)) and os.path.isfile(filepath): | ||
file_obj = open(filepath, "rb") | ||
|
||
# Bail out | ||
else: | ||
dtype = type(f) | ||
raise TypeError( | ||
f"File at index {ix} is neither path-like nor readable ({dtype})." | ||
) | ||
# Bail out | ||
else: | ||
dtype = type(file_obj) | ||
raise TypeError(f"File is neither path-like nor readable ({dtype}).") | ||
|
||
# Prepare the payload from the files | ||
payload = list() | ||
|
||
for _f in _files: | ||
filename = os.path.basename(_f.name) | ||
ext = filename.split(".")[-1] | ||
mimetype = mt.types_map[f".{ext}"] | ||
payload.append(("file", (filename, _f, mimetype))) | ||
filename = os.path.basename(file_obj.name) | ||
ext = filename.split(".")[-1] | ||
mimetype = mt.types_map[f".{ext}"] | ||
payload.append(("file", (filename, file_obj, mimetype))) | ||
|
||
# Make the request | ||
response = self._make_request( | ||
method=mcc.HTTP_POST, | ||
endpoint=endpoints.FILE_UPLOAD, | ||
files=payload, | ||
url_params=dict(id=id), | ||
return_json=True, | ||
) | ||
|
||
# Close all the file descriptors (requests should do this, but just to be sure) | ||
for fd in payload: | ||
fd[1][1].close() | ||
|
||
# Automatically attach to a model output | ||
if attach_to: | ||
|
||
_ = self.attach_files_to_model_output( | ||
attach_to, files=mu.get_uploaded_file_ids(response) | ||
) | ||
|
||
return mu.ensure_list(response) | ||
|
||
def list_files(self, id: str) -> Union[dict, requests.Response]: | ||
|
@@ -400,42 +375,29 @@ def list_files(self, id: str) -> Union[dict, requests.Response]: | |
method=mcc.HTTP_GET, endpoint=endpoints.FILE_LIST, url_params=dict(id=id) | ||
) | ||
|
||
def attach_files_to_model_output( | ||
self, id: str, files: list | ||
) -> Union[dict, requests.Response]: | ||
"""Attach files to a model output. | ||
def delete_file_from_model_output(self, id: str, file_id: str): | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. New delete method, turns 2 calls into 1 |
||
"""Delete file from model output | ||
|
||
Parameters | ||
---------- | ||
id : str | ||
Model output ID. | ||
files : list | ||
List of file IDs. | ||
file_id : str | ||
File ID. | ||
|
||
Returns | ||
------- | ||
Union[dict, requests.Response] | ||
Response from ME.org. | ||
Union[dict, requests.Request] | ||
Response from ME.org | ||
""" | ||
|
||
# Get a list of files for the model output | ||
current_files = self.list_files(id).get("data").get("files") | ||
|
||
# Attach the new files to this list | ||
new_files = current_files + files | ||
|
||
# Update the resource | ||
return self._make_request( | ||
mcc.HTTP_PATCH, | ||
endpoint=endpoints.FILE_LIST, | ||
url_params=dict(id=id), | ||
json=new_files, | ||
method=mcc.HTTP_DELETE, | ||
endpoint=endpoints.FILE_DELETE, | ||
url_params=dict(id=id, fileId=file_id), | ||
) | ||
|
||
def detach_all_files_from_model_output( | ||
self, id: str | ||
) -> Union[dict, requests.Response]: | ||
"""Detach all files from a model output. | ||
def delete_all_files_from_model_output(self, id: str): | ||
"""Delete file from model output | ||
|
||
Parameters | ||
---------- | ||
|
@@ -444,17 +406,22 @@ def detach_all_files_from_model_output( | |
|
||
Returns | ||
------- | ||
Union[dict, requests.Response] | ||
Union[dict, requests.Request] | ||
Response from ME.org | ||
""" | ||
|
||
# Update the resource with an empty file list | ||
return self._make_request( | ||
mcc.HTTP_PATCH, | ||
endpoint=endpoints.FILE_LIST, | ||
url_params=dict(id=id), | ||
json=[], | ||
) | ||
# Get a list of the files currently on the model output | ||
files = self.list_files(id) | ||
file_ids = [f.get("id") for f in files.get("data").get("files")] | ||
|
||
responses = list() | ||
|
||
# Do the delete one at a time | ||
for file_id in file_ids: | ||
response = self.delete_file_from_model_output(id=id, file_id=file_id) | ||
responses.append(response) | ||
|
||
return responses | ||
|
||
def start_analysis(self, id: str) -> Union[dict, requests.Response]: | ||
"""Start the analysis chain. | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
New deleting method.