Skip to content

Commit

Permalink
Merge pull request #2393 from GNS3/feature/keep-compute-ids
Browse files Browse the repository at this point in the history
Option to keep the compute IDs unchanged when exporting a project
  • Loading branch information
grossmj authored Jul 6, 2024
2 parents 29f848d + d54c9db commit b48bd92
Show file tree
Hide file tree
Showing 7 changed files with 36 additions and 24 deletions.
18 changes: 9 additions & 9 deletions gns3server/controller/export_project.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
CHUNK_SIZE = 1024 * 8 # 8KB


async def export_project(zstream, project, temporary_dir, include_images=False, include_snapshots=False, keep_compute_id=False, allow_all_nodes=False, reset_mac_addresses=False):
async def export_project(zstream, project, temporary_dir, include_images=False, include_snapshots=False, keep_compute_ids=False, allow_all_nodes=False, reset_mac_addresses=False):
"""
Export a project to a zip file.
Expand All @@ -44,9 +44,9 @@ async def export_project(zstream, project, temporary_dir, include_images=False,
:param temporary_dir: A temporary dir where to store intermediate data
:param include_images: save OS images to the zip file
:param include_snapshots: save snapshots to the zip file
:param keep_compute_id: If false replace all compute id by local (standard behavior for .gns3project to make it portable)
:param allow_all_nodes: Allow all nodes type to be include in the zip even if not portable
:param reset_mac_addresses: Reset MAC addresses for every nodes.
:param keep_compute_ids: If false replace all compute IDs y local (standard behavior for .gns3project to make it portable)
:param allow_all_nodes: Allow all nodes type to be included in the zip even if not portable
:param reset_mac_addresses: Reset MAC addresses for each node.
"""

# To avoid issue with data not saved we disallow the export of a running project
Expand All @@ -62,7 +62,7 @@ async def export_project(zstream, project, temporary_dir, include_images=False,
# First we process the .gns3 in order to be sure we don't have an error
for file in os.listdir(project._path):
if file.endswith(".gns3"):
await _patch_project_file(project, os.path.join(project._path, file), zstream, include_images, keep_compute_id, allow_all_nodes, temporary_dir, reset_mac_addresses)
await _patch_project_file(project, os.path.join(project._path, file), zstream, include_images, keep_compute_ids, allow_all_nodes, temporary_dir, reset_mac_addresses)

# Export the local files
for root, dirs, files in os.walk(project._path, topdown=True, followlinks=False):
Expand Down Expand Up @@ -170,7 +170,7 @@ def _is_exportable(path, include_snapshots=False):
return True


async def _patch_project_file(project, path, zstream, include_images, keep_compute_id, allow_all_nodes, temporary_dir, reset_mac_addresses):
async def _patch_project_file(project, path, zstream, include_images, keep_compute_ids, allow_all_nodes, temporary_dir, reset_mac_addresses):
"""
Patch a project file (.gns3) to export a project.
The .gns3 file is renamed to project.gns3
Expand All @@ -197,7 +197,7 @@ async def _patch_project_file(project, path, zstream, include_images, keep_compu
if not allow_all_nodes and node["node_type"] in ["virtualbox", "vmware"]:
raise aiohttp.web.HTTPConflict(text="Projects with a {} node cannot be exported".format(node["node_type"]))

if not keep_compute_id:
if not keep_compute_ids:
node["compute_id"] = "local" # To make project portable all node by default run on local

if "properties" in node and node["node_type"] != "docker":
Expand All @@ -215,7 +215,7 @@ async def _patch_project_file(project, path, zstream, include_images, keep_compu
if value is None or value.strip() == '':
continue

if not keep_compute_id: # If we keep the original compute we can keep the image path
if not keep_compute_ids: # If we keep the original compute we can keep the image path
node["properties"][prop] = os.path.basename(value)

if include_images is True:
Expand All @@ -225,7 +225,7 @@ async def _patch_project_file(project, path, zstream, include_images, keep_compu
'image_type': node['node_type']
})

if not keep_compute_id:
if not keep_compute_ids:
topology["topology"]["computes"] = [] # Strip compute information because could contain secret info like password

local_images = set([i['image'] for i in images if i['compute_id'] == 'local'])
Expand Down
6 changes: 3 additions & 3 deletions gns3server/controller/import_project.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
"""


async def import_project(controller, project_id, stream, location=None, name=None, keep_compute_id=False,
async def import_project(controller, project_id, stream, location=None, name=None, keep_compute_ids=False,
auto_start=False, auto_open=False, auto_close=True):
"""
Import a project contain in a zip file
Expand All @@ -50,7 +50,7 @@ async def import_project(controller, project_id, stream, location=None, name=Non
:param stream: A io.BytesIO of the zipfile
:param location: Directory for the project if None put in the default directory
:param name: Wanted project name, generate one from the .gns3 if None
:param keep_compute_id: If true do not touch the compute id
:param keep_compute_ids: keep compute IDs unchanged
:returns: Project
"""
Expand Down Expand Up @@ -124,7 +124,7 @@ async def import_project(controller, project_id, stream, location=None, name=Non
drawing["drawing_id"] = str(uuid.uuid4())

# Modify the compute id of the node depending of compute capacity
if not keep_compute_id:
if not keep_compute_ids:
# For some VM type we move them to the GNS3 VM if possible
# unless it's a linux host without GNS3 VM
if not sys.platform.startswith("linux") or controller.has_compute("vm"):
Expand Down
4 changes: 2 additions & 2 deletions gns3server/controller/project.py
Original file line number Diff line number Diff line change
Expand Up @@ -1066,7 +1066,7 @@ async def duplicate(self, name=None, location=None, reset_mac_addresses=True):
with tempfile.TemporaryDirectory(dir=working_dir) as tmpdir:
# Do not compress the exported project when duplicating
with aiozipstream.ZipFile(compression=zipfile.ZIP_STORED) as zstream:
await export_project(zstream, self, tmpdir, keep_compute_id=True, allow_all_nodes=True, reset_mac_addresses=reset_mac_addresses)
await export_project(zstream, self, tmpdir, keep_compute_ids=True, allow_all_nodes=True, reset_mac_addresses=reset_mac_addresses)

# export the project to a temporary location
project_path = os.path.join(tmpdir, "project.gns3p")
Expand All @@ -1077,7 +1077,7 @@ async def duplicate(self, name=None, location=None, reset_mac_addresses=True):

# import the temporary project
with open(project_path, "rb") as f:
project = await import_project(self._controller, str(uuid.uuid4()), f, location=location, name=name, keep_compute_id=True)
project = await import_project(self._controller, str(uuid.uuid4()), f, location=location, name=name, keep_compute_ids=True)

log.info("Project '{}' duplicated in {:.4f} seconds".format(project.name, time.time() - begin))
except (ValueError, OSError, UnicodeEncodeError) as e:
Expand Down
2 changes: 1 addition & 1 deletion gns3server/controller/snapshot.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ async def create(self):
with tempfile.TemporaryDirectory(dir=snapshot_directory) as tmpdir:
# Do not compress the snapshots
with aiozipstream.ZipFile(compression=zipfile.ZIP_STORED) as zstream:
await export_project(zstream, self._project, tmpdir, keep_compute_id=True, allow_all_nodes=True)
await export_project(zstream, self._project, tmpdir, keep_compute_ids=True, allow_all_nodes=True)
async with aiofiles.open(self.path, 'wb') as f:
async for chunk in zstream:
await f.write(chunk)
Expand Down
20 changes: 16 additions & 4 deletions gns3server/handlers/api/controller/project_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -319,6 +319,10 @@ async def export_project(request, response):
reset_mac_addresses = True
else:
reset_mac_addresses = False
if request.query.get("keep_compute_ids", "no").lower() == "yes":
keep_compute_ids = True
else:
keep_compute_ids = False

compression_query = request.query.get("compression", "zip").lower()
if compression_query == "zip":
Expand All @@ -336,9 +340,17 @@ async def export_project(request, response):
working_dir = os.path.abspath(os.path.join(project.path, os.pardir))
with tempfile.TemporaryDirectory(dir=working_dir) as tmpdir:
with aiozipstream.ZipFile(compression=compression) as zstream:
await export_project(zstream, project, tmpdir, include_snapshots=include_snapshots, include_images=include_images, reset_mac_addresses=reset_mac_addresses)

# We need to do that now because export could failed and raise an HTTP error
await export_project(
zstream,
project,
tmpdir,
include_snapshots=include_snapshots,
include_images=include_images,
reset_mac_addresses=reset_mac_addresses,
keep_compute_ids=keep_compute_ids
)

# We need to do that now because export could fail and raise an HTTP error
# that why response start need to be the later possible
response.content_type = 'application/gns3project'
response.headers['CONTENT-DISPOSITION'] = 'attachment; filename="{}.gns3project"'.format(project.name)
Expand All @@ -350,7 +362,7 @@ async def export_project(request, response):

log.info("Project '{}' exported in {:.4f} seconds".format(project.name, time.time() - begin))

# Will be raise if you have no space left or permission issue on your temporary directory
# Will be raised if you have no space left or permission issue on your temporary directory
# RuntimeError: something was wrong during the zip process
except (ValueError, OSError, RuntimeError) as e:
raise aiohttp.web.HTTPNotFound(text="Cannot export project: {}".format(str(e)))
Expand Down
6 changes: 3 additions & 3 deletions tests/controller/test_export_project.py
Original file line number Diff line number Diff line change
Expand Up @@ -325,7 +325,7 @@ async def test_export_with_images(tmpdir, project):
myzip.getinfo("images/IOS/test.image")


async def test_export_keep_compute_id(tmpdir, project):
async def test_export_keep_compute_ids(tmpdir, project):
"""
If we want to restore the same computes we could ask to keep them
in the file
Expand Down Expand Up @@ -354,7 +354,7 @@ async def test_export_keep_compute_id(tmpdir, project):
json.dump(data, f)

with aiozipstream.ZipFile() as z:
await export_project(z, project, str(tmpdir), keep_compute_id=True)
await export_project(z, project, str(tmpdir), keep_compute_ids=True)
await write_file(str(tmpdir / 'zipfile.zip'), z)

with zipfile.ZipFile(str(tmpdir / 'zipfile.zip')) as myzip:
Expand Down Expand Up @@ -458,7 +458,7 @@ async def test_export_with_ignoring_snapshots(tmpdir, project):
Path(os.path.join(snapshots_dir, 'snap.gns3project')).touch()

with aiozipstream.ZipFile() as z:
await export_project(z, project, str(tmpdir), keep_compute_id=True)
await export_project(z, project, str(tmpdir), keep_compute_ids=True)
await write_file(str(tmpdir / 'zipfile.zip'), z)

with zipfile.ZipFile(str(tmpdir / 'zipfile.zip')) as myzip:
Expand Down
4 changes: 2 additions & 2 deletions tests/controller/test_import_project.py
Original file line number Diff line number Diff line change
Expand Up @@ -449,7 +449,7 @@ async def test_import_node_id(linux_platform, tmpdir, controller):
assert os.path.exists(os.path.join(project.path, "project-files", "iou", topo["topology"]["nodes"][0]["node_id"], "startup.cfg"))


async def test_import_keep_compute_id(windows_platform, tmpdir, controller):
async def test_import_keep_compute_ids(windows_platform, tmpdir, controller):
"""
On linux host IOU should be moved to the GNS3 VM
"""
Expand Down Expand Up @@ -487,7 +487,7 @@ async def test_import_keep_compute_id(windows_platform, tmpdir, controller):
myzip.write(str(tmpdir / "project.gns3"), "project.gns3")

with open(zip_path, "rb") as f:
project = await import_project(controller, project_id, f, keep_compute_id=True)
project = await import_project(controller, project_id, f, keep_compute_ids=True)

with open(os.path.join(project.path, "test.gns3")) as f:
topo = json.load(f)
Expand Down

0 comments on commit b48bd92

Please sign in to comment.