Skip to content

Commit

Permalink
feat: script to fetch polkadojs endpoints to preConfigured (#2972)
Browse files Browse the repository at this point in the history
* initial implementation

* refactoring

* extract methods, refactoring

* add method addign info, fixes

* add print to console for controlling

* add check for existing networks in file and chain details

* fixes

* fix invalid chainId

* process unavailable connections

* add check for existing wss nodes

* * fix chain naming to distinguish networks
* fix fetching several array constants from pjs ts file

* changed token icon to default, added regexp for paseo endpoints

* move sys path line up

Co-authored-by: Stepan Lavrentev <[email protected]>

* delete unused code

Co-authored-by: Stepan Lavrentev <[email protected]>

* expand error handling for downloading ts file

Co-authored-by: Stepan Lavrentev <[email protected]>

* remove redundant else condition

Co-authored-by: Stepan Lavrentev <[email protected]>

* remove redundant nested level

Co-authored-by: Stepan Lavrentev <[email protected]>

* fixed pr comments

* return to polkadot

* add regexp for Paseo, combine search for all endpoints, add exclusion

---------

Co-authored-by: Stepan Lavrentev <[email protected]>
  • Loading branch information
leohar and stepanLav authored Aug 22, 2024
1 parent 702a647 commit aeaa1ab
Showing 1 changed file with 245 additions and 0 deletions.
245 changes: 245 additions & 0 deletions scripts/polkadotjs_endpoints_to_preconfigured.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,245 @@
import json
import re
import os
import sys
from pathlib import Path
import requests

sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))

from scripts.utils.metadata_interaction import get_properties
from scripts.utils.substrate_interface import create_connection_by_url
from enum import Enum


class Endpoints(Enum):
polkadot = "https://raw.githubusercontent.com/polkadot-js/apps/master/packages/apps-config/src/endpoints/productionRelayPolkadot.ts"
kusama = "https://raw.githubusercontent.com/polkadot-js/apps/master/packages/apps-config/src/endpoints/productionRelayKusama.ts"
westend = "https://raw.githubusercontent.com/polkadot-js/apps/master/packages/apps-config/src/endpoints/testingRelayWestend.ts"
rococo = "https://raw.githubusercontent.com/polkadot-js/apps/master/packages/apps-config/src/endpoints/testingRelayRococo.ts"
paseo = "https://raw.githubusercontent.com/polkadot-js/apps/master/packages/apps-config/src/endpoints/testingRelayPaseo.ts"
singlechains = "https://raw.githubusercontent.com/polkadot-js/apps/master/packages/apps-config/src/endpoints/production.ts"
testnets = "https://raw.githubusercontent.com/polkadot-js/apps/master/packages/apps-config/src/endpoints/testing.ts"


CHAINS_FILE_PATH_DEV = Path(os.getenv("DEV_CHAINS_JSON_PATH", 'chains/v20/chains_dev.json'))
CHAINS_FILE_PATH_PROD = Path(os.getenv("PROD_CHAINS_JSON_PATH", 'chains/v20/chains.json'))


def load_json_file(file_path):
if file_path.exists():
with open(file_path, 'r') as f:
return json.load(f)
return []


def save_json_file(file_path, data):
with open(file_path, 'w') as f:
json.dump(data, f, indent=4)
f.write('\n')


def find_objects(array_content):
objects = []
stack = []
start = -1

for i, char in enumerate(array_content):
if char == '{':
if not stack:
start = i
stack.append(char)
elif char == '}':
if stack and stack[-1] == '{':
stack.pop()
if not stack:
objects.append(array_content[start:i + 1])

return objects


def request_data_from_pjs(file):
response = requests.get(file)
data = response.json()
return data


def get_ts_file(url, output_path):
try:
response = requests.get(url, stream=True)
response.raise_for_status()
with open(output_path, "wb") as file:
for chunk in response.iter_content(chunk_size=1024):
if chunk:
file.write(chunk)
print(f"Downloaded file saved as {output_path}")
except Exception as e:
print(f"Failed to download file from {url}. Error: {e}")


def ts_constant_to_json(input_file_path):
json_file_path = "output.json"

with open(input_file_path, 'r') as file:
content = file.read()

# Extract the array content
array_matches = re.findall(r'=\s*\[(.*?)\]', content, re.DOTALL)
array_matches += re.findall(r'\[\s*(\{(?:.|\n)*?})\s*]', content, re.DOTALL)
if not array_matches:
raise ValueError("No array found in the input file")
json_objects = []
for i, array_content in enumerate(array_matches):
# Split the array content into individual objects
objects = find_objects(array_content)

for obj in objects:
# Remove comments
obj_lines = [line for line in obj.split('\n') if not line.strip().startswith('//')]
cleaned_obj = '\n'.join(obj_lines)

# Convert to valid JSON
cleaned_obj = re.sub(r"'", '"', cleaned_obj) # Replace single quotes with double quotes
cleaned_obj = re.sub(r'(?<!")(\b\w+\b)(?=\s*:)', r'"\1"', cleaned_obj) # Add quotes to keys
cleaned_obj = re.sub(r",\s*}", "}", cleaned_obj) # Remove trailing commas
cleaned_obj = re.sub(r":\s*([a-zA-Z]\w*)", r': "\1"', cleaned_obj) # Quote unquoted string values
cleaned_obj = re.sub(r"\n", '', cleaned_obj)

# Parse the cleaned object
try:
json_obj = json.loads(cleaned_obj)
json_objects.append(json_obj)
except json.JSONDecodeError as e:
print(f"Error parsing object: {e}")
print(f"Problematic object: {cleaned_obj}")

save_json_file(json_file_path, json_objects)
print(f"Conversion completed. Output written to {json_file_path}")

return json_objects


def create_chain_data(chain_object):
providers = chain_object.get("providers", {})
if not providers:
return None

# Get the first provider (if any)
# TODO: Iterate through all nodes available until connection is established
first_provider_value = next(iter(providers.values()), None)
wss_url = first_provider_value.strip("'")
try:
substrate = create_connection_by_url(wss_url)
json_property = get_properties(substrate)

chain_data = {
"chainId": json_property.chainId[2:],
"name": json_property.name,
"assets": [{
"assetId": 0,
"symbol": json_property.symbol,
"precision": json_property.precision,
"icon": "https://raw.githubusercontent.com/novasamatech/nova-utils/master/icons/tokens/white/Default.svg"
}],
"nodes": [{"url": url, "name": name} for name, url in providers.items()],
"addressPrefix": json_property.ss58Format
}
return chain_data
except Exception as err:
# If there's a failure, print a warning and skip the connection
print(f"⚠️ Can't connect by {wss_url}, check if it is available? \n {err}")
# Do not raise the exception; instead, return None or handle it accordingly
return None


def check_chain_id(chains, chain_id_to_check):
for chain in chains:
if chain.get("chainId") == chain_id_to_check:
return True
return False


def check_node_is_present(chains_data, nodes_to_check):
# Iterate over each node to check
for node in nodes_to_check:
node_url = node['url']
found = False
# Iterate over each chain in the chain data
for chain in chains_data:
# Check if node URL exists in any node list of this chain
if any(n['url'] == node_url for n in chain['nodes']):
found = True
print(f"⚠️Node URL '{node_url}' is found in chain '{chain['name']}'.")
break
if not found:
return False
return True


def create_json_files(pjs_networks, chains_path):
existing_data_in_chains = load_json_file(chains_path)
exclusion = "sora"

for pjs_network in pjs_networks:
# skip disabled networks and networks with commented providers
pjs_chain_name = pjs_network.get("text")
if '"isDisabled": "true"' in pjs_network or pjs_network.get("providers") == {}:
continue
chain = create_chain_data(pjs_network)
if chain:
chain_name = chain.get("name")
print(f"Connection established for {chain_name}")
chain_id = chain.get("chainId")
# skip chains already added to config
is_chain_present = check_chain_id(existing_data_in_chains, chain_id)
# skip chains with wss already added to config, in case they have changed chain_id
is_node_present = check_node_is_present(existing_data_in_chains, chain.get("nodes"))
if is_chain_present or is_node_present or exclusion.casefold() in chain_name.casefold():
continue
add_chains_details_file(chain, chains_path)
add_chain_to_chains_file(chain, chains_path)
else:
print(f"Skipped connection for chain {pjs_chain_name}")


def add_chains_details_file(chain, chains_path):
target_path = chains_path.parent / 'preConfigured' / 'detailsDev'
file_name = chain.get("chainId") + '.json'
file_path = target_path / file_name

if file_path.exists():
print(f"File found in config, skipping file: {file_name}")
else:
save_json_file(file_path, chain)
print(f"Added details file for chain: {chain.get('name')}")


def add_chain_to_chains_file(chain, chains_path):
target_path = chains_path.parent / 'preConfigured' / 'chains_dev.json'
chain_data = {
"chainId": chain.get("chainId"),
"name": chain.get("name")
}

data = load_json_file(target_path)
chain_id_exists = any(item.get("chainId") == chain_data["chainId"] for item in data)

if not chain_id_exists:
data.append(chain_data)
print(f"Added chain data to chains: {chain_data}")
else:
print(f"Chain ID {chain_data['chainId']} already exists in the file, skip adding")
save_json_file(target_path, data)


def main():
ts_file_path = "downloaded_file.ts"

for endpoint in Endpoints:
get_ts_file(endpoint.value, ts_file_path)
polkadotjs_data = ts_constant_to_json(ts_file_path)
create_json_files(polkadotjs_data, CHAINS_FILE_PATH_DEV)


if __name__ == "__main__":
main()

0 comments on commit aeaa1ab

Please sign in to comment.