Skip to content

Commit

Permalink
Feature/Refactoring - automatic build docker image + Major refactorin…
Browse files Browse the repository at this point in the history
…g + Testing (#83)

* Feature -  Unconfirmed TX info (#61)

* Add explorer for unconfirmed TX
* Add front end UI navigation menu
* Add graphs visualisation for historic API calls

* add cloud build

* test

* test

* test

* test

* change project

* add check

* test

* test

* test

* test

* test

* test

* add snapshot testing

* add travis CI

* add test

* update lock file

* add test to build

* fix

* fix build

* test travis

* fix

* fix

* fix

* fix

* fix

* fix test

* test fail

* fix test

* add coverall

* asdf

* add coveralls

* fix coveralls

* fix coveralls

* fix coveralls

* fix coveralls

* ignore bootstrap

* fix travis

* add tests

* add testing

* add async insertData

* add sql insert methods

* test on gcp

* add timeout

* add timeout

* done

* done

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test all

* test all

* test testport

* test testport

* test

* print result

* print result

* print result

* print result

* print result

* print result

* print result

* print result

* print result

* print result

* print result

* print result

* add test

* add test

* add test

* add test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* finish most testing

* finish most testing

* finish most testing

* finish most testing

* finish most testing

* finish most testing

* finish most testing

* finish most testing

* finish most testing

* finish most testing

* finish most testing

* finish most testing

* finish most testing

* finish most testing

* finish most testing

* finish most testing

* finish most testing

* finish most testing

* finish most testing

* finish most testing

* finish most testing

* finish most testing

* finish most testing

* finish most testing

* finish most testing

* finish most testing

* finish most testing

* finish most testing

* finish most testing

* finish most testing

* update insertData

* update insertData

* done

* update dockerifle

* update dockerifle

* fine

* fine

* fix

* add discover test

* update dockerfile

* finish test

* add dockerfile

* update

* done

* done

* fix

* add time logging

* test without tx

* add new dockerfile

* fix

* fix
  • Loading branch information
i25959341 authored and f27d committed Aug 24, 2018
1 parent 2d1f9b0 commit 7c04a1d
Show file tree
Hide file tree
Showing 61 changed files with 8,172 additions and 5,515 deletions.
10 changes: 10 additions & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
language: node_js
node_js:
- "10.4.1"
install:
- cd neo-interface && npm install
- npm install coveralls --save
script:
- echo REACT_APP_API_URL="https://api.happynodes.f27.ventures/redis" > .env
- npm test -- --coverage --collectCoverageFrom=src/**/*js --collectCoverageFrom=!src/bootstrap/js/*js --coverageReporters=text-lcov | coveralls

38 changes: 38 additions & 0 deletions cloudbuild.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
steps:
- name: 'gcr.io/cloud-builders/docker'
args: [ 'build', '-t', 'gcr.io/$PROJECT_ID/f27/integration/neofront:v65', './neo-interface']
- name: 'gcr.io/cloud-builders/docker'
args: [ 'build', '-t', 'gcr.io/$PROJECT_ID/f27/integration/neoback:v65', './neo-back']
- name: 'gcr.io/cloud-builders/docker'
args: [ 'build', '-t', 'gcr.io/$PROJECT_ID/f27/integration/neonode-p2p-tcp:v65', './neo-node/p2p_tcp']
- name: 'gcr.io/cloud-builders/docker'
args: [ 'build', '-t', 'gcr.io/$PROJECT_ID/f27/integration/neonode-p2p-ws:v65', './neo-node/p2p_ws']
- name: 'gcr.io/cloud-builders/docker'
args: [ 'build', '-t', 'gcr.io/$PROJECT_ID/f27/integration/neodiscover:v65', './neo-collector/discover']
- name: 'gcr.io/cloud-builders/docker'
args: [ 'build', '-t', 'gcr.io/$PROJECT_ID/f27/integration/neocreate:v65', './neo-collector/create']
- name: 'gcr.io/cloud-builders/docker'
args: [ 'build', '-t', 'gcr.io/$PROJECT_ID/f27/integration/neoinsert:v65', './neo-collector/insert']
- name: 'gcr.io/cloud-builders/docker'
args: [ 'build', '-t', 'gcr.io/$PROJECT_ID/f27/integration/neodelete:v65', './neo-collector/delete']
- name: 'gcr.io/cloud-builders/docker'
args: [ 'build', '-t', 'gcr.io/$PROJECT_ID/f27/integration/redis-network-summary:v65', './neo-redis/networksummary' ]
- name: 'gcr.io/cloud-builders/docker'
args: [ 'build', '-t', 'gcr.io/$PROJECT_ID/f27/integration/redis-historic:v65', './neo-redis/historic' ]
- name: 'gcr.io/cloud-builders/docker'
args: [ 'build', '-t', 'gcr.io/$PROJECT_ID/f27/integration/redis-nodes-info:v65', './neo-redis/nodeinfo' ]
- name: 'gcr.io/cloud-builders/docker'
args: [ 'build', '-t', 'gcr.io/$PROJECT_ID/f27/integration/redis-peers-info:v65', './neo-redis/peerinfo' ]
- name: 'gcr.io/cloud-builders/docker'
args: [ 'build', '-t', 'gcr.io/$PROJECT_ID/f27/integration/redis-unconfirmedtx-info:v65', './neo-redis/unconfirmedTx' ]
images: ['gcr.io/$PROJECT_ID/f27/integration/neonode-p2p-tcp:v65',
'gcr.io/$PROJECT_ID/f27/integration/neonode-p2p-ws:v65',
'gcr.io/$PROJECT_ID/f27/integration/neodiscover:v65',
'gcr.io/$PROJECT_ID/f27/integration/neocreate:v65',
'gcr.io/$PROJECT_ID/f27/integration/neoinsert:v65',
'gcr.io/$PROJECT_ID/f27/integration/neodelete:v65',
'gcr.io/$PROJECT_ID/f27/integration/redis-network-summary:v65',
'gcr.io/$PROJECT_ID/f27/integration/redis-historic:v65',
'gcr.io/$PROJECT_ID/f27/integration/redis-nodes-info:v65',
'gcr.io/$PROJECT_ID/f27/integration/redis-peers-info:v65',
'gcr.io/$PROJECT_ID/f27/integration/redis-historic:v65']
4 changes: 2 additions & 2 deletions neo-collector/create/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ RUN apk update && \
RUN mkdir /usr/src/app
WORKDIR /usr/src/app

COPY createOrUpdatePrimaryTables.py .
COPY createUpdatePrimarySQL.py .

# start app
CMD ["python3", "createOrUpdatePrimaryTables.py"]
CMD ["python3", "createUpdatePrimarySQL.py"]
154 changes: 0 additions & 154 deletions neo-collector/create/createOrUpdatePrimaryTables.py

This file was deleted.

144 changes: 144 additions & 0 deletions neo-collector/create/createUpdatePrimarySQL.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,144 @@
import psycopg2
import requests
import json
import time
import socket
import dns.resolver
import socket
import os
import sys
import requests
import time
import logging

PGHOST = str(os.environ['PGHOST'])
PGDATABASE = str(os.environ['PGDATABASE'])
PGUSER = str(os.environ['PGUSER'])
PGPASSWORD = str(os.environ['PGPASSWORD'])

CONNECTION_STR = "dbname='{}' user='{}' host='{}' password='{}'".format(PGDATABASE, PGUSER, PGHOST, PGPASSWORD)

SLEEP_TIME = 60*60*24

logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)

# create console handler and set level to debug
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)

# create formatter
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')

# add formatter to ch
ch.setFormatter(formatter)

# add ch to logger
logger.addHandler(ch)


class CreateUpdatePrimarySQL:
def __init__(self, connect_str, sleeptime):
self.connect_str=connect_str
self.sleeptime = sleeptime

def get_coz_mainnet_json(self):
r = requests.get(
'https://raw.githubusercontent.com/CityOfZion/neo-mon/master/docs/assets/mainnet.json')
return json.loads(r.text)

def get_existing_nodes(self, cursor):
# For mainnet json, we use hostname to check whether a node
# exists in our database, since there is a chance that
# that a node has multiple ips for load-balancing
nodes_dict = {}
cursor.execute("select id, hostname, ip from nodes")
results = cursor.fetchall()

for id, hostname, ip in results:
nodes_dict[hostname] = (id, hostname, ip)

return nodes_dict

def create_or_update_nodes_rows(self, data):
conn = psycopg2.connect(self.connect_str)
cursor = conn.cursor()

for endpoint in data["sites"]:
if endpoint["type"] == "RPC":
hostname = endpoint["url"].split("//")[-1].split(":")[0]
ip = socket.gethostbyname(endpoint["url"])

nodes_dict = self.get_existing_nodes(cursor)

if hostname not in nodes_dict:
# add new rows in nodes
logger.info("insert new rows into nodes table. hostname: {} ip: {}".format(hostname, ip))
cursor.execute(
"INSERT INTO nodes (hostname, ip) VALUES (%s, %s)", [hostname, ip])
else:
(id, hostnameFromDatabase, ipFromDatabase) = nodes_dict[hostname]

if ipFromDatabase!=ip:
# IP has changes, some nodes uses loadbalancing and changes their ip all the time
logger.info("update node's ip. hostname: {} id: {} ipFromDatabase: {} ip:{}".format(hostname, id, ipFromDatabase, ip))
cursor.execute("UPDATE nodes SET ip=%s WHERE id=%s;", [ip, id])

conn.commit()
cursor.close()
conn.close()

def create_connectionendpoints_rows(self, data):
conn = psycopg2.connect(self.connect_str)
cursor = conn.cursor()
for endpoint in data["sites"]:
if endpoint["type"] == "RPC":
hostname = endpoint["url"].split("//")[-1].split(":")[0]
ip = socket.gethostbyname(endpoint["url"])

nodes_dict = self.get_existing_nodes(cursor)
(node_id, hostnameFromDatabase, ipFromDatabase) = nodes_dict[hostname]

protocol = endpoint["protocol"]
port = endpoint["port"] if "port" in endpoint else 10332

cursor.execute("SELECT id, node_id, protocol, port FROM public.connection_endpoints where node_id=%s and protocol=%s and port=%s", [int(node_id),str(protocol),int(port)])

results = cursor.fetchall()

if len(results)==0:
# this connection endpoints does not exist in the database
logger.info("insert into connection endpoints, hostname:{} node_id: {} protocol: {} port: {}".format(hostname, int(node_id), str(protocol), int(port)))
cursor.execute("INSERT INTO public.connection_endpoints (node_id, protocol, port) VALUES (%s, %s, %s) RETURNING id", [int(node_id), str(protocol), int(port)])

lastid = cursor.fetchone()[0]

cursor.execute("INSERT INTO locale (connection_id, locale) VALUES (%s, %s)"
, [lastid, endpoint["locale"]])
cursor.execute("INSERT INTO location (connection_id, location) VALUES (%s, %s)"
, [lastid, endpoint["location"]])

response = requests.get("https://geoip.nekudo.com/api/"+ip)
json_data = json.loads(response.text)

lat = json_data["location"]['latitude']
long = json_data["location"]['longitude']

cursor.execute("INSERT INTO coordinates (connection_id, lat, long) VALUES (%s, %s, %s)", [lastid, lat, long])
conn.commit()
cursor.close()
conn.close()

def run(self):
while True:
coz_main_net_info = self.get_coz_mainnet_json()
self.create_or_update_nodes_rows(coz_main_net_info)
self.create_connectionendpoints_rows(coz_main_net_info)

logger.info("Sleeping")
# Run hourly
time.sleep(self.sleeptime)

if __name__ == "__main__":
createUpdatePrimarySQL = CreateUpdatePrimarySQL(CONNECTION_STR, SLEEP_TIME)
createUpdatePrimarySQL.run()
Loading

0 comments on commit 7c04a1d

Please sign in to comment.