-
Notifications
You must be signed in to change notification settings - Fork 20
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
95 changed files
with
10,621 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
# Operating System Files | ||
|
||
*.DS_Store | ||
Thumbs.db | ||
*.sw? | ||
.#* | ||
*# | ||
*~ | ||
*.sublime-* | ||
|
||
# Build Artifacts | ||
|
||
.gradle/ | ||
build/ | ||
out/ | ||
target/ | ||
bin/ | ||
dependency-reduced-pom.xml | ||
logs/ | ||
# Eclipse Project Files | ||
.rocksdb | ||
.classpath | ||
.project | ||
.settings/ | ||
|
||
# IntelliJ IDEA Files | ||
|
||
*.iml | ||
*.ipr | ||
*.iws | ||
*.idea |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,49 @@ | ||
# README | ||
## 打包 | ||
sh build.sh | ||
|
||
## usage | ||
### 从sls采集drds或rds日志 | ||
java -jar slssniffer-1.0.jar --endpoint=cn-hangzhou-intranet.log.aliyuncs.com --project=xxx --store=xxx --accesskey=xxxx --accesskeyid=xxx --from="2023-05-05 12:00:00" --sort-by-date --log-type=drds --out=/root/rds.json --threads=4 | ||
|
||
#### 过滤处理5月5号的日志 | ||
egrep '^2023-05-05' /root/rds.json |sort -S 10240M -T /data --parallel=8 rds.json > /root/out.json | ||
|
||
### 从自建mysql采集general 日志 | ||
java -jar mysqlsniffer.jar --capture-method=general_log --replay-to=file --port=3306 --username=root --password=xxx --concurrency=32 --time=60 | ||
文件默认输出到 logs/out.json | ||
|
||
### replay to mysql | ||
java -Xms=2G -Xmx=4G -jar frodo.jar --file=/root/out.json --source-db=mysql --replay-to=mysql --port=3306 --host=172.25.132.163 --username=root --password=123456 --concurrency=64 --time=1000 --task=task1 --schema-map=test:test1,test2 --log-level=info --rate-factor=1 --database=test | ||
|
||
### replay to polarx | ||
java -Xms=2G -Xmx=4G -jar frodo.jar --file=/root/out.json --source-db=mysql --replay-to=polarx --port=3306 --host=172.25.132.163 --username=root --password=123456 --concurrency=64 --time=1000 --task=task1 --schema-map=test:test1,test2 --log-level=info --rate-factor=1 --database=test | ||
|
||
### replay to polardb_o | ||
java -Xms=2G -Xmx=4G -jar frodo.jar --file=/root/out.json --source-db=mysql --replay-to=polardb_o --port=3306 --host=172.25.132.163 --username=root --password=123456 --concurrency=64 --time=1000 --task=task1 --schema-map=test:test1,test2 --log-level=info --rate-factor=1 --database=polardb | ||
|
||
### 参数描述 | ||
``` | ||
--file 采集的sql文件 | ||
--source-db 源库类型 oracle、mysql、db2 | ||
--replay-to 回放到哪种数据库 mysql、polardb_o | ||
--host 目标库ip | ||
--port 目标库端口 | ||
--username=root 目标库用户名 | ||
--password 目标库密码 | ||
--database 目标库库名 | ||
--concurrency 并发数 | ||
--time=1000 执行时间 | ||
--task 任务名 | ||
--schema-map schema映射和过滤,例如schema1:schema2,schema2,schema3:schema2 只重放schema1、schema2、schema3 3个schema的sql,且schema1和schema3映射到schema2进行重放 | ||
--rate-factor 速度控制,1表示原速,0.5表2倍速度,0.1表示10倍速度 | ||
--circle 是否循环回放,如果开启循环回放,那边会忽略rate-factor参数,rate-factor置为0,以最大压力回放 | ||
--sql-timeout 设置sql超时时间,默认60,单位秒,建议设置合理sql-timeout,避免慢sql影响重放进度 | ||
--interval 运行时监控打印时间间隔,默认5,单位秒 | ||
--commit 是否commit,默认frodo会手动开启事务进行回放,执行完一条SQL后会rollback,如果需要commit,可以打开该参数。 | ||
--filter 多选:ALL、DQL、DML; ALL:所有sql;DQL:select;DML:update、insert、delete、replace、merge。参数默认值是ALL。只回放指定的SQL类型 | ||
--skip-dupli-error-sql 是否跳过已经报错过的相同SQL指纹的sql,默认关闭,如果打开,一定程度上能够提高重放速度 | ||
--disable-insert-to-replace 默认对polarx回放,会把insert转成replace,减少主键冲突的报错,但是replace可能会导致产生死锁。可以设置该参数关闭该特性。 | ||
--disable-transaction 是否关闭手动事务,如果关闭,那么SQL使用自动提交模式,--commit参数自动失效 | ||
``` |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,21 @@ | ||
#!/bin/sh | ||
version="1.1.29" | ||
cd mysqlsniffer | ||
sh build.sh $version | ||
cd ../ | ||
|
||
cd slssniffer | ||
sh build.sh $version | ||
cd ../ | ||
|
||
cd frodo | ||
sh build.sh $version | ||
cd ../ | ||
rm -rf target | ||
mkdir target | ||
cp frodo/target/frodo-${version}.tar.gz ./target | ||
|
||
|
||
|
||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,10 @@ | ||
FROM repository.dayu.work/frodoplus/java-python:v1 | ||
|
||
# 大禹工作流会自动根据pom采用mvn clean install -DskipTests 打出jar包 | ||
ADD ./target/frodo-1.1.17.jar /app/frodo.jar | ||
ADD ./*.py /app/ | ||
ADD ./*.sh /app/ | ||
ADD ./license.key /app/ | ||
ADD ./index /app/index/ | ||
|
||
CMD ["python", "/app/index/app.py"] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,25 @@ | ||
# README | ||
## usage | ||
### replay to mysql | ||
java -Xms=2G -Xmx=4G -jar frodo.jar --file=/root/out.json --source-db=mysql --replay-to=mysql --port=3306 --host=172.25.132.163 --username=root --password=123456 --concurrency=64 --time=1000 --task=task1 --schema-map=test:test1,test2 --log-level=info --rate-factor=1 --database=test | ||
|
||
### replay to polardb_o | ||
java -Xms=2G -Xmx=4G -jar frodo.jar --file=/root/out.json --source-db=mysql --replay-to=polardb_o --port=3306 --host=172.25.132.163 --username=root --password=123456 --concurrency=64 --time=1000 --task=task1 --schema-map=test:test1,test2 --log-level=info --rate-factor=1 --database=polardb | ||
|
||
### 参数描述 | ||
``` | ||
--file 采集的sql文件 | ||
--source-db 源库类型 oracle、mysql、db2 | ||
--replay-to 回放到哪种数据库 mysql、polardb_o | ||
--host 目标库ip | ||
--port 目标库端口 | ||
--username=root 目标库用户名 | ||
--password 目标库密码 | ||
--database 目标库库名 | ||
--concurrency 并发数 | ||
--time=1000 执行时间 | ||
--task 任务名 | ||
--schema-map schema映射和过滤,例如schema1:schema2,schema2,schema3:schema2 只重放schema1、schema2、schema3 3个schema的sql,且schema1和schema3映射到schema2进行重放 | ||
--rate-factor 速度控制,1表示原速,0.5表2倍速度,0.1表示10倍速度 | ||
--circle 是否循环回放,如果开启循环回放,那边会忽略rate-factor参数,rate-factor置为0,以最大压力回放 | ||
``` |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,116 @@ | ||
#!/usr/bin/env python | ||
# -*- coding: utf-8 -*- | ||
# 建议 pip install ujson tqdm -i https://mirrors.aliyun.com/pypi/simple 安装这两个包, 展示效果和解析性能比较好 | ||
# 想要看解析进度需要安装tqdm | ||
# import tqdm | ||
# import ujson as json | ||
|
||
import json | ||
import traceback | ||
import sys | ||
import math | ||
import random | ||
import time | ||
from datetime import datetime | ||
import string | ||
import os | ||
import re | ||
|
||
""" | ||
-- 解析OMA的JSON文件,生成cloudbench需要的格式, 输入JSON格式 | ||
[2023-02-10 00:03:15,398] INFO [pool-38-thread-255] c.a.c.a.f.l.AccessLog.info - Client=100.81.136.152 Total_time=0 Exec_time=0 Queue_time=0 - [2023-02-10 00:03:15 398] 1000 SYNC TABLE_ACCESS_TRAFFIC\;process=2023021000031510008113609309999067304\;CLUSTER=dailybuild | ||
""" | ||
|
||
default_encoding = 'utf-8' | ||
if sys.getdefaultencoding() != default_encoding: | ||
reload(sys) | ||
sys.setdefaultencoding(default_encoding) | ||
|
||
DELIMITER = "," | ||
start_pattern=r'^\[20\d{2}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d{3}\] .*' | ||
def parse_json_to_frodo_file(file_name, out_file,io_buffer=10000): | ||
|
||
""" | ||
单机版本 | ||
:param file_name: | ||
:param out_file_prefix: | ||
:return: | ||
""" | ||
n = 0 | ||
print("begin analyse %s" % file_name) | ||
log_str="" | ||
fetch_start=False | ||
with open(file_name, "r") as f,open(out_file, "a") as tof: | ||
# for line in tqdm.tqdm(f): | ||
# for line in f.readlines(): | ||
for line in f: | ||
try: | ||
line = str(line).strip() | ||
if fetch_start: | ||
if re.match(start_pattern,line): | ||
fetch_start=True | ||
# todo 解析日志行,解析出sql日期等信息 | ||
dst={} | ||
x=re.split(r'\s+',log_str) | ||
date_str=x[0][1:]+" "+x[1][:-1] | ||
client_str=x[6]+x[3] | ||
dst["startTime"]=parse_time(date_str) | ||
dst["user"]='' | ||
dst["session"]=client_str | ||
dst["schema"]='cgljfl' | ||
sql_text_arr=re.split(r'\[20\d{2}-\d{2}-\d{2} \d{2}:\d{2}:\d{2} \d{3}\] \d* |\\;',log_str) | ||
sql=sql_text_arr[1].strip() | ||
rt_str=x[7] | ||
rt_str_arr=re.split(r',|=',rt_str) | ||
dst["execTime"]=int(rt_str_arr[1])*1000 | ||
if not sql.endswith(" more") and not sql.startswith('SYNC'): | ||
n=n+1 | ||
dst["convertSqlText"]=sql | ||
out_line = json.dumps(dst,sort_keys=True) + "\n" | ||
tof.write(out_line) | ||
if n % io_buffer == 0: | ||
tof.flush() | ||
else: | ||
print("ignore truncated sql") | ||
log_str=line | ||
else: | ||
log_str+="\n"+line | ||
else: | ||
if re.match(start_pattern,line): | ||
fetch_start=True | ||
log_str=line | ||
except(Exception): | ||
print(line) | ||
print(traceback.format_exc()) | ||
sys.exit(1) | ||
print("--- end ---") | ||
|
||
|
||
def parse_time(str): | ||
dt=datetime.strptime(str,"%Y-%m-%d %H:%M:%S,%f") | ||
return int(time.mktime(dt.timetuple())*1000000+dt.microsecond) | ||
|
||
def optparser(): | ||
""" | ||
解析输入参数 | ||
:return: | ||
""" | ||
from optparse import OptionParser | ||
usage = "usage: %prog [options] arg" | ||
parser = OptionParser(usage, version="%prog 1.0") | ||
parser.add_option("-s", "--src", action="store", dest="src", type=str, default=1, help="inuput filename") | ||
parser.add_option("-d", "--dest", action="store", dest="dest", type=str, help="outuput filename suffix") | ||
(options, args) = parser.parse_args() | ||
return options | ||
|
||
|
||
if __name__ == "__main__": | ||
opts = optparser() | ||
# parse_json_to_cloudbench_file("oma_replay.json", "oma_cloudbench.txt") | ||
if opts.src and opts.dest: | ||
# 先清空文件 | ||
with open(opts.dest, "w") as f: | ||
pass | ||
parse_json_to_frodo_file(opts.src, opts.dest) | ||
else: | ||
print("ERROR: need -s and -d") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,30 @@ | ||
#!/bin/sh | ||
version=$1 | ||
mvn clean | ||
mvn assembly:assembly | ||
cd target | ||
rm -rf frodo-${version} | ||
mkdir frodo-${version} | ||
mkdir -p frodo-${version}/logs | ||
|
||
cp ../README.md frodo-${version}/ | ||
cp ../rds_audit_transfer.py frodo-${version}/ | ||
cp ../polarx_audit_transfer.py frodo-${version}/ | ||
cp ../polarx_cn_transfer.py frodo-${version}/ | ||
cp ../polarx_cn_transfer_parallel.sh frodo-${version}/ | ||
cp ../adb_mysql2_transfer.py frodo-${version}/ | ||
cp ../license.key frodo-${version}/ | ||
cp frodo-*.jar frodo-${version}/frodo.jar | ||
mkdir -p frodo-${version}/collector | ||
#cp -r ../collector frodo/ | ||
cp -r ../../slssniffer/target/slssniffer*.tar.gz frodo-${version}/collector | ||
cp -r ../../mysqlsniffer/target/mysqlsniffer*.tar.gz frodo-${version}/collector | ||
rm -f frodo-${version}.tar.gz | ||
tar zcvf frodo-${version}.tar.gz frodo-${version} | ||
cd ../ | ||
|
||
|
||
|
||
|
||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
e1aYl1k9BlD/7zIYc+lqYCkL0nr6LqXAS8YqtmPf4FNO0DtCykfxmj7Va2Lqfrz4oOaUEVlU6xHwCK0Q5O7fww== |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,101 @@ | ||
#!/usr/bin/env python | ||
# -*- coding: utf-8 -*- | ||
# 建议 pip install ujson tqdm -i https://mirrors.aliyun.com/pypi/simple 安装这两个包, 展示效果和解析性能比较好 | ||
# 想要看解析进度需要安装tqdm | ||
# import tqdm | ||
# import ujson as json | ||
|
||
import json | ||
import traceback | ||
import sys | ||
import math | ||
import random | ||
import time | ||
from datetime import datetime | ||
import string | ||
import os | ||
import operator | ||
|
||
""" | ||
-- 解析OMA的JSON文件,生成cloudbench需要的格式, 输入JSON格式 | ||
{"__source__":"11.115.109.200","__time__":"1659526965","__topic__":"polardbx_sqlaudit","affect_rows":"0","autocommit":"1","ccl_hit_cache":"","ccl_status":"","ccl_wait_time":"","client_ip":"100.121.6.153","client_port":"42490","db_name":"information_schema","fail":"0","fetched_rows":"","instance_id":"pxc-hzrcjtfe9db8y6","is_prepare_stmt":"0","matched_ccl_rule":"","parameters":"","prepare_stmt_id":"0","response_time":"0.029","sql":"select 1","sql_code":"","sql_time":"2022-08-03 19:42:45.307","sql_type":"Select","trace_id":"14b351c419001000","transaction_id":"14b351c419001000","transaction_policy":"","user":"zkk_test","workload_type":"TP"} | ||
""" | ||
|
||
default_encoding = 'utf-8' | ||
if sys.getdefaultencoding() != default_encoding: | ||
reload(sys) | ||
sys.setdefaultencoding(default_encoding) | ||
|
||
DELIMITER = "," | ||
|
||
|
||
def parse_json_to_frodo_file(file_name, out_file,io_buffer=10000): | ||
|
||
""" | ||
单机版本 | ||
:param file_name: | ||
:param out_file_prefix: | ||
:return: | ||
""" | ||
n = 0 | ||
print("begin analyse %s" % file_name) | ||
with open(file_name, "r") as f,open(out_file, "a") as tof: | ||
# for line in tqdm.tqdm(f): | ||
# for line in f.readlines(): | ||
for line in f: | ||
try: | ||
line = str(line).strip() | ||
if line: | ||
n += 1 | ||
data_dict = json.loads(line) | ||
if data_dict: | ||
dst={} | ||
dst["convertSqlText"]=data_dict["sql"] | ||
dst["parameter"]=data_dict["parameters"] | ||
if dst["convertSqlText"].endswith(" more") or (operator.contains(dst["convertSqlText"],'?') and (dst["parameter"]==None or dst["parameter"]=="" )): | ||
print("ignore truncated sql") | ||
continue | ||
dst["startTime"]=parse_time(data_dict["sql_time"]) | ||
dst["session"]=data_dict["client_ip"]+":"+data_dict["client_port"] | ||
dst["execTime"]=int(string.atof(data_dict["response_time"])*1000) | ||
dst["schema"]=data_dict["db_name"] | ||
#dst["xid"]=data_dict["transaction_id"] | ||
out_line = json.dumps(dst) + "\n" | ||
tof.write(out_line) | ||
if n % io_buffer == 0: | ||
tof.flush() | ||
except(Exception): | ||
print(line) | ||
print(traceback.format_exc()) | ||
sys.exit(1) | ||
print("--- end ---") | ||
|
||
|
||
def parse_time(str): | ||
dt=datetime.strptime(str,"%Y-%m-%d %H:%M:%S.%f") | ||
return int(time.mktime(dt.timetuple())*1000000+dt.microsecond) | ||
|
||
def optparser(): | ||
""" | ||
解析输入参数 | ||
:return: | ||
""" | ||
from optparse import OptionParser | ||
usage = "usage: %prog [options] arg" | ||
parser = OptionParser(usage, version="%prog 1.0") | ||
parser.add_option("-s", "--src", action="store", dest="src", type=str, default=1, help="inuput filename") | ||
parser.add_option("-d", "--dest", action="store", dest="dest", type=str, help="outuput filename suffix") | ||
(options, args) = parser.parse_args() | ||
return options | ||
|
||
|
||
if __name__ == "__main__": | ||
opts = optparser() | ||
# parse_json_to_cloudbench_file("oma_replay.json", "oma_cloudbench.txt") | ||
if opts.src and opts.dest: | ||
# 先清空文件 | ||
with open(opts.dest, "w") as f: | ||
pass | ||
parse_json_to_frodo_file(opts.src, opts.dest) | ||
else: | ||
print("ERROR: need -s and -d") |
Oops, something went wrong.