Skip to content

Commit a6e5d31

Browse files
committed
dtmbench deploy on several servers
1 parent 7b4257c commit a6e5d31

File tree

6 files changed

+103
-49
lines changed

6 files changed

+103
-49
lines changed

tests/deploy/cluster.yml

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -43,3 +43,16 @@
4343

4444
- name: restart postgrespro
4545
command: "{{pg_dst}}/bin/pg_ctl restart -w -D {{pg_datadir}} -l {{pg_datadir}}/pg.log"
46+
environment:
47+
LD_LIBRARY_PATH: "{{pg_dst}}/lib/"
48+
49+
50+
- hosts: clients
51+
tasks:
52+
- name: copy pg source
53+
copy: src=./{{item}} dest=~/{{item}} mode=0755
54+
with_items:
55+
- "pg_cluster_install.tgz"
56+
57+
- name: extract postgres
58+
command: "tar xzf pg_cluster_install.tgz"

tests/deploy/hosts

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
11
[clients]
2-
158.250.29.4 ansible_ssh_user=cluster offset=6001
3-
158.250.29.7 ansible_ssh_user=cluster ansible_ssh_port=2299 offset=3001
2+
158.250.29.4 ansible_ssh_user=cluster offset=600001
3+
158.250.29.7 ansible_ssh_user=cluster ansible_ssh_port=2299 offset=300001
44

55
[nodes]
6-
158.250.29.5 ansible_ssh_user=cluster offset=5001
7-
158.250.29.6 ansible_ssh_user=cluster offset=4001
8-
158.250.29.8 ansible_ssh_user=cluster offset=2001
9-
158.250.29.9 ansible_ssh_user=cluster offset=1001
6+
158.250.29.5 ansible_ssh_user=cluster offset=500001
7+
158.250.29.6 ansible_ssh_user=cluster offset=400001
8+
158.250.29.8 ansible_ssh_user=cluster offset=200001
9+
158.250.29.9 ansible_ssh_user=cluster offset=100001
1010
158.250.29.10 ansible_ssh_user=cluster offset=1
1111

1212
[master]

tests/deploy/roles/postgres/tasks/main.yml

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -75,6 +75,8 @@
7575

7676
- name: stop postgres if it was running
7777
command: "{{pg_dst}}/bin/pg_ctl stop -w -D {{pg_datadir}}"
78+
environment:
79+
LD_LIBRARY_PATH: "{{pg_dst}}/lib"
7880
when: pg_pidfile.stat.exists
7981

8082
- name: remove datadirs on datanodes
@@ -83,6 +85,8 @@
8385

8486
- name: create datadirs on datanodes
8587
command: "{{pg_dst}}/bin/initdb {{pg_datadir}}"
88+
environment:
89+
LD_LIBRARY_PATH: "{{pg_dst}}/lib/"
8690
args:
8791
creates: "{{pg_datadir}}"
8892

@@ -106,7 +110,7 @@
106110
line: "host all all 0.0.0.0/0 trust"
107111

108112
- name: start postgrespro
109-
command: "{{pg_dst}}/bin/pg_ctl start -w -D {{pg_datadir}} -l {{pg_datadir}}/pg.log"
110-
111-
113+
shell: "{{pg_dst}}/bin/pg_ctl start -w -D {{pg_datadir}} -l {{pg_datadir}}/pg.log"
114+
environment:
115+
LD_LIBRARY_PATH: "{{pg_dst}}/lib/"
112116

tests/deploy/roles/postgres/vars/main.yml

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,6 @@ pg_version_tag: master
88
pg_destroy_and_init: false
99

1010
pg_port: 5432
11-
pg_dtm_enable: false
12-
pg_dtm_host: "127.0.0.1"
1311
pg_config:
1412
- line: "shared_buffers = 3GB"
1513
- line: "wal_keep_segments = 128"
@@ -26,4 +24,3 @@ pg_prefix: "{{ansible_env.HOME}}/pg_cluster"
2624
pg_src: "{{pg_prefix}}/src"
2725
pg_dst: "{{pg_prefix}}/install"
2826
pg_datadir: "{{pg_prefix}}/data_{{pg_port}}"
29-

tests/dtmbench.cpp

Lines changed: 55 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -59,13 +59,17 @@ struct config
5959
int nWriters;
6060
int nIterations;
6161
int nAccounts;
62+
int startId;
63+
int diapason;
6264
vector<string> connections;
6365

6466
config() {
6567
nReaders = 1;
6668
nWriters = 10;
6769
nIterations = 1000;
68-
nAccounts = 1000;
70+
nAccounts = 100000;
71+
startId = 1;
72+
diapason = 100000;
6973
}
7074
};
7175

@@ -141,36 +145,41 @@ void* reader(void* arg)
141145
void* writer(void* arg)
142146
{
143147
thread& t = *(thread*)arg;
144-
vector< unique_ptr<connection> > conns(cfg.connections.size());
145-
for (size_t i = 0; i < conns.size(); i++) {
146-
conns[i] = new connection(cfg.connections[i]);
147-
}
148+
connection *srcCon, *dstCon;
149+
150+
srcCon = new connection(cfg.connections[t.id % cfg.connections.size()]);
151+
dstCon = new connection(cfg.connections[(t.id + 1) % cfg.connections.size()]);
152+
148153
for (int i = 0; i < cfg.nIterations; i++)
149154
{
150155
char gtid[32];
151-
int srcCon, dstCon;
152-
int srcAcc = (random() % ((cfg.nAccounts-cfg.nWriters)/cfg.nWriters))*cfg.nWriters + t.id;
153-
int dstAcc = (random() % ((cfg.nAccounts-cfg.nWriters)/cfg.nWriters))*cfg.nWriters + t.id;
154156

155-
sprintf(gtid, "%d.%d", t.id, i);
157+
// int srcAcc = (random() % ((cfg.nAccounts-cfg.nWriters)/cfg.nWriters))*cfg.nWriters + t.id;
158+
// int dstAcc = (random() % ((cfg.nAccounts-cfg.nWriters)/cfg.nWriters))*cfg.nWriters + t.id;
156159

157-
do {
158-
srcCon = random() % cfg.connections.size();
159-
dstCon = random() % cfg.connections.size();
160-
} while (srcCon == dstCon);
161-
162-
nontransaction srcTx(*conns[srcCon]);
163-
nontransaction dstTx(*conns[dstCon]);
160+
int srcAcc = cfg.startId + random() % cfg.diapason;
161+
int dstAcc = cfg.startId + random() % cfg.diapason;
162+
163+
if (srcAcc > dstAcc) {
164+
int tmpAcc = dstAcc;
165+
dstAcc = srcAcc;
166+
srcAcc = tmpAcc;
167+
}
168+
169+
sprintf(gtid, "%d.%d.%d", cfg.startId, t.id, i);
170+
171+
nontransaction srcTx(*srcCon);
172+
nontransaction dstTx(*dstCon);
164173

165174
exec(srcTx, "begin transaction");
166175
exec(dstTx, "begin transaction");
167176

168177
csn_t snapshot = execQuery(srcTx, "select dtm_extend('%s')", gtid);
169178
snapshot = execQuery(dstTx, "select dtm_access(%ld, '%s')", snapshot, gtid);
170-
179+
171180
exec(srcTx, "update t set v = v - 1 where u=%d", srcAcc);
172181
exec(dstTx, "update t set v = v + 1 where u=%d", dstAcc);
173-
182+
174183
exec(srcTx, "prepare transaction '%s'", gtid);
175184
exec(dstTx, "prepare transaction '%s'", gtid);
176185
exec(srcTx, "select dtm_begin_prepare('%s')", gtid);
@@ -196,17 +205,20 @@ void initializeDatabase()
196205
exec(txn, "create extension pg_dtm");
197206
exec(txn, "drop table if exists t");
198207
exec(txn, "create table t(u int primary key, v int)");
199-
exec(txn, "insert into t (select generate_series(0,%d), %d)", cfg.nAccounts-1, 0);
208+
exec(txn, "insert into t (select generate_series(0,%d), %d)", cfg.nAccounts, 0);
200209
txn.commit();
201-
202-
// nontransaction vacTx(conn);
203-
// exec(vacTx, "vacuum full");
204210
}
205211
}
206212

207213
int main (int argc, char* argv[])
208214
{
209215
bool initialize = false;
216+
217+
if (argc == 1){
218+
printf("Use -h to show usage options\n");
219+
return 1;
220+
}
221+
210222
for (int i = 1; i < argc; i++) {
211223
if (argv[i][0] == '-') {
212224
switch (argv[i][1]) {
@@ -222,6 +234,12 @@ int main (int argc, char* argv[])
222234
case 'n':
223235
cfg.nIterations = atoi(argv[++i]);
224236
continue;
237+
case 's':
238+
cfg.startId = atoi(argv[++i]);
239+
continue;
240+
case 'd':
241+
cfg.diapason = atoi(argv[++i]);
242+
continue;
225243
case 'C':
226244
cfg.connections.push_back(string(argv[++i]));
227245
continue;
@@ -233,14 +251,24 @@ int main (int argc, char* argv[])
233251
printf("Options:\n"
234252
"\t-r N\tnumber of readers (1)\n"
235253
"\t-w N\tnumber of writers (10)\n"
236-
"\t-a N\tnumber of accounts (1000)\n"
254+
"\t-a N\tnumber of accounts (100000)\n"
255+
"\t-s N\tperform updates starting from this id (1)\n"
256+
"\t-d N\tperform updates in this diapason (100000)\n"
237257
"\t-n N\tnumber of iterations (1000)\n"
238-
"\t-c STR\tdatabase connection string\n"
258+
"\t-C STR\tdatabase connection string\n"
239259
"\t-i\tinitialize datanase\n");
240260
return 1;
241261
}
262+
263+
if (cfg.startId + cfg.diapason - 1 > cfg.nAccounts) {
264+
printf("startId + diapason should be less that nAccounts. Exiting.\n");
265+
return 1;
266+
}
267+
242268
if (initialize) {
243269
initializeDatabase();
270+
printf("%d account inserted\n", cfg.nAccounts);
271+
return 0;
244272
}
245273

246274
time_t start = getCurrentTime();
@@ -275,7 +303,9 @@ int main (int argc, char* argv[])
275303

276304

277305
printf(
278-
"{\"update_tps\":%f, \"read_tps\":%f, \"readers\":%d, \"writers\":%d, \"accounts\":%d, \"iterations\":%d, \"hosts\":%d}\n",
306+
"{\"update_tps\":%f, \"read_tps\":%f,"
307+
" \"readers\":%d, \"writers\":%d,"
308+
" \"accounts\":%d, \"iterations\":%d, \"hosts\":%d}\n",
279309
(double)(nWrites*USEC)/elapsed,
280310
(double)(nReads*USEC)/elapsed,
281311
cfg.nReaders,

tests/perf.yml

Lines changed: 22 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66

77
- name: generate connstrings
88
set_fact:
9-
connstr: "-C 'host={{item}} user=cluster port=15432 dbname=postgres' "
9+
connstr: "-C 'host={{item}} user={{ansible_ssh_user}} port=15432 dbname=postgres' "
1010
with_items:
1111
groups['nodes'] | reverse | batch(nnodes | d(2) | int) | first
1212
register: connstrs
@@ -15,25 +15,35 @@
1515
set_fact:
1616
connections: "{{ connstrs.results | map(attribute='ansible_facts.connstr') | join }}"
1717

18-
# - name: copy transfers binary
18+
# - name: copy transfers source
1919
# copy: src=./{{item}} dest=~/{{item}} mode=0755
2020
# with_items:
21-
# - "dtmbench"
22-
# - "libpqxx-4.0.so"
21+
# - "dtmbench.cpp"
2322

24-
- name: copy transfers source
25-
copy: src=./{{item}} dest=~/{{item}} mode=0755
26-
with_items:
27-
- "dtmbench.cpp"
23+
# - name: compile dtmbench
24+
# shell: "g++ -g -Wall -O2 -o dtmbench dtmbench.cpp -lpqxx -lpq -pthread"
25+
26+
# - name: compile dtmbench
27+
# shell: "mv dtmbench ~/pg_cluster/install/bin/dtmbench"
2828

29-
- name: compile dtmbench
30-
shell: "g++ -g -Wall -O2 -o dtmbench dtmbench.cpp -lpqxx -pthread"
29+
- hosts: clients[0]
30+
gather_facts: no
31+
tasks:
32+
- name: init database
33+
shell: "~/pg_cluster/install/bin/dtmbench {{connections}} -a 1000000 -i"
34+
register: init_result
35+
- debug: var=init_result
3136

37+
- hosts: clients
38+
gather_facts: no
39+
tasks:
3240
- name: run transfers
33-
shell: "./dtmbench {{connections}} -w {{item}} -r 1 -n 100 -a 100000 -i "
41+
shell: "~/pg_cluster/install/bin/dtmbench {{connections}} -w {{item}} -s {{offset}} -d 100000 -r 1 -n 1000 -a 1000000"
3442
register: transfers_result
43+
environment:
44+
LD_LIBRARY_PATH: "/home/{{ansible_ssh_user}}/pg_cluster/install/lib"
3545
with_sequence: start=100 end=200 stride=100
3646

37-
- local_action: shell echo "{{transfers_result.results | map(attribute='stdout') | join('\n')}}" >> x.out
47+
- local_action: shell echo "{{transfers_result.results | map(attribute='stdout') | join('\n')}}" >> perf.results
3848

3949
- debug: msg="{{ transfers_result.results | map(attribute='stdout') | join('\n') }}"

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy