Skip to content

Commit b616354

Browse files
committed
dtmbench and perf.yml changes
1 parent d980ac1 commit b616354

File tree

3 files changed

+96
-31
lines changed

3 files changed

+96
-31
lines changed

tests/deploy_layouts/cluster.yml

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,9 +30,11 @@
3030
- name: enable dtm extension on datanodes
3131
lineinfile:
3232
dest: "{{pg_datadir}}/postgresql.conf"
33-
regexp: "^shared_preload_libraries"
34-
line: "shared_preload_libraries = 'pg_dtm'"
33+
line: "{{item}}"
3534
state: present
35+
with_items:
36+
- "shared_preload_libraries = 'pg_dtm'"
37+
- "dtm.host = '{{ groups['nodes'][0] }}'"
3638

3739
- name: restart postgrespro
3840
command: "{{pg_dst}}/bin/pg_ctl restart -w -D {{pg_datadir}} -l {{pg_datadir}}/pg.log"

tests/dtmbench.cpp

Lines changed: 62 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -62,15 +62,19 @@ struct config
6262
int nWriters;
6363
int nIterations;
6464
int nAccounts;
65+
int startId;
66+
int diapason;
6567
char const* isolationLevel;
6668
vector<string> connections;
6769

6870
config() {
6971
nReaders = 1;
7072
nWriters = 10;
7173
nIterations = 1000;
72-
nAccounts = 1000;
74+
nAccounts = 100000;
7375
isolationLevel = "read committed";
76+
startId = 1;
77+
diapason = 100000;
7478
}
7579
};
7680

@@ -154,23 +158,24 @@ void* reader(void* arg)
154158
void* writer(void* arg)
155159
{
156160
thread& t = *(thread*)arg;
157-
vector< unique_ptr<connection> > conns(cfg.connections.size());
158-
for (size_t i = 0; i < conns.size(); i++) {
159-
conns[i] = new connection(cfg.connections[i]);
160-
}
161+
connection *srcCon, *dstCon;
162+
163+
srcCon = new connection(cfg.connections[t.id % cfg.connections.size()]);
164+
dstCon = new connection(cfg.connections[(t.id + 1) % cfg.connections.size()]);
165+
161166
for (int i = 0; i < cfg.nIterations; i++)
162167
{
163-
int srcCon, dstCon;
164-
int srcAcc = (random() % ((cfg.nAccounts-cfg.nWriters)/cfg.nWriters))*cfg.nWriters + t.id;
165-
int dstAcc = (random() % ((cfg.nAccounts-cfg.nWriters)/cfg.nWriters))*cfg.nWriters + t.id;
166-
167-
do {
168-
srcCon = random() % cfg.connections.size();
169-
dstCon = random() % cfg.connections.size();
170-
} while (srcCon == dstCon);
171-
172-
nontransaction srcTx(*conns[srcCon]);
173-
nontransaction dstTx(*conns[dstCon]);
168+
int srcAcc = cfg.startId + random() % cfg.diapason;
169+
int dstAcc = cfg.startId + random() % cfg.diapason;
170+
171+
if (srcAcc > dstAcc) {
172+
int tmpAcc = dstAcc;
173+
dstAcc = srcAcc;
174+
srcAcc = tmpAcc;
175+
}
176+
177+
nontransaction srcTx(*srcCon);
178+
nontransaction dstTx(*dstCon);
174179

175180
xid_t xid = execQuery(srcTx, "select dtm_begin_transaction()");
176181
exec(dstTx, "select dtm_join_transaction(%u)", xid);
@@ -188,13 +193,14 @@ void* writer(void* arg)
188193
i -= 1;
189194
continue;
190195
}
196+
191197
pipeline srcPipe(srcTx);
192198
pipeline dstPipe(dstTx);
193199
srcPipe.insert("commit transaction");
194200
dstPipe.insert("commit transaction");
195201
srcPipe.complete();
196202
dstPipe.complete();
197-
203+
198204
t.proceeded += 1;
199205
}
200206
return NULL;
@@ -217,6 +223,12 @@ void initializeDatabase()
217223
int main (int argc, char* argv[])
218224
{
219225
bool initialize = false;
226+
227+
if (argc == 1){
228+
printf("Use -h to show usage options\n");
229+
return 1;
230+
}
231+
220232
for (int i = 1; i < argc; i++) {
221233
if (argv[i][0] == '-') {
222234
switch (argv[i][1]) {
@@ -232,11 +244,16 @@ int main (int argc, char* argv[])
232244
case 'n':
233245
cfg.nIterations = atoi(argv[++i]);
234246
continue;
235-
case 'c':
236-
cfg.connections.push_back(string(argv[++i]));
247+
case 's':
248+
cfg.startId = atoi(argv[++i]);
249+
continue;
250+
case 'd':
251+
cfg.diapason = atoi(argv[++i]);
237252
continue;
238253
case 'l':
239254
cfg.isolationLevel = argv[++i];
255+
case 'C':
256+
cfg.connections.push_back(string(argv[++i]));
240257
continue;
241258
case 'i':
242259
initialize = true;
@@ -246,15 +263,26 @@ int main (int argc, char* argv[])
246263
printf("Options:\n"
247264
"\t-r N\tnumber of readers (1)\n"
248265
"\t-w N\tnumber of writers (10)\n"
249-
"\t-a N\tnumber of accounts (1000)\n"
266+
"\t-a N\tnumber of accounts (100000)\n"
267+
"\t-s N\tperform updates starting from this id (1)\n"
268+
"\t-d N\tperform updates in this diapason (100000)\n"
250269
"\t-n N\tnumber of iterations (1000)\n"
251270
"\t-l STR\tisolation level (read committed)\n"
252271
"\t-c STR\tdatabase connection string\n"
272+
"\t-C STR\tdatabase connection string\n"
253273
"\t-i\tinitialize datanase\n");
254274
return 1;
255275
}
276+
277+
if (cfg.startId + cfg.diapason - 1 > cfg.nAccounts) {
278+
printf("startId + diapason should be less that nAccounts. Exiting.\n");
279+
return 1;
280+
}
281+
256282
if (initialize) {
257283
initializeDatabase();
284+
printf("%d account inserted\n", cfg.nAccounts);
285+
return 0;
258286
}
259287

260288
time_t start = getCurrentTime();
@@ -287,6 +315,19 @@ int main (int argc, char* argv[])
287315
}
288316

289317
time_t elapsed = getCurrentTime() - start;
290-
printf("TPS(updates)=%f, TPS(selects)=%f, aborts=%ld\n", (double)(nWrites*USEC)/elapsed, (double)(nReads*USEC)/elapsed, nAborts);
318+
319+
printf(
320+
"{\"update_tps\":%f, \"read_tps\":%f,"
321+
" \"readers\":%d, \"writers\":%d,"
322+
" \"accounts\":%d, \"iterations\":%d, \"hosts\":%d}\n",
323+
(double)(nWrites*USEC)/elapsed,
324+
(double)(nReads*USEC)/elapsed,
325+
cfg.nReaders,
326+
cfg.nWriters,
327+
cfg.nAccounts,
328+
cfg.nIterations,
329+
cfg.connections.size()
330+
);
331+
291332
return 0;
292333
}

tests/perf.yml

Lines changed: 30 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66

77
- name: generate connstrings
88
set_fact:
9-
connstr: "-d 'host={{item}} user=cluster port=15432 dbname=postgres' "
9+
connstr: "-C 'host={{item}} user={{ansible_ssh_user}} port=15432 dbname=postgres' "
1010
with_items:
1111
groups['nodes'] | reverse | batch(nnodes | d(2) | int) | first
1212
register: connstrs
@@ -15,22 +15,44 @@
1515
set_fact:
1616
connections: "{{ connstrs.results | map(attribute='ansible_facts.connstr') | join }}"
1717

18-
- name: copy transfers binary
19-
copy: src=transfers.linux dest=~/transfers mode=a+x
18+
- name: copy transfers source
19+
copy: src=./{{item}} dest=~/{{item}} mode=0755
20+
with_items:
21+
- "dtmbench.cpp"
22+
23+
- name: compile dtmbench
24+
shell: "g++ -g -Wall -O2 -o dtmbench dtmbench.cpp -lpqxx -lpq -pthread"
25+
26+
- name: compile dtmbench
27+
shell: "mv dtmbench ~/pg_cluster/install/bin/dtmbench"
2028

2129
- hosts: clients[0]
2230
gather_facts: no
2331
tasks:
24-
- name: fill the databases
25-
shell: "./transfers {{connections}} -f -g"
26-
register: transfers_result
27-
- debug: "var=transfers_result"
32+
- name: init database
33+
shell: "~/pg_cluster/install/bin/dtmbench {{connections}} -a 2000000 -i"
34+
register: init_result
35+
- debug: var=init_result
2836

2937
- hosts: clients
3038
gather_facts: no
3139
tasks:
40+
41+
- local_action: shell echo "Bench started at `date`" >> perf.results
42+
3243
- name: run transfers
33-
shell: "./transfers {{connections}} -s {{runkeys | d('-g -w 60 -m -o -u 10000 -r 0')}} -k {{hostvars[inventory_hostname]['offset']}} "
44+
shell: >
45+
~/pg_cluster/install/bin/dtmbench {{connections}}
46+
-w {{ (nconns | d(100)| int)*(nnodes | d(2) | int)/(2*( groups['clients'] | count))}}
47+
-s {{offset}} -d 100000 -r 1 -n 20000 -a 500000 |
48+
tee -a perf.results |
49+
sed "s/^/`hostname`:/"
3450
register: transfers_result
51+
environment:
52+
LD_LIBRARY_PATH: "/home/{{ansible_ssh_user}}/pg_cluster/install/lib"
53+
3554
- debug: var=transfers_result
3655

56+
- local_action: 'shell echo "{{transfers_result.stdout }}" >> perf.results'
57+
58+
- local_action: shell echo "Bench finished at `date`" >> perf.results

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy