Skip to content

Commit 2a1ebb6

Browse files
mandeserooleg-jukovec
authored andcommitted
test: add tests for command tt status
Add test to verify config, box, and upstream statuses and alerts returned by the new `tt status [--details]` command.
1 parent ec6bef8 commit 2a1ebb6

File tree

4 files changed

+351
-2
lines changed

4 files changed

+351
-2
lines changed
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
local log = require('log')
2+
3+
box.cfg{listen=3303}
4+
box.schema.user.grant('guest', 'super')
5+
6+
box.session.on_connect(function()
7+
log.error("Connected")
8+
end)
9+
box.session.on_disconnect(function()
10+
log.error("Disconnected")
11+
end)
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
local fiber = require('fiber')
2+
local fio = require('fio')
3+
4+
box.cfg({})
5+
6+
fh = fio.open('ready', {'O_WRONLY', 'O_CREAT'}, tonumber('644',8))
7+
fh:close()
8+
9+
while true do
10+
fiber.sleep(5)
11+
end
+325
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,325 @@
1+
import os
2+
import re
3+
import shutil
4+
import subprocess
5+
import tempfile
6+
import time
7+
8+
import pytest
9+
import tarantool
10+
11+
from utils import (control_socket, extract_status, get_tarantool_version,
12+
pid_file, run_command_and_get_output, run_path, wait_file)
13+
14+
tarantool_major_version, tarantool_minor_version = get_tarantool_version()
15+
16+
17+
def start_application(cmd, workdir, app_name, instances):
18+
instance_process = subprocess.Popen(
19+
cmd,
20+
cwd=workdir,
21+
stderr=subprocess.STDOUT,
22+
stdout=subprocess.PIPE,
23+
text=True
24+
)
25+
start_output = instance_process.stdout.read()
26+
for inst in instances:
27+
assert f"Starting an instance [{app_name}:{inst}]" in start_output
28+
29+
30+
def stop_application(tt_cmd, app_name, workdir):
31+
stop_cmd = [tt_cmd, "stop", app_name, "-y"]
32+
stop_rc, stop_out = run_command_and_get_output(stop_cmd, cwd=workdir)
33+
assert stop_rc == 0
34+
35+
36+
def break_config(path):
37+
with open(path, "a") as config:
38+
config.write("invalid_field: invalid_value\n")
39+
40+
41+
def run_command_on_instance(tt_cmd, tmpdir, full_inst_name, cmd):
42+
con_cmd = [tt_cmd, "connect", full_inst_name, "-f", "-"]
43+
instance_process = subprocess.Popen(
44+
con_cmd,
45+
cwd=tmpdir,
46+
stderr=subprocess.STDOUT,
47+
stdout=subprocess.PIPE,
48+
stdin=subprocess.PIPE,
49+
text=True
50+
)
51+
instance_process.stdin.writelines([cmd])
52+
instance_process.stdin.close()
53+
output = instance_process.stdout.read()
54+
return output
55+
56+
57+
def wait_instance_status(tt_cmd, tmpdir, full_inst_name, status, port=None, timeout=10):
58+
if status == "config":
59+
cmd = "return require('config'):info().status"
60+
expected_statuses = ["ready", "check_warnings"]
61+
elif status == "box":
62+
cmd = "return box.info.status"
63+
expected_statuses = ["running"]
64+
else:
65+
raise RuntimeError(f"Not supported status to check: {status}")
66+
67+
conn = None
68+
end_time = time.time() + timeout
69+
while True:
70+
try:
71+
if port:
72+
# if socket file doesn't exist use connection by ip:port
73+
if not conn:
74+
conn = tarantool.Connection(host="localhost", port=port)
75+
res = conn.eval(cmd)[0]
76+
else:
77+
res = run_command_on_instance(
78+
tt_cmd,
79+
tmpdir,
80+
full_inst_name,
81+
cmd
82+
)
83+
84+
if any(expected_status in res for expected_status in expected_statuses):
85+
if conn:
86+
conn.close()
87+
return True
88+
except tarantool.error.Error:
89+
pass
90+
if time.time() > end_time:
91+
print(f"[{full_inst_name}]: {status} wait timed out after {timeout} seconds.")
92+
return False
93+
time.sleep(1)
94+
95+
96+
@pytest.mark.skipif(tarantool_major_version < 3,
97+
reason="skip cluster instances test for Tarantool < 3")
98+
def test_t3_instance_names_with_config(tt_cmd, tmpdir_with_cfg):
99+
tmpdir = tmpdir_with_cfg
100+
app_name = "small_cluster_app"
101+
app_path = os.path.join(tmpdir, app_name)
102+
shutil.copytree(os.path.join(os.path.dirname(__file__), f"../running/{app_name}"), app_path)
103+
104+
run_dir = os.path.join(tmpdir, app_name, run_path)
105+
instances = ['storage-master', 'storage-replica']
106+
try:
107+
# Start an instance.
108+
start_cmd = [tt_cmd, "start", app_name]
109+
start_application(start_cmd, tmpdir, app_name, instances)
110+
111+
# Check status.
112+
for inst in instances:
113+
file = wait_file(os.path.join(run_dir, inst), 'tarantool.pid', [])
114+
assert file != ""
115+
file = wait_file(os.path.join(run_dir, inst), control_socket, [])
116+
assert file != ""
117+
118+
status_cmd = [tt_cmd, "status", app_name]
119+
status_rc, status_out = run_command_and_get_output(status_cmd, cwd=tmpdir)
120+
assert status_rc == 0
121+
status_info = extract_status(status_out)
122+
for inst in instances:
123+
assert status_info[app_name+":"+inst]["STATUS"] == "RUNNING"
124+
assert os.path.exists(os.path.join(tmpdir, app_name, "var", "lib", inst))
125+
assert os.path.exists(os.path.join(tmpdir, app_name, "var", "log", inst, "tt.log"))
126+
assert not os.path.exists(os.path.join(tmpdir, app_name, "var", "log", inst,
127+
"tarantool.log"))
128+
129+
full_master_inst_name = f"{app_name}:{instances[0]}"
130+
131+
# Wait for the configuration setup to complete
132+
assert wait_instance_status(tt_cmd, tmpdir, full_master_inst_name, "config")
133+
134+
# Break the configuration by modifying the config.yaml file
135+
config_path = os.path.join(app_path, "config.yaml")
136+
break_config(config_path)
137+
138+
# Reload the configuration on the instance
139+
reload_cmd = "require('config'):reload()"
140+
res = run_command_on_instance(tt_cmd, tmpdir, full_master_inst_name, reload_cmd)
141+
142+
# Check if the expected error message is present in the response
143+
error_message = "[cluster_config] Unexpected field \"invalid_field\""
144+
assert error_message in res
145+
146+
status_cmd = [tt_cmd, "status", full_master_inst_name, "--details"]
147+
status_rc, status_out = run_command_and_get_output(status_cmd, cwd=tmpdir)
148+
assert status_rc == 0
149+
150+
status_table = status_out[status_out.find("INSTANCE"):]
151+
status_info = extract_status(status_table)
152+
153+
assert status_info[full_master_inst_name]["STATUS"] == "RUNNING"
154+
assert status_info[full_master_inst_name]["MODE"] == "RW"
155+
assert status_info[full_master_inst_name]["CONFIG"] == "check_errors"
156+
assert status_info[full_master_inst_name]["BOX"] == "running"
157+
158+
# We cannot be certain that the instance bootstrap has completed.
159+
assert status_info[full_master_inst_name]["UPSTREAM"] in ["--", "loading"]
160+
assert f"[config][error]: {error_message}" in status_out
161+
finally:
162+
stop_application(tt_cmd, app_name, tmpdir)
163+
164+
165+
@pytest.mark.skipif(tarantool_major_version < 3,
166+
reason="skip cluster instances test for Tarantool < 3")
167+
def test_t3_instance_names_no_config(tt_cmd):
168+
test_app_path_src = os.path.join(os.path.dirname(__file__), "../running/multi_inst_app")
169+
instances = ["router", "master", "replica", "stateboard"]
170+
171+
# Default temporary directory may have very long path. This can cause socket path buffer
172+
# overflow. Create our own temporary directory.
173+
with tempfile.TemporaryDirectory() as tmpdir:
174+
test_app_path = os.path.join(tmpdir, "app")
175+
shutil.copytree(test_app_path_src, test_app_path)
176+
177+
for subdir in ["", "app"]:
178+
if subdir != "":
179+
os.mkdir(os.path.join(test_app_path, "app"))
180+
try:
181+
# Start an instance.
182+
start_cmd = [tt_cmd, "start", "app"]
183+
instance_process = subprocess.Popen(
184+
start_cmd,
185+
cwd=test_app_path,
186+
stderr=subprocess.STDOUT,
187+
stdout=subprocess.PIPE,
188+
text=True
189+
)
190+
start_output = instance_process.stdout.readline()
191+
assert re.search(
192+
r"Starting an instance \[app:(router|master|replica|stateboard)\]",
193+
start_output
194+
)
195+
196+
# Check status.
197+
for instName in instances:
198+
print(os.path.join(test_app_path, "run", "app", instName))
199+
file = wait_file(os.path.join(test_app_path, run_path, instName), pid_file, [])
200+
assert file != ""
201+
202+
status_cmd = [tt_cmd, "status", "app", "--details"]
203+
status_rc, status_out = run_command_and_get_output(status_cmd, cwd=test_app_path)
204+
assert status_rc == 0
205+
status_table = status_out[status_out.find("INSTANCE"):]
206+
status_table = extract_status(status_table)
207+
208+
for instName in instances:
209+
assert status_table[f"app:{instName}"]["STATUS"] == "RUNNING"
210+
211+
pattern = (
212+
r"Alerts for app:(router|master|replica|stateboard):\s+"
213+
r"• Error while connecting to instance app:\1 via socket "
214+
r".+tarantool\.control: failed to dial: dial unix "
215+
r".+tarantool\.control: connect: no such file or directory"
216+
)
217+
matches = re.findall(pattern, status_out)
218+
assert len(matches) == 4
219+
220+
# Since we cannot connect to instances because the socket file doesn't exist,
221+
# we will verify that the status strings follow this structure:
222+
pattern = r"app:(master|replica|router|stateboard)\s+RUNNING\s+\d+\s+--\s+--\s+--"
223+
matches = re.findall(pattern, status_out)
224+
assert len(matches) == 4
225+
226+
finally:
227+
stop_application(tt_cmd, "app", test_app_path)
228+
229+
230+
@pytest.mark.skipif(tarantool_major_version < 3,
231+
reason="skip cluster instances test for Tarantool < 3")
232+
def test_t3_no_instance_names_no_config(tt_cmd, tmpdir_with_cfg):
233+
tmpdir = tmpdir_with_cfg
234+
app_name = "single_app"
235+
app_path = os.path.join(tmpdir, app_name)
236+
shutil.copytree(os.path.join(os.path.dirname(__file__), app_name), app_path)
237+
238+
try:
239+
start_cmd = [tt_cmd, "start", app_name]
240+
instance_process = subprocess.Popen(
241+
start_cmd,
242+
cwd=app_path,
243+
stderr=subprocess.STDOUT,
244+
stdout=subprocess.PIPE,
245+
text=True
246+
)
247+
start_output = instance_process.stdout.read()
248+
assert re.search(
249+
r"Starting an instance \[single_app\]",
250+
start_output
251+
)
252+
assert wait_instance_status(tt_cmd, app_path, app_name, "box", port=3303)
253+
status_cmd = [tt_cmd, "status", app_name]
254+
status_rc, status_out = run_command_and_get_output(status_cmd, cwd=app_path)
255+
assert status_rc == 0
256+
status_out = extract_status(status_out)
257+
258+
assert status_out[app_name]["STATUS"] == "RUNNING"
259+
assert status_out[app_name]["MODE"] == "RW"
260+
assert status_out[app_name]["CONFIG"] == "uninitialized"
261+
assert status_out[app_name]["BOX"] == "running"
262+
assert status_out[app_name]["UPSTREAM"] == "--"
263+
finally:
264+
stop_application(tt_cmd, app_name, app_path)
265+
266+
267+
@pytest.mark.skipif(tarantool_major_version > 2,
268+
reason="skip custom test for Tarantool > 2")
269+
def test_status_custom_app(tt_cmd, tmpdir_with_cfg):
270+
tmpdir = tmpdir_with_cfg
271+
app_name = "test_custom_app"
272+
app_path = os.path.join(tmpdir, app_name)
273+
shutil.copytree(os.path.join(os.path.dirname(__file__), app_name), app_path)
274+
try:
275+
# Start a cluster.
276+
start_cmd = [tt_cmd, "start", app_name]
277+
rc, out = run_command_and_get_output(start_cmd, cwd=tmpdir)
278+
assert rc == 0
279+
280+
# Check for start.
281+
file = wait_file(os.path.join(tmpdir, app_name), 'ready', [])
282+
assert file != ""
283+
284+
status_cmd = [tt_cmd, "status"]
285+
status_cmd.append("test_custom_app")
286+
287+
rc, out = run_command_and_get_output(status_cmd, cwd=tmpdir)
288+
assert rc == 0
289+
status_out = extract_status(out)
290+
assert status_out[app_name]["STATUS"] == "RUNNING"
291+
assert status_out[app_name]["MODE"] == "RW"
292+
assert status_out[app_name]["CONFIG"] == "uninitialized"
293+
assert status_out[app_name]["BOX"] == "running"
294+
assert status_out[app_name]["UPSTREAM"] == "--"
295+
finally:
296+
stop_application(tt_cmd, app_name, tmpdir)
297+
298+
299+
@pytest.mark.skipif(tarantool_major_version > 2,
300+
reason="skip cartridge test for Tarantool > 2")
301+
def test_status_cartridge(tt_cmd, cartridge_app):
302+
rs_cmd = [tt_cmd, "status"]
303+
304+
time.sleep(20)
305+
rc, out = run_command_and_get_output(rs_cmd, cwd=cartridge_app.workdir)
306+
assert rc == 0
307+
status_out = extract_status(out)
308+
309+
instances = {
310+
"cartridge_app:router": "RW",
311+
"cartridge_app:s1-master": "RW",
312+
"cartridge_app:s2-master": "RW",
313+
"cartridge_app:s3-master": "RW",
314+
"cartridge_app:stateboard": "RW",
315+
"cartridge_app:s1-replica": "RO",
316+
"cartridge_app:s2-replica-1": "RO",
317+
"cartridge_app:s2-replica-2": "RO",
318+
}
319+
320+
for app_name, mode in instances.items():
321+
assert status_out[app_name]["STATUS"] == "RUNNING"
322+
assert status_out[app_name]["MODE"] == mode
323+
assert status_out[app_name]["CONFIG"] == "uninitialized"
324+
assert status_out[app_name]["BOX"] == "running"
325+
assert status_out[app_name]["UPSTREAM"] == "--"

test/utils.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -440,8 +440,10 @@ def extract_status(status_output):
440440
if fields[1] == "RUNNING":
441441
info["STATUS"] = fields[1]
442442
info["PID"] = int(fields[2])
443-
if len(fields) >= 4:
444-
info["MODE"] = fields[3]
443+
keys = ["MODE", "CONFIG", "BOX", "UPSTREAM"]
444+
for i, key in enumerate(keys, start=3):
445+
if len(fields) > i:
446+
info[key] = fields[i]
445447
else:
446448
info["STATUS"] = " ".join(fields[1:])
447449
result[instance] = info

0 commit comments

Comments
 (0)