test_dashboard.py 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717
  1. import os
  2. import sys
  3. import copy
  4. import json
  5. import time
  6. import logging
  7. import asyncio
  8. import ipaddress
  9. import subprocess
  10. import collections
  11. import numpy as np
  12. import ray
  13. import psutil
  14. import pytest
  15. import redis
  16. import requests
  17. from ray import ray_constants
  18. from ray._private.test_utils import (
  19. format_web_url,
  20. wait_for_condition,
  21. wait_until_server_available,
  22. run_string_as_driver,
  23. wait_until_succeeded_without_exception,
  24. )
  25. from ray._private.gcs_pubsub import gcs_pubsub_enabled
  26. from ray.ray_constants import DEBUG_AUTOSCALING_STATUS_LEGACY, DEBUG_AUTOSCALING_ERROR
  27. from ray.dashboard import dashboard
  28. import ray.dashboard.consts as dashboard_consts
  29. import ray.dashboard.utils as dashboard_utils
  30. import ray.dashboard.modules
  31. from ray._private.gcs_utils import use_gcs_for_bootstrap
  32. try:
  33. import aiohttp.web
  34. import ray.dashboard.optional_utils as dashboard_optional_utils
  35. routes = dashboard_optional_utils.ClassMethodRouteTable
  36. except Exception:
  37. pass
  38. logger = logging.getLogger(__name__)
  39. def make_gcs_client(address_info):
  40. if not use_gcs_for_bootstrap():
  41. address = address_info["redis_address"]
  42. address = address.split(":")
  43. assert len(address) == 2
  44. client = redis.StrictRedis(
  45. host=address[0],
  46. port=int(address[1]),
  47. password=ray_constants.REDIS_DEFAULT_PASSWORD,
  48. )
  49. gcs_client = ray._private.gcs_utils.GcsClient.create_from_redis(client)
  50. else:
  51. address = address_info["gcs_address"]
  52. gcs_client = ray._private.gcs_utils.GcsClient(address=address)
  53. return gcs_client
  54. def cleanup_test_files():
  55. module_path = ray.dashboard.modules.__path__[0]
  56. filename = os.path.join(module_path, "test_for_bad_import.py")
  57. logger.info("Remove test file: %s", filename)
  58. try:
  59. os.remove(filename)
  60. except Exception:
  61. pass
  62. def prepare_test_files():
  63. module_path = ray.dashboard.modules.__path__[0]
  64. filename = os.path.join(module_path, "test_for_bad_import.py")
  65. logger.info("Prepare test file: %s", filename)
  66. with open(filename, "w") as f:
  67. f.write(">>>")
  68. cleanup_test_files()
  69. @pytest.mark.parametrize(
  70. "ray_start_with_dashboard",
  71. [{"_system_config": {"agent_register_timeout_ms": 5000}}],
  72. indirect=True,
  73. )
  74. def test_basic(ray_start_with_dashboard):
  75. """Dashboard test that starts a Ray cluster with a dashboard server running,
  76. then hits the dashboard API and asserts that it receives sensible data."""
  77. address_info = ray_start_with_dashboard
  78. node_id = address_info["node_id"]
  79. gcs_client = make_gcs_client(address_info)
  80. ray.experimental.internal_kv._initialize_internal_kv(gcs_client)
  81. all_processes = ray.worker._global_node.all_processes
  82. assert ray_constants.PROCESS_TYPE_DASHBOARD in all_processes
  83. assert ray_constants.PROCESS_TYPE_REPORTER not in all_processes
  84. dashboard_proc_info = all_processes[ray_constants.PROCESS_TYPE_DASHBOARD][0]
  85. dashboard_proc = psutil.Process(dashboard_proc_info.process.pid)
  86. assert dashboard_proc.status() in [
  87. psutil.STATUS_RUNNING,
  88. psutil.STATUS_SLEEPING,
  89. psutil.STATUS_DISK_SLEEP,
  90. ]
  91. raylet_proc_info = all_processes[ray_constants.PROCESS_TYPE_RAYLET][0]
  92. raylet_proc = psutil.Process(raylet_proc_info.process.pid)
  93. def _search_agent(processes):
  94. for p in processes:
  95. try:
  96. for c in p.cmdline():
  97. if os.path.join("dashboard", "agent.py") in c:
  98. return p
  99. except Exception:
  100. pass
  101. # Test for bad imports, the agent should be restarted.
  102. logger.info("Test for bad imports.")
  103. agent_proc = _search_agent(raylet_proc.children())
  104. prepare_test_files()
  105. agent_pids = set()
  106. try:
  107. assert agent_proc is not None
  108. agent_proc.kill()
  109. agent_proc.wait()
  110. # The agent will be restarted for imports failure.
  111. for _ in range(300):
  112. agent_proc = _search_agent(raylet_proc.children())
  113. if agent_proc:
  114. agent_pids.add(agent_proc.pid)
  115. # The agent should be restarted,
  116. # so we can break if the len(agent_pid) > 1
  117. if len(agent_pids) > 1:
  118. break
  119. time.sleep(0.1)
  120. finally:
  121. cleanup_test_files()
  122. assert len(agent_pids) > 1, agent_pids
  123. agent_proc = _search_agent(raylet_proc.children())
  124. if agent_proc:
  125. agent_proc.kill()
  126. agent_proc.wait()
  127. logger.info("Test agent register is OK.")
  128. wait_for_condition(lambda: _search_agent(raylet_proc.children()))
  129. assert dashboard_proc.status() in [psutil.STATUS_RUNNING, psutil.STATUS_SLEEPING]
  130. agent_proc = _search_agent(raylet_proc.children())
  131. agent_pid = agent_proc.pid
  132. # Check if agent register is OK.
  133. for x in range(5):
  134. logger.info("Check agent is alive.")
  135. agent_proc = _search_agent(raylet_proc.children())
  136. assert agent_proc.pid == agent_pid
  137. time.sleep(1)
  138. # The agent should be dead if raylet exits.
  139. raylet_proc.kill()
  140. raylet_proc.wait()
  141. agent_proc.wait(5)
  142. # Check kv keys are set.
  143. logger.info("Check kv keys are set.")
  144. dashboard_address = ray.experimental.internal_kv._internal_kv_get(
  145. ray_constants.DASHBOARD_ADDRESS, namespace=ray_constants.KV_NAMESPACE_DASHBOARD
  146. )
  147. assert dashboard_address is not None
  148. dashboard_rpc_address = ray.experimental.internal_kv._internal_kv_get(
  149. dashboard_consts.DASHBOARD_RPC_ADDRESS,
  150. namespace=ray_constants.KV_NAMESPACE_DASHBOARD,
  151. )
  152. assert dashboard_rpc_address is not None
  153. key = f"{dashboard_consts.DASHBOARD_AGENT_PORT_PREFIX}{node_id}"
  154. agent_ports = ray.experimental.internal_kv._internal_kv_get(
  155. key, namespace=ray_constants.KV_NAMESPACE_DASHBOARD
  156. )
  157. assert agent_ports is not None
  158. @pytest.mark.parametrize(
  159. "ray_start_with_dashboard",
  160. [
  161. {"dashboard_host": "127.0.0.1"},
  162. {"dashboard_host": "0.0.0.0"},
  163. {"dashboard_host": "::"},
  164. ],
  165. indirect=True,
  166. )
  167. def test_dashboard_address(ray_start_with_dashboard):
  168. webui_url = ray_start_with_dashboard["webui_url"]
  169. if os.environ.get("RAY_MINIMAL") == "1":
  170. # In the minimal installation, webui url shouldn't be configured.
  171. assert webui_url == ""
  172. else:
  173. webui_ip = webui_url.split(":")[0]
  174. print(ipaddress.ip_address(webui_ip))
  175. print(webui_ip)
  176. assert not ipaddress.ip_address(webui_ip).is_unspecified
  177. assert webui_ip in ["127.0.0.1", ray_start_with_dashboard["node_ip_address"]]
  178. @pytest.mark.skipif(
  179. os.environ.get("RAY_MINIMAL") == "1",
  180. reason="This test is not supposed to work for minimal installation.",
  181. )
  182. def test_http_get(enable_test_module, ray_start_with_dashboard):
  183. assert wait_until_server_available(ray_start_with_dashboard["webui_url"]) is True
  184. webui_url = ray_start_with_dashboard["webui_url"]
  185. webui_url = format_web_url(webui_url)
  186. target_url = webui_url + "/test/dump"
  187. timeout_seconds = 30
  188. start_time = time.time()
  189. while True:
  190. time.sleep(3)
  191. try:
  192. response = requests.get(webui_url + "/test/http_get?url=" + target_url)
  193. response.raise_for_status()
  194. try:
  195. dump_info = response.json()
  196. except Exception as ex:
  197. logger.info("failed response: %s", response.text)
  198. raise ex
  199. assert dump_info["result"] is True
  200. dump_data = dump_info["data"]
  201. assert len(dump_data["agents"]) == 1
  202. node_id, ports = next(iter(dump_data["agents"].items()))
  203. ip = ray_start_with_dashboard["node_ip_address"]
  204. http_port, grpc_port = ports
  205. response = requests.get(
  206. f"http://{ip}:{http_port}" f"/test/http_get_from_agent?url={target_url}"
  207. )
  208. response.raise_for_status()
  209. try:
  210. dump_info = response.json()
  211. except Exception as ex:
  212. logger.info("failed response: %s", response.text)
  213. raise ex
  214. assert dump_info["result"] is True
  215. break
  216. except (AssertionError, requests.exceptions.ConnectionError) as e:
  217. logger.info("Retry because of %s", e)
  218. finally:
  219. if time.time() > start_time + timeout_seconds:
  220. raise Exception("Timed out while testing.")
  221. @pytest.mark.skipif(
  222. os.environ.get("RAY_MINIMAL") == "1",
  223. reason="This test is not supposed to work for minimal installation.",
  224. )
  225. def test_class_method_route_table(enable_test_module):
  226. head_cls_list = dashboard_utils.get_all_modules(dashboard_utils.DashboardHeadModule)
  227. agent_cls_list = dashboard_utils.get_all_modules(
  228. dashboard_utils.DashboardAgentModule
  229. )
  230. test_head_cls = None
  231. for cls in head_cls_list:
  232. if cls.__name__ == "TestHead":
  233. test_head_cls = cls
  234. break
  235. assert test_head_cls is not None
  236. test_agent_cls = None
  237. for cls in agent_cls_list:
  238. if cls.__name__ == "TestAgent":
  239. test_agent_cls = cls
  240. break
  241. assert test_agent_cls is not None
  242. def _has_route(route, method, path):
  243. if isinstance(route, aiohttp.web.RouteDef):
  244. if route.method == method and route.path == path:
  245. return True
  246. return False
  247. def _has_static(route, path, prefix):
  248. if isinstance(route, aiohttp.web.StaticDef):
  249. if route.path == path and route.prefix == prefix:
  250. return True
  251. return False
  252. all_routes = dashboard_optional_utils.ClassMethodRouteTable.routes()
  253. assert any(_has_route(r, "HEAD", "/test/route_head") for r in all_routes)
  254. assert any(_has_route(r, "GET", "/test/route_get") for r in all_routes)
  255. assert any(_has_route(r, "POST", "/test/route_post") for r in all_routes)
  256. assert any(_has_route(r, "PUT", "/test/route_put") for r in all_routes)
  257. assert any(_has_route(r, "PATCH", "/test/route_patch") for r in all_routes)
  258. assert any(_has_route(r, "DELETE", "/test/route_delete") for r in all_routes)
  259. assert any(_has_route(r, "*", "/test/route_view") for r in all_routes)
  260. # Test bind()
  261. bound_routes = dashboard_optional_utils.ClassMethodRouteTable.bound_routes()
  262. assert len(bound_routes) == 0
  263. dashboard_optional_utils.ClassMethodRouteTable.bind(
  264. test_agent_cls.__new__(test_agent_cls)
  265. )
  266. bound_routes = dashboard_optional_utils.ClassMethodRouteTable.bound_routes()
  267. assert any(_has_route(r, "POST", "/test/route_post") for r in bound_routes)
  268. assert all(not _has_route(r, "PUT", "/test/route_put") for r in bound_routes)
  269. # Static def should be in bound routes.
  270. routes.static("/test/route_static", "/path")
  271. bound_routes = dashboard_optional_utils.ClassMethodRouteTable.bound_routes()
  272. assert any(_has_static(r, "/path", "/test/route_static") for r in bound_routes)
  273. # Test duplicated routes should raise exception.
  274. try:
  275. @routes.get("/test/route_get")
  276. def _duplicated_route(req):
  277. pass
  278. raise Exception("Duplicated routes should raise exception.")
  279. except Exception as ex:
  280. message = str(ex)
  281. assert "/test/route_get" in message
  282. assert "test_head.py" in message
  283. # Test exception in handler
  284. post_handler = None
  285. for r in bound_routes:
  286. if _has_route(r, "POST", "/test/route_post"):
  287. post_handler = r.handler
  288. break
  289. assert post_handler is not None
  290. loop = asyncio.get_event_loop()
  291. r = loop.run_until_complete(post_handler())
  292. assert r.status == 200
  293. resp = json.loads(r.body)
  294. assert resp["result"] is False
  295. assert "Traceback" in resp["msg"]
  296. @pytest.mark.skipif(
  297. os.environ.get("RAY_MINIMAL") == "1",
  298. reason="This test is not supposed to work for minimal installation.",
  299. )
  300. def test_async_loop_forever():
  301. counter = [0]
  302. @dashboard_utils.async_loop_forever(interval_seconds=0.1)
  303. async def foo():
  304. counter[0] += 1
  305. raise Exception("Test exception")
  306. loop = asyncio.get_event_loop()
  307. loop.create_task(foo())
  308. loop.call_later(1, loop.stop)
  309. loop.run_forever()
  310. assert counter[0] > 2
  311. counter2 = [0]
  312. task = None
  313. @dashboard_utils.async_loop_forever(interval_seconds=0.1, cancellable=True)
  314. async def bar():
  315. nonlocal task
  316. counter2[0] += 1
  317. if counter2[0] > 2:
  318. task.cancel()
  319. loop = asyncio.new_event_loop()
  320. task = loop.create_task(bar())
  321. with pytest.raises(asyncio.CancelledError):
  322. loop.run_until_complete(task)
  323. assert counter2[0] == 3
  324. @pytest.mark.skipif(
  325. os.environ.get("RAY_MINIMAL") == "1",
  326. reason="This test is not supposed to work for minimal installation.",
  327. )
  328. def test_dashboard_module_decorator(enable_test_module):
  329. head_cls_list = dashboard_utils.get_all_modules(dashboard_utils.DashboardHeadModule)
  330. agent_cls_list = dashboard_utils.get_all_modules(
  331. dashboard_utils.DashboardAgentModule
  332. )
  333. assert any(cls.__name__ == "TestHead" for cls in head_cls_list)
  334. assert any(cls.__name__ == "TestAgent" for cls in agent_cls_list)
  335. test_code = """
  336. import os
  337. import ray.dashboard.utils as dashboard_utils
  338. os.environ.pop("RAY_DASHBOARD_MODULE_TEST")
  339. head_cls_list = dashboard_utils.get_all_modules(
  340. dashboard_utils.DashboardHeadModule)
  341. agent_cls_list = dashboard_utils.get_all_modules(
  342. dashboard_utils.DashboardAgentModule)
  343. print(head_cls_list)
  344. print(agent_cls_list)
  345. assert all(cls.__name__ != "TestHead" for cls in head_cls_list)
  346. assert all(cls.__name__ != "TestAgent" for cls in agent_cls_list)
  347. print("success")
  348. """
  349. run_string_as_driver(test_code)
  350. @pytest.mark.skipif(
  351. os.environ.get("RAY_MINIMAL") == "1",
  352. reason="This test is not supposed to work for minimal installation.",
  353. )
  354. def test_aiohttp_cache(enable_test_module, ray_start_with_dashboard):
  355. assert wait_until_server_available(ray_start_with_dashboard["webui_url"]) is True
  356. webui_url = ray_start_with_dashboard["webui_url"]
  357. webui_url = format_web_url(webui_url)
  358. timeout_seconds = 5
  359. start_time = time.time()
  360. value1_timestamps = []
  361. while True:
  362. time.sleep(1)
  363. try:
  364. for x in range(10):
  365. response = requests.get(webui_url + "/test/aiohttp_cache/t1?value=1")
  366. response.raise_for_status()
  367. timestamp = response.json()["data"]["timestamp"]
  368. value1_timestamps.append(timestamp)
  369. assert len(collections.Counter(value1_timestamps)) > 1
  370. break
  371. except (AssertionError, requests.exceptions.ConnectionError) as e:
  372. logger.info("Retry because of %s", e)
  373. finally:
  374. if time.time() > start_time + timeout_seconds:
  375. raise Exception("Timed out while testing.")
  376. sub_path_timestamps = []
  377. for x in range(10):
  378. response = requests.get(webui_url + f"/test/aiohttp_cache/tt{x}?value=1")
  379. response.raise_for_status()
  380. timestamp = response.json()["data"]["timestamp"]
  381. sub_path_timestamps.append(timestamp)
  382. assert len(collections.Counter(sub_path_timestamps)) == 10
  383. volatile_value_timestamps = []
  384. for x in range(10):
  385. response = requests.get(webui_url + f"/test/aiohttp_cache/tt?value={x}")
  386. response.raise_for_status()
  387. timestamp = response.json()["data"]["timestamp"]
  388. volatile_value_timestamps.append(timestamp)
  389. assert len(collections.Counter(volatile_value_timestamps)) == 10
  390. response = requests.get(webui_url + "/test/aiohttp_cache/raise_exception")
  391. response.raise_for_status()
  392. result = response.json()
  393. assert result["result"] is False
  394. assert "KeyError" in result["msg"]
  395. volatile_value_timestamps = []
  396. for x in range(10):
  397. response = requests.get(webui_url + f"/test/aiohttp_cache_lru/tt{x % 4}")
  398. response.raise_for_status()
  399. timestamp = response.json()["data"]["timestamp"]
  400. volatile_value_timestamps.append(timestamp)
  401. assert len(collections.Counter(volatile_value_timestamps)) == 4
  402. volatile_value_timestamps = []
  403. data = collections.defaultdict(set)
  404. for x in [0, 1, 2, 3, 4, 5, 2, 1, 0, 3]:
  405. response = requests.get(webui_url + f"/test/aiohttp_cache_lru/t1?value={x}")
  406. response.raise_for_status()
  407. timestamp = response.json()["data"]["timestamp"]
  408. data[x].add(timestamp)
  409. volatile_value_timestamps.append(timestamp)
  410. assert len(collections.Counter(volatile_value_timestamps)) == 8
  411. assert len(data[3]) == 2
  412. assert len(data[0]) == 2
  413. @pytest.mark.skipif(
  414. os.environ.get("RAY_MINIMAL") == "1",
  415. reason="This test is not supposed to work for minimal installation.",
  416. )
  417. def test_get_cluster_status(ray_start_with_dashboard):
  418. assert wait_until_server_available(ray_start_with_dashboard["webui_url"]) is True
  419. address_info = ray_start_with_dashboard
  420. webui_url = address_info["webui_url"]
  421. webui_url = format_web_url(webui_url)
  422. # Check that the cluster_status endpoint works without the underlying data
  423. # from the GCS, but returns nothing.
  424. def get_cluster_status():
  425. response = requests.get(f"{webui_url}/api/cluster_status")
  426. response.raise_for_status()
  427. print(response.json())
  428. assert response.json()["result"]
  429. assert "autoscalingStatus" in response.json()["data"]
  430. assert "autoscalingError" in response.json()["data"]
  431. assert response.json()["data"]["autoscalingError"] is None
  432. assert "clusterStatus" in response.json()["data"]
  433. assert "loadMetricsReport" in response.json()["data"]["clusterStatus"]
  434. assert wait_until_succeeded_without_exception(
  435. get_cluster_status, (requests.RequestException,)
  436. )
  437. gcs_client = make_gcs_client(address_info)
  438. ray.experimental.internal_kv._initialize_internal_kv(gcs_client)
  439. ray.experimental.internal_kv._internal_kv_put(
  440. DEBUG_AUTOSCALING_STATUS_LEGACY, "hello"
  441. )
  442. ray.experimental.internal_kv._internal_kv_put(DEBUG_AUTOSCALING_ERROR, "world")
  443. response = requests.get(f"{webui_url}/api/cluster_status")
  444. response.raise_for_status()
  445. assert response.json()["result"]
  446. assert "autoscalingStatus" in response.json()["data"]
  447. assert response.json()["data"]["autoscalingStatus"] == "hello"
  448. assert "autoscalingError" in response.json()["data"]
  449. assert response.json()["data"]["autoscalingError"] == "world"
  450. assert "clusterStatus" in response.json()["data"]
  451. assert "loadMetricsReport" in response.json()["data"]["clusterStatus"]
  452. @pytest.mark.skipif(
  453. os.environ.get("RAY_MINIMAL") == "1",
  454. reason="This test is not supposed to work for minimal installation.",
  455. )
  456. def test_immutable_types():
  457. d = {str(i): i for i in range(1000)}
  458. d["list"] = list(range(1000))
  459. d["list"][0] = {str(i): i for i in range(1000)}
  460. d["dict"] = {str(i): i for i in range(1000)}
  461. immutable_dict = dashboard_utils.make_immutable(d)
  462. assert type(immutable_dict) == dashboard_utils.ImmutableDict
  463. assert immutable_dict == dashboard_utils.ImmutableDict(d)
  464. assert immutable_dict == d
  465. assert dashboard_utils.ImmutableDict(immutable_dict) == immutable_dict
  466. assert (
  467. dashboard_utils.ImmutableList(immutable_dict["list"]) == immutable_dict["list"]
  468. )
  469. assert "512" in d
  470. assert "512" in d["list"][0]
  471. assert "512" in d["dict"]
  472. # Test type conversion
  473. assert type(dict(immutable_dict)["list"]) == dashboard_utils.ImmutableList
  474. assert type(list(immutable_dict["list"])[0]) == dashboard_utils.ImmutableDict
  475. # Test json dumps / loads
  476. json_str = json.dumps(immutable_dict, cls=dashboard_optional_utils.CustomEncoder)
  477. deserialized_immutable_dict = json.loads(json_str)
  478. assert type(deserialized_immutable_dict) == dict
  479. assert type(deserialized_immutable_dict["list"]) == list
  480. assert immutable_dict.mutable() == deserialized_immutable_dict
  481. dashboard_optional_utils.rest_response(True, "OK", data=immutable_dict)
  482. dashboard_optional_utils.rest_response(True, "OK", **immutable_dict)
  483. # Test copy
  484. copy_of_immutable = copy.copy(immutable_dict)
  485. assert copy_of_immutable == immutable_dict
  486. deepcopy_of_immutable = copy.deepcopy(immutable_dict)
  487. assert deepcopy_of_immutable == immutable_dict
  488. # Test get default immutable
  489. immutable_default_value = immutable_dict.get("not exist list", [1, 2])
  490. assert type(immutable_default_value) == dashboard_utils.ImmutableList
  491. # Test recursive immutable
  492. assert type(immutable_dict["list"]) == dashboard_utils.ImmutableList
  493. assert type(immutable_dict["dict"]) == dashboard_utils.ImmutableDict
  494. assert type(immutable_dict["list"][0]) == dashboard_utils.ImmutableDict
  495. # Test exception
  496. with pytest.raises(TypeError):
  497. dashboard_utils.ImmutableList((1, 2))
  498. with pytest.raises(TypeError):
  499. dashboard_utils.ImmutableDict([1, 2])
  500. with pytest.raises(TypeError):
  501. immutable_dict["list"] = []
  502. with pytest.raises(AttributeError):
  503. immutable_dict.update({1: 3})
  504. with pytest.raises(TypeError):
  505. immutable_dict["list"][0] = 0
  506. with pytest.raises(AttributeError):
  507. immutable_dict["list"].extend([1, 2])
  508. with pytest.raises(AttributeError):
  509. immutable_dict["list"].insert(1, 2)
  510. d2 = dashboard_utils.ImmutableDict({1: np.zeros([3, 5])})
  511. with pytest.raises(TypeError):
  512. print(d2[1])
  513. d3 = dashboard_utils.ImmutableList([1, np.zeros([3, 5])])
  514. with pytest.raises(TypeError):
  515. print(d3[1])
  516. @pytest.mark.skipif(
  517. os.environ.get("RAY_MINIMAL") == "1",
  518. reason="This test is not supposed to work for minimal installation.",
  519. )
  520. def test_http_proxy(enable_test_module, set_http_proxy, shutdown_only):
  521. address_info = ray.init(num_cpus=1, include_dashboard=True)
  522. assert wait_until_server_available(address_info["webui_url"]) is True
  523. webui_url = address_info["webui_url"]
  524. webui_url = format_web_url(webui_url)
  525. timeout_seconds = 10
  526. start_time = time.time()
  527. while True:
  528. time.sleep(1)
  529. try:
  530. response = requests.get(
  531. webui_url + "/test/dump", proxies={"http": None, "https": None}
  532. )
  533. response.raise_for_status()
  534. try:
  535. response.json()
  536. assert response.ok
  537. except Exception as ex:
  538. logger.info("failed response: %s", response.text)
  539. raise ex
  540. break
  541. except (AssertionError, requests.exceptions.ConnectionError) as e:
  542. logger.info("Retry because of %s", e)
  543. finally:
  544. if time.time() > start_time + timeout_seconds:
  545. raise Exception("Timed out while testing.")
  546. @pytest.mark.skipif(
  547. os.environ.get("RAY_MINIMAL") == "1",
  548. reason="This test is not supposed to work for minimal installation.",
  549. )
  550. def test_dashboard_port_conflict(ray_start_with_dashboard):
  551. assert wait_until_server_available(ray_start_with_dashboard["webui_url"]) is True
  552. address_info = ray_start_with_dashboard
  553. gcs_client = make_gcs_client(address_info)
  554. ray.experimental.internal_kv._initialize_internal_kv(gcs_client)
  555. host, port = address_info["webui_url"].split(":")
  556. temp_dir = "/tmp/ray"
  557. log_dir = "/tmp/ray/session_latest/logs"
  558. dashboard_cmd = [
  559. sys.executable,
  560. dashboard.__file__,
  561. f"--host={host}",
  562. f"--port={port}",
  563. f"--temp-dir={temp_dir}",
  564. f"--log-dir={log_dir}",
  565. f"--redis-address={address_info['redis_address']}",
  566. f"--redis-password={ray_constants.REDIS_DEFAULT_PASSWORD}",
  567. f"--gcs-address={address_info['gcs_address']}",
  568. ]
  569. logger.info("The dashboard should be exit: %s", dashboard_cmd)
  570. p = subprocess.Popen(dashboard_cmd)
  571. p.wait(5)
  572. dashboard_cmd.append("--port-retries=10")
  573. subprocess.Popen(dashboard_cmd)
  574. timeout_seconds = 10
  575. start_time = time.time()
  576. while True:
  577. time.sleep(1)
  578. try:
  579. dashboard_url = ray.experimental.internal_kv._internal_kv_get(
  580. ray_constants.DASHBOARD_ADDRESS,
  581. namespace=ray_constants.KV_NAMESPACE_DASHBOARD,
  582. )
  583. if dashboard_url:
  584. new_port = int(dashboard_url.split(b":")[-1])
  585. assert new_port > int(port)
  586. break
  587. except AssertionError as e:
  588. logger.info("Retry because of %s", e)
  589. finally:
  590. if time.time() > start_time + timeout_seconds:
  591. raise Exception("Timed out while testing.")
  592. @pytest.mark.skipif(
  593. os.environ.get("RAY_MINIMAL") == "1",
  594. reason="This test is not supposed to work for minimal installation.",
  595. )
  596. def test_gcs_check_alive(fast_gcs_failure_detection, ray_start_with_dashboard):
  597. assert wait_until_server_available(ray_start_with_dashboard["webui_url"]) is True
  598. all_processes = ray.worker._global_node.all_processes
  599. dashboard_info = all_processes[ray_constants.PROCESS_TYPE_DASHBOARD][0]
  600. dashboard_proc = psutil.Process(dashboard_info.process.pid)
  601. gcs_server_info = all_processes[ray_constants.PROCESS_TYPE_GCS_SERVER][0]
  602. gcs_server_proc = psutil.Process(gcs_server_info.process.pid)
  603. assert dashboard_proc.status() in [
  604. psutil.STATUS_RUNNING,
  605. psutil.STATUS_SLEEPING,
  606. psutil.STATUS_DISK_SLEEP,
  607. ]
  608. gcs_server_proc.kill()
  609. gcs_server_proc.wait()
  610. if gcs_pubsub_enabled():
  611. # When pubsub enabled, the exits comes from pubsub errored.
  612. # TODO: Fix this exits logic for pubsub
  613. assert dashboard_proc.wait(10) != 0
  614. else:
  615. # The dashboard exits by os._exit(-1)
  616. assert dashboard_proc.wait(10) == 255
  617. if __name__ == "__main__":
  618. sys.exit(pytest.main(["-v", __file__]))