Compare commits

..

55 Commits

Author SHA1 Message Date
Josiah Baldwin
59fb1f104e Bumped version 2025-04-01 12:17:38 -07:00
Josiah Baldwin
9bd3e10ed7 Merge pull request #45 from HuFlungDu/fix/device-open-url
Fixed weird issues in device_open_url
Fix #40
2025-04-01 12:08:43 -07:00
Josiah Baldwin
28e1d94ab9 Fixed weird issues in device_open_url 2025-04-01 12:06:41 -07:00
Josiah Baldwin
51325a89d3 Merge pull request #44 from DaanSelen/simonfix2
Fix #41
2025-04-01 11:53:18 -07:00
Josiah Baldwin
97dff80222 Merge pull request #42 from DaanSelen/lastconlastaddr
Display lastaddr and lastconnected in device object.
2025-04-01 11:51:43 -07:00
Daan
8da445348b Fix a bug discovered by @si458 and also suggested to be fixed 2025-03-21 00:06:24 +01:00
Daan
ab1fba5cc1 Display lastaddr and lastconnected in device object. 2025-03-20 23:55:13 +01:00
Josiah Baldwin
34a80cdda7 Merge pull request #39 from HuFlungDu/feat/websockets15
Feat/websockets15

Implement #38
2025-02-17 12:56:29 -08:00
Josiah Baldwin
fcf523dd62 Updated to work with websockets 15 proxy handling 2025-02-17 12:54:50 -08:00
Josiah Baldwin
9a1311167d Added traceback printing for autoreconnect test in case the error type changes 2025-02-17 12:54:01 -08:00
Josiah Baldwin
c2319fcf29 Updated requirements to websockets 15. This breaks things. 2025-02-17 12:22:31 -08:00
Josiah Baldwin
4d1c25a35c Merge pull request #37 from HuFlungDu/hotfix/1.1.2
Hotfix/1.1.2

Fixes #35 

Update Cryptography version to fix SSL vulnerability.
2025-02-17 12:12:13 -08:00
Josiah Baldwin
e226fff8dd Merge pull request #36 from HuFlungDu/hotfix/1.1.2
Hotfix/1.1.2

Fixes #35 

Update Cryptography version to fix SSL vulnerability.
2025-02-17 12:11:08 -08:00
Josiah Baldwin
a07b0f129a Updated changelog 2025-02-17 12:07:19 -08:00
Josiah Baldwin
64dc5eccdf Updated cryptography requirement 2025-02-17 12:06:10 -08:00
Josiah Baldwin
1a7714663a Updated changelog 2025-02-17 11:51:13 -08:00
Josiah Baldwin
0a59edd19a Fixed semvar for requirements 2025-02-17 11:50:50 -08:00
Josiah Baldwin
f8600b09fe Merge pull request #33 from HuFlungDu/hotfix/1.1.1
Hotfix/1.1.1

Fix #29
2025-02-05 12:37:35 -08:00
Josiah Baldwin
351f425ce5 Merge pull request #32 from HuFlungDu/hotfix/1.1.1
Hotfix/1.1.1

Fix #29
2025-02-05 12:37:00 -08:00
Josiah Baldwin
77e76aeb7c Updated changelog 2025-02-05 12:35:23 -08:00
Josiah Baldwin
5393321f7b Fixed issue with getting device_info when multiple meshes are available 2025-02-05 12:30:13 -08:00
Josiah Baldwin
79554ebad6 Added differintiation between admin device info and unprivileged device info for tests, as unprivileged doesn't have any mesh info at the time of testing 2025-02-05 12:28:06 -08:00
Josiah Baldwin
1dbcd012ec Added test to check for issue with mulitple meshes 2025-02-05 12:16:01 -08:00
Josiah Baldwin
ace6884991 Merge pull request #27 from HuFlungDu/release/1.1.0
Release/1.1.0 > main
2025-01-08 14:26:38 -08:00
Josiah Baldwin
61eebf1532 Merge pull request #26 from HuFlungDu/release/1.1.0
1.1.0 Release > dev
2025-01-08 14:25:18 -08:00
Josiah Baldwin
fcfeac21a8 Updated changelog 2025-01-08 14:24:19 -08:00
Josiah Baldwin
19d10ee050 Merge pull request #25 from HuFlungDu/fix/multiple-run-commands
fix/multiple-run-commands
2025-01-08 14:10:21 -08:00
Josiah Baldwin
0c9ebf0ff2 Merge pull request #23 from HuFlungDu/feat/test-meshcetral-overrides
Feat/test-meshcentral-overrides
2025-01-08 14:10:13 -08:00
Josiah Baldwin
2556e72a73 Merge pull request #22 from HuFlungDu/fix/bad-auth
Fix/bad-auth
2025-01-08 14:10:03 -08:00
Josiah Baldwin
cda5f610a1 Merge pull request #21 from HuFlungDu/fix/raw-event-off
Fixed listening to raw not removing its listener correctly
2025-01-08 14:09:48 -08:00
Josiah Baldwin
564d466ff9 Fixed listening to raw not removing its listener correctly 2025-01-08 13:57:28 -08:00
Josiah Baldwin
125e6ac6ac Added override directory which will be copied over meshcentral code for testing purposes 2025-01-08 13:54:21 -08:00
Josiah Baldwin
1b849473bb Removed silencing of docker process. Useful for debugging server side things with overrides. 2025-01-08 13:48:41 -08:00
Josiah Baldwin
df25652ba6 Fixed run_commands parsing return from multiple devices incorrectly 2025-01-08 13:42:39 -08:00
Josiah Baldwin
9668e4d507 Added test for using run_command on multiple nodes 2025-01-08 13:38:49 -08:00
Josiah Baldwin
fe4c2fe874 Fixed connection errors not raising immediately 2025-01-08 13:26:04 -08:00
Josiah Baldwin
bb7cf17cd3 Added test for invalid auth 2025-01-08 13:23:21 -08:00
Josiah Baldwin
6919da4a42 Merge pull request #19 from DaanSelen/add-users
Fix #15
2025-01-08 11:59:04 -08:00
Josiah Baldwin
ff120490fa Merge branch 'main' into add-users 2025-01-08 11:56:47 -08:00
Josiah Baldwin
d9991156f6 Merge pull request #18 from DaanSelen/conversion
Fix #9
2025-01-08 11:46:15 -08:00
Daan
4fea858fbc Fix: https://github.com/HuFlungDu/pylibmeshctrl/issues/15 2025-01-04 17:16:30 +01:00
Daan
3b4a18b379 Also added the ValueError raise condition for lastconnect datetime. 2025-01-04 17:09:33 +01:00
Daan
c072d6012a fix https://github.com/HuFlungDu/pylibmeshctrl/issues/9 2025-01-04 17:05:57 +01:00
Josiah Baldwin
0ee2e2dc94 Merge pull request #12 from DaanSelen/main
Backport for python 3.11 and alike
2024-12-19 12:49:12 -08:00
dselen
f2d9fcd295 Update tunnel.py
Fix syntaxerror
2024-12-19 10:14:09 +01:00
Josiah Baldwin
7456743709 Updated installing docs 2024-12-13 17:10:34 -08:00
Josiah Baldwin
07b828a150 Fixed some docs 2024-12-13 17:04:11 -08:00
Josiah Baldwin
cd7a356eb5 Merge pull request #7 from HuFlungDu/release/1.0.0
1.0.0 release
2024-12-13 16:43:12 -08:00
Josiah Baldwin
5ee2c8edf3 1.0.0 release 2024-12-13 16:39:59 -08:00
Josiah Baldwin
d3d5b87287 Fixed various issues with (down/up)load file functions not passing through arguments 2024-12-13 08:51:49 -08:00
Josiah Baldwin
18eb2de5b6 Added no_proxy os variable bypass so urllib.requests acts as expected 2024-12-13 08:28:54 -08:00
Josiah Baldwin
ec23ba458d Changed how tunnuls handle their ssl contexts; fixed long standing file tunnels not being cleaned up on session close; changed file sizes in tests 2024-12-12 18:06:50 -08:00
Josiah Baldwin
a3c721318d Added ability to download files over http(s)
Also fixed some tests and a couple other bugs
2024-12-12 16:06:18 -08:00
Josiah Baldwin
4eda4e6c08 increased the odds of getting useful node node information from device_info 2024-12-10 18:17:25 -08:00
Josiah Baldwin
ab2a4c40bc Fixed auto-reconnect for proxy and created tests for auto-reconnect 2024-12-10 13:05:22 -08:00
23 changed files with 423 additions and 164 deletions

View File

@@ -19,7 +19,7 @@ formats:
build:
os: ubuntu-22.04
tools:
python: "3.11"
python: "3.13"
python:
install:

View File

@@ -2,7 +2,45 @@
Changelog
=========
Version 0.1
===========
version 1.2.0
=============
Create
Bugs:
* Fixed agent sometimes being None causing an oxception
* Fixed bad code in device_open_url
Features:
* Changed websockets version to 15. This now uses the proxy implemention from that library, instead of the previous hack.
* Added lastaddr and lastconnect to list_devices API
version 1.1.2
=============
Bugs:
* Fixed semver for requirements. New version of websockets broke this library.
Security:
* Updated cryptogaphy to ~44.0.1 to fix ssl vulnerability.
Version 1.1.1
=============
Bugs:
* Fixed bug when running device_info when user has access to multiple meshes
Version 1.1.0
=============
Features:
* Added overrides for meshcentral files for testing purposes
* Added `users` field to `device` object
Bugs:
* Fixed connection errors not raising immediately
* Fixed run_commands parsing return from multiple devices incorrectly
* Fixed listening to raw not removing its listener correctly
* Fixed javascript timecodes not being handled in gnu environments
* Changed some fstring formatting that locked the library into python >3.13
Version 1.0.0
=============
First release

View File

@@ -38,7 +38,7 @@ Library for remotely interacting with a
Installation
------------
pip install meshctrl
pip install libmeshctrl
Usage
-----

View File

@@ -5,8 +5,8 @@ sphinx>=3.2.1
sphinx-jinja2-compat>=0.1.1
sphinx-toolbox>=2.16.0
# sphinx_rtd_theme
cffi==1.17.1
cryptography==43.0.3
pycparser==2.22
websockets==13.1
cffi~=1.17.1
cryptography~=44.0.1
pycparser~=2.22
websockets~=15.0.0
enum_tools

Binary file not shown.

View File

@@ -24,7 +24,7 @@ platforms = any
# Add here all kinds of additional classifiers as defined under
# https://pypi.org/classifiers/
classifiers =
Development Status :: 4 - Beta
Development Status :: 5 - Production/Stable
Programming Language :: Python
@@ -44,9 +44,9 @@ python_requires = >=3.8
# For more information, check out https://semver.org/.
install_requires =
importlib-metadata
cryptography>=43.0.3
websockets>=13.1
python-socks[asyncio]
cryptography~=44.0.1
websockets~=15.0.0
python-socks[asyncio]~=2.5.3
[options.packages.find]

View File

@@ -12,6 +12,7 @@ class Device(object):
name (str|None): Device name as it is shown on the meshcentral server
description (str|None): Device description as it is shown on the meshcentral server. Also accepted as desc.
tags (list[str]|None): tags associated with device.
users (list[str]|None): latest known usernames which have logged in.
created_at (datetime.Datetime|int|None): Time at which device mas created. Also accepted as agct.
computer_name (str|None): Device name as reported from the agent. This may be different from name. Also accepted as rname.
icon (~meshctrl.constants.Icon): Icon displayed on the website
@@ -38,6 +39,7 @@ class Device(object):
name (str|None): Device name as it is shown on the meshcentral server
description (str|None): Device description as it is shown on the meshcentral server.
tags (list[str]): tags associated with device.
users (list[str]): latest known usernames which have logged in.
computer_name (str|None): Device name as reported from the agent. This may be different from name. Also accepted as rname.
icon (~meshctrl.constants.Icon): Icon displayed on the website
mesh (~meshctrl.mesh.Mesh|None): Mesh object under which this device exists. Is None for individual device access.
@@ -56,7 +58,7 @@ class Device(object):
'''
def __init__(self, nodeid, session, agent=None,
name=None, desc=None, description=None,
tags=None,
tags=None, users=None,
agct=None, created_at=None,
rname=None, computer_name=None, icon=constants.Icon.desktop,
mesh=None, mtype=None, meshtype=None, groupname=None, meshname=None,
@@ -69,7 +71,7 @@ class Device(object):
if links is None:
links = {}
self.links = links
if ("ver" in agent):
if agent and "ver" in agent:
agent = {
"version": agent["ver"],
"id": agent["id"],
@@ -90,13 +92,14 @@ class Device(object):
self.description = description if description is not None else desc
self.os_description = os_description if os_description is not None else osdesc
self.tags = tags if tags is not None else []
self.users = users if users is not None else []
self.details = details if details is not None else {}
created_at = created_at if created_at is not None else agct
if not isinstance(created_at, datetime.datetime) and created_at is not None:
try:
created_at = datetime.datetime.fromtimestamp(created_at)
except OSError:
except (OSError, ValueError):
# Meshcentral returns in miliseconds, while fromtimestamp, and most of python, expects the argument in seconds. Try seconds frist, then translate from ms if it fails.
# This doesn't work for really early timestamps, but I don't expect that to be a problem here.
created_at = datetime.datetime.fromtimestamp(created_at/1000.0)
@@ -106,7 +109,7 @@ class Device(object):
if not isinstance(lastconnect, datetime.datetime) and lastconnect is not None:
try:
lastconnect = datetime.datetime.fromtimestamp(lastconnect)
except OSError:
except (OSError, ValueError):
# Meshcentral returns in miliseconds, while fromtimestamp, and most of python, expects the argument in seconds. Try seconds frist, then translate from ms if it fails.
# This doesn't work for really early timestamps, but I don't expect that to be a problem here.
lastconnect = datetime.datetime.fromtimestamp(lastconnect/1000.0)
@@ -340,10 +343,10 @@ class Device(object):
def __str__(self):
return f"<Device: nodeid={self.nodeid} name={self.name} description={self.description} computer_name={self.computer_name} icon={self.icon} "\
f"mesh={self.mesh} meshtype={self.meshtype} meshname={self.meshname} domain={self.domain} host={self.host} ip={self.ip} "\
f"tags={self.tags} details={self.details} created_at={self.created_at} lastaddr={self.lastaddr} lastconnect={self.lastconnect} "\
f"tags={self.tags} users={self.users} details={self.details} created_at={self.created_at} lastaddr={self.lastaddr} lastconnect={self.lastconnect} "\
f"connected={self.connected} powered_on={self.powered_on} os_description={self.os_description} links={self.links} _extra_props={self._extra_props}>"
def __repr__(self):
return f"Device(nodeid={repr(self.nodeid)}, session={repr(self._session)}, name={repr(self.name)}, description={repr(self.description)}, computer_name={repr(self.computer_name)}, icon={repr(self.icon)}, "\
f"mesh={repr(self.mesh)}, meshtype={repr(self.meshtype)}, meshname={repr(self.meshname)}, domain={repr(self.domain)}, host={repr(self.host)}, ip={repr(self.ip)}, "\
f"tags={repr(self.tags)}, details={repr(self.details)} created_at={repr(self.created_at)} lastaddr={repr(self.lastaddr)} lastconnect={repr(self.lastconnect)} "\
f"tags={repr(self.tags)}, users={repr(self.users)}, details={repr(self.details)} created_at={repr(self.created_at)} lastaddr={repr(self.lastaddr)} lastconnect={repr(self.lastconnect)} "\
f"connected={repr(self.connected)}, powered_on={repr(self.powered_on)}, os_description={repr(self.os_description)}, links={repr(self.links)}, **{repr(self._extra_props)})"

View File

@@ -2,7 +2,9 @@ class MeshCtrlError(Exception):
"""
Base class for Meshctrl errors
"""
pass
def __init__(self, message, *args, **kwargs):
self.message = message
super().__init__(message, *args, **kwargs)
class ServerError(MeshCtrlError):
"""
@@ -25,6 +27,7 @@ class FileTransferError(MeshCtrlError):
"""
def __init__(self, message, stats):
self.stats = stats
super().__init__(message)
class FileTransferCancelled(FileTransferError):
"""

View File

@@ -4,11 +4,32 @@ from . import exceptions
from . import util
import asyncio
import json
import importlib
import importlib.util
import shutil
# import urllib
# import urllib.request
import urllib.parse
old_parse = urllib.parse
# Default proxy handler uses OS defined no_proxy in order to be helpful. This is unhelpful for our usecase. Monkey patch out proxy getting functions, but don't effect the user's urllib instance.
spec = importlib.util.find_spec('urllib')
urllib = importlib.util.module_from_spec(spec)
spec.loader.exec_module(urllib)
spec = importlib.util.find_spec('urllib.request')
urllib.request = importlib.util.module_from_spec(spec)
spec.loader.exec_module(urllib.request)
urllib.parse = old_parse
urllib.request.getproxies_environment = lambda: {}
urllib.request.getproxies_registry = lambda: {}
urllib.request.getproxies_macosx_sysconf = lambda: {}
urllib.request.getproxies = lambda: {}
class Files(tunnel.Tunnel):
def __init__(self, session, nodeid):
super().__init__(session, nodeid, constants.Protocol.FILES)
def __init__(self, session, node):
super().__init__(session, node.nodeid, constants.Protocol.FILES)
self.recorded = None
self._node = node
self._request_id = 0
self._request_queue = asyncio.Queue()
self._download_finished = asyncio.Event()
@@ -16,6 +37,17 @@ class Files(tunnel.Tunnel):
self._current_request = None
self._handle_requests_task = asyncio.create_task(self._handle_requests())
self._chunk_size = 65564
proxies = {}
if self._session._proxy is not None:
# We don't know which protocol the user is going to use, but we only need support one at a time, so just assume both
proxies = {
"http": self._session._proxy,
"https": self._session._proxy,
"no": ""
}
self._proxy_handler = urllib.request.ProxyHandler(proxies=proxies)
self._http_opener = urllib.request.build_opener(self._proxy_handler, urllib.request.HTTPSHandler(context=self._session._ssl_context))
def _get_request_id(self):
self._request_id = (self._request_id+1)%(2**32-1)
@@ -68,6 +100,7 @@ class Files(tunnel.Tunnel):
Args:
directory (str): Path to the directory you wish to list
timeout (int): duration in seconds to wait for a response before throwing an error
Returns:
list[~meshctrl.types.FilesLSItem]: The directory listing
@@ -75,6 +108,7 @@ class Files(tunnel.Tunnel):
Raises:
:py:class:`~meshctrl.exceptions.ServerError`: Error from server
:py:class:`~meshctrl.exceptions.SocketError`: Info about socket closure
asyncio.TimeoutError: Command timed out
"""
data = await self._send_command({"action": "ls", "path": directory}, "ls", timeout=timeout)
return data["dir"]
@@ -101,10 +135,12 @@ class Files(tunnel.Tunnel):
Args:
directory (str): Path of directory to create
timeout (int): duration in seconds to wait for a response before throwing an error
Raises:
:py:class:`~meshctrl.exceptions.ServerError`: Error from server
:py:class:`~meshctrl.exceptions.SocketError`: Info about socket closure
asyncio.TimeoutError: Command timed out
Returns:
bool: True if directory was created
@@ -127,10 +163,12 @@ class Files(tunnel.Tunnel):
path (str): Directory from which to delete files
files (str|list[str]): File or files to remove from the directory
recursive (bool): Whether to delete the files recursively
timeout (int): duration in seconds to wait for a response before throwing an error
Raises:
:py:class:`~meshctrl.exceptions.ServerError`: Error from server
:py:class:`~meshctrl.exceptions.SocketError`: Info about socket closure
asyncio.TimeoutError: Command timed out
Returns:
str: Info about the files removed. Something along the lines of Delete: "/path/to/file", or 'Delete recursive: "/path/to/dir", n element(s) removed'.
@@ -155,10 +193,12 @@ class Files(tunnel.Tunnel):
path (str): Directory from which to rename the file
name (str): File to rename
new_name (str): New name to give the file
timeout (int): duration in seconds to wait for a response before throwing an error
Raises:
:py:class:`~meshctrl.exceptions.ServerError`: Error from server
:py:class:`~meshctrl.exceptions.SocketError`: Info about socket closure
asyncio.TimeoutError: Command timed out
Returns:
str: Info about file renamed. Something along the lines of 'Rename: "/path/to/file" to "newfile"'.
@@ -173,6 +213,7 @@ class Files(tunnel.Tunnel):
return tasks[2].result()
@util._check_socket
async def upload(self, source, target, name=None, timeout=None):
'''
Upload a stream to a device.
@@ -181,10 +222,12 @@ class Files(tunnel.Tunnel):
source (io.IOBase): An IO instance from which to read the data. Must be open for reading.
target (str): Path which to upload stream to on remote device
name (str): Pass if target points at a directory instead of the file path. In that case, this will be the name of the file.
timeout (int): duration in seconds to wait for a response before throwing an error
Raises:
:py:class:`~meshctrl.exceptions.FileTransferError`: File transfer failed. Info available on the `stats` property
:py:class:`~meshctrl.exceptions.FileTransferCancelled`: File transfer cancelled. Info available on the `stats` property
asyncio.TimeoutError: Command timed out
Returns:
dict: {result: bool whether upload succeeded, size: number of bytes uploaded}
@@ -198,17 +241,26 @@ class Files(tunnel.Tunnel):
raise request["error"]
return request["return"]
async def download(self, source, target, timeout=None):
def _http_download(self, url, target, timeout):
response = self._http_opener.open(url, timeout=timeout)
shutil.copyfileobj(response, target)
@util._check_socket
async def download(self, source, target, skip_http_attempt=False, skip_ws_attempt=False, timeout=None):
'''
Download a file from a device into a writable stream.
Args:
source (str): Path from which to download from device
target (io.IOBase): Stream to which to write data. If None, create new BytesIO which is both readable and writable.
skip_http_attempt (bool): Meshcentral has a way to download files through http(s) instead of through the websocket. This method tends to be much faster than using the websocket, so we try it first. Setting this to True will skip that attempt and just use the established websocket connection.
skip_ws_attempt (bool): Like skip_http_attempt, except just throw an error if the http attempt fails instead of trying with the websocket
timeout (int): duration in seconds to wait for a response before throwing an error
Raises:
:py:class:`~meshctrl.exceptions.FileTransferError`: File transfer failed. Info available on the `stats` property
:py:class:`~meshctrl.exceptions.FileTransferCancelled`: File transfer cancelled. Info available on the `stats` property
asyncio.TimeoutError: Command timed out
Returns:
dict: {result: bool whether download succeeded, size: number of bytes downloaded}
@@ -216,6 +268,29 @@ class Files(tunnel.Tunnel):
request_id = f"download_{self._get_request_id()}"
data = { "action": 'download', "sub": 'start', "id": request_id, "path": source }
request = {"id": request_id, "data": data, "type": "download", "source": source, "target": target, "size": 0, "finished": asyncio.Event(), "errored": asyncio.Event(), "error": None}
if not skip_http_attempt:
start_pos = target.tell()
try:
params = urllib.parse.urlencode({
"c": self._authcookie["cookie"],
"m": self._node.mesh.meshid.split("/")[-1],
"n": self._node.nodeid.split("/")[-1],
"f": source
})
url = self._session.url.replace('/control.ashx', f"/devicefile.ashx?{params}")
url = url.replace("wss://", "https://").replace("ws://", "http://")
loop = asyncio.get_event_loop()
await loop.run_in_executor(None, self._http_download, url, target, timeout)
size = target.tell() - start_pos
return {"result": True, "size": size}
except* Exception as eg:
if skip_ws_attempt:
size = target.tell() - start_pos
excs = eg.exceptions + (exceptions.FileTransferError("Errored", {"result": False, "size": size}),)
raise ExceptionGroup("File download failed", excs)
target.seek(start_pos)
await self._request_queue.put(request)
await asyncio.wait_for(request["finished"].wait(), timeout)
if request["error"] is not None:
@@ -230,7 +305,7 @@ class Files(tunnel.Tunnel):
return
if cmd["reqid"] == self._current_request["id"]:
if cmd["action"] == "uploaddone":
self._current_request["return"] = {"result": "success", "size": self._current_request["size"]}
self._current_request["return"] = {"result": True, "size": self._current_request["size"]}
self._current_request["finished"].set()
elif cmd["action"] == "uploadstart":
while True:
@@ -252,7 +327,7 @@ class Files(tunnel.Tunnel):
if self._current_request["inflight"] == 0 and self._current_request["complete"]:
await self._message_queue.put(json.dumps({ "action": 'uploaddone', "reqid": self._current_request["id"]}))
elif cmd["action"] == "uploaderror":
self._current_request["return"] = {"result": "canceled", "size": self._current_request["size"]}
self._current_request["return"] = {"result": False, "size": self._current_request["size"]}
self._current_request["error"] = exceptions.FileTransferError("Errored", self._current_request["return"])
self._current_request["errored"].set()
self._current_request["finished"].set()
@@ -268,7 +343,7 @@ class Files(tunnel.Tunnel):
self._current_request["target"].write(data[4:])
self._current_request["size"] += len(data)-4
if (data[3] & 1) != 0:
self._current_request["return"] = {"result": "success", "size": self._current_request["size"]}
self._current_request["return"] = {"result": True, "size": self._current_request["size"]}
self._current_request["finished"].set()
else:
await self._message_queue.put(json.dumps({ "action": 'download', "sub": 'ack', "id": self._current_request["id"] }))
@@ -279,7 +354,7 @@ class Files(tunnel.Tunnel):
if cmd["sub"] == "start":
await self._message_queue.put(json.dumps({ "action": 'download', "sub": 'startack', "id": self._current_request["id"] }))
elif cmd["sub"] == "cancel":
self._current_request["return"] = {"result": "canceled", "size": self._current_request["size"]}
self._current_request["return"] = {"result": False, "size": self._current_request["size"]}
self._current_request["error"] = exceptions.FileTransferCancelled("Cancelled", self._current_request["return"])
self._current_request["errored"].set()
self._current_request["finished"].set()

View File

@@ -46,7 +46,7 @@ class Mesh(object):
if not isinstance(created_at, datetime.datetime) and created_at is not None:
try:
created_at = datetime.datetime.fromtimestamp(created_at)
except OSError:
except (OSError, ValueError):
# Meshcentral returns in miliseconds, while fromtimestamp, and most of python, expects the argument in seconds. Try seconds frist, then translate from ms if it fails.
# This doesn't work for really early timestamps, but I don't expect that to be a problem here.
created_at = datetime.datetime.fromtimestamp(created_at/1000.0)

View File

@@ -124,15 +124,17 @@ class Session(object):
self._message_queue = asyncio.Queue()
self._send_task = None
self._listen_task = None
self._ssl_context = None
if self._ignore_ssl:
self._ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
self._ssl_context.check_hostname = False
self._ssl_context.verify_mode = ssl.CERT_NONE
async def _main_loop(self):
try:
options = {}
if self._ignore_ssl:
ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
options = { "ssl": ssl_context }
if self._ssl_context is not None:
options["ssl"] = self._ssl_context
headers = websockets.datastructures.Headers()
@@ -140,13 +142,9 @@ class Session(object):
token = self._token if self._token else b""
headers['x-meshauth'] = (base64.b64encode(self._user.encode()) + b',' + base64.b64encode(self._password.encode()) + token).decode()
if self._proxy:
proxy = Proxy.from_url(self._proxy)
parsed = urllib.parse.urlparse(self.url)
options["sock"] = await proxy.connect(dest_host=parsed.hostname, dest_port=parsed.port)
options["additional_headers"] = headers
async for websocket in websockets.asyncio.client.connect(self.url, process_exception=util._process_websocket_exception, **options):
async for websocket in websockets.asyncio.client.connect(self.url, proxy=self._proxy, process_exception=util._process_websocket_exception, **options):
self.alive = True
self._socket_open.set()
try:
@@ -154,10 +152,10 @@ class Session(object):
tg.create_task(self._listen_data_task(websocket))
tg.create_task(self._send_data_task(websocket))
except* websockets.ConnectionClosed as e:
self._socket_open.clear()
if not self.auto_reconnect:
raise
except* Exception as eg:
self.alive = False
self._socket_open.clear()
@@ -219,12 +217,14 @@ class Session(object):
return self._command_id
async def close(self):
# Dunno yet
self._main_loop_task.cancel()
try:
await self._main_loop_task
except asyncio.CancelledError:
pass
await asyncio.gather(*[tunnel.close() for name, tunnel in self._file_tunnels.items()])
finally:
self._main_loop_task.cancel()
try:
await self._main_loop_task
except asyncio.CancelledError:
pass
@util._check_socket
async def __aenter__(self):
@@ -482,6 +482,10 @@ class Session(object):
for node in nodes:
if node["node"].get("meshid", None):
node["node"]["mesh"] = mesh.Mesh(node["node"].get("meshid"), self)
if "lastConnect" in node and isinstance(node["lastConnect"], dict):
node["node"]["lastconnect"] = node["lastConnect"].get("time")
node["node"]["lastaddr"] = node["lastConnect"].get("addr")
del node["lastConnect"]
details = {}
for key, val in node.items():
if key != "node":
@@ -533,7 +537,7 @@ class Session(object):
data = await event_queue.get()
yield data
finally:
self._eventer.off("server_event", _)
self._eventer.off("raw", _)
async def events(self, filter=None):
'''
@@ -1341,6 +1345,14 @@ class Session(object):
if sysinfo is not None and sysinfo.get("node", None):
# Node information came with system information
node = sysinfo.get("node", None)
for meshid, _nodes in nodes["nodes"].items():
for _mesh in meshes:
if _mesh.meshid == meshid:
break
else:
break
if meshid == node["meshid"]:
node["mesh"] = _mesh
else:
# This device does not have system information, get node information from the nodes list.
for meshid, _nodes in nodes["nodes"].items():
@@ -1355,10 +1367,10 @@ class Session(object):
node["meshid"] = meshid
if _mesh is not None:
node["mesh"] = _mesh
sysinfo["node"] = node
sysinfo["nodeid"] = nodeid
del sysinfo["result"]
del sysinfo["noinfo"]
break
else:
continue
break
if node is None:
raise ValueError("Invalid device id")
if lastconnect is not None:
@@ -1455,6 +1467,7 @@ class Session(object):
result.setdefault(node, {})["complete"] = True
if all(_["complete"] for key, _ in result.items()):
break
continue
elif (event["value"].startswith("Run commands")):
continue
result[node]["result"].append(event["value"])
@@ -1728,10 +1741,11 @@ class Session(object):
tasks.append(tg.create_task(asyncio.wait_for(_(), timeout=timeout)))
tasks.append({ "action": 'msg', "type": 'openUrl', "nodeid": nodeid, "url": url }, "device_open_url", timeout=timeout)
res = tasks[1].result()
success = tasks[2].result()
if data.get("result", "ok").lower() != "ok":
success = tasks[0].result()
res = tasks[1].result()
if res.get("result", "ok").lower() != "ok":
raise exceptions.ServerError(data["result"])
if not success:
@@ -1814,15 +1828,16 @@ class Session(object):
raise ValueError("No user or session given")
await self._message_queue.put(json.dumps({"action": "interuser", "data": data, "sessionid": session, "userid": user}))
async def upload(self, nodeid, source, target, unique_file_tunnel=False, timeout=None):
async def upload(self, node, source, target, unique_file_tunnel=False, timeout=None):
'''
Upload a stream to a device. This creates an _File and destroys it every call. If you need to upload multiple files, use {@link Session#file_explorer} instead.
Upload a stream to a device.
Args:
nodeid (str): Unique id to upload stream to
node (~meshctrl.device.Device|str): Device or id of device to which to upload the file. If it is a device, it must have a ~meshctrl.mesh.Mesh device associated with it (the default). If it is a string, the device will be fetched prior to tunnel creation.
source (io.IOBase): An IO instance from which to read the data. Must be open for reading.
target (str): Path which to upload stream to on remote device
unique_file_tunnel (bool): True: Create a unique :py:class:`~meshctrl.files.Files` for this call, which will be cleaned up on return, else use cached or cache :py:class:`~meshctrl.files.Files`
timeout (int): duration in seconds to wait for a response before throwing an error
Raises:
:py:class:`~meshctrl.exceptions.FileTransferError`: File transfer failed. Info available on the `stats` property
@@ -1831,23 +1846,26 @@ class Session(object):
Returns:
dict: {result: bool whether upload succeeded, size: number of bytes uploaded}
'''
if not isinstance(node, device.Device):
node = await self.device_info(node)
if unique_file_tunnel:
async with self.file_explorer(nodeid) as files:
return await files.upload(source, target)
async with self.file_explorer(node) as files:
return await files.upload(source, target, timeout=timeout)
else:
files = await self._cached_file_explorer(nodeid, nodeid)
files = await self._cached_file_explorer(node, node.nodeid)
return await files.upload(source, target, timeout=timeout)
async def upload_file(self, nodeid, filepath, target, unique_file_tunnel=False, timeout=None):
async def upload_file(self, node, filepath, target, unique_file_tunnel=False, timeout=None):
'''
Friendly wrapper around :py:class:`~meshctrl.session.Session.upload` to upload from a filepath. Creates a ReadableStream and calls upload.
Args:
nodeid (str): Unique id to upload file to
node (~meshctrl.device.Device|str): Device or id of device to which to upload the file. If it is a device, it must have a ~meshctrl.mesh.Mesh device associated with it (the default). If it is a string, the device will be fetched prior to tunnel creation.
filepath (str): Path from which to read the data
target (str): Path which to upload file to on remote device
unique_file_tunnel (bool): True: Create a unique :py:class:`~meshctrl.files.Files` for this call, which will be cleaned up on return, else use cached or cache :py:class:`~meshctrl.files.Files`
timeout (int): duration in seconds to wait for a response before throwing an error
Raises:
:py:class:`~meshctrl.exceptions.FileTransferError`: File transfer failed. Info available on the `stats` property
@@ -1857,17 +1875,20 @@ class Session(object):
dict: {result: bool whether upload succeeded, size: number of bytes uploaded}
'''
with open(filepath, "rb") as f:
return await self.upload(nodeid, f, target, unique_file_tunnel, timeout=timeout)
return await self.upload(node, f, target, unique_file_tunnel, timeout=timeout)
async def download(self, nodeid, source, target=None, unique_file_tunnel=False, timeout=None):
async def download(self, node, source, target=None, skip_http_attempt=False, skip_ws_attempt=False, unique_file_tunnel=False, timeout=None):
'''
Download a file from a device into a writable stream. This creates an :py:class:`~meshctrl.files.Files` and destroys it every call. If you need to upload multiple files, use :py:class:`~meshctrl.session.Session.file_explorer` instead.
Args:
nodeid (str): Unique id to download file from
node (~meshctrl.device.Device|str): Device or id of device from which to download the file. If it is a device, it must have a ~meshctrl.mesh.Mesh device associated with it (the default). If it is a string, the device will be fetched prior to tunnel creation.
source (str): Path from which to download from device
target (io.IOBase): Stream to which to write data. If None, create new BytesIO which is both readable and writable.
skip_http_attempt (bool): Meshcentral has a way to download files through http(s) instead of through the websocket. This method tends to be much faster than using the websocket, so we try it first. Setting this to True will skip that attempt and just use the established websocket connection.
skip_ws_attempt (bool): Like skip_http_attempt, except just throw an error if the http attempt fails instead of trying with the websocket
unique_file_tunnel (bool): True: Create a unique :py:class:`~meshctrl.files.Files` for this call, which will be cleaned up on return, else use cached or cache :py:class:`~meshctrl.files.Files`
timeout (int): duration in seconds to wait for a response before throwing an error
Raises:
:py:class:`~meshctrl.exceptions.FileTransferError`: File transfer failed. Info available on the `stats` property
@@ -1876,29 +1897,34 @@ class Session(object):
Returns:
io.IOBase: The stream which has been downloaded into. Cursor will be at the beginning of where the file is downloaded.
'''
if not isinstance(node, device.Device):
node = await self.device_info(node)
if target is None:
target = io.BytesIO()
start = target.tell()
if unique_file_tunnel:
async with self.file_explorer(nodeid) as files:
await files.download(source, target)
async with self.file_explorer(node) as files:
await files.download(source, target, skip_http_attempt=skip_http_attempt, skip_ws_attempt=skip_ws_attempt, timeout=timeout)
target.seek(start)
return target
else:
files = await self._cached_file_explorer(nodeid, nodeid)
await files.download(source, target, timeout=timeout)
files = await self._cached_file_explorer(node, node.nodeid)
await files.download(source, target, skip_http_attempt=skip_http_attempt, skip_ws_attempt=skip_ws_attempt, timeout=timeout)
target.seek(start)
return target
async def download_file(self, nodeid, source, filepath, unique_file_tunnel=False, timeout=None):
async def download_file(self, node, source, filepath, skip_http_attempt=False, skip_ws_attempt=False, unique_file_tunnel=False, timeout=None):
'''
Friendly wrapper around :py:class:`~meshctrl.session.Session.download` to download to a filepath. Creates a WritableStream and calls download.
Args:
nodeid (str): Unique id to download file from
node (~meshctrl.device.Device|str): Device or id of device from which to download the file. If it is a device, it must have a ~meshctrl.mesh.Mesh device associated with it (the default). If it is a string, the device will be fetched prior to tunnel creation.
source (str): Path from which to download from device
filepath (str): Path to which to download data
skip_http_attempt (bool): Meshcentral has a way to download files through http(s) instead of through the websocket. This method tends to be much faster than using the websocket, so we try it first. Setting this to True will skip that attempt and just use the established websocket connection.
skip_ws_attempt (bool): Like skip_http_attempt, except just throw an error if the http attempt fails instead of trying with the websocket
unique_file_tunnel (bool): True: Create a unique :py:class:`~meshctrl.files.Files` for this call, which will be cleaned up on return, else use cached or cache :py:class:`~meshctrl.files.Files`
timeout (int): duration in seconds to wait for a response before throwing an error
Raises:
:py:class:`~meshctrl.exceptions.FileTransferError`: File transfer failed. Info available on the `stats` property
@@ -1908,24 +1934,39 @@ class Session(object):
None
'''
with open(filepath, "wb") as f:
await self.download(nodeid, source, f, unique_file_tunnel, timeout=timeout)
await self.download(node, source, f, skip_http_attempt=skip_http_attempt, skip_ws_attempt=skip_ws_attempt, unique_file_tunnel=unique_file_tunnel, timeout=timeout)
async def _cached_file_explorer(self, nodeid, _id):
async def _cached_file_explorer(self, node, _id):
if (_id not in self._file_tunnels or not self._file_tunnels[_id].alive):
self._file_tunnels[_id] = self.file_explorer(nodeid)
self._file_tunnels[_id] = await self.file_explorer(node).__aenter__()
await self._file_tunnels[_id].initialized.wait()
return self._file_tunnels[_id]
def file_explorer(self, nodeid):
def file_explorer(self, node):
'''
Create, initialize, and return an :py:class:`~meshctrl.files.Files` object for the given node
Args:
nodeid (str): Unique id on which to open file explorer
node (~meshctrl.device.Device|str): Device or id of device on which to open file explorer. If it is a device, it must have a ~meshctrl.mesh.Mesh device associated with it (the default). If it is a string, the device will be fetched prior to tunnel creation.
Returns:
:py:class:`~meshctrl.files.Files`: A newly initialized file explorer.
'''
return files.Files(self, nodeid)
'''
return _FileExplorerWrapper(self, node)
# This is a little yucky, but I can't get a good API otherwise. Since Tunnel objects are only useable as context managers anyway, this should be fine.
class _FileExplorerWrapper:
def __init__(self, session, node):
self.session = session
self.node = node
self._files = None
async def __aenter__(self):
if not isinstance(self.node, device.Device):
self.node = await self.session.device_info(self.node)
self._files = files.Files(self.session, self.node)
return await self._files.__aenter__()
async def __aexit__(self, exc_t, exc_v, exc_tb):
return await self._files.__aexit__(exc_t, exc_v, exc_tb)

View File

@@ -78,7 +78,6 @@ class Shell(tunnel.Tunnel):
read_bytes = 0
while True:
d = self._buffer.read1(length-read_bytes if length is not None else -1)
# print(f"read: {d}")
read_bytes += len(d)
ret.append(d)
if length is not None and read_bytes >= length:
@@ -163,7 +162,6 @@ class SmartShell(object):
command += "\n"
await self._shell.write(command)
data = await self._shell.expect(self._regex, timeout=timeout)
print(repr(data))
return data[:self._compiled_regex.search(data).span()[0]]
@property
@@ -178,14 +176,18 @@ class SmartShell(object):
def initialized(self):
return self._shell.initialized
@property
def _socket_open(self):
return self._shell._socket_open
async def close(self):
await self._init_task
await asyncio.wait_for(self._init_task, 10)
return await self._shell.close()
async def __aenter__(self):
await self._init_task
await self._shell.__aenter__()
await asyncio.wait_for(self._init_task, 10)
return self
async def __aexit__(self, *args):
return await self._shell.__aexit__(*args)
await self.close()

View File

@@ -45,22 +45,18 @@ class Tunnel(object):
async def _main_loop(self):
try:
authcookie = await self._session._send_command_no_response_id({ "action":"authcookie" })
self._authcookie = await self._session._send_command_no_response_id({ "action":"authcookie" })
options = {}
if self._session._ignore_ssl:
ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
options = { "ssl": ssl_context }
if self._session._ssl_context is not None:
options["ssl"] = self._session._ssl_context
if (self.node_id.split('/') != 3) and (self._session._currentDomain is not None):
self.node_id = f"node/{self._session._currentDomain}/{self.node_id}"
if (len(self.node_id.split('/')) != 3):
self.node_id = f"node/{self._session._currentDomain or ''}/{self.node_id}"
self._tunnel_id = util._get_random_hex(6)
initialize_tunnel_response = await self._session._send_command({ "action": 'msg', "nodeid": self.node_id, "type": 'tunnel', "usage": 1, "value": '*/meshrelay.ashx?p=' + str(self._protocol) + '&nodeid=' + self.node_id + '&id=' + self._tunnel_id + '&rauth=' + authcookie["rcookie"] }, "initialize_tunnel")
initialize_tunnel_response = await self._session._send_command({ "action": 'msg', "nodeid": self.node_id, "type": 'tunnel', "usage": 1, "value": '*/meshrelay.ashx?p=' + str(self._protocol) + '&nodeid=' + self.node_id + '&id=' + self._tunnel_id + '&rauth=' + self._authcookie["rcookie"] }, "initialize_tunnel")
if initialize_tunnel_response.get("result", None) != "OK":
self._main_loop_error = exceptions.ServerError(initialize_tunnel_response.get("result", "Failed to initialize remote tunnel"))
self._socket_open.clear()
@@ -68,14 +64,10 @@ class Tunnel(object):
self.initialized.set()
return
self.url = self._session.url.replace('/control.ashx', '/meshrelay.ashx?browser=1&p=' + str(self._protocol) + '&nodeid=' + self.node_id + '&id=' + self._tunnel_id + '&auth=' + authcookie["cookie"])
self.url = self._session.url.replace('/control.ashx', '/meshrelay.ashx?browser=1&p=' + str(self._protocol) + '&nodeid=' + self.node_id + '&id=' + self._tunnel_id + '&auth=' + self._authcookie["cookie"])
if self._session._proxy:
proxy = Proxy.from_url(self._session._proxy)
parsed = urllib.parse.urlparse(self.url)
options["sock"] = await proxy.connect(dest_host=parsed.hostname, dest_port=parsed.port)
async for websocket in websockets.asyncio.client.connect(self.url, process_exception=util._process_websocket_exception, **options):
async for websocket in websockets.asyncio.client.connect(self.url, proxy=self._session._proxy, process_exception=util._process_websocket_exception, **options):
self.alive = True
self._socket_open.set()
try:
@@ -85,7 +77,6 @@ class Tunnel(object):
except* websockets.ConnectionClosed as e:
self._socket_open.clear()
if not self.auto_reconnect:
self.alive = False
raise
except* Exception as eg:
self.alive = False

View File

@@ -9,6 +9,8 @@ import re
import websockets
import ssl
import functools
import urllib
import python_socks
from . import exceptions
def _encode_cookie(o, key):
@@ -137,22 +139,31 @@ def compare_dict(dict1, dict2):
return False
def _check_socket(f):
@functools.wraps(f)
async def wrapper(self, *args, **kwargs):
await asyncio.wait_for(self.initialized.wait(), 10)
async def _check_errs(self):
if not self.alive and self._main_loop_error is not None:
raise self._main_loop_error
elif not self.alive:
elif not self.alive and self.initialized.is_set():
raise exceptions.SocketError("Socket Closed")
return await f(self, *args, **kwargs)
@functools.wraps(f)
async def wrapper(self, *args, **kwargs):
try:
await asyncio.wait_for(self.initialized.wait(), 10)
await _check_errs(self)
await asyncio.wait_for(self._socket_open.wait(), 10)
finally:
await _check_errs(self)
return await f(self, *args, **kwargs)
return wrapper
def _process_websocket_exception(exc):
tmp = websockets.asyncio.client.process_exception(exc)
# SSLVerification error is a subclass of OSError, but doesn't make sense no retry, so we need to handle it separately.
# SSLVerification error is a subclass of OSError, but doesn't make sense to retry, so we need to handle it separately.
if isinstance(exc, (ssl.SSLCertVerificationError, TimeoutError)):
return exc
if isinstance(exc, python_socks._errors.ProxyError):
return None
# Proxy errors show up like this now, and it's default to error out. Handle explicitly.
if isinstance(exc, websockets.exceptions.InvalidProxyMessage):
return None
return tmp
class Proxy(object):
pass

4
tests/.gitignore vendored
View File

@@ -1,2 +1,2 @@
data
environment/scripts/meshcentral/users.json
/data
/environment/scripts/meshcentral/users.json

View File

@@ -43,10 +43,8 @@ class Agent(object):
return self
def __exit__(self, exc_t, exc_v, exc_tb):
try:
requests.post("{self._clienturl}/remove-agent/{self.nodeid}")
except:
pass
requests.post(f"{self._clienturl}/remove-agent/{self.nodeid}")
class TestEnvironment(object):
def __init__(self):
@@ -64,8 +62,13 @@ class TestEnvironment(object):
return self
# Destroy the env in case it wasn't killed correctly last time.
subprocess.check_call(["docker", "compose", "down"], stdout=subprocess.DEVNULL, cwd=thisdir)
self._subp = _docker_process = subprocess.Popen(["docker", "compose", "up", "--build", "--force-recreate", "--no-deps"], stdout=subprocess.DEVNULL, cwd=thisdir)
timeout = 30
self._subp = _docker_process = subprocess.Popen(["docker", "compose", "up", "--build", "--force-recreate", "--no-deps"], cwd=thisdir)
if not self._wait_for_meshcentral():
self.__exit__(None, None, None)
raise Exception("Failed to create docker instance")
return self
def _wait_for_meshcentral(self, timeout=30):
start = time.time()
while time.time() - start < timeout:
try:
@@ -82,10 +85,8 @@ class TestEnvironment(object):
pass
time.sleep(1)
else:
self.__exit__(None, None, None)
raise Exception("Failed to create docker instance")
return self
return False
return True
def __exit__(self, exc_t, exc_v, exc_tb):
pass
@@ -93,6 +94,15 @@ class TestEnvironment(object):
def create_agent(self, meshid):
return Agent(meshid, self.mcurl, self.clienturl, self.dockerurl)
# Restart our docker instances, to test reconnect code.
def restart_mesh(self):
subprocess.check_call(["docker", "container", "restart", "meshctrl-meshcentral"], stdout=subprocess.DEVNULL, cwd=thisdir)
assert self._wait_for_meshcentral(), "Failed to restart docker instance"
def restart_proxy(self):
subprocess.check_call(["docker", "container", "restart", "meshctrl-squid"], stdout=subprocess.DEVNULL, cwd=thisdir)
def _kill_docker_process():
if _docker_process is not None:
_docker_process.kill()

View File

@@ -0,0 +1,4 @@
# Ignore everything in this directory
*
# Except this file
!.gitignore

View File

@@ -3,5 +3,6 @@ RUN apk add curl
RUN apk add python3
WORKDIR /opt/meshcentral/
COPY ./scripts/meshcentral ./scripts
COPY ./meshcentral/data /opt/meshcentral/meshcentral-data
COPY ./config/meshcentral/data /opt/meshcentral/meshcentral-data
COPY ./config/meshcentral/overrides /opt/meshcentral/meshcentral
CMD ["python3", "/opt/meshcentral/scripts/create_users.py"]

View File

@@ -1,6 +1,6 @@
requests
pytest-asyncio
cffi==1.17.1
cryptography==43.0.3
cryptography~=44.0.1
pycparser==2.22
websockets==13.1
websockets~=15.0.0

View File

@@ -5,6 +5,7 @@ import meshctrl
import requests
import io
import random
import time
async def test_commands(env):
async with meshctrl.Session("wss://" + env.dockerurl, user="admin", password=env.users["admin"], ignore_ssl=True, proxy=env.proxyurl) as admin_session:
@@ -52,6 +53,18 @@ async def test_commands(env):
finally:
assert (await admin_session.remove_device_group(mesh.meshid, timeout=10)), "Failed to remove device group"
async def test_os_proxy_bypass():
os.environ["no_proxy"] = "*"
import urllib
import urllib.request
os_proxies = urllib.request.getproxies()
meshctrl_proxies = meshctrl.files.urllib.request.getproxies()
print(f"os_proxies: {os_proxies}")
print(f"meshctrl_proxies: {meshctrl_proxies}")
assert meshctrl_proxies.get("no", None) == None, "Meshctrl is using system proxies"
assert os_proxies.get("no", None) == "*", "System is using meshctrl proxies"
assert os_proxies != meshctrl_proxies, "Override didn't work"
async def test_upload_download(env):
async with meshctrl.Session("wss://" + env.dockerurl, user="admin", password=env.users["admin"], ignore_ssl=True, proxy=env.proxyurl) as admin_session:
mesh = await admin_session.add_device_group("test", description="This is a test group", amtonly=False, features=0, consent=0, timeout=10)
@@ -69,7 +82,7 @@ async def test_upload_download(env):
else:
break
randdata = random.randbytes(2000000)
randdata = random.randbytes(20000000)
upfilestream = io.BytesIO(randdata)
downfilestream = io.BytesIO()
@@ -78,7 +91,7 @@ async def test_upload_download(env):
async with admin_session.file_explorer(agent.nodeid) as files:
r = await files.upload(upfilestream, f"{pwd}/test", timeout=5)
print("\ninfo files_upload: {}\n".format(r))
assert r["result"] == "success", "Upload failed"
assert r["result"] == True, "Upload failed"
assert r["size"] == len(randdata), "Uploaded wrong number of bytes"
for f in await files.ls(pwd, timeout=5):
if f["n"] == "test" and f["t"] == meshctrl.constants.FileType.FILE:
@@ -95,10 +108,23 @@ async def test_upload_download(env):
else:
raise Exception("Uploaded file not found")
r = await files.download(f"{pwd}/test", downfilestream, timeout=5)
start = time.perf_counter()
r = await files.download(f"{pwd}/test", downfilestream, skip_ws_attempt=True, timeout=5)
print("\ninfo files_download: {}\n".format(r))
assert r["result"] == "success", "Domnload failed"
assert r["result"] == True, "Download failed"
assert r["size"] == len(randdata), "Downloaded wrong number of bytes"
print(f"http download time: {time.perf_counter()-start}")
downfilestream.seek(0)
assert downfilestream.read() == randdata, "Got wrong data back"
downfilestream.seek(0)
start = time.perf_counter()
r = await files.download(f"{pwd}/test", downfilestream, skip_http_attempt=True, timeout=5)
print("\ninfo files_download: {}\n".format(r))
assert r["result"] == True, "Download failed"
assert r["size"] == len(randdata), "Downloaded wrong number of bytes"
print(f"ws download time: {time.perf_counter()-start}")
downfilestream.seek(0)
assert downfilestream.read() == randdata, "Got wrong data back"

View File

@@ -43,7 +43,7 @@ async def test_urlparse():
try:
async with meshctrl.Session("wss://localhost", user="unprivileged", password="Not a real password", ignore_ssl=True) as s:
pass
except* TimeoutError:
except* asyncio.TimeoutError:
#We're not running a server, so timeout is our expected outcome
pass
@@ -52,5 +52,4 @@ async def test_urlparse():
async with meshctrl.Session("https://localhost", user="unprivileged", password="Not a real password", ignore_ssl=True) as s:
pass
except* ValueError:
#We're not running a server, so timeout is our expected outcome
pass

View File

@@ -5,6 +5,8 @@ import meshctrl
import requests
import random
import io
import traceback
import time
thisdir = os.path.dirname(os.path.realpath(__file__))
async def test_admin(env):
@@ -29,7 +31,40 @@ async def test_admin(env):
assert len(no_sessions.keys()) == 0, "non-admin has admin acess"
assert len(admin_users) == len(env.users.keys()), "Admin cannot see correct number of users"
assert len(admin_sessions) == 2, "Admin cannot see correct number of oser sessions"
assert len(admin_sessions) == 2, "Admin cannot see correct number of user sessions"
async def test_auto_reconnect(env):
async with meshctrl.Session(env.mcurl, user="admin", password=env.users["admin"], ignore_ssl=True, auto_reconnect=True) as admin_session:
env.restart_mesh()
await asyncio.sleep(10)
await admin_session.ping(timeout=10)
# As above, but with proxy
async with meshctrl.Session("wss://" + env.dockerurl, user="admin", password=env.users["admin"], ignore_ssl=True, auto_reconnect=True, proxy=env.proxyurl) as admin_session:
env.restart_mesh()
for i in range(3):
try:
await admin_session.ping(timeout=10)
except* Exception as e:
print("".join(traceback.format_exception(e)))
pass
else:
break
else:
raise Exception("Failed to reconnect")
env.restart_proxy()
for i in range(3):
try:
await admin_session.ping(timeout=10)
except* Exception as e:
print("".join(traceback.format_exception(e)))
pass
else:
break
else:
raise Exception("Failed to reconnect")
async def test_users(env):
@@ -47,6 +82,17 @@ async def test_users(env):
pass
else:
raise Exception("Connected with no password")
start = time.time()
try:
async with meshctrl.Session(env.mcurl, user="admin", password="The wrong password", ignore_ssl=True) as admin_session:
pass
except* meshctrl.exceptions.ServerError as eg:
assert str(eg.exceptions[0]) == "Invalid Auth" or eg.exceptions[0].message == "Invalid Auth", "Didn't get invalid auth message"
assert time.time() - start < 10, "Invalid auth wasn't raised until after timeout"
pass
else:
raise Exception("Connected with bad password")
async with meshctrl.Session(env.mcurl+"/", user="admin", password=env.users["admin"], ignore_ssl=True) as admin_session,\
meshctrl.Session(env.mcurl, user="privileged", password=env.users["privileged"], ignore_ssl=True) as privileged_session,\
meshctrl.Session(env.mcurl, user="unprivileged", password=env.users["unprivileged"], ignore_ssl=True) as unprivileged_session:
@@ -157,21 +203,24 @@ async def test_mesh_device(env):
assert r[0].description == "New description", "Description either failed to change, or was changed by a user without permission to do so"
with env.create_agent(mesh.short_meshid) as agent:
# There once was a bug that occured whenever running run_commands with multiple meshes. We need to add devices to both meshes to be sure that bug is squashed.
with env.create_agent(mesh.short_meshid) as agent,\
env.create_agent(mesh.short_meshid) as agent2,\
env.create_agent(mesh2.short_meshid) as agent3:
# Test agent added to device group being propagated correctly
# Create agent isn't so good at waiting for the agent to show in the sessions. Give it a couple seconds to appear.
for i in range(3):
try:
r = await admin_session.list_devices(timeout=10)
print("\ninfo list_devices: {}\n".format(r))
assert len(r) == 1, "Incorrect number of agents connected"
assert len(r) == 3, "Incorrect number of agents connected"
except:
if i == 2:
raise
await asyncio.sleep(1)
else:
break
assert len(await privileged_session.list_devices(timeout=10)) == 1, "Incorrect number of agents connected"
assert len(await privileged_session.list_devices(timeout=10)) == 2, "Incorrect number of agents connected"
assert len(await unprivileged_session.list_devices(timeout=10)) == 0, "Unprivileged account has access to agent it should not"
r = await admin_session.list_devices(details=True, timeout=10)
@@ -183,6 +232,9 @@ async def test_mesh_device(env):
r = await admin_session.list_devices(meshid=mesh.meshid, timeout=10)
print("\ninfo list_devices_meshid: {}\n".format(r))
r = await admin_session.device_info(agent.nodeid, timeout=10)
print("\ninfo admin_device_info: {}\n".format(r))
# Test editing device info propagating correctly
assert await admin_session.edit_device(agent.nodeid, name="new_name", description="New Description", tags="device", consent=meshctrl.constants.ConsentFlags.all, timeout=10), "Failed to edit device info"
@@ -191,9 +243,12 @@ async def test_mesh_device(env):
assert await admin_session.edit_device(agent.nodeid, consent=meshctrl.constants.ConsentFlags.none, timeout=10), "Failed to edit device info"
# Test run_commands
r = await admin_session.run_command(agent.nodeid, "ls", timeout=10)
r = await admin_session.run_command([agent.nodeid, agent2.nodeid], "ls", timeout=10)
print("\ninfo run_command: {}\n".format(r))
assert "meshagent" in r[agent.nodeid]["result"], "ls gave incorrect data"
assert "meshagent" in r[agent2.nodeid]["result"], "ls gave incorrect data"
assert "Run commands completed." not in r[agent.nodeid]["result"], "Didn't parse run command ending correctly"
assert "Run commands completed." not in r[agent2.nodeid]["result"], "Didn't parse run command ending correctly"
assert "meshagent" in (await privileged_session.run_command(agent.nodeid, "ls", timeout=10))[agent.nodeid]["result"], "ls gave incorrect data"
# Test run commands with ndividual device permissions
@@ -222,7 +277,7 @@ async def test_mesh_device(env):
# Test getting individual device info
r = await unprivileged_session.device_info(agent.nodeid, timeout=10)
print("\ninfo device_info: {}\n".format(r))
print("\ninfo unprivileged_device_info: {}\n".format(r))
# This device info includes the mesh ID of the device, even though the user doesn't have acces to that mesh. That's odd.
# assert r.meshid is None, "Individual device is exposing its meshid"
@@ -248,21 +303,21 @@ async def test_mesh_device(env):
assert await admin_session.move_to_device_group([agent.nodeid], mesh.name, isname=True, timeout=5), "Failed to move mesh to new device group by name"
# For now, this expects no response. If we ever figure out why the server isn't sending console information te us when it should, fix this.
# For now, this expe namects no response. If we ever figure out why the server isn't sending console information te us when it should, fix this.
# assert "meshagent" in (await unprivileged_session.run_command(agent.nodeid, "ls", timeout=10))[agent.nodeid]["result"], "ls gave incorrect data"
try:
await unprivileged_session.run_command(agent.nodeid, "ls", timeout=10)
except:
raise Exception("Failed to run command on device after it was moved to a new mesh while having individual device permissions")
r = await admin_session.remove_users_from_device_group((await privileged_session.user_info())["_id"], mesh.meshid, timeout=10)
print("\ninfo remove_users_from_device_group: {}\n".format(r))
assert (await admin_session.remove_users_from_device(agent.nodeid, (await unprivileged_session.user_info())["_id"], timeout=10)), "Failed to remove user from device"
r = await admin_session.remove_users_from_device_group((await privileged_session.user_info())["_id"], mesh.meshid, timeout=10)
print("\ninfo remove_users_from_device_group: {}\n".format(r))
assert (r[(await privileged_session.user_info())["_id"]]["success"]), "Failed to remove user from device group"
assert (await admin_session.remove_users_from_device(agent.nodeid, (await unprivileged_session.user_info())["_id"], timeout=10)), "Failed to remove user from device"
assert (r[(await privileged_session.user_info())["_id"]]["success"]), "Failed to remove user from devcie group"
assert (await admin_session.remove_device_group(mesh.meshid, timeout=10)), "Failed to remove device group"
assert (await admin_session.remove_device_group(mesh2.name, isname=True, timeout=10)), "Failed to remove device group"
assert (await admin_session.remove_device_group(mesh2.name, isname=True, timeout=10)), "Failed to remove device group by name"
assert not (await admin_session.add_users_to_device_group((await privileged_session.user_info())["_id"], mesh.meshid, rights=meshctrl.constants.MeshRights.fullrights, timeout=5))[(await privileged_session.user_info())["_id"]]["success"], "Added user to device group which doesn't exist?"
async def test_user_groups(env):
@@ -378,7 +433,7 @@ async def test_session_files(env):
break
pwd = (await admin_session.run_command(agent.nodeid, "pwd", timeout=10))[agent.nodeid]["result"].strip()
randdata = random.randbytes(2000000)
randdata = random.randbytes(20000000)
upfilestream = io.BytesIO(randdata)
downfilestream = io.BytesIO()
os.makedirs(os.path.join(thisdir, "data"), exist_ok=True)
@@ -387,12 +442,12 @@ async def test_session_files(env):
r = await admin_session.upload(agent.nodeid, upfilestream, f"{pwd}/test", timeout=5)
print("\ninfo files_upload: {}\n".format(r))
assert r["result"] == "success", "Upload failed"
assert r["result"] == True, "Upload failed"
assert r["size"] == len(randdata), "Uploaded wrong number of bytes"
r = await admin_session.upload_file(agent.nodeid, os.path.join(thisdir, "data", "test"), f"{pwd}/test2", timeout=5)
print("\ninfo files_upload: {}\n".format(r))
assert r["result"] == "success", "Upload failed"
assert r["result"] == True, "Upload failed"
assert r["size"] == len(randdata), "Uploaded wrong number of bytes"
s = await admin_session.download(agent.nodeid, f"{pwd}/test", timeout=5)
@@ -408,7 +463,7 @@ async def test_session_files(env):
r = await admin_session.upload_file(agent.nodeid, os.path.join(thisdir, "data", "test"), f"{pwd}/test2", unique_file_tunnel=True, timeout=5)
assert r["result"] == "success", "Upload failed"
assert r["result"] == True, "Upload failed"
assert r["size"] == len(randdata), "Uploaded wrong number of bytes"
await admin_session.download_file(agent.nodeid, f"{pwd}/test2", os.path.join(thisdir, "data", "test"), unique_file_tunnel=True, timeout=5)