id
int64 0
458k
| file_name
stringlengths 4
119
| file_path
stringlengths 14
227
| content
stringlengths 24
9.96M
| size
int64 24
9.96M
| language
stringclasses 1
value | extension
stringclasses 14
values | total_lines
int64 1
219k
| avg_line_length
float64 2.52
4.63M
| max_line_length
int64 5
9.91M
| alphanum_fraction
float64 0
1
| repo_name
stringlengths 7
101
| repo_stars
int64 100
139k
| repo_forks
int64 0
26.4k
| repo_open_issues
int64 0
2.27k
| repo_license
stringclasses 12
values | repo_extraction_date
stringclasses 433
values |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
600 | ssh_gss.py | paramiko_paramiko/paramiko/ssh_gss.py | # Copyright (C) 2013-2014 science + computing ag
# Author: Sebastian Deiss <[email protected]>
#
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
This module provides GSS-API / SSPI authentication as defined in :rfc:`4462`.
.. note:: Credential delegation is not supported in server mode.
.. seealso:: :doc:`/api/kex_gss`
.. versionadded:: 1.15
"""
import struct
import os
import sys
#: A boolean constraint that indicates if GSS-API / SSPI is available.
GSS_AUTH_AVAILABLE = True
#: A tuple of the exception types used by the underlying GSSAPI implementation.
GSS_EXCEPTIONS = ()
#: :var str _API: Constraint for the used API
_API = None
try:
import gssapi
if hasattr(gssapi, "__title__") and gssapi.__title__ == "python-gssapi":
# old, unmaintained python-gssapi package
_API = "MIT" # keep this for compatibility
GSS_EXCEPTIONS = (gssapi.GSSException,)
else:
_API = "PYTHON-GSSAPI-NEW"
GSS_EXCEPTIONS = (
gssapi.exceptions.GeneralError,
gssapi.raw.misc.GSSError,
)
except (ImportError, OSError):
try:
import pywintypes
import sspicon
import sspi
_API = "SSPI"
GSS_EXCEPTIONS = (pywintypes.error,)
except ImportError:
GSS_AUTH_AVAILABLE = False
_API = None
from paramiko.common import MSG_USERAUTH_REQUEST
from paramiko.ssh_exception import SSHException
from paramiko._version import __version_info__
def GSSAuth(auth_method, gss_deleg_creds=True):
"""
Provide SSH2 GSS-API / SSPI authentication.
:param str auth_method: The name of the SSH authentication mechanism
(gssapi-with-mic or gss-keyex)
:param bool gss_deleg_creds: Delegate client credentials or not.
We delegate credentials by default.
:return: Either an `._SSH_GSSAPI_OLD` or `._SSH_GSSAPI_NEW` (Unix)
object or an `_SSH_SSPI` (Windows) object
:rtype: object
:raises: ``ImportError`` -- If no GSS-API / SSPI module could be imported.
:see: `RFC 4462 <http://www.ietf.org/rfc/rfc4462.txt>`_
:note: Check for the available API and return either an `._SSH_GSSAPI_OLD`
(MIT GSSAPI using python-gssapi package) object, an
`._SSH_GSSAPI_NEW` (MIT GSSAPI using gssapi package) object
or an `._SSH_SSPI` (MS SSPI) object.
If there is no supported API available,
``None`` will be returned.
"""
if _API == "MIT":
return _SSH_GSSAPI_OLD(auth_method, gss_deleg_creds)
elif _API == "PYTHON-GSSAPI-NEW":
return _SSH_GSSAPI_NEW(auth_method, gss_deleg_creds)
elif _API == "SSPI" and os.name == "nt":
return _SSH_SSPI(auth_method, gss_deleg_creds)
else:
raise ImportError("Unable to import a GSS-API / SSPI module!")
class _SSH_GSSAuth:
"""
Contains the shared variables and methods of `._SSH_GSSAPI_OLD`,
`._SSH_GSSAPI_NEW` and `._SSH_SSPI`.
"""
def __init__(self, auth_method, gss_deleg_creds):
"""
:param str auth_method: The name of the SSH authentication mechanism
(gssapi-with-mic or gss-keyex)
:param bool gss_deleg_creds: Delegate client credentials or not
"""
self._auth_method = auth_method
self._gss_deleg_creds = gss_deleg_creds
self._gss_host = None
self._username = None
self._session_id = None
self._service = "ssh-connection"
"""
OpenSSH supports Kerberos V5 mechanism only for GSS-API authentication,
so we also support the krb5 mechanism only.
"""
self._krb5_mech = "1.2.840.113554.1.2.2"
# client mode
self._gss_ctxt = None
self._gss_ctxt_status = False
# server mode
self._gss_srv_ctxt = None
self._gss_srv_ctxt_status = False
self.cc_file = None
def set_service(self, service):
"""
This is just a setter to use a non default service.
I added this method, because RFC 4462 doesn't specify "ssh-connection"
as the only service value.
:param str service: The desired SSH service
"""
if service.find("ssh-"):
self._service = service
def set_username(self, username):
"""
Setter for C{username}. If GSS-API Key Exchange is performed, the
username is not set by C{ssh_init_sec_context}.
:param str username: The name of the user who attempts to login
"""
self._username = username
def ssh_gss_oids(self, mode="client"):
"""
This method returns a single OID, because we only support the
Kerberos V5 mechanism.
:param str mode: Client for client mode and server for server mode
:return: A byte sequence containing the number of supported
OIDs, the length of the OID and the actual OID encoded with
DER
:note: In server mode we just return the OID length and the DER encoded
OID.
"""
from pyasn1.type.univ import ObjectIdentifier
from pyasn1.codec.der import encoder
OIDs = self._make_uint32(1)
krb5_OID = encoder.encode(ObjectIdentifier(self._krb5_mech))
OID_len = self._make_uint32(len(krb5_OID))
if mode == "server":
return OID_len + krb5_OID
return OIDs + OID_len + krb5_OID
def ssh_check_mech(self, desired_mech):
"""
Check if the given OID is the Kerberos V5 OID (server mode).
:param str desired_mech: The desired GSS-API mechanism of the client
:return: ``True`` if the given OID is supported, otherwise C{False}
"""
from pyasn1.codec.der import decoder
mech, __ = decoder.decode(desired_mech)
if mech.__str__() != self._krb5_mech:
return False
return True
# Internals
# -------------------------------------------------------------------------
def _make_uint32(self, integer):
"""
Create a 32 bit unsigned integer (The byte sequence of an integer).
:param int integer: The integer value to convert
:return: The byte sequence of an 32 bit integer
"""
return struct.pack("!I", integer)
def _ssh_build_mic(self, session_id, username, service, auth_method):
"""
Create the SSH2 MIC filed for gssapi-with-mic.
:param str session_id: The SSH session ID
:param str username: The name of the user who attempts to login
:param str service: The requested SSH service
:param str auth_method: The requested SSH authentication mechanism
:return: The MIC as defined in RFC 4462. The contents of the
MIC field are:
string session_identifier,
byte SSH_MSG_USERAUTH_REQUEST,
string user-name,
string service (ssh-connection),
string authentication-method
(gssapi-with-mic or gssapi-keyex)
"""
mic = self._make_uint32(len(session_id))
mic += session_id
mic += struct.pack("B", MSG_USERAUTH_REQUEST)
mic += self._make_uint32(len(username))
mic += username.encode()
mic += self._make_uint32(len(service))
mic += service.encode()
mic += self._make_uint32(len(auth_method))
mic += auth_method.encode()
return mic
class _SSH_GSSAPI_OLD(_SSH_GSSAuth):
"""
Implementation of the GSS-API MIT Kerberos Authentication for SSH2,
using the older (unmaintained) python-gssapi package.
:see: `.GSSAuth`
"""
def __init__(self, auth_method, gss_deleg_creds):
"""
:param str auth_method: The name of the SSH authentication mechanism
(gssapi-with-mic or gss-keyex)
:param bool gss_deleg_creds: Delegate client credentials or not
"""
_SSH_GSSAuth.__init__(self, auth_method, gss_deleg_creds)
if self._gss_deleg_creds:
self._gss_flags = (
gssapi.C_PROT_READY_FLAG,
gssapi.C_INTEG_FLAG,
gssapi.C_MUTUAL_FLAG,
gssapi.C_DELEG_FLAG,
)
else:
self._gss_flags = (
gssapi.C_PROT_READY_FLAG,
gssapi.C_INTEG_FLAG,
gssapi.C_MUTUAL_FLAG,
)
def ssh_init_sec_context(
self, target, desired_mech=None, username=None, recv_token=None
):
"""
Initialize a GSS-API context.
:param str username: The name of the user who attempts to login
:param str target: The hostname of the target to connect to
:param str desired_mech: The negotiated GSS-API mechanism
("pseudo negotiated" mechanism, because we
support just the krb5 mechanism :-))
:param str recv_token: The GSS-API token received from the Server
:raises:
`.SSHException` -- Is raised if the desired mechanism of the client
is not supported
:return: A ``String`` if the GSS-API has returned a token or
``None`` if no token was returned
"""
from pyasn1.codec.der import decoder
self._username = username
self._gss_host = target
targ_name = gssapi.Name(
"host@" + self._gss_host, gssapi.C_NT_HOSTBASED_SERVICE
)
ctx = gssapi.Context()
ctx.flags = self._gss_flags
if desired_mech is None:
krb5_mech = gssapi.OID.mech_from_string(self._krb5_mech)
else:
mech, __ = decoder.decode(desired_mech)
if mech.__str__() != self._krb5_mech:
raise SSHException("Unsupported mechanism OID.")
else:
krb5_mech = gssapi.OID.mech_from_string(self._krb5_mech)
token = None
try:
if recv_token is None:
self._gss_ctxt = gssapi.InitContext(
peer_name=targ_name,
mech_type=krb5_mech,
req_flags=ctx.flags,
)
token = self._gss_ctxt.step(token)
else:
token = self._gss_ctxt.step(recv_token)
except gssapi.GSSException:
message = "{} Target: {}".format(sys.exc_info()[1], self._gss_host)
raise gssapi.GSSException(message)
self._gss_ctxt_status = self._gss_ctxt.established
return token
def ssh_get_mic(self, session_id, gss_kex=False):
"""
Create the MIC token for a SSH2 message.
:param str session_id: The SSH session ID
:param bool gss_kex: Generate the MIC for GSS-API Key Exchange or not
:return: gssapi-with-mic:
Returns the MIC token from GSS-API for the message we created
with ``_ssh_build_mic``.
gssapi-keyex:
Returns the MIC token from GSS-API with the SSH session ID as
message.
"""
self._session_id = session_id
if not gss_kex:
mic_field = self._ssh_build_mic(
self._session_id,
self._username,
self._service,
self._auth_method,
)
mic_token = self._gss_ctxt.get_mic(mic_field)
else:
# for key exchange with gssapi-keyex
mic_token = self._gss_srv_ctxt.get_mic(self._session_id)
return mic_token
def ssh_accept_sec_context(self, hostname, recv_token, username=None):
"""
Accept a GSS-API context (server mode).
:param str hostname: The servers hostname
:param str username: The name of the user who attempts to login
:param str recv_token: The GSS-API Token received from the server,
if it's not the initial call.
:return: A ``String`` if the GSS-API has returned a token or ``None``
if no token was returned
"""
# hostname and username are not required for GSSAPI, but for SSPI
self._gss_host = hostname
self._username = username
if self._gss_srv_ctxt is None:
self._gss_srv_ctxt = gssapi.AcceptContext()
token = self._gss_srv_ctxt.step(recv_token)
self._gss_srv_ctxt_status = self._gss_srv_ctxt.established
return token
def ssh_check_mic(self, mic_token, session_id, username=None):
"""
Verify the MIC token for a SSH2 message.
:param str mic_token: The MIC token received from the client
:param str session_id: The SSH session ID
:param str username: The name of the user who attempts to login
:return: None if the MIC check was successful
:raises: ``gssapi.GSSException`` -- if the MIC check failed
"""
self._session_id = session_id
self._username = username
if self._username is not None:
# server mode
mic_field = self._ssh_build_mic(
self._session_id,
self._username,
self._service,
self._auth_method,
)
self._gss_srv_ctxt.verify_mic(mic_field, mic_token)
else:
# for key exchange with gssapi-keyex
# client mode
self._gss_ctxt.verify_mic(self._session_id, mic_token)
@property
def credentials_delegated(self):
"""
Checks if credentials are delegated (server mode).
:return: ``True`` if credentials are delegated, otherwise ``False``
"""
if self._gss_srv_ctxt.delegated_cred is not None:
return True
return False
def save_client_creds(self, client_token):
"""
Save the Client token in a file. This is used by the SSH server
to store the client credentials if credentials are delegated
(server mode).
:param str client_token: The GSS-API token received form the client
:raises:
``NotImplementedError`` -- Credential delegation is currently not
supported in server mode
"""
raise NotImplementedError
if __version_info__ < (2, 5):
# provide the old name for strict backward compatibility
_SSH_GSSAPI = _SSH_GSSAPI_OLD
class _SSH_GSSAPI_NEW(_SSH_GSSAuth):
"""
Implementation of the GSS-API MIT Kerberos Authentication for SSH2,
using the newer, currently maintained gssapi package.
:see: `.GSSAuth`
"""
def __init__(self, auth_method, gss_deleg_creds):
"""
:param str auth_method: The name of the SSH authentication mechanism
(gssapi-with-mic or gss-keyex)
:param bool gss_deleg_creds: Delegate client credentials or not
"""
_SSH_GSSAuth.__init__(self, auth_method, gss_deleg_creds)
if self._gss_deleg_creds:
self._gss_flags = (
gssapi.RequirementFlag.protection_ready,
gssapi.RequirementFlag.integrity,
gssapi.RequirementFlag.mutual_authentication,
gssapi.RequirementFlag.delegate_to_peer,
)
else:
self._gss_flags = (
gssapi.RequirementFlag.protection_ready,
gssapi.RequirementFlag.integrity,
gssapi.RequirementFlag.mutual_authentication,
)
def ssh_init_sec_context(
self, target, desired_mech=None, username=None, recv_token=None
):
"""
Initialize a GSS-API context.
:param str username: The name of the user who attempts to login
:param str target: The hostname of the target to connect to
:param str desired_mech: The negotiated GSS-API mechanism
("pseudo negotiated" mechanism, because we
support just the krb5 mechanism :-))
:param str recv_token: The GSS-API token received from the Server
:raises: `.SSHException` -- Is raised if the desired mechanism of the
client is not supported
:raises: ``gssapi.exceptions.GSSError`` if there is an error signaled
by the GSS-API implementation
:return: A ``String`` if the GSS-API has returned a token or ``None``
if no token was returned
"""
from pyasn1.codec.der import decoder
self._username = username
self._gss_host = target
targ_name = gssapi.Name(
"host@" + self._gss_host,
name_type=gssapi.NameType.hostbased_service,
)
if desired_mech is not None:
mech, __ = decoder.decode(desired_mech)
if mech.__str__() != self._krb5_mech:
raise SSHException("Unsupported mechanism OID.")
krb5_mech = gssapi.MechType.kerberos
token = None
if recv_token is None:
self._gss_ctxt = gssapi.SecurityContext(
name=targ_name,
flags=self._gss_flags,
mech=krb5_mech,
usage="initiate",
)
token = self._gss_ctxt.step(token)
else:
token = self._gss_ctxt.step(recv_token)
self._gss_ctxt_status = self._gss_ctxt.complete
return token
def ssh_get_mic(self, session_id, gss_kex=False):
"""
Create the MIC token for a SSH2 message.
:param str session_id: The SSH session ID
:param bool gss_kex: Generate the MIC for GSS-API Key Exchange or not
:return: gssapi-with-mic:
Returns the MIC token from GSS-API for the message we created
with ``_ssh_build_mic``.
gssapi-keyex:
Returns the MIC token from GSS-API with the SSH session ID as
message.
:rtype: str
"""
self._session_id = session_id
if not gss_kex:
mic_field = self._ssh_build_mic(
self._session_id,
self._username,
self._service,
self._auth_method,
)
mic_token = self._gss_ctxt.get_signature(mic_field)
else:
# for key exchange with gssapi-keyex
mic_token = self._gss_srv_ctxt.get_signature(self._session_id)
return mic_token
def ssh_accept_sec_context(self, hostname, recv_token, username=None):
"""
Accept a GSS-API context (server mode).
:param str hostname: The servers hostname
:param str username: The name of the user who attempts to login
:param str recv_token: The GSS-API Token received from the server,
if it's not the initial call.
:return: A ``String`` if the GSS-API has returned a token or ``None``
if no token was returned
"""
# hostname and username are not required for GSSAPI, but for SSPI
self._gss_host = hostname
self._username = username
if self._gss_srv_ctxt is None:
self._gss_srv_ctxt = gssapi.SecurityContext(usage="accept")
token = self._gss_srv_ctxt.step(recv_token)
self._gss_srv_ctxt_status = self._gss_srv_ctxt.complete
return token
def ssh_check_mic(self, mic_token, session_id, username=None):
"""
Verify the MIC token for a SSH2 message.
:param str mic_token: The MIC token received from the client
:param str session_id: The SSH session ID
:param str username: The name of the user who attempts to login
:return: None if the MIC check was successful
:raises: ``gssapi.exceptions.GSSError`` -- if the MIC check failed
"""
self._session_id = session_id
self._username = username
if self._username is not None:
# server mode
mic_field = self._ssh_build_mic(
self._session_id,
self._username,
self._service,
self._auth_method,
)
self._gss_srv_ctxt.verify_signature(mic_field, mic_token)
else:
# for key exchange with gssapi-keyex
# client mode
self._gss_ctxt.verify_signature(self._session_id, mic_token)
@property
def credentials_delegated(self):
"""
Checks if credentials are delegated (server mode).
:return: ``True`` if credentials are delegated, otherwise ``False``
:rtype: bool
"""
if self._gss_srv_ctxt.delegated_creds is not None:
return True
return False
def save_client_creds(self, client_token):
"""
Save the Client token in a file. This is used by the SSH server
to store the client credentials if credentials are delegated
(server mode).
:param str client_token: The GSS-API token received form the client
:raises: ``NotImplementedError`` -- Credential delegation is currently
not supported in server mode
"""
raise NotImplementedError
class _SSH_SSPI(_SSH_GSSAuth):
"""
Implementation of the Microsoft SSPI Kerberos Authentication for SSH2.
:see: `.GSSAuth`
"""
def __init__(self, auth_method, gss_deleg_creds):
"""
:param str auth_method: The name of the SSH authentication mechanism
(gssapi-with-mic or gss-keyex)
:param bool gss_deleg_creds: Delegate client credentials or not
"""
_SSH_GSSAuth.__init__(self, auth_method, gss_deleg_creds)
if self._gss_deleg_creds:
self._gss_flags = (
sspicon.ISC_REQ_INTEGRITY
| sspicon.ISC_REQ_MUTUAL_AUTH
| sspicon.ISC_REQ_DELEGATE
)
else:
self._gss_flags = (
sspicon.ISC_REQ_INTEGRITY | sspicon.ISC_REQ_MUTUAL_AUTH
)
def ssh_init_sec_context(
self, target, desired_mech=None, username=None, recv_token=None
):
"""
Initialize a SSPI context.
:param str username: The name of the user who attempts to login
:param str target: The FQDN of the target to connect to
:param str desired_mech: The negotiated SSPI mechanism
("pseudo negotiated" mechanism, because we
support just the krb5 mechanism :-))
:param recv_token: The SSPI token received from the Server
:raises:
`.SSHException` -- Is raised if the desired mechanism of the client
is not supported
:return: A ``String`` if the SSPI has returned a token or ``None`` if
no token was returned
"""
from pyasn1.codec.der import decoder
self._username = username
self._gss_host = target
error = 0
targ_name = "host/" + self._gss_host
if desired_mech is not None:
mech, __ = decoder.decode(desired_mech)
if mech.__str__() != self._krb5_mech:
raise SSHException("Unsupported mechanism OID.")
try:
if recv_token is None:
self._gss_ctxt = sspi.ClientAuth(
"Kerberos", scflags=self._gss_flags, targetspn=targ_name
)
error, token = self._gss_ctxt.authorize(recv_token)
token = token[0].Buffer
except pywintypes.error as e:
e.strerror += ", Target: {}".format(self._gss_host)
raise
if error == 0:
"""
if the status is GSS_COMPLETE (error = 0) the context is fully
established an we can set _gss_ctxt_status to True.
"""
self._gss_ctxt_status = True
token = None
"""
You won't get another token if the context is fully established,
so i set token to None instead of ""
"""
return token
def ssh_get_mic(self, session_id, gss_kex=False):
"""
Create the MIC token for a SSH2 message.
:param str session_id: The SSH session ID
:param bool gss_kex: Generate the MIC for Key Exchange with SSPI or not
:return: gssapi-with-mic:
Returns the MIC token from SSPI for the message we created
with ``_ssh_build_mic``.
gssapi-keyex:
Returns the MIC token from SSPI with the SSH session ID as
message.
"""
self._session_id = session_id
if not gss_kex:
mic_field = self._ssh_build_mic(
self._session_id,
self._username,
self._service,
self._auth_method,
)
mic_token = self._gss_ctxt.sign(mic_field)
else:
# for key exchange with gssapi-keyex
mic_token = self._gss_srv_ctxt.sign(self._session_id)
return mic_token
def ssh_accept_sec_context(self, hostname, username, recv_token):
"""
Accept a SSPI context (server mode).
:param str hostname: The servers FQDN
:param str username: The name of the user who attempts to login
:param str recv_token: The SSPI Token received from the server,
if it's not the initial call.
:return: A ``String`` if the SSPI has returned a token or ``None`` if
no token was returned
"""
self._gss_host = hostname
self._username = username
targ_name = "host/" + self._gss_host
self._gss_srv_ctxt = sspi.ServerAuth("Kerberos", spn=targ_name)
error, token = self._gss_srv_ctxt.authorize(recv_token)
token = token[0].Buffer
if error == 0:
self._gss_srv_ctxt_status = True
token = None
return token
def ssh_check_mic(self, mic_token, session_id, username=None):
"""
Verify the MIC token for a SSH2 message.
:param str mic_token: The MIC token received from the client
:param str session_id: The SSH session ID
:param str username: The name of the user who attempts to login
:return: None if the MIC check was successful
:raises: ``sspi.error`` -- if the MIC check failed
"""
self._session_id = session_id
self._username = username
if username is not None:
# server mode
mic_field = self._ssh_build_mic(
self._session_id,
self._username,
self._service,
self._auth_method,
)
# Verifies data and its signature. If verification fails, an
# sspi.error will be raised.
self._gss_srv_ctxt.verify(mic_field, mic_token)
else:
# for key exchange with gssapi-keyex
# client mode
# Verifies data and its signature. If verification fails, an
# sspi.error will be raised.
self._gss_ctxt.verify(self._session_id, mic_token)
@property
def credentials_delegated(self):
"""
Checks if credentials are delegated (server mode).
:return: ``True`` if credentials are delegated, otherwise ``False``
"""
return self._gss_flags & sspicon.ISC_REQ_DELEGATE and (
self._gss_srv_ctxt_status or self._gss_flags
)
def save_client_creds(self, client_token):
"""
Save the Client token in a file. This is used by the SSH server
to store the client credentails if credentials are delegated
(server mode).
:param str client_token: The SSPI token received form the client
:raises:
``NotImplementedError`` -- Credential delegation is currently not
supported in server mode
"""
raise NotImplementedError
| 28,887 | Python | .py | 681 | 31.839941 | 79 | 0.590451 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
601 | ed25519key.py | paramiko_paramiko/paramiko/ed25519key.py | # This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import bcrypt
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.ciphers import Cipher
import nacl.signing
from paramiko.message import Message
from paramiko.pkey import PKey, OPENSSH_AUTH_MAGIC, _unpad_openssh
from paramiko.util import b
from paramiko.ssh_exception import SSHException, PasswordRequiredException
class Ed25519Key(PKey):
"""
Representation of an `Ed25519 <https://ed25519.cr.yp.to/>`_ key.
.. note::
Ed25519 key support was added to OpenSSH in version 6.5.
.. versionadded:: 2.2
.. versionchanged:: 2.3
Added a ``file_obj`` parameter to match other key classes.
"""
name = "ssh-ed25519"
def __init__(
self, msg=None, data=None, filename=None, password=None, file_obj=None
):
self.public_blob = None
verifying_key = signing_key = None
if msg is None and data is not None:
msg = Message(data)
if msg is not None:
self._check_type_and_load_cert(
msg=msg,
key_type=self.name,
cert_type="[email protected]",
)
verifying_key = nacl.signing.VerifyKey(msg.get_binary())
elif filename is not None:
with open(filename, "r") as f:
pkformat, data = self._read_private_key("OPENSSH", f)
elif file_obj is not None:
pkformat, data = self._read_private_key("OPENSSH", file_obj)
if filename or file_obj:
signing_key = self._parse_signing_key_data(data, password)
if signing_key is None and verifying_key is None:
raise ValueError("need a key")
self._signing_key = signing_key
self._verifying_key = verifying_key
def _parse_signing_key_data(self, data, password):
from paramiko.transport import Transport
# We may eventually want this to be usable for other key types, as
# OpenSSH moves to it, but for now this is just for Ed25519 keys.
# This format is described here:
# https://github.com/openssh/openssh-portable/blob/master/PROTOCOL.key
# The description isn't totally complete, and I had to refer to the
# source for a full implementation.
message = Message(data)
if message.get_bytes(len(OPENSSH_AUTH_MAGIC)) != OPENSSH_AUTH_MAGIC:
raise SSHException("Invalid key")
ciphername = message.get_text()
kdfname = message.get_text()
kdfoptions = message.get_binary()
num_keys = message.get_int()
if kdfname == "none":
# kdfname of "none" must have an empty kdfoptions, the ciphername
# must be "none"
if kdfoptions or ciphername != "none":
raise SSHException("Invalid key")
elif kdfname == "bcrypt":
if not password:
raise PasswordRequiredException(
"Private key file is encrypted"
)
kdf = Message(kdfoptions)
bcrypt_salt = kdf.get_binary()
bcrypt_rounds = kdf.get_int()
else:
raise SSHException("Invalid key")
if ciphername != "none" and ciphername not in Transport._cipher_info:
raise SSHException("Invalid key")
public_keys = []
for _ in range(num_keys):
pubkey = Message(message.get_binary())
if pubkey.get_text() != self.name:
raise SSHException("Invalid key")
public_keys.append(pubkey.get_binary())
private_ciphertext = message.get_binary()
if ciphername == "none":
private_data = private_ciphertext
else:
cipher = Transport._cipher_info[ciphername]
key = bcrypt.kdf(
password=b(password),
salt=bcrypt_salt,
desired_key_bytes=cipher["key-size"] + cipher["block-size"],
rounds=bcrypt_rounds,
# We can't control how many rounds are on disk, so no sense
# warning about it.
ignore_few_rounds=True,
)
decryptor = Cipher(
cipher["class"](key[: cipher["key-size"]]),
cipher["mode"](key[cipher["key-size"] :]),
backend=default_backend(),
).decryptor()
private_data = (
decryptor.update(private_ciphertext) + decryptor.finalize()
)
message = Message(_unpad_openssh(private_data))
if message.get_int() != message.get_int():
raise SSHException("Invalid key")
signing_keys = []
for i in range(num_keys):
if message.get_text() != self.name:
raise SSHException("Invalid key")
# A copy of the public key, again, ignore.
public = message.get_binary()
key_data = message.get_binary()
# The second half of the key data is yet another copy of the public
# key...
signing_key = nacl.signing.SigningKey(key_data[:32])
# Verify that all the public keys are the same...
assert (
signing_key.verify_key.encode()
== public
== public_keys[i]
== key_data[32:]
)
signing_keys.append(signing_key)
# Comment, ignore.
message.get_binary()
if len(signing_keys) != 1:
raise SSHException("Invalid key")
return signing_keys[0]
def asbytes(self):
if self.can_sign():
v = self._signing_key.verify_key
else:
v = self._verifying_key
m = Message()
m.add_string(self.name)
m.add_string(v.encode())
return m.asbytes()
@property
def _fields(self):
if self.can_sign():
v = self._signing_key.verify_key
else:
v = self._verifying_key
return (self.get_name(), v)
# TODO 4.0: remove
def get_name(self):
return self.name
def get_bits(self):
return 256
def can_sign(self):
return self._signing_key is not None
def sign_ssh_data(self, data, algorithm=None):
m = Message()
m.add_string(self.name)
m.add_string(self._signing_key.sign(data).signature)
return m
def verify_ssh_sig(self, data, msg):
if msg.get_text() != self.name:
return False
try:
self._verifying_key.verify(data, msg.get_binary())
except nacl.exceptions.BadSignatureError:
return False
else:
return True
| 7,457 | Python | .py | 181 | 31.232044 | 79 | 0.600138 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
602 | kex_group16.py | paramiko_paramiko/paramiko/kex_group16.py | # Copyright (C) 2019 Edgar Sousa <https://github.com/edgsousa>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Standard SSH key exchange ("kex" if you wanna sound cool). Diffie-Hellman of
4096 bit key halves, using a known "p" prime and "g" generator.
"""
from paramiko.kex_group1 import KexGroup1
from hashlib import sha512
class KexGroup16SHA512(KexGroup1):
name = "diffie-hellman-group16-sha512"
# http://tools.ietf.org/html/rfc3526#section-5
P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6BF12FFA06D98A0864D87602733EC86A64521F2B18177B200CBBE117577A615D6C770988C0BAD946E208E24FA074E5AB3143DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D788719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA993B4EA988D8FDDC186FFB7DC90A6C08F4DF435C934063199FFFFFFFFFFFFFFFF # noqa
G = 2
name = "diffie-hellman-group16-sha512"
hash_algo = sha512
| 2,288 | Python | .py | 30 | 74.3 | 1,042 | 0.869507 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
603 | ssh_exception.py | paramiko_paramiko/paramiko/ssh_exception.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import socket
class SSHException(Exception):
"""
Exception raised by failures in SSH2 protocol negotiation or logic errors.
"""
pass
class AuthenticationException(SSHException):
"""
Exception raised when authentication failed for some reason. It may be
possible to retry with different credentials. (Other classes specify more
specific reasons.)
.. versionadded:: 1.6
"""
pass
class PasswordRequiredException(AuthenticationException):
"""
Exception raised when a password is needed to unlock a private key file.
"""
pass
class BadAuthenticationType(AuthenticationException):
"""
Exception raised when an authentication type (like password) is used, but
the server isn't allowing that type. (It may only allow public-key, for
example.)
.. versionadded:: 1.1
"""
allowed_types = []
# TODO 4.0: remove explanation kwarg
def __init__(self, explanation, types):
# TODO 4.0: remove this supercall unless it's actually required for
# pickling (after fixing pickling)
AuthenticationException.__init__(self, explanation, types)
self.explanation = explanation
self.allowed_types = types
def __str__(self):
return "{}; allowed types: {!r}".format(
self.explanation, self.allowed_types
)
class PartialAuthentication(AuthenticationException):
"""
An internal exception thrown in the case of partial authentication.
"""
allowed_types = []
def __init__(self, types):
AuthenticationException.__init__(self, types)
self.allowed_types = types
def __str__(self):
return "Partial authentication; allowed types: {!r}".format(
self.allowed_types
)
# TODO 4.0: stop inheriting from SSHException, move to auth.py
class UnableToAuthenticate(AuthenticationException):
pass
class ChannelException(SSHException):
"""
Exception raised when an attempt to open a new `.Channel` fails.
:param int code: the error code returned by the server
.. versionadded:: 1.6
"""
def __init__(self, code, text):
SSHException.__init__(self, code, text)
self.code = code
self.text = text
def __str__(self):
return "ChannelException({!r}, {!r})".format(self.code, self.text)
class BadHostKeyException(SSHException):
"""
The host key given by the SSH server did not match what we were expecting.
:param str hostname: the hostname of the SSH server
:param PKey got_key: the host key presented by the server
:param PKey expected_key: the host key expected
.. versionadded:: 1.6
"""
def __init__(self, hostname, got_key, expected_key):
SSHException.__init__(self, hostname, got_key, expected_key)
self.hostname = hostname
self.key = got_key
self.expected_key = expected_key
def __str__(self):
msg = "Host key for server '{}' does not match: got '{}', expected '{}'" # noqa
return msg.format(
self.hostname,
self.key.get_base64(),
self.expected_key.get_base64(),
)
class IncompatiblePeer(SSHException):
"""
A disagreement arose regarding an algorithm required for key exchange.
.. versionadded:: 2.9
"""
# TODO 4.0: consider making this annotate w/ 1..N 'missing' algorithms,
# either just the first one that would halt kex, or even updating the
# Transport logic so we record /all/ that /could/ halt kex.
# TODO: update docstrings where this may end up raised so they are more
# specific.
pass
class ProxyCommandFailure(SSHException):
"""
The "ProxyCommand" found in the .ssh/config file returned an error.
:param str command: The command line that is generating this exception.
:param str error: The error captured from the proxy command output.
"""
def __init__(self, command, error):
SSHException.__init__(self, command, error)
self.command = command
self.error = error
def __str__(self):
return 'ProxyCommand("{}") returned nonzero exit status: {}'.format(
self.command, self.error
)
class NoValidConnectionsError(socket.error):
"""
Multiple connection attempts were made and no families succeeded.
This exception class wraps multiple "real" underlying connection errors,
all of which represent failed connection attempts. Because these errors are
not guaranteed to all be of the same error type (i.e. different errno,
`socket.error` subclass, message, etc) we expose a single unified error
message and a ``None`` errno so that instances of this class match most
normal handling of `socket.error` objects.
To see the wrapped exception objects, access the ``errors`` attribute.
``errors`` is a dict whose keys are address tuples (e.g. ``('127.0.0.1',
22)``) and whose values are the exception encountered trying to connect to
that address.
It is implied/assumed that all the errors given to a single instance of
this class are from connecting to the same hostname + port (and thus that
the differences are in the resolution of the hostname - e.g. IPv4 vs v6).
.. versionadded:: 1.16
"""
def __init__(self, errors):
"""
:param dict errors:
The errors dict to store, as described by class docstring.
"""
addrs = sorted(errors.keys())
body = ", ".join([x[0] for x in addrs[:-1]])
tail = addrs[-1][0]
if body:
msg = "Unable to connect to port {0} on {1} or {2}"
else:
msg = "Unable to connect to port {0} on {2}"
super().__init__(
None, msg.format(addrs[0][1], body, tail) # stand-in for errno
)
self.errors = errors
def __reduce__(self):
return (self.__class__, (self.errors,))
class CouldNotCanonicalize(SSHException):
"""
Raised when hostname canonicalization fails & fallback is disabled.
.. versionadded:: 2.7
"""
pass
class ConfigParseError(SSHException):
"""
A fatal error was encountered trying to parse SSH config data.
Typically this means a config file violated the ``ssh_config``
specification in a manner that requires exiting immediately, such as not
matching ``key = value`` syntax or misusing certain ``Match`` keywords.
.. versionadded:: 2.7
"""
pass
class MessageOrderError(SSHException):
"""
Out-of-order protocol messages were received, violating "strict kex" mode.
.. versionadded:: 3.4
"""
pass
| 7,494 | Python | .py | 184 | 34.804348 | 88 | 0.679597 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
604 | primes.py | paramiko_paramiko/paramiko/primes.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Utility functions for dealing with primes.
"""
import os
from paramiko import util
from paramiko.common import byte_mask
from paramiko.ssh_exception import SSHException
def _roll_random(n):
"""returns a random # from 0 to N-1"""
bits = util.bit_length(n - 1)
byte_count = (bits + 7) // 8
hbyte_mask = pow(2, bits % 8) - 1
# so here's the plan:
# we fetch as many random bits as we'd need to fit N-1, and if the
# generated number is >= N, we try again. in the worst case (N-1 is a
# power of 2), we have slightly better than 50% odds of getting one that
# fits, so i can't guarantee that this loop will ever finish, but the odds
# of it looping forever should be infinitesimal.
while True:
x = os.urandom(byte_count)
if hbyte_mask > 0:
x = byte_mask(x[0], hbyte_mask) + x[1:]
num = util.inflate_long(x, 1)
if num < n:
break
return num
class ModulusPack:
"""
convenience object for holding the contents of the /etc/ssh/moduli file,
on systems that have such a file.
"""
def __init__(self):
# pack is a hash of: bits -> [ (generator, modulus) ... ]
self.pack = {}
self.discarded = []
def _parse_modulus(self, line):
(
timestamp,
mod_type,
tests,
tries,
size,
generator,
modulus,
) = line.split()
mod_type = int(mod_type)
tests = int(tests)
tries = int(tries)
size = int(size)
generator = int(generator)
modulus = int(modulus, 16)
# weed out primes that aren't at least:
# type 2 (meets basic structural requirements)
# test 4 (more than just a small-prime sieve)
# tries < 100 if test & 4 (at least 100 tries of miller-rabin)
if (
mod_type < 2
or tests < 4
or (tests & 4 and tests < 8 and tries < 100)
):
self.discarded.append(
(modulus, "does not meet basic requirements")
)
return
if generator == 0:
generator = 2
# there's a bug in the ssh "moduli" file (yeah, i know: shock! dismay!
# call cnn!) where it understates the bit lengths of these primes by 1.
# this is okay.
bl = util.bit_length(modulus)
if (bl != size) and (bl != size + 1):
self.discarded.append(
(modulus, "incorrectly reported bit length {}".format(size))
)
return
if bl not in self.pack:
self.pack[bl] = []
self.pack[bl].append((generator, modulus))
def read_file(self, filename):
"""
:raises IOError: passed from any file operations that fail.
"""
self.pack = {}
with open(filename, "r") as f:
for line in f:
line = line.strip()
if (len(line) == 0) or (line[0] == "#"):
continue
try:
self._parse_modulus(line)
except:
continue
def get_modulus(self, min, prefer, max):
bitsizes = sorted(self.pack.keys())
if len(bitsizes) == 0:
raise SSHException("no moduli available")
good = -1
# find nearest bitsize >= preferred
for b in bitsizes:
if (b >= prefer) and (b <= max) and (b < good or good == -1):
good = b
# if that failed, find greatest bitsize >= min
if good == -1:
for b in bitsizes:
if (b >= min) and (b <= max) and (b > good):
good = b
if good == -1:
# their entire (min, max) range has no intersection with our range.
# if their range is below ours, pick the smallest. otherwise pick
# the largest. it'll be out of their range requirement either way,
# but we'll be sending them the closest one we have.
good = bitsizes[0]
if min > good:
good = bitsizes[-1]
# now pick a random modulus of this bitsize
n = _roll_random(len(self.pack[good]))
return self.pack[good][n]
| 5,107 | Python | .py | 134 | 29.365672 | 79 | 0.578342 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
605 | auth_handler.py | paramiko_paramiko/paramiko/auth_handler.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
`.AuthHandler`
"""
import weakref
import threading
import time
import re
from paramiko.common import (
cMSG_SERVICE_REQUEST,
cMSG_DISCONNECT,
DISCONNECT_SERVICE_NOT_AVAILABLE,
DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE,
cMSG_USERAUTH_REQUEST,
cMSG_SERVICE_ACCEPT,
DEBUG,
AUTH_SUCCESSFUL,
INFO,
cMSG_USERAUTH_SUCCESS,
cMSG_USERAUTH_FAILURE,
AUTH_PARTIALLY_SUCCESSFUL,
cMSG_USERAUTH_INFO_REQUEST,
WARNING,
AUTH_FAILED,
cMSG_USERAUTH_PK_OK,
cMSG_USERAUTH_INFO_RESPONSE,
MSG_SERVICE_REQUEST,
MSG_SERVICE_ACCEPT,
MSG_USERAUTH_REQUEST,
MSG_USERAUTH_SUCCESS,
MSG_USERAUTH_FAILURE,
MSG_USERAUTH_BANNER,
MSG_USERAUTH_INFO_REQUEST,
MSG_USERAUTH_INFO_RESPONSE,
cMSG_USERAUTH_GSSAPI_RESPONSE,
cMSG_USERAUTH_GSSAPI_TOKEN,
cMSG_USERAUTH_GSSAPI_MIC,
MSG_USERAUTH_GSSAPI_RESPONSE,
MSG_USERAUTH_GSSAPI_TOKEN,
MSG_USERAUTH_GSSAPI_ERROR,
MSG_USERAUTH_GSSAPI_ERRTOK,
MSG_USERAUTH_GSSAPI_MIC,
MSG_NAMES,
cMSG_USERAUTH_BANNER,
)
from paramiko.message import Message
from paramiko.util import b, u
from paramiko.ssh_exception import (
SSHException,
AuthenticationException,
BadAuthenticationType,
PartialAuthentication,
)
from paramiko.server import InteractiveQuery
from paramiko.ssh_gss import GSSAuth, GSS_EXCEPTIONS
class AuthHandler:
"""
Internal class to handle the mechanics of authentication.
"""
def __init__(self, transport):
self.transport = weakref.proxy(transport)
self.username = None
self.authenticated = False
self.auth_event = None
self.auth_method = ""
self.banner = None
self.password = None
self.private_key = None
self.interactive_handler = None
self.submethods = None
# for server mode:
self.auth_username = None
self.auth_fail_count = 0
# for GSSAPI
self.gss_host = None
self.gss_deleg_creds = True
def _log(self, *args):
return self.transport._log(*args)
def is_authenticated(self):
return self.authenticated
def get_username(self):
if self.transport.server_mode:
return self.auth_username
else:
return self.username
def auth_none(self, username, event):
self.transport.lock.acquire()
try:
self.auth_event = event
self.auth_method = "none"
self.username = username
self._request_auth()
finally:
self.transport.lock.release()
def auth_publickey(self, username, key, event):
self.transport.lock.acquire()
try:
self.auth_event = event
self.auth_method = "publickey"
self.username = username
self.private_key = key
self._request_auth()
finally:
self.transport.lock.release()
def auth_password(self, username, password, event):
self.transport.lock.acquire()
try:
self.auth_event = event
self.auth_method = "password"
self.username = username
self.password = password
self._request_auth()
finally:
self.transport.lock.release()
def auth_interactive(self, username, handler, event, submethods=""):
"""
response_list = handler(title, instructions, prompt_list)
"""
self.transport.lock.acquire()
try:
self.auth_event = event
self.auth_method = "keyboard-interactive"
self.username = username
self.interactive_handler = handler
self.submethods = submethods
self._request_auth()
finally:
self.transport.lock.release()
def auth_gssapi_with_mic(self, username, gss_host, gss_deleg_creds, event):
self.transport.lock.acquire()
try:
self.auth_event = event
self.auth_method = "gssapi-with-mic"
self.username = username
self.gss_host = gss_host
self.gss_deleg_creds = gss_deleg_creds
self._request_auth()
finally:
self.transport.lock.release()
def auth_gssapi_keyex(self, username, event):
self.transport.lock.acquire()
try:
self.auth_event = event
self.auth_method = "gssapi-keyex"
self.username = username
self._request_auth()
finally:
self.transport.lock.release()
def abort(self):
if self.auth_event is not None:
self.auth_event.set()
# ...internals...
def _request_auth(self):
m = Message()
m.add_byte(cMSG_SERVICE_REQUEST)
m.add_string("ssh-userauth")
self.transport._send_message(m)
def _disconnect_service_not_available(self):
m = Message()
m.add_byte(cMSG_DISCONNECT)
m.add_int(DISCONNECT_SERVICE_NOT_AVAILABLE)
m.add_string("Service not available")
m.add_string("en")
self.transport._send_message(m)
self.transport.close()
def _disconnect_no_more_auth(self):
m = Message()
m.add_byte(cMSG_DISCONNECT)
m.add_int(DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE)
m.add_string("No more auth methods available")
m.add_string("en")
self.transport._send_message(m)
self.transport.close()
def _get_key_type_and_bits(self, key):
"""
Given any key, return its type/algorithm & bits-to-sign.
Intended for input to or verification of, key signatures.
"""
# Use certificate contents, if available, plain pubkey otherwise
if key.public_blob:
return key.public_blob.key_type, key.public_blob.key_blob
else:
return key.get_name(), key
def _get_session_blob(self, key, service, username, algorithm):
m = Message()
m.add_string(self.transport.session_id)
m.add_byte(cMSG_USERAUTH_REQUEST)
m.add_string(username)
m.add_string(service)
m.add_string("publickey")
m.add_boolean(True)
_, bits = self._get_key_type_and_bits(key)
m.add_string(algorithm)
m.add_string(bits)
return m.asbytes()
def wait_for_response(self, event):
max_ts = None
if self.transport.auth_timeout is not None:
max_ts = time.time() + self.transport.auth_timeout
while True:
event.wait(0.1)
if not self.transport.is_active():
e = self.transport.get_exception()
if (e is None) or issubclass(e.__class__, EOFError):
e = AuthenticationException(
"Authentication failed: transport shut down or saw EOF"
)
raise e
if event.is_set():
break
if max_ts is not None and max_ts <= time.time():
raise AuthenticationException("Authentication timeout.")
if not self.is_authenticated():
e = self.transport.get_exception()
if e is None:
e = AuthenticationException("Authentication failed.")
# this is horrible. Python Exception isn't yet descended from
# object, so type(e) won't work. :(
# TODO 4.0: lol. just lmao.
if issubclass(e.__class__, PartialAuthentication):
return e.allowed_types
raise e
return []
def _parse_service_request(self, m):
service = m.get_text()
if self.transport.server_mode and (service == "ssh-userauth"):
# accepted
m = Message()
m.add_byte(cMSG_SERVICE_ACCEPT)
m.add_string(service)
self.transport._send_message(m)
banner, language = self.transport.server_object.get_banner()
if banner:
m = Message()
m.add_byte(cMSG_USERAUTH_BANNER)
m.add_string(banner)
m.add_string(language)
self.transport._send_message(m)
return
# dunno this one
self._disconnect_service_not_available()
def _generate_key_from_request(self, algorithm, keyblob):
# For use in server mode.
options = self.transport.preferred_pubkeys
if algorithm.replace("[email protected]", "") not in options:
err = (
"Auth rejected: pubkey algorithm '{}' unsupported or disabled"
)
self._log(INFO, err.format(algorithm))
return None
return self.transport._key_info[algorithm](Message(keyblob))
def _choose_fallback_pubkey_algorithm(self, key_type, my_algos):
# Fallback: first one in our (possibly tweaked by caller) list
pubkey_algo = my_algos[0]
msg = "Server did not send a server-sig-algs list; defaulting to our first preferred algo ({!r})" # noqa
self._log(DEBUG, msg.format(pubkey_algo))
self._log(
DEBUG,
"NOTE: you may use the 'disabled_algorithms' SSHClient/Transport init kwarg to disable that or other algorithms if your server does not support them!", # noqa
)
return pubkey_algo
def _finalize_pubkey_algorithm(self, key_type):
# Short-circuit for non-RSA keys
if "rsa" not in key_type:
return key_type
self._log(
DEBUG,
"Finalizing pubkey algorithm for key of type {!r}".format(
key_type
),
)
# NOTE re #2017: When the key is an RSA cert and the remote server is
# OpenSSH 7.7 or earlier, always use [email protected].
# Those versions of the server won't support rsa-sha2 family sig algos
# for certs specifically, and in tandem with various server bugs
# regarding server-sig-algs, it's impossible to fit this into the rest
# of the logic here.
if key_type.endswith("[email protected]") and re.search(
r"-OpenSSH_(?:[1-6]|7\.[0-7])", self.transport.remote_version
):
pubkey_algo = "[email protected]"
self.transport._agreed_pubkey_algorithm = pubkey_algo
self._log(DEBUG, "OpenSSH<7.8 + RSA cert = forcing ssh-rsa!")
self._log(
DEBUG, "Agreed upon {!r} pubkey algorithm".format(pubkey_algo)
)
return pubkey_algo
# Normal attempts to handshake follow from here.
# Only consider RSA algos from our list, lest we agree on another!
my_algos = [x for x in self.transport.preferred_pubkeys if "rsa" in x]
self._log(DEBUG, "Our pubkey algorithm list: {}".format(my_algos))
# Short-circuit negatively if user disabled all RSA algos (heh)
if not my_algos:
raise SSHException(
"An RSA key was specified, but no RSA pubkey algorithms are configured!" # noqa
)
# Check for server-sig-algs if supported & sent
server_algo_str = u(
self.transport.server_extensions.get("server-sig-algs", b(""))
)
pubkey_algo = None
# Prefer to match against server-sig-algs
if server_algo_str:
server_algos = server_algo_str.split(",")
self._log(
DEBUG, "Server-side algorithm list: {}".format(server_algos)
)
# Only use algos from our list that the server likes, in our own
# preference order. (NOTE: purposefully using same style as in
# Transport...expect to refactor later)
agreement = list(filter(server_algos.__contains__, my_algos))
if agreement:
pubkey_algo = agreement[0]
self._log(
DEBUG,
"Agreed upon {!r} pubkey algorithm".format(pubkey_algo),
)
else:
self._log(DEBUG, "No common pubkey algorithms exist! Dying.")
# TODO: MAY want to use IncompatiblePeer again here but that's
# technically for initial key exchange, not pubkey auth.
err = "Unable to agree on a pubkey algorithm for signing a {!r} key!" # noqa
raise AuthenticationException(err.format(key_type))
# Fallback to something based purely on the key & our configuration
else:
pubkey_algo = self._choose_fallback_pubkey_algorithm(
key_type, my_algos
)
if key_type.endswith("[email protected]"):
pubkey_algo += "[email protected]"
self.transport._agreed_pubkey_algorithm = pubkey_algo
return pubkey_algo
def _parse_service_accept(self, m):
service = m.get_text()
if service == "ssh-userauth":
self._log(DEBUG, "userauth is OK")
m = Message()
m.add_byte(cMSG_USERAUTH_REQUEST)
m.add_string(self.username)
m.add_string("ssh-connection")
m.add_string(self.auth_method)
if self.auth_method == "password":
m.add_boolean(False)
password = b(self.password)
m.add_string(password)
elif self.auth_method == "publickey":
m.add_boolean(True)
key_type, bits = self._get_key_type_and_bits(self.private_key)
algorithm = self._finalize_pubkey_algorithm(key_type)
m.add_string(algorithm)
m.add_string(bits)
blob = self._get_session_blob(
self.private_key,
"ssh-connection",
self.username,
algorithm,
)
sig = self.private_key.sign_ssh_data(blob, algorithm)
m.add_string(sig)
elif self.auth_method == "keyboard-interactive":
m.add_string("")
m.add_string(self.submethods)
elif self.auth_method == "gssapi-with-mic":
sshgss = GSSAuth(self.auth_method, self.gss_deleg_creds)
m.add_bytes(sshgss.ssh_gss_oids())
# send the supported GSSAPI OIDs to the server
self.transport._send_message(m)
ptype, m = self.transport.packetizer.read_message()
if ptype == MSG_USERAUTH_BANNER:
self._parse_userauth_banner(m)
ptype, m = self.transport.packetizer.read_message()
if ptype == MSG_USERAUTH_GSSAPI_RESPONSE:
# Read the mechanism selected by the server. We send just
# the Kerberos V5 OID, so the server can only respond with
# this OID.
mech = m.get_string()
m = Message()
m.add_byte(cMSG_USERAUTH_GSSAPI_TOKEN)
try:
m.add_string(
sshgss.ssh_init_sec_context(
self.gss_host, mech, self.username
)
)
except GSS_EXCEPTIONS as e:
return self._handle_local_gss_failure(e)
self.transport._send_message(m)
while True:
ptype, m = self.transport.packetizer.read_message()
if ptype == MSG_USERAUTH_GSSAPI_TOKEN:
srv_token = m.get_string()
try:
next_token = sshgss.ssh_init_sec_context(
self.gss_host,
mech,
self.username,
srv_token,
)
except GSS_EXCEPTIONS as e:
return self._handle_local_gss_failure(e)
# After this step the GSSAPI should not return any
# token. If it does, we keep sending the token to
# the server until no more token is returned.
if next_token is None:
break
else:
m = Message()
m.add_byte(cMSG_USERAUTH_GSSAPI_TOKEN)
m.add_string(next_token)
self.transport.send_message(m)
else:
raise SSHException(
"Received Package: {}".format(MSG_NAMES[ptype])
)
m = Message()
m.add_byte(cMSG_USERAUTH_GSSAPI_MIC)
# send the MIC to the server
m.add_string(sshgss.ssh_get_mic(self.transport.session_id))
elif ptype == MSG_USERAUTH_GSSAPI_ERRTOK:
# RFC 4462 says we are not required to implement GSS-API
# error messages.
# See RFC 4462 Section 3.8 in
# http://www.ietf.org/rfc/rfc4462.txt
raise SSHException("Server returned an error token")
elif ptype == MSG_USERAUTH_GSSAPI_ERROR:
maj_status = m.get_int()
min_status = m.get_int()
err_msg = m.get_string()
m.get_string() # Lang tag - discarded
raise SSHException(
"""GSS-API Error:
Major Status: {}
Minor Status: {}
Error Message: {}
""".format(
maj_status, min_status, err_msg
)
)
elif ptype == MSG_USERAUTH_FAILURE:
self._parse_userauth_failure(m)
return
else:
raise SSHException(
"Received Package: {}".format(MSG_NAMES[ptype])
)
elif (
self.auth_method == "gssapi-keyex"
and self.transport.gss_kex_used
):
kexgss = self.transport.kexgss_ctxt
kexgss.set_username(self.username)
mic_token = kexgss.ssh_get_mic(self.transport.session_id)
m.add_string(mic_token)
elif self.auth_method == "none":
pass
else:
raise SSHException(
'Unknown auth method "{}"'.format(self.auth_method)
)
self.transport._send_message(m)
else:
self._log(
DEBUG, 'Service request "{}" accepted (?)'.format(service)
)
def _send_auth_result(self, username, method, result):
# okay, send result
m = Message()
if result == AUTH_SUCCESSFUL:
self._log(INFO, "Auth granted ({}).".format(method))
m.add_byte(cMSG_USERAUTH_SUCCESS)
self.authenticated = True
else:
self._log(INFO, "Auth rejected ({}).".format(method))
m.add_byte(cMSG_USERAUTH_FAILURE)
m.add_string(
self.transport.server_object.get_allowed_auths(username)
)
if result == AUTH_PARTIALLY_SUCCESSFUL:
m.add_boolean(True)
else:
m.add_boolean(False)
self.auth_fail_count += 1
self.transport._send_message(m)
if self.auth_fail_count >= 10:
self._disconnect_no_more_auth()
if result == AUTH_SUCCESSFUL:
self.transport._auth_trigger()
def _interactive_query(self, q):
# make interactive query instead of response
m = Message()
m.add_byte(cMSG_USERAUTH_INFO_REQUEST)
m.add_string(q.name)
m.add_string(q.instructions)
m.add_string(bytes())
m.add_int(len(q.prompts))
for p in q.prompts:
m.add_string(p[0])
m.add_boolean(p[1])
self.transport._send_message(m)
def _parse_userauth_request(self, m):
if not self.transport.server_mode:
# er, uh... what?
m = Message()
m.add_byte(cMSG_USERAUTH_FAILURE)
m.add_string("none")
m.add_boolean(False)
self.transport._send_message(m)
return
if self.authenticated:
# ignore
return
username = m.get_text()
service = m.get_text()
method = m.get_text()
self._log(
DEBUG,
"Auth request (type={}) service={}, username={}".format(
method, service, username
),
)
if service != "ssh-connection":
self._disconnect_service_not_available()
return
if (self.auth_username is not None) and (
self.auth_username != username
):
self._log(
WARNING,
"Auth rejected because the client attempted to change username in mid-flight", # noqa
)
self._disconnect_no_more_auth()
return
self.auth_username = username
# check if GSS-API authentication is enabled
gss_auth = self.transport.server_object.enable_auth_gssapi()
if method == "none":
result = self.transport.server_object.check_auth_none(username)
elif method == "password":
changereq = m.get_boolean()
password = m.get_binary()
try:
password = password.decode("UTF-8")
except UnicodeError:
# some clients/servers expect non-utf-8 passwords!
# in this case, just return the raw byte string.
pass
if changereq:
# always treated as failure, since we don't support changing
# passwords, but collect the list of valid auth types from
# the callback anyway
self._log(DEBUG, "Auth request to change passwords (rejected)")
newpassword = m.get_binary()
try:
newpassword = newpassword.decode("UTF-8", "replace")
except UnicodeError:
pass
result = AUTH_FAILED
else:
result = self.transport.server_object.check_auth_password(
username, password
)
elif method == "publickey":
sig_attached = m.get_boolean()
# NOTE: server never wants to guess a client's algo, they're
# telling us directly. No need for _finalize_pubkey_algorithm
# anywhere in this flow.
algorithm = m.get_text()
keyblob = m.get_binary()
try:
key = self._generate_key_from_request(algorithm, keyblob)
except SSHException as e:
self._log(INFO, "Auth rejected: public key: {}".format(str(e)))
key = None
except Exception as e:
msg = "Auth rejected: unsupported or mangled public key ({}: {})" # noqa
self._log(INFO, msg.format(e.__class__.__name__, e))
key = None
if key is None:
self._disconnect_no_more_auth()
return
# first check if this key is okay... if not, we can skip the verify
result = self.transport.server_object.check_auth_publickey(
username, key
)
if result != AUTH_FAILED:
# key is okay, verify it
if not sig_attached:
# client wants to know if this key is acceptable, before it
# signs anything... send special "ok" message
m = Message()
m.add_byte(cMSG_USERAUTH_PK_OK)
m.add_string(algorithm)
m.add_string(keyblob)
self.transport._send_message(m)
return
sig = Message(m.get_binary())
blob = self._get_session_blob(
key, service, username, algorithm
)
if not key.verify_ssh_sig(blob, sig):
self._log(INFO, "Auth rejected: invalid signature")
result = AUTH_FAILED
elif method == "keyboard-interactive":
submethods = m.get_string()
result = self.transport.server_object.check_auth_interactive(
username, submethods
)
if isinstance(result, InteractiveQuery):
# make interactive query instead of response
self._interactive_query(result)
return
elif method == "gssapi-with-mic" and gss_auth:
sshgss = GSSAuth(method)
# Read the number of OID mechanisms supported by the client.
# OpenSSH sends just one OID. It's the Kerveros V5 OID and that's
# the only OID we support.
mechs = m.get_int()
# We can't accept more than one OID, so if the SSH client sends
# more than one, disconnect.
if mechs > 1:
self._log(
INFO,
"Disconnect: Received more than one GSS-API OID mechanism",
)
self._disconnect_no_more_auth()
desired_mech = m.get_string()
mech_ok = sshgss.ssh_check_mech(desired_mech)
# if we don't support the mechanism, disconnect.
if not mech_ok:
self._log(
INFO,
"Disconnect: Received an invalid GSS-API OID mechanism",
)
self._disconnect_no_more_auth()
# send the Kerberos V5 GSSAPI OID to the client
supported_mech = sshgss.ssh_gss_oids("server")
# RFC 4462 says we are not required to implement GSS-API error
# messages. See section 3.8 in http://www.ietf.org/rfc/rfc4462.txt
m = Message()
m.add_byte(cMSG_USERAUTH_GSSAPI_RESPONSE)
m.add_bytes(supported_mech)
self.transport.auth_handler = GssapiWithMicAuthHandler(
self, sshgss
)
self.transport._expected_packet = (
MSG_USERAUTH_GSSAPI_TOKEN,
MSG_USERAUTH_REQUEST,
MSG_SERVICE_REQUEST,
)
self.transport._send_message(m)
return
elif method == "gssapi-keyex" and gss_auth:
mic_token = m.get_string()
sshgss = self.transport.kexgss_ctxt
if sshgss is None:
# If there is no valid context, we reject the authentication
result = AUTH_FAILED
self._send_auth_result(username, method, result)
try:
sshgss.ssh_check_mic(
mic_token, self.transport.session_id, self.auth_username
)
except Exception:
result = AUTH_FAILED
self._send_auth_result(username, method, result)
raise
result = AUTH_SUCCESSFUL
self.transport.server_object.check_auth_gssapi_keyex(
username, result
)
else:
result = self.transport.server_object.check_auth_none(username)
# okay, send result
self._send_auth_result(username, method, result)
def _parse_userauth_success(self, m):
self._log(
INFO, "Authentication ({}) successful!".format(self.auth_method)
)
self.authenticated = True
self.transport._auth_trigger()
if self.auth_event is not None:
self.auth_event.set()
def _parse_userauth_failure(self, m):
authlist = m.get_list()
# TODO 4.0: we aren't giving callers access to authlist _unless_ it's
# partial authentication, so eg authtype=none can't work unless we
# tweak this.
partial = m.get_boolean()
if partial:
self._log(INFO, "Authentication continues...")
self._log(DEBUG, "Methods: " + str(authlist))
self.transport.saved_exception = PartialAuthentication(authlist)
elif self.auth_method not in authlist:
for msg in (
"Authentication type ({}) not permitted.".format(
self.auth_method
),
"Allowed methods: {}".format(authlist),
):
self._log(DEBUG, msg)
self.transport.saved_exception = BadAuthenticationType(
"Bad authentication type", authlist
)
else:
self._log(
INFO, "Authentication ({}) failed.".format(self.auth_method)
)
self.authenticated = False
self.username = None
if self.auth_event is not None:
self.auth_event.set()
def _parse_userauth_banner(self, m):
banner = m.get_string()
self.banner = banner
self._log(INFO, "Auth banner: {}".format(banner))
# who cares.
def _parse_userauth_info_request(self, m):
if self.auth_method != "keyboard-interactive":
raise SSHException("Illegal info request from server")
title = m.get_text()
instructions = m.get_text()
m.get_binary() # lang
prompts = m.get_int()
prompt_list = []
for i in range(prompts):
prompt_list.append((m.get_text(), m.get_boolean()))
response_list = self.interactive_handler(
title, instructions, prompt_list
)
m = Message()
m.add_byte(cMSG_USERAUTH_INFO_RESPONSE)
m.add_int(len(response_list))
for r in response_list:
m.add_string(r)
self.transport._send_message(m)
def _parse_userauth_info_response(self, m):
if not self.transport.server_mode:
raise SSHException("Illegal info response from server")
n = m.get_int()
responses = []
for i in range(n):
responses.append(m.get_text())
result = self.transport.server_object.check_auth_interactive_response(
responses
)
if isinstance(result, InteractiveQuery):
# make interactive query instead of response
self._interactive_query(result)
return
self._send_auth_result(
self.auth_username, "keyboard-interactive", result
)
def _handle_local_gss_failure(self, e):
self.transport.saved_exception = e
self._log(DEBUG, "GSSAPI failure: {}".format(e))
self._log(INFO, "Authentication ({}) failed.".format(self.auth_method))
self.authenticated = False
self.username = None
if self.auth_event is not None:
self.auth_event.set()
return
# TODO 4.0: MAY make sense to make these tables into actual
# classes/instances that can be fed a mode bool or whatever. Or,
# alternately (both?) make the message types small classes or enums that
# embed this info within themselves (which could also then tidy up the
# current 'integer -> human readable short string' stuff in common.py).
# TODO: if we do that, also expose 'em publicly.
# Messages which should be handled _by_ servers (sent by clients)
@property
def _server_handler_table(self):
return {
# TODO 4.0: MSG_SERVICE_REQUEST ought to eventually move into
# Transport's server mode like the client side did, just for
# consistency.
MSG_SERVICE_REQUEST: self._parse_service_request,
MSG_USERAUTH_REQUEST: self._parse_userauth_request,
MSG_USERAUTH_INFO_RESPONSE: self._parse_userauth_info_response,
}
# Messages which should be handled _by_ clients (sent by servers)
@property
def _client_handler_table(self):
return {
MSG_SERVICE_ACCEPT: self._parse_service_accept,
MSG_USERAUTH_SUCCESS: self._parse_userauth_success,
MSG_USERAUTH_FAILURE: self._parse_userauth_failure,
MSG_USERAUTH_BANNER: self._parse_userauth_banner,
MSG_USERAUTH_INFO_REQUEST: self._parse_userauth_info_request,
}
# NOTE: prior to the fix for #1283, this was a static dict instead of a
# property. Should be backwards compatible in most/all cases.
@property
def _handler_table(self):
if self.transport.server_mode:
return self._server_handler_table
else:
return self._client_handler_table
class GssapiWithMicAuthHandler:
"""A specialized Auth handler for gssapi-with-mic
During the GSSAPI token exchange we need a modified dispatch table,
because the packet type numbers are not unique.
"""
method = "gssapi-with-mic"
def __init__(self, delegate, sshgss):
self._delegate = delegate
self.sshgss = sshgss
def abort(self):
self._restore_delegate_auth_handler()
return self._delegate.abort()
@property
def transport(self):
return self._delegate.transport
@property
def _send_auth_result(self):
return self._delegate._send_auth_result
@property
def auth_username(self):
return self._delegate.auth_username
@property
def gss_host(self):
return self._delegate.gss_host
def _restore_delegate_auth_handler(self):
self.transport.auth_handler = self._delegate
def _parse_userauth_gssapi_token(self, m):
client_token = m.get_string()
# use the client token as input to establish a secure
# context.
sshgss = self.sshgss
try:
token = sshgss.ssh_accept_sec_context(
self.gss_host, client_token, self.auth_username
)
except Exception as e:
self.transport.saved_exception = e
result = AUTH_FAILED
self._restore_delegate_auth_handler()
self._send_auth_result(self.auth_username, self.method, result)
raise
if token is not None:
m = Message()
m.add_byte(cMSG_USERAUTH_GSSAPI_TOKEN)
m.add_string(token)
self.transport._expected_packet = (
MSG_USERAUTH_GSSAPI_TOKEN,
MSG_USERAUTH_GSSAPI_MIC,
MSG_USERAUTH_REQUEST,
)
self.transport._send_message(m)
def _parse_userauth_gssapi_mic(self, m):
mic_token = m.get_string()
sshgss = self.sshgss
username = self.auth_username
self._restore_delegate_auth_handler()
try:
sshgss.ssh_check_mic(
mic_token, self.transport.session_id, username
)
except Exception as e:
self.transport.saved_exception = e
result = AUTH_FAILED
self._send_auth_result(username, self.method, result)
raise
# TODO: Implement client credential saving.
# The OpenSSH server is able to create a TGT with the delegated
# client credentials, but this is not supported by GSS-API.
result = AUTH_SUCCESSFUL
self.transport.server_object.check_auth_gssapi_with_mic(
username, result
)
# okay, send result
self._send_auth_result(username, self.method, result)
def _parse_service_request(self, m):
self._restore_delegate_auth_handler()
return self._delegate._parse_service_request(m)
def _parse_userauth_request(self, m):
self._restore_delegate_auth_handler()
return self._delegate._parse_userauth_request(m)
__handler_table = {
MSG_SERVICE_REQUEST: _parse_service_request,
MSG_USERAUTH_REQUEST: _parse_userauth_request,
MSG_USERAUTH_GSSAPI_TOKEN: _parse_userauth_gssapi_token,
MSG_USERAUTH_GSSAPI_MIC: _parse_userauth_gssapi_mic,
}
@property
def _handler_table(self):
# TODO: determine if we can cut this up like we did for the primary
# AuthHandler class.
return self.__handler_table
class AuthOnlyHandler(AuthHandler):
"""
AuthHandler, and just auth, no service requests!
.. versionadded:: 3.2
"""
# NOTE: this purposefully duplicates some of the parent class in order to
# modernize, refactor, etc. The intent is that eventually we will collapse
# this one onto the parent in a backwards incompatible release.
@property
def _client_handler_table(self):
my_table = super()._client_handler_table.copy()
del my_table[MSG_SERVICE_ACCEPT]
return my_table
def send_auth_request(self, username, method, finish_message=None):
"""
Submit a userauth request message & wait for response.
Performs the transport message send call, sets self.auth_event, and
will lock-n-block as necessary to both send, and wait for response to,
the USERAUTH_REQUEST.
Most callers will want to supply a callback to ``finish_message``,
which accepts a Message ``m`` and may call mutator methods on it to add
more fields.
"""
# Store a few things for reference in handlers, including auth failure
# handler (which needs to know if we were using a bad method, etc)
self.auth_method = method
self.username = username
# Generic userauth request fields
m = Message()
m.add_byte(cMSG_USERAUTH_REQUEST)
m.add_string(username)
m.add_string("ssh-connection")
m.add_string(method)
# Caller usually has more to say, such as injecting password, key etc
finish_message(m)
# TODO 4.0: seems odd to have the client handle the lock and not
# Transport; that _may_ have been an artifact of allowing user
# threading event injection? Regardless, we don't want to move _this_
# locking into Transport._send_message now, because lots of other
# untouched code also uses that method and we might end up
# double-locking (?) but 4.0 would be a good time to revisit.
with self.transport.lock:
self.transport._send_message(m)
# We have cut out the higher level event args, but self.auth_event is
# still required for self.wait_for_response to function correctly (it's
# the mechanism used by the auth success/failure handlers, the abort
# handler, and a few other spots like in gssapi.
# TODO: interestingly, wait_for_response itself doesn't actually
# enforce that its event argument and self.auth_event are the same...
self.auth_event = threading.Event()
return self.wait_for_response(self.auth_event)
def auth_none(self, username):
return self.send_auth_request(username, "none")
def auth_publickey(self, username, key):
key_type, bits = self._get_key_type_and_bits(key)
algorithm = self._finalize_pubkey_algorithm(key_type)
blob = self._get_session_blob(
key,
"ssh-connection",
username,
algorithm,
)
def finish(m):
# This field doesn't appear to be named, but is False when querying
# for permission (ie knowing whether to even prompt a user for
# passphrase, etc) or True when just going for it. Paramiko has
# never bothered with the former type of message, apparently.
m.add_boolean(True)
m.add_string(algorithm)
m.add_string(bits)
m.add_string(key.sign_ssh_data(blob, algorithm))
return self.send_auth_request(username, "publickey", finish)
def auth_password(self, username, password):
def finish(m):
# Unnamed field that equates to "I am changing my password", which
# Paramiko clientside never supported and serverside only sort of
# supported.
m.add_boolean(False)
m.add_string(b(password))
return self.send_auth_request(username, "password", finish)
def auth_interactive(self, username, handler, submethods=""):
"""
response_list = handler(title, instructions, prompt_list)
"""
# Unlike most siblings, this auth method _does_ require other
# superclass handlers (eg userauth info request) to understand
# what's going on, so we still set some self attributes.
self.auth_method = "keyboard_interactive"
self.interactive_handler = handler
def finish(m):
# Empty string for deprecated language tag field, per RFC 4256:
# https://www.rfc-editor.org/rfc/rfc4256#section-3.1
m.add_string("")
m.add_string(submethods)
return self.send_auth_request(username, "keyboard-interactive", finish)
# NOTE: not strictly 'auth only' related, but allows users to opt-in.
def _choose_fallback_pubkey_algorithm(self, key_type, my_algos):
msg = "Server did not send a server-sig-algs list; defaulting to something in our preferred algorithms list" # noqa
self._log(DEBUG, msg)
noncert_key_type = key_type.replace("[email protected]", "")
if key_type in my_algos or noncert_key_type in my_algos:
actual = key_type if key_type in my_algos else noncert_key_type
msg = f"Current key type, {actual!r}, is in our preferred list; using that" # noqa
algo = actual
else:
algo = my_algos[0]
msg = f"{key_type!r} not in our list - trying first list item instead, {algo!r}" # noqa
self._log(DEBUG, msg)
return algo
| 43,006 | Python | .py | 1,012 | 30.618577 | 171 | 0.575703 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
606 | sftp_client.py | paramiko_paramiko/paramiko/sftp_client.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of Paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from binascii import hexlify
import errno
import os
import stat
import threading
import time
import weakref
from paramiko import util
from paramiko.channel import Channel
from paramiko.message import Message
from paramiko.common import INFO, DEBUG, o777
from paramiko.sftp import (
BaseSFTP,
CMD_OPENDIR,
CMD_HANDLE,
SFTPError,
CMD_READDIR,
CMD_NAME,
CMD_CLOSE,
SFTP_FLAG_READ,
SFTP_FLAG_WRITE,
SFTP_FLAG_CREATE,
SFTP_FLAG_TRUNC,
SFTP_FLAG_APPEND,
SFTP_FLAG_EXCL,
CMD_OPEN,
CMD_REMOVE,
CMD_RENAME,
CMD_MKDIR,
CMD_RMDIR,
CMD_STAT,
CMD_ATTRS,
CMD_LSTAT,
CMD_SYMLINK,
CMD_SETSTAT,
CMD_READLINK,
CMD_REALPATH,
CMD_STATUS,
CMD_EXTENDED,
SFTP_OK,
SFTP_EOF,
SFTP_NO_SUCH_FILE,
SFTP_PERMISSION_DENIED,
int64,
)
from paramiko.sftp_attr import SFTPAttributes
from paramiko.ssh_exception import SSHException
from paramiko.sftp_file import SFTPFile
from paramiko.util import ClosingContextManager, b, u
def _to_unicode(s):
"""
decode a string as ascii or utf8 if possible (as required by the sftp
protocol). if neither works, just return a byte string because the server
probably doesn't know the filename's encoding.
"""
try:
return s.encode("ascii")
except (UnicodeError, AttributeError):
try:
return s.decode("utf-8")
except UnicodeError:
return s
b_slash = b"/"
class SFTPClient(BaseSFTP, ClosingContextManager):
"""
SFTP client object.
Used to open an SFTP session across an open SSH `.Transport` and perform
remote file operations.
Instances of this class may be used as context managers.
"""
def __init__(self, sock):
"""
Create an SFTP client from an existing `.Channel`. The channel
should already have requested the ``"sftp"`` subsystem.
An alternate way to create an SFTP client context is by using
`from_transport`.
:param .Channel sock: an open `.Channel` using the ``"sftp"`` subsystem
:raises:
`.SSHException` -- if there's an exception while negotiating sftp
"""
BaseSFTP.__init__(self)
self.sock = sock
self.ultra_debug = False
self.request_number = 1
# lock for request_number
self._lock = threading.Lock()
self._cwd = None
# request # -> SFTPFile
self._expecting = weakref.WeakValueDictionary()
if type(sock) is Channel:
# override default logger
transport = self.sock.get_transport()
self.logger = util.get_logger(
transport.get_log_channel() + ".sftp"
)
self.ultra_debug = transport.get_hexdump()
try:
server_version = self._send_version()
except EOFError:
raise SSHException("EOF during negotiation")
self._log(
INFO,
"Opened sftp connection (server version {})".format(
server_version
),
)
@classmethod
def from_transport(cls, t, window_size=None, max_packet_size=None):
"""
Create an SFTP client channel from an open `.Transport`.
Setting the window and packet sizes might affect the transfer speed.
The default settings in the `.Transport` class are the same as in
OpenSSH and should work adequately for both files transfers and
interactive sessions.
:param .Transport t: an open `.Transport` which is already
authenticated
:param int window_size:
optional window size for the `.SFTPClient` session.
:param int max_packet_size:
optional max packet size for the `.SFTPClient` session..
:return:
a new `.SFTPClient` object, referring to an sftp session (channel)
across the transport
.. versionchanged:: 1.15
Added the ``window_size`` and ``max_packet_size`` arguments.
"""
chan = t.open_session(
window_size=window_size, max_packet_size=max_packet_size
)
if chan is None:
return None
chan.invoke_subsystem("sftp")
return cls(chan)
def _log(self, level, msg, *args):
if isinstance(msg, list):
for m in msg:
self._log(level, m, *args)
else:
# NOTE: these bits MUST continue using %-style format junk because
# logging.Logger.log() explicitly requires it. Grump.
# escape '%' in msg (they could come from file or directory names)
# before logging
msg = msg.replace("%", "%%")
super()._log(
level,
"[chan %s] " + msg,
*([self.sock.get_name()] + list(args))
)
def close(self):
"""
Close the SFTP session and its underlying channel.
.. versionadded:: 1.4
"""
self._log(INFO, "sftp session closed.")
self.sock.close()
def get_channel(self):
"""
Return the underlying `.Channel` object for this SFTP session. This
might be useful for doing things like setting a timeout on the channel.
.. versionadded:: 1.7.1
"""
return self.sock
def listdir(self, path="."):
"""
Return a list containing the names of the entries in the given
``path``.
The list is in arbitrary order. It does not include the special
entries ``'.'`` and ``'..'`` even if they are present in the folder.
This method is meant to mirror ``os.listdir`` as closely as possible.
For a list of full `.SFTPAttributes` objects, see `listdir_attr`.
:param str path: path to list (defaults to ``'.'``)
"""
return [f.filename for f in self.listdir_attr(path)]
def listdir_attr(self, path="."):
"""
Return a list containing `.SFTPAttributes` objects corresponding to
files in the given ``path``. The list is in arbitrary order. It does
not include the special entries ``'.'`` and ``'..'`` even if they are
present in the folder.
The returned `.SFTPAttributes` objects will each have an additional
field: ``longname``, which may contain a formatted string of the file's
attributes, in unix format. The content of this string will probably
depend on the SFTP server implementation.
:param str path: path to list (defaults to ``'.'``)
:return: list of `.SFTPAttributes` objects
.. versionadded:: 1.2
"""
path = self._adjust_cwd(path)
self._log(DEBUG, "listdir({!r})".format(path))
t, msg = self._request(CMD_OPENDIR, path)
if t != CMD_HANDLE:
raise SFTPError("Expected handle")
handle = msg.get_binary()
filelist = []
while True:
try:
t, msg = self._request(CMD_READDIR, handle)
except EOFError:
# done with handle
break
if t != CMD_NAME:
raise SFTPError("Expected name response")
count = msg.get_int()
for i in range(count):
filename = msg.get_text()
longname = msg.get_text()
attr = SFTPAttributes._from_msg(msg, filename, longname)
if (filename != ".") and (filename != ".."):
filelist.append(attr)
self._request(CMD_CLOSE, handle)
return filelist
def listdir_iter(self, path=".", read_aheads=50):
"""
Generator version of `.listdir_attr`.
See the API docs for `.listdir_attr` for overall details.
This function adds one more kwarg on top of `.listdir_attr`:
``read_aheads``, an integer controlling how many
``SSH_FXP_READDIR`` requests are made to the server. The default of 50
should suffice for most file listings as each request/response cycle
may contain multiple files (dependent on server implementation.)
.. versionadded:: 1.15
"""
path = self._adjust_cwd(path)
self._log(DEBUG, "listdir({!r})".format(path))
t, msg = self._request(CMD_OPENDIR, path)
if t != CMD_HANDLE:
raise SFTPError("Expected handle")
handle = msg.get_string()
nums = list()
while True:
try:
# Send out a bunch of readdir requests so that we can read the
# responses later on Section 6.7 of the SSH file transfer RFC
# explains this
# http://filezilla-project.org/specs/draft-ietf-secsh-filexfer-02.txt
for i in range(read_aheads):
num = self._async_request(type(None), CMD_READDIR, handle)
nums.append(num)
# For each of our sent requests
# Read and parse the corresponding packets
# If we're at the end of our queued requests, then fire off
# some more requests
# Exit the loop when we've reached the end of the directory
# handle
for num in nums:
t, pkt_data = self._read_packet()
msg = Message(pkt_data)
new_num = msg.get_int()
if num == new_num:
if t == CMD_STATUS:
self._convert_status(msg)
count = msg.get_int()
for i in range(count):
filename = msg.get_text()
longname = msg.get_text()
attr = SFTPAttributes._from_msg(
msg, filename, longname
)
if (filename != ".") and (filename != ".."):
yield attr
# If we've hit the end of our queued requests, reset nums.
nums = list()
except EOFError:
self._request(CMD_CLOSE, handle)
return
def open(self, filename, mode="r", bufsize=-1):
"""
Open a file on the remote server. The arguments are the same as for
Python's built-in `python:file` (aka `python:open`). A file-like
object is returned, which closely mimics the behavior of a normal
Python file object, including the ability to be used as a context
manager.
The mode indicates how the file is to be opened: ``'r'`` for reading,
``'w'`` for writing (truncating an existing file), ``'a'`` for
appending, ``'r+'`` for reading/writing, ``'w+'`` for reading/writing
(truncating an existing file), ``'a+'`` for reading/appending. The
Python ``'b'`` flag is ignored, since SSH treats all files as binary.
The ``'U'`` flag is supported in a compatible way.
Since 1.5.2, an ``'x'`` flag indicates that the operation should only
succeed if the file was created and did not previously exist. This has
no direct mapping to Python's file flags, but is commonly known as the
``O_EXCL`` flag in posix.
The file will be buffered in standard Python style by default, but
can be altered with the ``bufsize`` parameter. ``<=0`` turns off
buffering, ``1`` uses line buffering, and any number greater than 1
(``>1``) uses that specific buffer size.
:param str filename: name of the file to open
:param str mode: mode (Python-style) to open in
:param int bufsize: desired buffering (default: ``-1``)
:return: an `.SFTPFile` object representing the open file
:raises: ``IOError`` -- if the file could not be opened.
"""
filename = self._adjust_cwd(filename)
self._log(DEBUG, "open({!r}, {!r})".format(filename, mode))
imode = 0
if ("r" in mode) or ("+" in mode):
imode |= SFTP_FLAG_READ
if ("w" in mode) or ("+" in mode) or ("a" in mode):
imode |= SFTP_FLAG_WRITE
if "w" in mode:
imode |= SFTP_FLAG_CREATE | SFTP_FLAG_TRUNC
if "a" in mode:
imode |= SFTP_FLAG_CREATE | SFTP_FLAG_APPEND
if "x" in mode:
imode |= SFTP_FLAG_CREATE | SFTP_FLAG_EXCL
attrblock = SFTPAttributes()
t, msg = self._request(CMD_OPEN, filename, imode, attrblock)
if t != CMD_HANDLE:
raise SFTPError("Expected handle")
handle = msg.get_binary()
self._log(
DEBUG,
"open({!r}, {!r}) -> {}".format(
filename, mode, u(hexlify(handle))
),
)
return SFTPFile(self, handle, mode, bufsize)
# Python continues to vacillate about "open" vs "file"...
file = open
def remove(self, path):
"""
Remove the file at the given path. This only works on files; for
removing folders (directories), use `rmdir`.
:param str path: path (absolute or relative) of the file to remove
:raises: ``IOError`` -- if the path refers to a folder (directory)
"""
path = self._adjust_cwd(path)
self._log(DEBUG, "remove({!r})".format(path))
self._request(CMD_REMOVE, path)
unlink = remove
def rename(self, oldpath, newpath):
"""
Rename a file or folder from ``oldpath`` to ``newpath``.
.. note::
This method implements 'standard' SFTP ``RENAME`` behavior; those
seeking the OpenSSH "POSIX rename" extension behavior should use
`posix_rename`.
:param str oldpath:
existing name of the file or folder
:param str newpath:
new name for the file or folder, must not exist already
:raises:
``IOError`` -- if ``newpath`` is a folder, or something else goes
wrong
"""
oldpath = self._adjust_cwd(oldpath)
newpath = self._adjust_cwd(newpath)
self._log(DEBUG, "rename({!r}, {!r})".format(oldpath, newpath))
self._request(CMD_RENAME, oldpath, newpath)
def posix_rename(self, oldpath, newpath):
"""
Rename a file or folder from ``oldpath`` to ``newpath``, following
posix conventions.
:param str oldpath: existing name of the file or folder
:param str newpath: new name for the file or folder, will be
overwritten if it already exists
:raises:
``IOError`` -- if ``newpath`` is a folder, posix-rename is not
supported by the server or something else goes wrong
:versionadded: 2.2
"""
oldpath = self._adjust_cwd(oldpath)
newpath = self._adjust_cwd(newpath)
self._log(DEBUG, "posix_rename({!r}, {!r})".format(oldpath, newpath))
self._request(
CMD_EXTENDED, "[email protected]", oldpath, newpath
)
def mkdir(self, path, mode=o777):
"""
Create a folder (directory) named ``path`` with numeric mode ``mode``.
The default mode is 0777 (octal). On some systems, mode is ignored.
Where it is used, the current umask value is first masked out.
:param str path: name of the folder to create
:param int mode: permissions (posix-style) for the newly-created folder
"""
path = self._adjust_cwd(path)
self._log(DEBUG, "mkdir({!r}, {!r})".format(path, mode))
attr = SFTPAttributes()
attr.st_mode = mode
self._request(CMD_MKDIR, path, attr)
def rmdir(self, path):
"""
Remove the folder named ``path``.
:param str path: name of the folder to remove
"""
path = self._adjust_cwd(path)
self._log(DEBUG, "rmdir({!r})".format(path))
self._request(CMD_RMDIR, path)
def stat(self, path):
"""
Retrieve information about a file on the remote system. The return
value is an object whose attributes correspond to the attributes of
Python's ``stat`` structure as returned by ``os.stat``, except that it
contains fewer fields. An SFTP server may return as much or as little
info as it wants, so the results may vary from server to server.
Unlike a Python `python:stat` object, the result may not be accessed as
a tuple. This is mostly due to the author's slack factor.
The fields supported are: ``st_mode``, ``st_size``, ``st_uid``,
``st_gid``, ``st_atime``, and ``st_mtime``.
:param str path: the filename to stat
:return:
an `.SFTPAttributes` object containing attributes about the given
file
"""
path = self._adjust_cwd(path)
self._log(DEBUG, "stat({!r})".format(path))
t, msg = self._request(CMD_STAT, path)
if t != CMD_ATTRS:
raise SFTPError("Expected attributes")
return SFTPAttributes._from_msg(msg)
def lstat(self, path):
"""
Retrieve information about a file on the remote system, without
following symbolic links (shortcuts). This otherwise behaves exactly
the same as `stat`.
:param str path: the filename to stat
:return:
an `.SFTPAttributes` object containing attributes about the given
file
"""
path = self._adjust_cwd(path)
self._log(DEBUG, "lstat({!r})".format(path))
t, msg = self._request(CMD_LSTAT, path)
if t != CMD_ATTRS:
raise SFTPError("Expected attributes")
return SFTPAttributes._from_msg(msg)
def symlink(self, source, dest):
"""
Create a symbolic link to the ``source`` path at ``destination``.
:param str source: path of the original file
:param str dest: path of the newly created symlink
"""
dest = self._adjust_cwd(dest)
self._log(DEBUG, "symlink({!r}, {!r})".format(source, dest))
source = b(source)
self._request(CMD_SYMLINK, source, dest)
def chmod(self, path, mode):
"""
Change the mode (permissions) of a file. The permissions are
unix-style and identical to those used by Python's `os.chmod`
function.
:param str path: path of the file to change the permissions of
:param int mode: new permissions
"""
path = self._adjust_cwd(path)
self._log(DEBUG, "chmod({!r}, {!r})".format(path, mode))
attr = SFTPAttributes()
attr.st_mode = mode
self._request(CMD_SETSTAT, path, attr)
def chown(self, path, uid, gid):
"""
Change the owner (``uid``) and group (``gid``) of a file. As with
Python's `os.chown` function, you must pass both arguments, so if you
only want to change one, use `stat` first to retrieve the current
owner and group.
:param str path: path of the file to change the owner and group of
:param int uid: new owner's uid
:param int gid: new group id
"""
path = self._adjust_cwd(path)
self._log(DEBUG, "chown({!r}, {!r}, {!r})".format(path, uid, gid))
attr = SFTPAttributes()
attr.st_uid, attr.st_gid = uid, gid
self._request(CMD_SETSTAT, path, attr)
def utime(self, path, times):
"""
Set the access and modified times of the file specified by ``path``.
If ``times`` is ``None``, then the file's access and modified times
are set to the current time. Otherwise, ``times`` must be a 2-tuple
of numbers, of the form ``(atime, mtime)``, which is used to set the
access and modified times, respectively. This bizarre API is mimicked
from Python for the sake of consistency -- I apologize.
:param str path: path of the file to modify
:param tuple times:
``None`` or a tuple of (access time, modified time) in standard
internet epoch time (seconds since 01 January 1970 GMT)
"""
path = self._adjust_cwd(path)
if times is None:
times = (time.time(), time.time())
self._log(DEBUG, "utime({!r}, {!r})".format(path, times))
attr = SFTPAttributes()
attr.st_atime, attr.st_mtime = times
self._request(CMD_SETSTAT, path, attr)
def truncate(self, path, size):
"""
Change the size of the file specified by ``path``. This usually
extends or shrinks the size of the file, just like the `~file.truncate`
method on Python file objects.
:param str path: path of the file to modify
:param int size: the new size of the file
"""
path = self._adjust_cwd(path)
self._log(DEBUG, "truncate({!r}, {!r})".format(path, size))
attr = SFTPAttributes()
attr.st_size = size
self._request(CMD_SETSTAT, path, attr)
def readlink(self, path):
"""
Return the target of a symbolic link (shortcut). You can use
`symlink` to create these. The result may be either an absolute or
relative pathname.
:param str path: path of the symbolic link file
:return: target path, as a `str`
"""
path = self._adjust_cwd(path)
self._log(DEBUG, "readlink({!r})".format(path))
t, msg = self._request(CMD_READLINK, path)
if t != CMD_NAME:
raise SFTPError("Expected name response")
count = msg.get_int()
if count == 0:
return None
if count != 1:
raise SFTPError("Readlink returned {} results".format(count))
return _to_unicode(msg.get_string())
def normalize(self, path):
"""
Return the normalized path (on the server) of a given path. This
can be used to quickly resolve symbolic links or determine what the
server is considering to be the "current folder" (by passing ``'.'``
as ``path``).
:param str path: path to be normalized
:return: normalized form of the given path (as a `str`)
:raises: ``IOError`` -- if the path can't be resolved on the server
"""
path = self._adjust_cwd(path)
self._log(DEBUG, "normalize({!r})".format(path))
t, msg = self._request(CMD_REALPATH, path)
if t != CMD_NAME:
raise SFTPError("Expected name response")
count = msg.get_int()
if count != 1:
raise SFTPError("Realpath returned {} results".format(count))
return msg.get_text()
def chdir(self, path=None):
"""
Change the "current directory" of this SFTP session. Since SFTP
doesn't really have the concept of a current working directory, this is
emulated by Paramiko. Once you use this method to set a working
directory, all operations on this `.SFTPClient` object will be relative
to that path. You can pass in ``None`` to stop using a current working
directory.
:param str path: new current working directory
:raises:
``IOError`` -- if the requested path doesn't exist on the server
.. versionadded:: 1.4
"""
if path is None:
self._cwd = None
return
if not stat.S_ISDIR(self.stat(path).st_mode):
code = errno.ENOTDIR
raise SFTPError(code, "{}: {}".format(os.strerror(code), path))
self._cwd = b(self.normalize(path))
def getcwd(self):
"""
Return the "current working directory" for this SFTP session, as
emulated by Paramiko. If no directory has been set with `chdir`,
this method will return ``None``.
.. versionadded:: 1.4
"""
# TODO: make class initialize with self._cwd set to self.normalize('.')
return self._cwd and u(self._cwd)
def _transfer_with_callback(self, reader, writer, file_size, callback):
size = 0
while True:
data = reader.read(32768)
writer.write(data)
size += len(data)
if len(data) == 0:
break
if callback is not None:
callback(size, file_size)
return size
def putfo(self, fl, remotepath, file_size=0, callback=None, confirm=True):
"""
Copy the contents of an open file object (``fl``) to the SFTP server as
``remotepath``. Any exception raised by operations will be passed
through.
The SFTP operations use pipelining for speed.
:param fl: opened file or file-like object to copy
:param str remotepath: the destination path on the SFTP server
:param int file_size:
optional size parameter passed to callback. If none is specified,
size defaults to 0
:param callable callback:
optional callback function (form: ``func(int, int)``) that accepts
the bytes transferred so far and the total bytes to be transferred
(since 1.7.4)
:param bool confirm:
whether to do a stat() on the file afterwards to confirm the file
size (since 1.7.7)
:return:
an `.SFTPAttributes` object containing attributes about the given
file.
.. versionadded:: 1.10
"""
with self.file(remotepath, "wb") as fr:
fr.set_pipelined(True)
size = self._transfer_with_callback(
reader=fl, writer=fr, file_size=file_size, callback=callback
)
if confirm:
s = self.stat(remotepath)
if s.st_size != size:
raise IOError(
"size mismatch in put! {} != {}".format(s.st_size, size)
)
else:
s = SFTPAttributes()
return s
def put(self, localpath, remotepath, callback=None, confirm=True):
"""
Copy a local file (``localpath``) to the SFTP server as ``remotepath``.
Any exception raised by operations will be passed through. This
method is primarily provided as a convenience.
The SFTP operations use pipelining for speed.
:param str localpath: the local file to copy
:param str remotepath: the destination path on the SFTP server. Note
that the filename should be included. Only specifying a directory
may result in an error.
:param callable callback:
optional callback function (form: ``func(int, int)``) that accepts
the bytes transferred so far and the total bytes to be transferred
:param bool confirm:
whether to do a stat() on the file afterwards to confirm the file
size
:return: an `.SFTPAttributes` object containing attributes about the
given file
.. versionadded:: 1.4
.. versionchanged:: 1.7.4
``callback`` and rich attribute return value added.
.. versionchanged:: 1.7.7
``confirm`` param added.
"""
file_size = os.stat(localpath).st_size
with open(localpath, "rb") as fl:
return self.putfo(fl, remotepath, file_size, callback, confirm)
def getfo(
self,
remotepath,
fl,
callback=None,
prefetch=True,
max_concurrent_prefetch_requests=None,
):
"""
Copy a remote file (``remotepath``) from the SFTP server and write to
an open file or file-like object, ``fl``. Any exception raised by
operations will be passed through. This method is primarily provided
as a convenience.
:param object remotepath: opened file or file-like object to copy to
:param str fl:
the destination path on the local host or open file object
:param callable callback:
optional callback function (form: ``func(int, int)``) that accepts
the bytes transferred so far and the total bytes to be transferred
:param bool prefetch:
controls whether prefetching is performed (default: True)
:param int max_concurrent_prefetch_requests:
The maximum number of concurrent read requests to prefetch. See
`.SFTPClient.get` (its ``max_concurrent_prefetch_requests`` param)
for details.
:return: the `number <int>` of bytes written to the opened file object
.. versionadded:: 1.10
.. versionchanged:: 2.8
Added the ``prefetch`` keyword argument.
.. versionchanged:: 3.3
Added ``max_concurrent_prefetch_requests``.
"""
file_size = self.stat(remotepath).st_size
with self.open(remotepath, "rb") as fr:
if prefetch:
fr.prefetch(file_size, max_concurrent_prefetch_requests)
return self._transfer_with_callback(
reader=fr, writer=fl, file_size=file_size, callback=callback
)
def get(
self,
remotepath,
localpath,
callback=None,
prefetch=True,
max_concurrent_prefetch_requests=None,
):
"""
Copy a remote file (``remotepath``) from the SFTP server to the local
host as ``localpath``. Any exception raised by operations will be
passed through. This method is primarily provided as a convenience.
:param str remotepath: the remote file to copy
:param str localpath: the destination path on the local host
:param callable callback:
optional callback function (form: ``func(int, int)``) that accepts
the bytes transferred so far and the total bytes to be transferred
:param bool prefetch:
controls whether prefetching is performed (default: True)
:param int max_concurrent_prefetch_requests:
The maximum number of concurrent read requests to prefetch.
When this is ``None`` (the default), do not limit the number of
concurrent prefetch requests. Note: OpenSSH's sftp internally
imposes a limit of 64 concurrent requests, while Paramiko imposes
no limit by default; consider setting a limit if a file can be
successfully received with sftp but hangs with Paramiko.
.. versionadded:: 1.4
.. versionchanged:: 1.7.4
Added the ``callback`` param
.. versionchanged:: 2.8
Added the ``prefetch`` keyword argument.
.. versionchanged:: 3.3
Added ``max_concurrent_prefetch_requests``.
"""
with open(localpath, "wb") as fl:
size = self.getfo(
remotepath,
fl,
callback,
prefetch,
max_concurrent_prefetch_requests,
)
s = os.stat(localpath)
if s.st_size != size:
raise IOError(
"size mismatch in get! {} != {}".format(s.st_size, size)
)
# ...internals...
def _request(self, t, *args):
num = self._async_request(type(None), t, *args)
return self._read_response(num)
def _async_request(self, fileobj, t, *args):
# this method may be called from other threads (prefetch)
self._lock.acquire()
try:
msg = Message()
msg.add_int(self.request_number)
for item in args:
if isinstance(item, int64):
msg.add_int64(item)
elif isinstance(item, int):
msg.add_int(item)
elif isinstance(item, SFTPAttributes):
item._pack(msg)
else:
# For all other types, rely on as_string() to either coerce
# to bytes before writing or raise a suitable exception.
msg.add_string(item)
num = self.request_number
self._expecting[num] = fileobj
self.request_number += 1
finally:
self._lock.release()
self._send_packet(t, msg)
return num
def _read_response(self, waitfor=None):
while True:
try:
t, data = self._read_packet()
except EOFError as e:
raise SSHException("Server connection dropped: {}".format(e))
msg = Message(data)
num = msg.get_int()
self._lock.acquire()
try:
if num not in self._expecting:
# might be response for a file that was closed before
# responses came back
self._log(DEBUG, "Unexpected response #{}".format(num))
if waitfor is None:
# just doing a single check
break
continue
fileobj = self._expecting[num]
del self._expecting[num]
finally:
self._lock.release()
if num == waitfor:
# synchronous
if t == CMD_STATUS:
self._convert_status(msg)
return t, msg
# can not rewrite this to deal with E721, either as a None check
# nor as not an instance of None or NoneType
if fileobj is not type(None): # noqa
fileobj._async_response(t, msg, num)
if waitfor is None:
# just doing a single check
break
return None, None
def _finish_responses(self, fileobj):
while fileobj in self._expecting.values():
self._read_response()
fileobj._check_exception()
def _convert_status(self, msg):
"""
Raises EOFError or IOError on error status; otherwise does nothing.
"""
code = msg.get_int()
text = msg.get_text()
if code == SFTP_OK:
return
elif code == SFTP_EOF:
raise EOFError(text)
elif code == SFTP_NO_SUCH_FILE:
# clever idea from john a. meinel: map the error codes to errno
raise IOError(errno.ENOENT, text)
elif code == SFTP_PERMISSION_DENIED:
raise IOError(errno.EACCES, text)
else:
raise IOError(text)
def _adjust_cwd(self, path):
"""
Return an adjusted path if we're emulating a "current working
directory" for the server.
"""
path = b(path)
if self._cwd is None:
return path
if len(path) and path[0:1] == b_slash:
# absolute path
return path
if self._cwd == b_slash:
return self._cwd + path
return self._cwd + b_slash + path
class SFTP(SFTPClient):
"""
An alias for `.SFTPClient` for backwards compatibility.
"""
pass
| 35,855 | Python | .py | 845 | 32.19645 | 85 | 0.588593 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
607 | win_pageant.py | paramiko_paramiko/paramiko/win_pageant.py | # Copyright (C) 2005 John Arbash-Meinel <[email protected]>
# Modified up by: Todd Whiteman <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Functions for communicating with Pageant, the basic windows ssh agent program.
"""
import array
import ctypes.wintypes
import platform
import struct
from paramiko.common import zero_byte
from paramiko.util import b
import _thread as thread
from . import _winapi
_AGENT_COPYDATA_ID = 0x804E50BA
_AGENT_MAX_MSGLEN = 8192
# Note: The WM_COPYDATA value is pulled from win32con, as a workaround
# so we do not have to import this huge library just for this one variable.
win32con_WM_COPYDATA = 74
def _get_pageant_window_object():
return ctypes.windll.user32.FindWindowA(b"Pageant", b"Pageant")
def can_talk_to_agent():
"""
Check to see if there is a "Pageant" agent we can talk to.
This checks both if we have the required libraries (win32all or ctypes)
and if there is a Pageant currently running.
"""
return bool(_get_pageant_window_object())
if platform.architecture()[0] == "64bit":
ULONG_PTR = ctypes.c_uint64
else:
ULONG_PTR = ctypes.c_uint32
class COPYDATASTRUCT(ctypes.Structure):
"""
ctypes implementation of
http://msdn.microsoft.com/en-us/library/windows/desktop/ms649010%28v=vs.85%29.aspx
"""
_fields_ = [
("num_data", ULONG_PTR),
("data_size", ctypes.wintypes.DWORD),
("data_loc", ctypes.c_void_p),
]
def _query_pageant(msg):
"""
Communication with the Pageant process is done through a shared
memory-mapped file.
"""
hwnd = _get_pageant_window_object()
if not hwnd:
# Raise a failure to connect exception, pageant isn't running anymore!
return None
# create a name for the mmap
map_name = f"PageantRequest{thread.get_ident():08x}"
pymap = _winapi.MemoryMap(
map_name, _AGENT_MAX_MSGLEN, _winapi.get_security_attributes_for_user()
)
with pymap:
pymap.write(msg)
# Create an array buffer containing the mapped filename
char_buffer = array.array("b", b(map_name) + zero_byte) # noqa
char_buffer_address, char_buffer_size = char_buffer.buffer_info()
# Create a string to use for the SendMessage function call
cds = COPYDATASTRUCT(
_AGENT_COPYDATA_ID, char_buffer_size, char_buffer_address
)
response = ctypes.windll.user32.SendMessageA(
hwnd, win32con_WM_COPYDATA, ctypes.sizeof(cds), ctypes.byref(cds)
)
if response > 0:
pymap.seek(0)
datalen = pymap.read(4)
retlen = struct.unpack(">I", datalen)[0]
return datalen + pymap.read(retlen)
return None
class PageantConnection:
"""
Mock "connection" to an agent which roughly approximates the behavior of
a unix local-domain socket (as used by Agent). Requests are sent to the
pageant daemon via special Windows magick, and responses are buffered back
for subsequent reads.
"""
def __init__(self):
self._response = None
def send(self, data):
self._response = _query_pageant(data)
def recv(self, n):
if self._response is None:
return ""
ret = self._response[:n]
self._response = self._response[n:]
if self._response == "":
self._response = None
return ret
def close(self):
pass
| 4,177 | Python | .py | 110 | 32.718182 | 86 | 0.69126 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
608 | rsakey.py | paramiko_paramiko/paramiko/rsakey.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
RSA keys.
"""
from cryptography.exceptions import InvalidSignature, UnsupportedAlgorithm
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import rsa, padding
from paramiko.message import Message
from paramiko.pkey import PKey
from paramiko.ssh_exception import SSHException
class RSAKey(PKey):
"""
Representation of an RSA key which can be used to sign and verify SSH2
data.
"""
name = "ssh-rsa"
HASHES = {
"ssh-rsa": hashes.SHA1,
"[email protected]": hashes.SHA1,
"rsa-sha2-256": hashes.SHA256,
"[email protected]": hashes.SHA256,
"rsa-sha2-512": hashes.SHA512,
"[email protected]": hashes.SHA512,
}
def __init__(
self,
msg=None,
data=None,
filename=None,
password=None,
key=None,
file_obj=None,
):
self.key = None
self.public_blob = None
if file_obj is not None:
self._from_private_key(file_obj, password)
return
if filename is not None:
self._from_private_key_file(filename, password)
return
if (msg is None) and (data is not None):
msg = Message(data)
if key is not None:
self.key = key
else:
self._check_type_and_load_cert(
msg=msg,
# NOTE: this does NOT change when using rsa2 signatures; it's
# purely about key loading, not exchange or verification
key_type=self.name,
cert_type="[email protected]",
)
self.key = rsa.RSAPublicNumbers(
e=msg.get_mpint(), n=msg.get_mpint()
).public_key(default_backend())
@classmethod
def identifiers(cls):
return list(cls.HASHES.keys())
@property
def size(self):
return self.key.key_size
@property
def public_numbers(self):
if isinstance(self.key, rsa.RSAPrivateKey):
return self.key.private_numbers().public_numbers
else:
return self.key.public_numbers()
def asbytes(self):
m = Message()
m.add_string(self.name)
m.add_mpint(self.public_numbers.e)
m.add_mpint(self.public_numbers.n)
return m.asbytes()
def __str__(self):
# NOTE: see #853 to explain some legacy behavior.
# TODO 4.0: replace with a nice clean fingerprint display or something
return self.asbytes().decode("utf8", errors="ignore")
@property
def _fields(self):
return (self.get_name(), self.public_numbers.e, self.public_numbers.n)
def get_name(self):
return self.name
def get_bits(self):
return self.size
def can_sign(self):
return isinstance(self.key, rsa.RSAPrivateKey)
def sign_ssh_data(self, data, algorithm=None):
if algorithm is None:
algorithm = self.name
sig = self.key.sign(
data,
padding=padding.PKCS1v15(),
# HASHES being just a map from long identifier to either SHA1 or
# SHA256 - cert'ness is not truly relevant.
algorithm=self.HASHES[algorithm](),
)
m = Message()
# And here again, cert'ness is irrelevant, so it is stripped out.
m.add_string(algorithm.replace("[email protected]", ""))
m.add_string(sig)
return m
def verify_ssh_sig(self, data, msg):
sig_algorithm = msg.get_text()
if sig_algorithm not in self.HASHES:
return False
key = self.key
if isinstance(key, rsa.RSAPrivateKey):
key = key.public_key()
# NOTE: pad received signature with leading zeros, key.verify()
# expects a signature of key size (e.g. PuTTY doesn't pad)
sign = msg.get_binary()
diff = key.key_size - len(sign) * 8
if diff > 0:
sign = b"\x00" * ((diff + 7) // 8) + sign
try:
key.verify(
sign, data, padding.PKCS1v15(), self.HASHES[sig_algorithm]()
)
except InvalidSignature:
return False
else:
return True
def write_private_key_file(self, filename, password=None):
self._write_private_key_file(
filename,
self.key,
serialization.PrivateFormat.TraditionalOpenSSL,
password=password,
)
def write_private_key(self, file_obj, password=None):
self._write_private_key(
file_obj,
self.key,
serialization.PrivateFormat.TraditionalOpenSSL,
password=password,
)
@staticmethod
def generate(bits, progress_func=None):
"""
Generate a new private RSA key. This factory function can be used to
generate a new host key or authentication key.
:param int bits: number of bits the generated key should be.
:param progress_func: Unused
:return: new `.RSAKey` private key
"""
key = rsa.generate_private_key(
public_exponent=65537, key_size=bits, backend=default_backend()
)
return RSAKey(key=key)
# ...internals...
def _from_private_key_file(self, filename, password):
data = self._read_private_key_file("RSA", filename, password)
self._decode_key(data)
def _from_private_key(self, file_obj, password):
data = self._read_private_key("RSA", file_obj, password)
self._decode_key(data)
def _decode_key(self, data):
pkformat, data = data
if pkformat == self._PRIVATE_KEY_FORMAT_ORIGINAL:
try:
key = serialization.load_der_private_key(
data, password=None, backend=default_backend()
)
except (ValueError, TypeError, UnsupportedAlgorithm) as e:
raise SSHException(str(e))
elif pkformat == self._PRIVATE_KEY_FORMAT_OPENSSH:
n, e, d, iqmp, p, q = self._uint32_cstruct_unpack(data, "iiiiii")
public_numbers = rsa.RSAPublicNumbers(e=e, n=n)
key = rsa.RSAPrivateNumbers(
p=p,
q=q,
d=d,
dmp1=d % (p - 1),
dmq1=d % (q - 1),
iqmp=iqmp,
public_numbers=public_numbers,
).private_key(default_backend())
else:
self._got_bad_key_format_id(pkformat)
assert isinstance(key, rsa.RSAPrivateKey)
self.key = key
| 7,546 | Python | .py | 199 | 28.979899 | 79 | 0.611422 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
609 | pipe.py | paramiko_paramiko/paramiko/pipe.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Abstraction of a one-way pipe where the read end can be used in
`select.select`. Normally this is trivial, but Windows makes it nearly
impossible.
The pipe acts like an Event, which can be set or cleared. When set, the pipe
will trigger as readable in `select <select.select>`.
"""
import sys
import os
import socket
def make_pipe():
if sys.platform[:3] != "win":
p = PosixPipe()
else:
p = WindowsPipe()
return p
class PosixPipe:
def __init__(self):
self._rfd, self._wfd = os.pipe()
self._set = False
self._forever = False
self._closed = False
def close(self):
os.close(self._rfd)
os.close(self._wfd)
# used for unit tests:
self._closed = True
def fileno(self):
return self._rfd
def clear(self):
if not self._set or self._forever:
return
os.read(self._rfd, 1)
self._set = False
def set(self):
if self._set or self._closed:
return
self._set = True
os.write(self._wfd, b"*")
def set_forever(self):
self._forever = True
self.set()
class WindowsPipe:
"""
On Windows, only an OS-level "WinSock" may be used in select(), but reads
and writes must be to the actual socket object.
"""
def __init__(self):
serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
serv.bind(("127.0.0.1", 0))
serv.listen(1)
# need to save sockets in _rsock/_wsock so they don't get closed
self._rsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._rsock.connect(("127.0.0.1", serv.getsockname()[1]))
self._wsock, addr = serv.accept()
serv.close()
self._set = False
self._forever = False
self._closed = False
def close(self):
self._rsock.close()
self._wsock.close()
# used for unit tests:
self._closed = True
def fileno(self):
return self._rsock.fileno()
def clear(self):
if not self._set or self._forever:
return
self._rsock.recv(1)
self._set = False
def set(self):
if self._set or self._closed:
return
self._set = True
self._wsock.send(b"*")
def set_forever(self):
self._forever = True
self.set()
class OrPipe:
def __init__(self, pipe):
self._set = False
self._partner = None
self._pipe = pipe
def set(self):
self._set = True
if not self._partner._set:
self._pipe.set()
def clear(self):
self._set = False
if not self._partner._set:
self._pipe.clear()
def make_or_pipe(pipe):
"""
wraps a pipe into two pipe-like objects which are "or"d together to
affect the real pipe. if either returned pipe is set, the wrapped pipe
is set. when both are cleared, the wrapped pipe is cleared.
"""
p1 = OrPipe(pipe)
p2 = OrPipe(pipe)
p1._partner = p2
p2._partner = p1
return p1, p2
| 3,902 | Python | .py | 120 | 26.283333 | 79 | 0.628663 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
610 | transport.py | paramiko_paramiko/paramiko/transport.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
# Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Core protocol implementation
"""
import os
import socket
import sys
import threading
import time
import weakref
from hashlib import md5, sha1, sha256, sha512
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.ciphers import (
algorithms,
Cipher,
modes,
aead,
)
import paramiko
from paramiko import util
from paramiko.auth_handler import AuthHandler, AuthOnlyHandler
from paramiko.ssh_gss import GSSAuth
from paramiko.channel import Channel
from paramiko.common import (
xffffffff,
cMSG_CHANNEL_OPEN,
cMSG_IGNORE,
cMSG_GLOBAL_REQUEST,
DEBUG,
MSG_KEXINIT,
MSG_IGNORE,
MSG_DISCONNECT,
MSG_DEBUG,
ERROR,
WARNING,
cMSG_UNIMPLEMENTED,
INFO,
cMSG_KEXINIT,
cMSG_NEWKEYS,
MSG_NEWKEYS,
cMSG_REQUEST_SUCCESS,
cMSG_REQUEST_FAILURE,
CONNECTION_FAILED_CODE,
OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED,
OPEN_SUCCEEDED,
cMSG_CHANNEL_OPEN_FAILURE,
cMSG_CHANNEL_OPEN_SUCCESS,
MSG_GLOBAL_REQUEST,
MSG_REQUEST_SUCCESS,
MSG_REQUEST_FAILURE,
cMSG_SERVICE_REQUEST,
MSG_SERVICE_ACCEPT,
MSG_CHANNEL_OPEN_SUCCESS,
MSG_CHANNEL_OPEN_FAILURE,
MSG_CHANNEL_OPEN,
MSG_CHANNEL_SUCCESS,
MSG_CHANNEL_FAILURE,
MSG_CHANNEL_DATA,
MSG_CHANNEL_EXTENDED_DATA,
MSG_CHANNEL_WINDOW_ADJUST,
MSG_CHANNEL_REQUEST,
MSG_CHANNEL_EOF,
MSG_CHANNEL_CLOSE,
MIN_WINDOW_SIZE,
MIN_PACKET_SIZE,
MAX_WINDOW_SIZE,
DEFAULT_WINDOW_SIZE,
DEFAULT_MAX_PACKET_SIZE,
HIGHEST_USERAUTH_MESSAGE_ID,
MSG_UNIMPLEMENTED,
MSG_NAMES,
MSG_EXT_INFO,
cMSG_EXT_INFO,
byte_ord,
)
from paramiko.compress import ZlibCompressor, ZlibDecompressor
from paramiko.dsskey import DSSKey
from paramiko.ed25519key import Ed25519Key
from paramiko.kex_curve25519 import KexCurve25519
from paramiko.kex_gex import KexGex, KexGexSHA256
from paramiko.kex_group1 import KexGroup1
from paramiko.kex_group14 import KexGroup14, KexGroup14SHA256
from paramiko.kex_group16 import KexGroup16SHA512
from paramiko.kex_ecdh_nist import KexNistp256, KexNistp384, KexNistp521
from paramiko.kex_gss import KexGSSGex, KexGSSGroup1, KexGSSGroup14
from paramiko.message import Message
from paramiko.packet import Packetizer, NeedRekeyException
from paramiko.primes import ModulusPack
from paramiko.rsakey import RSAKey
from paramiko.ecdsakey import ECDSAKey
from paramiko.server import ServerInterface
from paramiko.sftp_client import SFTPClient
from paramiko.ssh_exception import (
BadAuthenticationType,
ChannelException,
IncompatiblePeer,
MessageOrderError,
ProxyCommandFailure,
SSHException,
)
from paramiko.util import (
ClosingContextManager,
clamp_value,
b,
)
# TripleDES is moving from `cryptography.hazmat.primitives.ciphers.algorithms`
# in cryptography>=43.0.0 to `cryptography.hazmat.decrepit.ciphers.algorithms`
# It will be removed from `cryptography.hazmat.primitives.ciphers.algorithms`
# in cryptography==48.0.0.
#
# Source References:
# - https://github.com/pyca/cryptography/commit/722a6393e61b3ac
# - https://github.com/pyca/cryptography/pull/11407/files
try:
from cryptography.hazmat.decrepit.ciphers.algorithms import TripleDES
except ImportError:
from cryptography.hazmat.primitives.ciphers.algorithms import TripleDES
# for thread cleanup
_active_threads = []
def _join_lingering_threads():
for thr in _active_threads:
thr.stop_thread()
import atexit
atexit.register(_join_lingering_threads)
class Transport(threading.Thread, ClosingContextManager):
"""
An SSH Transport attaches to a stream (usually a socket), negotiates an
encrypted session, authenticates, and then creates stream tunnels, called
`channels <.Channel>`, across the session. Multiple channels can be
multiplexed across a single session (and often are, in the case of port
forwardings).
Instances of this class may be used as context managers.
"""
_ENCRYPT = object()
_DECRYPT = object()
_PROTO_ID = "2.0"
_CLIENT_ID = "paramiko_{}".format(paramiko.__version__)
# These tuples of algorithm identifiers are in preference order; do not
# reorder without reason!
# NOTE: if you need to modify these, we suggest leveraging the
# `disabled_algorithms` constructor argument (also available in SSHClient)
# instead of monkeypatching or subclassing.
_preferred_ciphers = (
"aes128-ctr",
"aes192-ctr",
"aes256-ctr",
"aes128-cbc",
"aes192-cbc",
"aes256-cbc",
"3des-cbc",
"[email protected]",
"[email protected]",
)
_preferred_macs = (
"hmac-sha2-256",
"hmac-sha2-512",
"[email protected]",
"[email protected]",
"hmac-sha1",
"hmac-md5",
"hmac-sha1-96",
"hmac-md5-96",
)
# ~= HostKeyAlgorithms in OpenSSH land
_preferred_keys = (
"ssh-ed25519",
"ecdsa-sha2-nistp256",
"ecdsa-sha2-nistp384",
"ecdsa-sha2-nistp521",
"rsa-sha2-512",
"rsa-sha2-256",
"ssh-rsa",
"ssh-dss",
)
# ~= PubKeyAcceptedAlgorithms
_preferred_pubkeys = (
"ssh-ed25519",
"ecdsa-sha2-nistp256",
"ecdsa-sha2-nistp384",
"ecdsa-sha2-nistp521",
"rsa-sha2-512",
"rsa-sha2-256",
"ssh-rsa",
"ssh-dss",
)
_preferred_kex = (
"ecdh-sha2-nistp256",
"ecdh-sha2-nistp384",
"ecdh-sha2-nistp521",
"diffie-hellman-group16-sha512",
"diffie-hellman-group-exchange-sha256",
"diffie-hellman-group14-sha256",
"diffie-hellman-group-exchange-sha1",
"diffie-hellman-group14-sha1",
"diffie-hellman-group1-sha1",
)
if KexCurve25519.is_available():
_preferred_kex = ("[email protected]",) + _preferred_kex
_preferred_gsskex = (
"gss-gex-sha1-toWM5Slw5Ew8Mqkay+al2g==",
"gss-group14-sha1-toWM5Slw5Ew8Mqkay+al2g==",
"gss-group1-sha1-toWM5Slw5Ew8Mqkay+al2g==",
)
_preferred_compression = ("none",)
_cipher_info = {
"aes128-ctr": {
"class": algorithms.AES,
"mode": modes.CTR,
"block-size": 16,
"key-size": 16,
},
"aes192-ctr": {
"class": algorithms.AES,
"mode": modes.CTR,
"block-size": 16,
"key-size": 24,
},
"aes256-ctr": {
"class": algorithms.AES,
"mode": modes.CTR,
"block-size": 16,
"key-size": 32,
},
"aes128-cbc": {
"class": algorithms.AES,
"mode": modes.CBC,
"block-size": 16,
"key-size": 16,
},
"aes192-cbc": {
"class": algorithms.AES,
"mode": modes.CBC,
"block-size": 16,
"key-size": 24,
},
"aes256-cbc": {
"class": algorithms.AES,
"mode": modes.CBC,
"block-size": 16,
"key-size": 32,
},
"3des-cbc": {
"class": TripleDES,
"mode": modes.CBC,
"block-size": 8,
"key-size": 24,
},
"[email protected]": {
"class": aead.AESGCM,
"block-size": 16,
"iv-size": 12,
"key-size": 16,
"is_aead": True,
},
"[email protected]": {
"class": aead.AESGCM,
"block-size": 16,
"iv-size": 12,
"key-size": 32,
"is_aead": True,
},
}
_mac_info = {
"hmac-sha1": {"class": sha1, "size": 20},
"hmac-sha1-96": {"class": sha1, "size": 12},
"hmac-sha2-256": {"class": sha256, "size": 32},
"[email protected]": {"class": sha256, "size": 32},
"hmac-sha2-512": {"class": sha512, "size": 64},
"[email protected]": {"class": sha512, "size": 64},
"hmac-md5": {"class": md5, "size": 16},
"hmac-md5-96": {"class": md5, "size": 12},
}
_key_info = {
# TODO: at some point we will want to drop this as it's no longer
# considered secure due to using SHA-1 for signatures. OpenSSH 8.8 no
# longer supports it. Question becomes at what point do we want to
# prevent users with older setups from using this?
"ssh-rsa": RSAKey,
"[email protected]": RSAKey,
"rsa-sha2-256": RSAKey,
"[email protected]": RSAKey,
"rsa-sha2-512": RSAKey,
"[email protected]": RSAKey,
"ssh-dss": DSSKey,
"[email protected]": DSSKey,
"ecdsa-sha2-nistp256": ECDSAKey,
"[email protected]": ECDSAKey,
"ecdsa-sha2-nistp384": ECDSAKey,
"[email protected]": ECDSAKey,
"ecdsa-sha2-nistp521": ECDSAKey,
"[email protected]": ECDSAKey,
"ssh-ed25519": Ed25519Key,
"[email protected]": Ed25519Key,
}
_kex_info = {
"diffie-hellman-group1-sha1": KexGroup1,
"diffie-hellman-group14-sha1": KexGroup14,
"diffie-hellman-group-exchange-sha1": KexGex,
"diffie-hellman-group-exchange-sha256": KexGexSHA256,
"diffie-hellman-group14-sha256": KexGroup14SHA256,
"diffie-hellman-group16-sha512": KexGroup16SHA512,
"gss-group1-sha1-toWM5Slw5Ew8Mqkay+al2g==": KexGSSGroup1,
"gss-group14-sha1-toWM5Slw5Ew8Mqkay+al2g==": KexGSSGroup14,
"gss-gex-sha1-toWM5Slw5Ew8Mqkay+al2g==": KexGSSGex,
"ecdh-sha2-nistp256": KexNistp256,
"ecdh-sha2-nistp384": KexNistp384,
"ecdh-sha2-nistp521": KexNistp521,
}
if KexCurve25519.is_available():
_kex_info["[email protected]"] = KexCurve25519
_compression_info = {
# [email protected] is just zlib, but only turned on after a successful
# authentication. openssh servers may only offer this type because
# they've had troubles with security holes in zlib in the past.
"[email protected]": (ZlibCompressor, ZlibDecompressor),
"zlib": (ZlibCompressor, ZlibDecompressor),
"none": (None, None),
}
_modulus_pack = None
_active_check_timeout = 0.1
def __init__(
self,
sock,
default_window_size=DEFAULT_WINDOW_SIZE,
default_max_packet_size=DEFAULT_MAX_PACKET_SIZE,
gss_kex=False,
gss_deleg_creds=True,
disabled_algorithms=None,
server_sig_algs=True,
strict_kex=True,
packetizer_class=None,
):
"""
Create a new SSH session over an existing socket, or socket-like
object. This only creates the `.Transport` object; it doesn't begin
the SSH session yet. Use `connect` or `start_client` to begin a client
session, or `start_server` to begin a server session.
If the object is not actually a socket, it must have the following
methods:
- ``send(bytes)``: Writes from 1 to ``len(bytes)`` bytes, and returns
an int representing the number of bytes written. Returns
0 or raises ``EOFError`` if the stream has been closed.
- ``recv(int)``: Reads from 1 to ``int`` bytes and returns them as a
string. Returns 0 or raises ``EOFError`` if the stream has been
closed.
- ``close()``: Closes the socket.
- ``settimeout(n)``: Sets a (float) timeout on I/O operations.
For ease of use, you may also pass in an address (as a tuple) or a host
string as the ``sock`` argument. (A host string is a hostname with an
optional port (separated by ``":"``) which will be converted into a
tuple of ``(hostname, port)``.) A socket will be connected to this
address and used for communication. Exceptions from the ``socket``
call may be thrown in this case.
.. note::
Modifying the the window and packet sizes might have adverse
effects on your channels created from this transport. The default
values are the same as in the OpenSSH code base and have been
battle tested.
:param socket sock:
a socket or socket-like object to create the session over.
:param int default_window_size:
sets the default window size on the transport. (defaults to
2097152)
:param int default_max_packet_size:
sets the default max packet size on the transport. (defaults to
32768)
:param bool gss_kex:
Whether to enable GSSAPI key exchange when GSSAPI is in play.
Default: ``False``.
:param bool gss_deleg_creds:
Whether to enable GSSAPI credential delegation when GSSAPI is in
play. Default: ``True``.
:param dict disabled_algorithms:
If given, must be a dictionary mapping algorithm type to an
iterable of algorithm identifiers, which will be disabled for the
lifetime of the transport.
Keys should match the last word in the class' builtin algorithm
tuple attributes, such as ``"ciphers"`` to disable names within
``_preferred_ciphers``; or ``"kex"`` to disable something defined
inside ``_preferred_kex``. Values should exactly match members of
the matching attribute.
For example, if you need to disable
``diffie-hellman-group16-sha512`` key exchange (perhaps because
your code talks to a server which implements it differently from
Paramiko), specify ``disabled_algorithms={"kex":
["diffie-hellman-group16-sha512"]}``.
:param bool server_sig_algs:
Whether to send an extra message to compatible clients, in server
mode, with a list of supported pubkey algorithms. Default:
``True``.
:param bool strict_kex:
Whether to advertise (and implement, if client also advertises
support for) a "strict kex" mode for safer handshaking. Default:
``True``.
:param packetizer_class:
Which class to use for instantiating the internal packet handler.
Default: ``None`` (i.e.: use `Packetizer` as normal).
.. versionchanged:: 1.15
Added the ``default_window_size`` and ``default_max_packet_size``
arguments.
.. versionchanged:: 1.15
Added the ``gss_kex`` and ``gss_deleg_creds`` kwargs.
.. versionchanged:: 2.6
Added the ``disabled_algorithms`` kwarg.
.. versionchanged:: 2.9
Added the ``server_sig_algs`` kwarg.
.. versionchanged:: 3.4
Added the ``strict_kex`` kwarg.
.. versionchanged:: 3.4
Added the ``packetizer_class`` kwarg.
"""
self.active = False
self.hostname = None
self.server_extensions = {}
self.advertise_strict_kex = strict_kex
self.agreed_on_strict_kex = False
# TODO: these two overrides on sock's type should go away sometime, too
# many ways to do it!
if isinstance(sock, str):
# convert "host:port" into (host, port)
hl = sock.split(":", 1)
self.hostname = hl[0]
if len(hl) == 1:
sock = (hl[0], 22)
else:
sock = (hl[0], int(hl[1]))
if type(sock) is tuple:
# connect to the given (host, port)
hostname, port = sock
self.hostname = hostname
reason = "No suitable address family"
addrinfos = socket.getaddrinfo(
hostname, port, socket.AF_UNSPEC, socket.SOCK_STREAM
)
for family, socktype, proto, canonname, sockaddr in addrinfos:
if socktype == socket.SOCK_STREAM:
af = family
# addr = sockaddr
sock = socket.socket(af, socket.SOCK_STREAM)
try:
sock.connect((hostname, port))
except socket.error as e:
reason = str(e)
else:
break
else:
raise SSHException(
"Unable to connect to {}: {}".format(hostname, reason)
)
# okay, normal socket-ish flow here...
threading.Thread.__init__(self)
self.daemon = True
self.sock = sock
# we set the timeout so we can check self.active periodically to
# see if we should bail. socket.timeout exception is never propagated.
self.sock.settimeout(self._active_check_timeout)
# negotiated crypto parameters
self.packetizer = (packetizer_class or Packetizer)(sock)
self.local_version = "SSH-" + self._PROTO_ID + "-" + self._CLIENT_ID
self.remote_version = ""
self.local_cipher = self.remote_cipher = ""
self.local_kex_init = self.remote_kex_init = None
self.local_mac = self.remote_mac = None
self.local_compression = self.remote_compression = None
self.session_id = None
self.host_key_type = None
self.host_key = None
# GSS-API / SSPI Key Exchange
self.use_gss_kex = gss_kex
# This will be set to True if GSS-API Key Exchange was performed
self.gss_kex_used = False
self.kexgss_ctxt = None
self.gss_host = None
if self.use_gss_kex:
self.kexgss_ctxt = GSSAuth("gssapi-keyex", gss_deleg_creds)
self._preferred_kex = self._preferred_gsskex + self._preferred_kex
# state used during negotiation
self.kex_engine = None
self.H = None
self.K = None
self.initial_kex_done = False
self.in_kex = False
self.authenticated = False
self._expected_packet = tuple()
# synchronization (always higher level than write_lock)
self.lock = threading.Lock()
# tracking open channels
self._channels = ChannelMap()
self.channel_events = {} # (id -> Event)
self.channels_seen = {} # (id -> True)
self._channel_counter = 0
self.default_max_packet_size = default_max_packet_size
self.default_window_size = default_window_size
self._forward_agent_handler = None
self._x11_handler = None
self._tcp_handler = None
self.saved_exception = None
self.clear_to_send = threading.Event()
self.clear_to_send_lock = threading.Lock()
self.clear_to_send_timeout = 30.0
self.log_name = "paramiko.transport"
self.logger = util.get_logger(self.log_name)
self.packetizer.set_log(self.logger)
self.auth_handler = None
# response Message from an arbitrary global request
self.global_response = None
# user-defined event callbacks
self.completion_event = None
# how long (seconds) to wait for the SSH banner
self.banner_timeout = 15
# how long (seconds) to wait for the handshake to finish after SSH
# banner sent.
self.handshake_timeout = 15
# how long (seconds) to wait for the auth response.
self.auth_timeout = 30
# how long (seconds) to wait for opening a channel
self.channel_timeout = 60 * 60
self.disabled_algorithms = disabled_algorithms or {}
self.server_sig_algs = server_sig_algs
# server mode:
self.server_mode = False
self.server_object = None
self.server_key_dict = {}
self.server_accepts = []
self.server_accept_cv = threading.Condition(self.lock)
self.subsystem_table = {}
# Handler table, now set at init time for easier per-instance
# manipulation and subclass twiddling.
self._handler_table = {
MSG_EXT_INFO: self._parse_ext_info,
MSG_NEWKEYS: self._parse_newkeys,
MSG_GLOBAL_REQUEST: self._parse_global_request,
MSG_REQUEST_SUCCESS: self._parse_request_success,
MSG_REQUEST_FAILURE: self._parse_request_failure,
MSG_CHANNEL_OPEN_SUCCESS: self._parse_channel_open_success,
MSG_CHANNEL_OPEN_FAILURE: self._parse_channel_open_failure,
MSG_CHANNEL_OPEN: self._parse_channel_open,
MSG_KEXINIT: self._negotiate_keys,
}
def _filter_algorithm(self, type_):
default = getattr(self, "_preferred_{}".format(type_))
return tuple(
x
for x in default
if x not in self.disabled_algorithms.get(type_, [])
)
@property
def preferred_ciphers(self):
return self._filter_algorithm("ciphers")
@property
def preferred_macs(self):
return self._filter_algorithm("macs")
@property
def preferred_keys(self):
# Interleave cert variants here; resistant to various background
# overwriting of _preferred_keys, and necessary as hostkeys can't use
# the logic pubkey auth does re: injecting/checking for certs at
# runtime
filtered = self._filter_algorithm("keys")
return tuple(
filtered
+ tuple("{}[email protected]".format(x) for x in filtered)
)
@property
def preferred_pubkeys(self):
return self._filter_algorithm("pubkeys")
@property
def preferred_kex(self):
return self._filter_algorithm("kex")
@property
def preferred_compression(self):
return self._filter_algorithm("compression")
def __repr__(self):
"""
Returns a string representation of this object, for debugging.
"""
id_ = hex(id(self) & xffffffff)
out = "<paramiko.Transport at {}".format(id_)
if not self.active:
out += " (unconnected)"
else:
if self.local_cipher != "":
out += " (cipher {}, {:d} bits)".format(
self.local_cipher,
self._cipher_info[self.local_cipher]["key-size"] * 8,
)
if self.is_authenticated():
out += " (active; {} open channel(s))".format(
len(self._channels)
)
elif self.initial_kex_done:
out += " (connected; awaiting auth)"
else:
out += " (connecting)"
out += ">"
return out
def atfork(self):
"""
Terminate this Transport without closing the session. On posix
systems, if a Transport is open during process forking, both parent
and child will share the underlying socket, but only one process can
use the connection (without corrupting the session). Use this method
to clean up a Transport object without disrupting the other process.
.. versionadded:: 1.5.3
"""
self.sock.close()
self.close()
def get_security_options(self):
"""
Return a `.SecurityOptions` object which can be used to tweak the
encryption algorithms this transport will permit (for encryption,
digest/hash operations, public keys, and key exchanges) and the order
of preference for them.
"""
return SecurityOptions(self)
def set_gss_host(self, gss_host, trust_dns=True, gssapi_requested=True):
"""
Normalize/canonicalize ``self.gss_host`` depending on various factors.
:param str gss_host:
The explicitly requested GSS-oriented hostname to connect to (i.e.
what the host's name is in the Kerberos database.) Defaults to
``self.hostname`` (which will be the 'real' target hostname and/or
host portion of given socket object.)
:param bool trust_dns:
Indicates whether or not DNS is trusted; if true, DNS will be used
to canonicalize the GSS hostname (which again will either be
``gss_host`` or the transport's default hostname.)
(Defaults to True due to backwards compatibility.)
:param bool gssapi_requested:
Whether GSSAPI key exchange or authentication was even requested.
If not, this is a no-op and nothing happens
(and ``self.gss_host`` is not set.)
(Defaults to True due to backwards compatibility.)
:returns: ``None``.
"""
# No GSSAPI in play == nothing to do
if not gssapi_requested:
return
# Obtain the correct host first - did user request a GSS-specific name
# to use that is distinct from the actual SSH target hostname?
if gss_host is None:
gss_host = self.hostname
# Finally, canonicalize via DNS if DNS is trusted.
if trust_dns and gss_host is not None:
gss_host = socket.getfqdn(gss_host)
# And set attribute for reference later.
self.gss_host = gss_host
def start_client(self, event=None, timeout=None):
"""
Negotiate a new SSH2 session as a client. This is the first step after
creating a new `.Transport`. A separate thread is created for protocol
negotiation.
If an event is passed in, this method returns immediately. When
negotiation is done (successful or not), the given ``Event`` will
be triggered. On failure, `is_active` will return ``False``.
(Since 1.4) If ``event`` is ``None``, this method will not return until
negotiation is done. On success, the method returns normally.
Otherwise an SSHException is raised.
After a successful negotiation, you will usually want to authenticate,
calling `auth_password <Transport.auth_password>` or
`auth_publickey <Transport.auth_publickey>`.
.. note:: `connect` is a simpler method for connecting as a client.
.. note::
After calling this method (or `start_server` or `connect`), you
should no longer directly read from or write to the original socket
object.
:param .threading.Event event:
an event to trigger when negotiation is complete (optional)
:param float timeout:
a timeout, in seconds, for SSH2 session negotiation (optional)
:raises:
`.SSHException` -- if negotiation fails (and no ``event`` was
passed in)
"""
self.active = True
if event is not None:
# async, return immediately and let the app poll for completion
self.completion_event = event
self.start()
return
# synchronous, wait for a result
self.completion_event = event = threading.Event()
self.start()
max_time = time.time() + timeout if timeout is not None else None
while True:
event.wait(0.1)
if not self.active:
e = self.get_exception()
if e is not None:
raise e
raise SSHException("Negotiation failed.")
if event.is_set() or (
timeout is not None and time.time() >= max_time
):
break
def start_server(self, event=None, server=None):
"""
Negotiate a new SSH2 session as a server. This is the first step after
creating a new `.Transport` and setting up your server host key(s). A
separate thread is created for protocol negotiation.
If an event is passed in, this method returns immediately. When
negotiation is done (successful or not), the given ``Event`` will
be triggered. On failure, `is_active` will return ``False``.
(Since 1.4) If ``event`` is ``None``, this method will not return until
negotiation is done. On success, the method returns normally.
Otherwise an SSHException is raised.
After a successful negotiation, the client will need to authenticate.
Override the methods `get_allowed_auths
<.ServerInterface.get_allowed_auths>`, `check_auth_none
<.ServerInterface.check_auth_none>`, `check_auth_password
<.ServerInterface.check_auth_password>`, and `check_auth_publickey
<.ServerInterface.check_auth_publickey>` in the given ``server`` object
to control the authentication process.
After a successful authentication, the client should request to open a
channel. Override `check_channel_request
<.ServerInterface.check_channel_request>` in the given ``server``
object to allow channels to be opened.
.. note::
After calling this method (or `start_client` or `connect`), you
should no longer directly read from or write to the original socket
object.
:param .threading.Event event:
an event to trigger when negotiation is complete.
:param .ServerInterface server:
an object used to perform authentication and create `channels
<.Channel>`
:raises:
`.SSHException` -- if negotiation fails (and no ``event`` was
passed in)
"""
if server is None:
server = ServerInterface()
self.server_mode = True
self.server_object = server
self.active = True
if event is not None:
# async, return immediately and let the app poll for completion
self.completion_event = event
self.start()
return
# synchronous, wait for a result
self.completion_event = event = threading.Event()
self.start()
while True:
event.wait(0.1)
if not self.active:
e = self.get_exception()
if e is not None:
raise e
raise SSHException("Negotiation failed.")
if event.is_set():
break
def add_server_key(self, key):
"""
Add a host key to the list of keys used for server mode. When behaving
as a server, the host key is used to sign certain packets during the
SSH2 negotiation, so that the client can trust that we are who we say
we are. Because this is used for signing, the key must contain private
key info, not just the public half. Only one key of each type (RSA or
DSS) is kept.
:param .PKey key:
the host key to add, usually an `.RSAKey` or `.DSSKey`.
"""
self.server_key_dict[key.get_name()] = key
# Handle SHA-2 extensions for RSA by ensuring that lookups into
# self.server_key_dict will yield this key for any of the algorithm
# names.
if isinstance(key, RSAKey):
self.server_key_dict["rsa-sha2-256"] = key
self.server_key_dict["rsa-sha2-512"] = key
def get_server_key(self):
"""
Return the active host key, in server mode. After negotiating with the
client, this method will return the negotiated host key. If only one
type of host key was set with `add_server_key`, that's the only key
that will ever be returned. But in cases where you have set more than
one type of host key (for example, an RSA key and a DSS key), the key
type will be negotiated by the client, and this method will return the
key of the type agreed on. If the host key has not been negotiated
yet, ``None`` is returned. In client mode, the behavior is undefined.
:return:
host key (`.PKey`) of the type negotiated by the client, or
``None``.
"""
try:
return self.server_key_dict[self.host_key_type]
except KeyError:
pass
return None
@staticmethod
def load_server_moduli(filename=None):
"""
(optional)
Load a file of prime moduli for use in doing group-exchange key
negotiation in server mode. It's a rather obscure option and can be
safely ignored.
In server mode, the remote client may request "group-exchange" key
negotiation, which asks the server to send a random prime number that
fits certain criteria. These primes are pretty difficult to compute,
so they can't be generated on demand. But many systems contain a file
of suitable primes (usually named something like ``/etc/ssh/moduli``).
If you call `load_server_moduli` and it returns ``True``, then this
file of primes has been loaded and we will support "group-exchange" in
server mode. Otherwise server mode will just claim that it doesn't
support that method of key negotiation.
:param str filename:
optional path to the moduli file, if you happen to know that it's
not in a standard location.
:return:
True if a moduli file was successfully loaded; False otherwise.
.. note:: This has no effect when used in client mode.
"""
Transport._modulus_pack = ModulusPack()
# places to look for the openssh "moduli" file
file_list = ["/etc/ssh/moduli", "/usr/local/etc/moduli"]
if filename is not None:
file_list.insert(0, filename)
for fn in file_list:
try:
Transport._modulus_pack.read_file(fn)
return True
except IOError:
pass
# none succeeded
Transport._modulus_pack = None
return False
def close(self):
"""
Close this session, and any open channels that are tied to it.
"""
if not self.active:
return
self.stop_thread()
for chan in list(self._channels.values()):
chan._unlink()
self.sock.close()
def get_remote_server_key(self):
"""
Return the host key of the server (in client mode).
.. note::
Previously this call returned a tuple of ``(key type, key
string)``. You can get the same effect by calling `.PKey.get_name`
for the key type, and ``str(key)`` for the key string.
:raises: `.SSHException` -- if no session is currently active.
:return: public key (`.PKey`) of the remote server
"""
if (not self.active) or (not self.initial_kex_done):
raise SSHException("No existing session")
return self.host_key
def is_active(self):
"""
Return true if this session is active (open).
:return:
True if the session is still active (open); False if the session is
closed
"""
return self.active
def open_session(
self, window_size=None, max_packet_size=None, timeout=None
):
"""
Request a new channel to the server, of type ``"session"``. This is
just an alias for calling `open_channel` with an argument of
``"session"``.
.. note:: Modifying the the window and packet sizes might have adverse
effects on the session created. The default values are the same
as in the OpenSSH code base and have been battle tested.
:param int window_size:
optional window size for this session.
:param int max_packet_size:
optional max packet size for this session.
:return: a new `.Channel`
:raises:
`.SSHException` -- if the request is rejected or the session ends
prematurely
.. versionchanged:: 1.13.4/1.14.3/1.15.3
Added the ``timeout`` argument.
.. versionchanged:: 1.15
Added the ``window_size`` and ``max_packet_size`` arguments.
"""
return self.open_channel(
"session",
window_size=window_size,
max_packet_size=max_packet_size,
timeout=timeout,
)
def open_x11_channel(self, src_addr=None):
"""
Request a new channel to the client, of type ``"x11"``. This
is just an alias for ``open_channel('x11', src_addr=src_addr)``.
:param tuple src_addr:
the source address (``(str, int)``) of the x11 server (port is the
x11 port, ie. 6010)
:return: a new `.Channel`
:raises:
`.SSHException` -- if the request is rejected or the session ends
prematurely
"""
return self.open_channel("x11", src_addr=src_addr)
def open_forward_agent_channel(self):
"""
Request a new channel to the client, of type
``"[email protected]"``.
This is just an alias for ``open_channel('[email protected]')``.
:return: a new `.Channel`
:raises: `.SSHException` --
if the request is rejected or the session ends prematurely
"""
return self.open_channel("[email protected]")
def open_forwarded_tcpip_channel(self, src_addr, dest_addr):
"""
Request a new channel back to the client, of type ``forwarded-tcpip``.
This is used after a client has requested port forwarding, for sending
incoming connections back to the client.
:param src_addr: originator's address
:param dest_addr: local (server) connected address
"""
return self.open_channel("forwarded-tcpip", dest_addr, src_addr)
def open_channel(
self,
kind,
dest_addr=None,
src_addr=None,
window_size=None,
max_packet_size=None,
timeout=None,
):
"""
Request a new channel to the server. `Channels <.Channel>` are
socket-like objects used for the actual transfer of data across the
session. You may only request a channel after negotiating encryption
(using `connect` or `start_client`) and authenticating.
.. note:: Modifying the the window and packet sizes might have adverse
effects on the channel created. The default values are the same
as in the OpenSSH code base and have been battle tested.
:param str kind:
the kind of channel requested (usually ``"session"``,
``"forwarded-tcpip"``, ``"direct-tcpip"``, or ``"x11"``)
:param tuple dest_addr:
the destination address (address + port tuple) of this port
forwarding, if ``kind`` is ``"forwarded-tcpip"`` or
``"direct-tcpip"`` (ignored for other channel types)
:param src_addr: the source address of this port forwarding, if
``kind`` is ``"forwarded-tcpip"``, ``"direct-tcpip"``, or ``"x11"``
:param int window_size:
optional window size for this session.
:param int max_packet_size:
optional max packet size for this session.
:param float timeout:
optional timeout opening a channel, default 3600s (1h)
:return: a new `.Channel` on success
:raises:
`.SSHException` -- if the request is rejected, the session ends
prematurely or there is a timeout opening a channel
.. versionchanged:: 1.15
Added the ``window_size`` and ``max_packet_size`` arguments.
"""
if not self.active:
raise SSHException("SSH session not active")
timeout = self.channel_timeout if timeout is None else timeout
self.lock.acquire()
try:
window_size = self._sanitize_window_size(window_size)
max_packet_size = self._sanitize_packet_size(max_packet_size)
chanid = self._next_channel()
m = Message()
m.add_byte(cMSG_CHANNEL_OPEN)
m.add_string(kind)
m.add_int(chanid)
m.add_int(window_size)
m.add_int(max_packet_size)
if (kind == "forwarded-tcpip") or (kind == "direct-tcpip"):
m.add_string(dest_addr[0])
m.add_int(dest_addr[1])
m.add_string(src_addr[0])
m.add_int(src_addr[1])
elif kind == "x11":
m.add_string(src_addr[0])
m.add_int(src_addr[1])
chan = Channel(chanid)
self._channels.put(chanid, chan)
self.channel_events[chanid] = event = threading.Event()
self.channels_seen[chanid] = True
chan._set_transport(self)
chan._set_window(window_size, max_packet_size)
finally:
self.lock.release()
self._send_user_message(m)
start_ts = time.time()
while True:
event.wait(0.1)
if not self.active:
e = self.get_exception()
if e is None:
e = SSHException("Unable to open channel.")
raise e
if event.is_set():
break
elif start_ts + timeout < time.time():
raise SSHException("Timeout opening channel.")
chan = self._channels.get(chanid)
if chan is not None:
return chan
e = self.get_exception()
if e is None:
e = SSHException("Unable to open channel.")
raise e
def request_port_forward(self, address, port, handler=None):
"""
Ask the server to forward TCP connections from a listening port on
the server, across this SSH session.
If a handler is given, that handler is called from a different thread
whenever a forwarded connection arrives. The handler parameters are::
handler(
channel,
(origin_addr, origin_port),
(server_addr, server_port),
)
where ``server_addr`` and ``server_port`` are the address and port that
the server was listening on.
If no handler is set, the default behavior is to send new incoming
forwarded connections into the accept queue, to be picked up via
`accept`.
:param str address: the address to bind when forwarding
:param int port:
the port to forward, or 0 to ask the server to allocate any port
:param callable handler:
optional handler for incoming forwarded connections, of the form
``func(Channel, (str, int), (str, int))``.
:return: the port number (`int`) allocated by the server
:raises:
`.SSHException` -- if the server refused the TCP forward request
"""
if not self.active:
raise SSHException("SSH session not active")
port = int(port)
response = self.global_request(
"tcpip-forward", (address, port), wait=True
)
if response is None:
raise SSHException("TCP forwarding request denied")
if port == 0:
port = response.get_int()
if handler is None:
def default_handler(channel, src_addr, dest_addr_port):
# src_addr, src_port = src_addr_port
# dest_addr, dest_port = dest_addr_port
self._queue_incoming_channel(channel)
handler = default_handler
self._tcp_handler = handler
return port
def cancel_port_forward(self, address, port):
"""
Ask the server to cancel a previous port-forwarding request. No more
connections to the given address & port will be forwarded across this
ssh connection.
:param str address: the address to stop forwarding
:param int port: the port to stop forwarding
"""
if not self.active:
return
self._tcp_handler = None
self.global_request("cancel-tcpip-forward", (address, port), wait=True)
def open_sftp_client(self):
"""
Create an SFTP client channel from an open transport. On success, an
SFTP session will be opened with the remote host, and a new
`.SFTPClient` object will be returned.
:return:
a new `.SFTPClient` referring to an sftp session (channel) across
this transport
"""
return SFTPClient.from_transport(self)
def send_ignore(self, byte_count=None):
"""
Send a junk packet across the encrypted link. This is sometimes used
to add "noise" to a connection to confuse would-be attackers. It can
also be used as a keep-alive for long lived connections traversing
firewalls.
:param int byte_count:
the number of random bytes to send in the payload of the ignored
packet -- defaults to a random number from 10 to 41.
"""
m = Message()
m.add_byte(cMSG_IGNORE)
if byte_count is None:
byte_count = (byte_ord(os.urandom(1)) % 32) + 10
m.add_bytes(os.urandom(byte_count))
self._send_user_message(m)
def renegotiate_keys(self):
"""
Force this session to switch to new keys. Normally this is done
automatically after the session hits a certain number of packets or
bytes sent or received, but this method gives you the option of forcing
new keys whenever you want. Negotiating new keys causes a pause in
traffic both ways as the two sides swap keys and do computations. This
method returns when the session has switched to new keys.
:raises:
`.SSHException` -- if the key renegotiation failed (which causes
the session to end)
"""
self.completion_event = threading.Event()
self._send_kex_init()
while True:
self.completion_event.wait(0.1)
if not self.active:
e = self.get_exception()
if e is not None:
raise e
raise SSHException("Negotiation failed.")
if self.completion_event.is_set():
break
return
def set_keepalive(self, interval):
"""
Turn on/off keepalive packets (default is off). If this is set, after
``interval`` seconds without sending any data over the connection, a
"keepalive" packet will be sent (and ignored by the remote host). This
can be useful to keep connections alive over a NAT, for example.
:param int interval:
seconds to wait before sending a keepalive packet (or
0 to disable keepalives).
"""
def _request(x=weakref.proxy(self)):
return x.global_request("[email protected]", wait=False)
self.packetizer.set_keepalive(interval, _request)
def global_request(self, kind, data=None, wait=True):
"""
Make a global request to the remote host. These are normally
extensions to the SSH2 protocol.
:param str kind: name of the request.
:param tuple data:
an optional tuple containing additional data to attach to the
request.
:param bool wait:
``True`` if this method should not return until a response is
received; ``False`` otherwise.
:return:
a `.Message` containing possible additional data if the request was
successful (or an empty `.Message` if ``wait`` was ``False``);
``None`` if the request was denied.
"""
if wait:
self.completion_event = threading.Event()
m = Message()
m.add_byte(cMSG_GLOBAL_REQUEST)
m.add_string(kind)
m.add_boolean(wait)
if data is not None:
m.add(*data)
self._log(DEBUG, 'Sending global request "{}"'.format(kind))
self._send_user_message(m)
if not wait:
return None
while True:
self.completion_event.wait(0.1)
if not self.active:
return None
if self.completion_event.is_set():
break
return self.global_response
def accept(self, timeout=None):
"""
Return the next channel opened by the client over this transport, in
server mode. If no channel is opened before the given timeout,
``None`` is returned.
:param int timeout:
seconds to wait for a channel, or ``None`` to wait forever
:return: a new `.Channel` opened by the client
"""
self.lock.acquire()
try:
if len(self.server_accepts) > 0:
chan = self.server_accepts.pop(0)
else:
self.server_accept_cv.wait(timeout)
if len(self.server_accepts) > 0:
chan = self.server_accepts.pop(0)
else:
# timeout
chan = None
finally:
self.lock.release()
return chan
def connect(
self,
hostkey=None,
username="",
password=None,
pkey=None,
gss_host=None,
gss_auth=False,
gss_kex=False,
gss_deleg_creds=True,
gss_trust_dns=True,
):
"""
Negotiate an SSH2 session, and optionally verify the server's host key
and authenticate using a password or private key. This is a shortcut
for `start_client`, `get_remote_server_key`, and
`Transport.auth_password` or `Transport.auth_publickey`. Use those
methods if you want more control.
You can use this method immediately after creating a Transport to
negotiate encryption with a server. If it fails, an exception will be
thrown. On success, the method will return cleanly, and an encrypted
session exists. You may immediately call `open_channel` or
`open_session` to get a `.Channel` object, which is used for data
transfer.
.. note::
If you fail to supply a password or private key, this method may
succeed, but a subsequent `open_channel` or `open_session` call may
fail because you haven't authenticated yet.
:param .PKey hostkey:
the host key expected from the server, or ``None`` if you don't
want to do host key verification.
:param str username: the username to authenticate as.
:param str password:
a password to use for authentication, if you want to use password
authentication; otherwise ``None``.
:param .PKey pkey:
a private key to use for authentication, if you want to use private
key authentication; otherwise ``None``.
:param str gss_host:
The target's name in the kerberos database. Default: hostname
:param bool gss_auth:
``True`` if you want to use GSS-API authentication.
:param bool gss_kex:
Perform GSS-API Key Exchange and user authentication.
:param bool gss_deleg_creds:
Whether to delegate GSS-API client credentials.
:param gss_trust_dns:
Indicates whether or not the DNS is trusted to securely
canonicalize the name of the host being connected to (default
``True``).
:raises: `.SSHException` -- if the SSH2 negotiation fails, the host key
supplied by the server is incorrect, or authentication fails.
.. versionchanged:: 2.3
Added the ``gss_trust_dns`` argument.
"""
if hostkey is not None:
# TODO: a more robust implementation would be to ask each key class
# for its nameS plural, and just use that.
# TODO: that could be used in a bunch of other spots too
if isinstance(hostkey, RSAKey):
self._preferred_keys = [
"rsa-sha2-512",
"rsa-sha2-256",
"ssh-rsa",
]
else:
self._preferred_keys = [hostkey.get_name()]
self.set_gss_host(
gss_host=gss_host,
trust_dns=gss_trust_dns,
gssapi_requested=gss_kex or gss_auth,
)
self.start_client()
# check host key if we were given one
# If GSS-API Key Exchange was performed, we are not required to check
# the host key.
if (hostkey is not None) and not gss_kex:
key = self.get_remote_server_key()
if (
key.get_name() != hostkey.get_name()
or key.asbytes() != hostkey.asbytes()
):
self._log(DEBUG, "Bad host key from server")
self._log(
DEBUG,
"Expected: {}: {}".format(
hostkey.get_name(), repr(hostkey.asbytes())
),
)
self._log(
DEBUG,
"Got : {}: {}".format(
key.get_name(), repr(key.asbytes())
),
)
raise SSHException("Bad host key from server")
self._log(
DEBUG, "Host key verified ({})".format(hostkey.get_name())
)
if (pkey is not None) or (password is not None) or gss_auth or gss_kex:
if gss_auth:
self._log(
DEBUG, "Attempting GSS-API auth... (gssapi-with-mic)"
) # noqa
self.auth_gssapi_with_mic(
username, self.gss_host, gss_deleg_creds
)
elif gss_kex:
self._log(DEBUG, "Attempting GSS-API auth... (gssapi-keyex)")
self.auth_gssapi_keyex(username)
elif pkey is not None:
self._log(DEBUG, "Attempting public-key auth...")
self.auth_publickey(username, pkey)
else:
self._log(DEBUG, "Attempting password auth...")
self.auth_password(username, password)
return
def get_exception(self):
"""
Return any exception that happened during the last server request.
This can be used to fetch more specific error information after using
calls like `start_client`. The exception (if any) is cleared after
this call.
:return:
an exception, or ``None`` if there is no stored exception.
.. versionadded:: 1.1
"""
self.lock.acquire()
try:
e = self.saved_exception
self.saved_exception = None
return e
finally:
self.lock.release()
def set_subsystem_handler(self, name, handler, *args, **kwargs):
"""
Set the handler class for a subsystem in server mode. If a request
for this subsystem is made on an open ssh channel later, this handler
will be constructed and called -- see `.SubsystemHandler` for more
detailed documentation.
Any extra parameters (including keyword arguments) are saved and
passed to the `.SubsystemHandler` constructor later.
:param str name: name of the subsystem.
:param handler:
subclass of `.SubsystemHandler` that handles this subsystem.
"""
try:
self.lock.acquire()
self.subsystem_table[name] = (handler, args, kwargs)
finally:
self.lock.release()
def is_authenticated(self):
"""
Return true if this session is active and authenticated.
:return:
True if the session is still open and has been authenticated
successfully; False if authentication failed and/or the session is
closed.
"""
return (
self.active
and self.auth_handler is not None
and self.auth_handler.is_authenticated()
)
def get_username(self):
"""
Return the username this connection is authenticated for. If the
session is not authenticated (or authentication failed), this method
returns ``None``.
:return: username that was authenticated (a `str`), or ``None``.
"""
if not self.active or (self.auth_handler is None):
return None
return self.auth_handler.get_username()
def get_banner(self):
"""
Return the banner supplied by the server upon connect. If no banner is
supplied, this method returns ``None``.
:returns: server supplied banner (`str`), or ``None``.
.. versionadded:: 1.13
"""
if not self.active or (self.auth_handler is None):
return None
return self.auth_handler.banner
def auth_none(self, username):
"""
Try to authenticate to the server using no authentication at all.
This will almost always fail. It may be useful for determining the
list of authentication types supported by the server, by catching the
`.BadAuthenticationType` exception raised.
:param str username: the username to authenticate as
:return:
list of auth types permissible for the next stage of
authentication (normally empty)
:raises:
`.BadAuthenticationType` -- if "none" authentication isn't allowed
by the server for this user
:raises:
`.SSHException` -- if the authentication failed due to a network
error
.. versionadded:: 1.5
"""
if (not self.active) or (not self.initial_kex_done):
raise SSHException("No existing session")
my_event = threading.Event()
self.auth_handler = AuthHandler(self)
self.auth_handler.auth_none(username, my_event)
return self.auth_handler.wait_for_response(my_event)
def auth_password(self, username, password, event=None, fallback=True):
"""
Authenticate to the server using a password. The username and password
are sent over an encrypted link.
If an ``event`` is passed in, this method will return immediately, and
the event will be triggered once authentication succeeds or fails. On
success, `is_authenticated` will return ``True``. On failure, you may
use `get_exception` to get more detailed error information.
Since 1.1, if no event is passed, this method will block until the
authentication succeeds or fails. On failure, an exception is raised.
Otherwise, the method simply returns.
Since 1.5, if no event is passed and ``fallback`` is ``True`` (the
default), if the server doesn't support plain password authentication
but does support so-called "keyboard-interactive" mode, an attempt
will be made to authenticate using this interactive mode. If it fails,
the normal exception will be thrown as if the attempt had never been
made. This is useful for some recent Gentoo and Debian distributions,
which turn off plain password authentication in a misguided belief
that interactive authentication is "more secure". (It's not.)
If the server requires multi-step authentication (which is very rare),
this method will return a list of auth types permissible for the next
step. Otherwise, in the normal case, an empty list is returned.
:param str username: the username to authenticate as
:param basestring password: the password to authenticate with
:param .threading.Event event:
an event to trigger when the authentication attempt is complete
(whether it was successful or not)
:param bool fallback:
``True`` if an attempt at an automated "interactive" password auth
should be made if the server doesn't support normal password auth
:return:
list of auth types permissible for the next stage of
authentication (normally empty)
:raises:
`.BadAuthenticationType` -- if password authentication isn't
allowed by the server for this user (and no event was passed in)
:raises:
`.AuthenticationException` -- if the authentication failed (and no
event was passed in)
:raises: `.SSHException` -- if there was a network error
"""
if (not self.active) or (not self.initial_kex_done):
# we should never try to send the password unless we're on a secure
# link
raise SSHException("No existing session")
if event is None:
my_event = threading.Event()
else:
my_event = event
self.auth_handler = AuthHandler(self)
self.auth_handler.auth_password(username, password, my_event)
if event is not None:
# caller wants to wait for event themselves
return []
try:
return self.auth_handler.wait_for_response(my_event)
except BadAuthenticationType as e:
# if password auth isn't allowed, but keyboard-interactive *is*,
# try to fudge it
if not fallback or ("keyboard-interactive" not in e.allowed_types):
raise
try:
def handler(title, instructions, fields):
if len(fields) > 1:
raise SSHException("Fallback authentication failed.")
if len(fields) == 0:
# for some reason, at least on os x, a 2nd request will
# be made with zero fields requested. maybe it's just
# to try to fake out automated scripting of the exact
# type we're doing here. *shrug* :)
return []
return [password]
return self.auth_interactive(username, handler)
except SSHException:
# attempt failed; just raise the original exception
raise e
def auth_publickey(self, username, key, event=None):
"""
Authenticate to the server using a private key. The key is used to
sign data from the server, so it must include the private part.
If an ``event`` is passed in, this method will return immediately, and
the event will be triggered once authentication succeeds or fails. On
success, `is_authenticated` will return ``True``. On failure, you may
use `get_exception` to get more detailed error information.
Since 1.1, if no event is passed, this method will block until the
authentication succeeds or fails. On failure, an exception is raised.
Otherwise, the method simply returns.
If the server requires multi-step authentication (which is very rare),
this method will return a list of auth types permissible for the next
step. Otherwise, in the normal case, an empty list is returned.
:param str username: the username to authenticate as
:param .PKey key: the private key to authenticate with
:param .threading.Event event:
an event to trigger when the authentication attempt is complete
(whether it was successful or not)
:return:
list of auth types permissible for the next stage of
authentication (normally empty)
:raises:
`.BadAuthenticationType` -- if public-key authentication isn't
allowed by the server for this user (and no event was passed in)
:raises:
`.AuthenticationException` -- if the authentication failed (and no
event was passed in)
:raises: `.SSHException` -- if there was a network error
"""
if (not self.active) or (not self.initial_kex_done):
# we should never try to authenticate unless we're on a secure link
raise SSHException("No existing session")
if event is None:
my_event = threading.Event()
else:
my_event = event
self.auth_handler = AuthHandler(self)
self.auth_handler.auth_publickey(username, key, my_event)
if event is not None:
# caller wants to wait for event themselves
return []
return self.auth_handler.wait_for_response(my_event)
def auth_interactive(self, username, handler, submethods=""):
"""
Authenticate to the server interactively. A handler is used to answer
arbitrary questions from the server. On many servers, this is just a
dumb wrapper around PAM.
This method will block until the authentication succeeds or fails,
periodically calling the handler asynchronously to get answers to
authentication questions. The handler may be called more than once
if the server continues to ask questions.
The handler is expected to be a callable that will handle calls of the
form: ``handler(title, instructions, prompt_list)``. The ``title`` is
meant to be a dialog-window title, and the ``instructions`` are user
instructions (both are strings). ``prompt_list`` will be a list of
prompts, each prompt being a tuple of ``(str, bool)``. The string is
the prompt and the boolean indicates whether the user text should be
echoed.
A sample call would thus be:
``handler('title', 'instructions', [('Password:', False)])``.
The handler should return a list or tuple of answers to the server's
questions.
If the server requires multi-step authentication (which is very rare),
this method will return a list of auth types permissible for the next
step. Otherwise, in the normal case, an empty list is returned.
:param str username: the username to authenticate as
:param callable handler: a handler for responding to server questions
:param str submethods: a string list of desired submethods (optional)
:return:
list of auth types permissible for the next stage of
authentication (normally empty).
:raises: `.BadAuthenticationType` -- if public-key authentication isn't
allowed by the server for this user
:raises: `.AuthenticationException` -- if the authentication failed
:raises: `.SSHException` -- if there was a network error
.. versionadded:: 1.5
"""
if (not self.active) or (not self.initial_kex_done):
# we should never try to authenticate unless we're on a secure link
raise SSHException("No existing session")
my_event = threading.Event()
self.auth_handler = AuthHandler(self)
self.auth_handler.auth_interactive(
username, handler, my_event, submethods
)
return self.auth_handler.wait_for_response(my_event)
def auth_interactive_dumb(self, username, handler=None, submethods=""):
"""
Authenticate to the server interactively but dumber.
Just print the prompt and / or instructions to stdout and send back
the response. This is good for situations where partial auth is
achieved by key and then the user has to enter a 2fac token.
"""
if not handler:
def handler(title, instructions, prompt_list):
answers = []
if title:
print(title.strip())
if instructions:
print(instructions.strip())
for prompt, show_input in prompt_list:
print(prompt.strip(), end=" ")
answers.append(input())
return answers
return self.auth_interactive(username, handler, submethods)
def auth_gssapi_with_mic(self, username, gss_host, gss_deleg_creds):
"""
Authenticate to the Server using GSS-API / SSPI.
:param str username: The username to authenticate as
:param str gss_host: The target host
:param bool gss_deleg_creds: Delegate credentials or not
:return: list of auth types permissible for the next stage of
authentication (normally empty)
:raises: `.BadAuthenticationType` -- if gssapi-with-mic isn't
allowed by the server (and no event was passed in)
:raises:
`.AuthenticationException` -- if the authentication failed (and no
event was passed in)
:raises: `.SSHException` -- if there was a network error
"""
if (not self.active) or (not self.initial_kex_done):
# we should never try to authenticate unless we're on a secure link
raise SSHException("No existing session")
my_event = threading.Event()
self.auth_handler = AuthHandler(self)
self.auth_handler.auth_gssapi_with_mic(
username, gss_host, gss_deleg_creds, my_event
)
return self.auth_handler.wait_for_response(my_event)
def auth_gssapi_keyex(self, username):
"""
Authenticate to the server with GSS-API/SSPI if GSS-API kex is in use.
:param str username: The username to authenticate as.
:returns:
a list of auth types permissible for the next stage of
authentication (normally empty)
:raises: `.BadAuthenticationType` --
if GSS-API Key Exchange was not performed (and no event was passed
in)
:raises: `.AuthenticationException` --
if the authentication failed (and no event was passed in)
:raises: `.SSHException` -- if there was a network error
"""
if (not self.active) or (not self.initial_kex_done):
# we should never try to authenticate unless we're on a secure link
raise SSHException("No existing session")
my_event = threading.Event()
self.auth_handler = AuthHandler(self)
self.auth_handler.auth_gssapi_keyex(username, my_event)
return self.auth_handler.wait_for_response(my_event)
def set_log_channel(self, name):
"""
Set the channel for this transport's logging. The default is
``"paramiko.transport"`` but it can be set to anything you want. (See
the `.logging` module for more info.) SSH Channels will log to a
sub-channel of the one specified.
:param str name: new channel name for logging
.. versionadded:: 1.1
"""
self.log_name = name
self.logger = util.get_logger(name)
self.packetizer.set_log(self.logger)
def get_log_channel(self):
"""
Return the channel name used for this transport's logging.
:return: channel name as a `str`
.. versionadded:: 1.2
"""
return self.log_name
def set_hexdump(self, hexdump):
"""
Turn on/off logging a hex dump of protocol traffic at DEBUG level in
the logs. Normally you would want this off (which is the default),
but if you are debugging something, it may be useful.
:param bool hexdump:
``True`` to log protocol traffix (in hex) to the log; ``False``
otherwise.
"""
self.packetizer.set_hexdump(hexdump)
def get_hexdump(self):
"""
Return ``True`` if the transport is currently logging hex dumps of
protocol traffic.
:return: ``True`` if hex dumps are being logged, else ``False``.
.. versionadded:: 1.4
"""
return self.packetizer.get_hexdump()
def use_compression(self, compress=True):
"""
Turn on/off compression. This will only have an affect before starting
the transport (ie before calling `connect`, etc). By default,
compression is off since it negatively affects interactive sessions.
:param bool compress:
``True`` to ask the remote client/server to compress traffic;
``False`` to refuse compression
.. versionadded:: 1.5.2
"""
if compress:
self._preferred_compression = ("[email protected]", "zlib", "none")
else:
self._preferred_compression = ("none",)
def getpeername(self):
"""
Return the address of the remote side of this Transport, if possible.
This is effectively a wrapper around ``getpeername`` on the underlying
socket. If the socket-like object has no ``getpeername`` method, then
``("unknown", 0)`` is returned.
:return:
the address of the remote host, if known, as a ``(str, int)``
tuple.
"""
gp = getattr(self.sock, "getpeername", None)
if gp is None:
return "unknown", 0
return gp()
def stop_thread(self):
self.active = False
self.packetizer.close()
# Keep trying to join() our main thread, quickly, until:
# * We join()ed successfully (self.is_alive() == False)
# * Or it looks like we've hit issue #520 (socket.recv hitting some
# race condition preventing it from timing out correctly), wherein
# our socket and packetizer are both closed (but where we'd
# otherwise be sitting forever on that recv()).
while (
self.is_alive()
and self is not threading.current_thread()
and not self.sock._closed
and not self.packetizer.closed
):
self.join(0.1)
# internals...
# TODO 4.0: make a public alias for this because multiple other classes
# already explicitly rely on it...or just rewrite logging :D
def _log(self, level, msg, *args):
if issubclass(type(msg), list):
for m in msg:
self.logger.log(level, m)
else:
self.logger.log(level, msg, *args)
def _get_modulus_pack(self):
"""used by KexGex to find primes for group exchange"""
return self._modulus_pack
def _next_channel(self):
"""you are holding the lock"""
chanid = self._channel_counter
while self._channels.get(chanid) is not None:
self._channel_counter = (self._channel_counter + 1) & 0xFFFFFF
chanid = self._channel_counter
self._channel_counter = (self._channel_counter + 1) & 0xFFFFFF
return chanid
def _unlink_channel(self, chanid):
"""used by a Channel to remove itself from the active channel list"""
self._channels.delete(chanid)
def _send_message(self, data):
self.packetizer.send_message(data)
def _send_user_message(self, data):
"""
send a message, but block if we're in key negotiation. this is used
for user-initiated requests.
"""
start = time.time()
while True:
self.clear_to_send.wait(0.1)
if not self.active:
self._log(
DEBUG, "Dropping user packet because connection is dead."
) # noqa
return
self.clear_to_send_lock.acquire()
if self.clear_to_send.is_set():
break
self.clear_to_send_lock.release()
if time.time() > start + self.clear_to_send_timeout:
raise SSHException(
"Key-exchange timed out waiting for key negotiation"
) # noqa
try:
self._send_message(data)
finally:
self.clear_to_send_lock.release()
def _set_K_H(self, k, h):
"""
Used by a kex obj to set the K (root key) and H (exchange hash).
"""
self.K = k
self.H = h
if self.session_id is None:
self.session_id = h
def _expect_packet(self, *ptypes):
"""
Used by a kex obj to register the next packet type it expects to see.
"""
self._expected_packet = tuple(ptypes)
def _verify_key(self, host_key, sig):
key = self._key_info[self.host_key_type](Message(host_key))
if key is None:
raise SSHException("Unknown host key type")
if not key.verify_ssh_sig(self.H, Message(sig)):
raise SSHException(
"Signature verification ({}) failed.".format(
self.host_key_type
)
) # noqa
self.host_key = key
def _compute_key(self, id, nbytes):
"""id is 'A' - 'F' for the various keys used by ssh"""
m = Message()
m.add_mpint(self.K)
m.add_bytes(self.H)
m.add_byte(b(id))
m.add_bytes(self.session_id)
# Fallback to SHA1 for kex engines that fail to specify a hex
# algorithm, or for e.g. transport tests that don't run kexinit.
hash_algo = getattr(self.kex_engine, "hash_algo", None)
hash_select_msg = "kex engine {} specified hash_algo {!r}".format(
self.kex_engine.__class__.__name__, hash_algo
)
if hash_algo is None:
hash_algo = sha1
hash_select_msg += ", falling back to sha1"
if not hasattr(self, "_logged_hash_selection"):
self._log(DEBUG, hash_select_msg)
setattr(self, "_logged_hash_selection", True)
out = sofar = hash_algo(m.asbytes()).digest()
while len(out) < nbytes:
m = Message()
m.add_mpint(self.K)
m.add_bytes(self.H)
m.add_bytes(sofar)
digest = hash_algo(m.asbytes()).digest()
out += digest
sofar += digest
return out[:nbytes]
def _get_engine(self, name, key, iv=None, operation=None, aead=False):
if name not in self._cipher_info:
raise SSHException("Unknown cipher " + name)
info = self._cipher_info[name]
algorithm = info["class"](key)
# AEAD types (eg GCM) use their algorithm class /as/ the encryption
# engine (they expose the same encrypt/decrypt API as a CipherContext)
if aead:
return algorithm
# All others go through the Cipher class.
cipher = Cipher(
algorithm=algorithm,
# TODO: why is this getting tickled in aesgcm mode???
mode=info["mode"](iv),
backend=default_backend(),
)
if operation is self._ENCRYPT:
return cipher.encryptor()
else:
return cipher.decryptor()
def _set_forward_agent_handler(self, handler):
if handler is None:
def default_handler(channel):
self._queue_incoming_channel(channel)
self._forward_agent_handler = default_handler
else:
self._forward_agent_handler = handler
def _set_x11_handler(self, handler):
# only called if a channel has turned on x11 forwarding
if handler is None:
# by default, use the same mechanism as accept()
def default_handler(channel, src_addr_port):
self._queue_incoming_channel(channel)
self._x11_handler = default_handler
else:
self._x11_handler = handler
def _queue_incoming_channel(self, channel):
self.lock.acquire()
try:
self.server_accepts.append(channel)
self.server_accept_cv.notify()
finally:
self.lock.release()
def _sanitize_window_size(self, window_size):
if window_size is None:
window_size = self.default_window_size
return clamp_value(MIN_WINDOW_SIZE, window_size, MAX_WINDOW_SIZE)
def _sanitize_packet_size(self, max_packet_size):
if max_packet_size is None:
max_packet_size = self.default_max_packet_size
return clamp_value(MIN_PACKET_SIZE, max_packet_size, MAX_WINDOW_SIZE)
def _ensure_authed(self, ptype, message):
"""
Checks message type against current auth state.
If server mode, and auth has not succeeded, and the message is of a
post-auth type (channel open or global request) an appropriate error
response Message is crafted and returned to caller for sending.
Otherwise (client mode, authed, or pre-auth message) returns None.
"""
if (
not self.server_mode
or ptype <= HIGHEST_USERAUTH_MESSAGE_ID
or self.is_authenticated()
):
return None
# WELP. We must be dealing with someone trying to do non-auth things
# without being authed. Tell them off, based on message class.
reply = Message()
# Global requests have no details, just failure.
if ptype == MSG_GLOBAL_REQUEST:
reply.add_byte(cMSG_REQUEST_FAILURE)
# Channel opens let us reject w/ a specific type + message.
elif ptype == MSG_CHANNEL_OPEN:
kind = message.get_text() # noqa
chanid = message.get_int()
reply.add_byte(cMSG_CHANNEL_OPEN_FAILURE)
reply.add_int(chanid)
reply.add_int(OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED)
reply.add_string("")
reply.add_string("en")
# NOTE: Post-open channel messages do not need checking; the above will
# reject attempts to open channels, meaning that even if a malicious
# user tries to send a MSG_CHANNEL_REQUEST, it will simply fall under
# the logic that handles unknown channel IDs (as the channel list will
# be empty.)
return reply
def _enforce_strict_kex(self, ptype):
"""
Conditionally raise `MessageOrderError` during strict initial kex.
This method should only be called inside code that handles non-KEXINIT
messages; it does not interrogate ``ptype`` besides using it to log
more accurately.
"""
if self.agreed_on_strict_kex and not self.initial_kex_done:
name = MSG_NAMES.get(ptype, f"msg {ptype}")
raise MessageOrderError(
f"In strict-kex mode, but was sent {name!r}!"
)
def run(self):
# (use the exposed "run" method, because if we specify a thread target
# of a private method, threading.Thread will keep a reference to it
# indefinitely, creating a GC cycle and not letting Transport ever be
# GC'd. it's a bug in Thread.)
# Hold reference to 'sys' so we can test sys.modules to detect
# interpreter shutdown.
self.sys = sys
# active=True occurs before the thread is launched, to avoid a race
_active_threads.append(self)
tid = hex(id(self) & xffffffff)
if self.server_mode:
self._log(DEBUG, "starting thread (server mode): {}".format(tid))
else:
self._log(DEBUG, "starting thread (client mode): {}".format(tid))
try:
try:
self.packetizer.write_all(b(self.local_version + "\r\n"))
self._log(
DEBUG,
"Local version/idstring: {}".format(self.local_version),
) # noqa
self._check_banner()
# The above is actually very much part of the handshake, but
# sometimes the banner can be read but the machine is not
# responding, for example when the remote ssh daemon is loaded
# in to memory but we can not read from the disk/spawn a new
# shell.
# Make sure we can specify a timeout for the initial handshake.
# Re-use the banner timeout for now.
self.packetizer.start_handshake(self.handshake_timeout)
self._send_kex_init()
self._expect_packet(MSG_KEXINIT)
while self.active:
if self.packetizer.need_rekey() and not self.in_kex:
self._send_kex_init()
try:
ptype, m = self.packetizer.read_message()
except NeedRekeyException:
continue
if ptype == MSG_IGNORE:
self._enforce_strict_kex(ptype)
continue
elif ptype == MSG_DISCONNECT:
self._parse_disconnect(m)
break
elif ptype == MSG_DEBUG:
self._enforce_strict_kex(ptype)
self._parse_debug(m)
continue
if len(self._expected_packet) > 0:
if ptype not in self._expected_packet:
exc_class = SSHException
if self.agreed_on_strict_kex:
exc_class = MessageOrderError
raise exc_class(
"Expecting packet from {!r}, got {:d}".format(
self._expected_packet, ptype
)
) # noqa
self._expected_packet = tuple()
# These message IDs indicate key exchange & will differ
# depending on exact exchange algorithm
if (ptype >= 30) and (ptype <= 41):
self.kex_engine.parse_next(ptype, m)
continue
if ptype in self._handler_table:
error_msg = self._ensure_authed(ptype, m)
if error_msg:
self._send_message(error_msg)
else:
self._handler_table[ptype](m)
elif ptype in self._channel_handler_table:
chanid = m.get_int()
chan = self._channels.get(chanid)
if chan is not None:
self._channel_handler_table[ptype](chan, m)
elif chanid in self.channels_seen:
self._log(
DEBUG,
"Ignoring message for dead channel {:d}".format( # noqa
chanid
),
)
else:
self._log(
ERROR,
"Channel request for unknown channel {:d}".format( # noqa
chanid
),
)
break
elif (
self.auth_handler is not None
and ptype in self.auth_handler._handler_table
):
handler = self.auth_handler._handler_table[ptype]
handler(m)
if len(self._expected_packet) > 0:
continue
else:
# Respond with "I don't implement this particular
# message type" message (unless the message type was
# itself literally MSG_UNIMPLEMENTED, in which case, we
# just shut up to avoid causing a useless loop).
name = MSG_NAMES[ptype]
warning = "Oops, unhandled type {} ({!r})".format(
ptype, name
)
self._log(WARNING, warning)
if ptype != MSG_UNIMPLEMENTED:
msg = Message()
msg.add_byte(cMSG_UNIMPLEMENTED)
msg.add_int(m.seqno)
self._send_message(msg)
self.packetizer.complete_handshake()
except SSHException as e:
self._log(
ERROR,
"Exception ({}): {}".format(
"server" if self.server_mode else "client", e
),
)
self._log(ERROR, util.tb_strings())
self.saved_exception = e
except EOFError as e:
self._log(DEBUG, "EOF in transport thread")
self.saved_exception = e
except socket.error as e:
if type(e.args) is tuple:
if e.args:
emsg = "{} ({:d})".format(e.args[1], e.args[0])
else: # empty tuple, e.g. socket.timeout
emsg = str(e) or repr(e)
else:
emsg = e.args
self._log(ERROR, "Socket exception: " + emsg)
self.saved_exception = e
except Exception as e:
self._log(ERROR, "Unknown exception: " + str(e))
self._log(ERROR, util.tb_strings())
self.saved_exception = e
_active_threads.remove(self)
for chan in list(self._channels.values()):
chan._unlink()
if self.active:
self.active = False
self.packetizer.close()
if self.completion_event is not None:
self.completion_event.set()
if self.auth_handler is not None:
self.auth_handler.abort()
for event in self.channel_events.values():
event.set()
try:
self.lock.acquire()
self.server_accept_cv.notify()
finally:
self.lock.release()
self.sock.close()
except:
# Don't raise spurious 'NoneType has no attribute X' errors when we
# wake up during interpreter shutdown. Or rather -- raise
# everything *if* sys.modules (used as a convenient sentinel)
# appears to still exist.
if self.sys.modules is not None:
raise
def _log_agreement(self, which, local, remote):
# Log useful, non-duplicative line re: an agreed-upon algorithm.
# Old code implied algorithms could be asymmetrical (different for
# inbound vs outbound) so we preserve that possibility.
msg = "{}: ".format(which)
if local == remote:
msg += local
else:
msg += "local={}, remote={}".format(local, remote)
self._log(DEBUG, msg)
# protocol stages
def _negotiate_keys(self, m):
# throws SSHException on anything unusual
self.clear_to_send_lock.acquire()
try:
self.clear_to_send.clear()
finally:
self.clear_to_send_lock.release()
if self.local_kex_init is None:
# remote side wants to renegotiate
self._send_kex_init()
self._parse_kex_init(m)
self.kex_engine.start_kex()
def _check_banner(self):
# this is slow, but we only have to do it once
for i in range(100):
# give them 15 seconds for the first line, then just 2 seconds
# each additional line. (some sites have very high latency.)
if i == 0:
timeout = self.banner_timeout
else:
timeout = 2
try:
buf = self.packetizer.readline(timeout)
except ProxyCommandFailure:
raise
except Exception as e:
raise SSHException(
"Error reading SSH protocol banner" + str(e)
)
if buf[:4] == "SSH-":
break
self._log(DEBUG, "Banner: " + buf)
if buf[:4] != "SSH-":
raise SSHException('Indecipherable protocol version "' + buf + '"')
# save this server version string for later
self.remote_version = buf
self._log(DEBUG, "Remote version/idstring: {}".format(buf))
# pull off any attached comment
# NOTE: comment used to be stored in a variable and then...never used.
# since 2003. ca 877cd974b8182d26fa76d566072917ea67b64e67
i = buf.find(" ")
if i >= 0:
buf = buf[:i]
# parse out version string and make sure it matches
segs = buf.split("-", 2)
if len(segs) < 3:
raise SSHException("Invalid SSH banner")
version = segs[1]
client = segs[2]
if version != "1.99" and version != "2.0":
msg = "Incompatible version ({} instead of 2.0)"
raise IncompatiblePeer(msg.format(version))
msg = "Connected (version {}, client {})".format(version, client)
self._log(INFO, msg)
def _send_kex_init(self):
"""
announce to the other side that we'd like to negotiate keys, and what
kind of key negotiation we support.
"""
self.clear_to_send_lock.acquire()
try:
self.clear_to_send.clear()
finally:
self.clear_to_send_lock.release()
self.gss_kex_used = False
self.in_kex = True
kex_algos = list(self.preferred_kex)
if self.server_mode:
mp_required_prefix = "diffie-hellman-group-exchange-sha"
kex_mp = [k for k in kex_algos if k.startswith(mp_required_prefix)]
if (self._modulus_pack is None) and (len(kex_mp) > 0):
# can't do group-exchange if we don't have a pack of potential
# primes
pkex = [
k
for k in self.get_security_options().kex
if not k.startswith(mp_required_prefix)
]
self.get_security_options().kex = pkex
available_server_keys = list(
filter(
list(self.server_key_dict.keys()).__contains__,
# TODO: ensure tests will catch if somebody streamlines
# this by mistake - case is the admittedly silly one where
# the only calls to add_server_key() contain keys which
# were filtered out of the below via disabled_algorithms.
# If this is streamlined, we would then be allowing the
# disabled algorithm(s) for hostkey use
# TODO: honestly this prob just wants to get thrown out
# when we make kex configuration more straightforward
self.preferred_keys,
)
)
else:
available_server_keys = self.preferred_keys
# Signal support for MSG_EXT_INFO so server will send it to us.
# NOTE: doing this here handily means we don't even consider this
# value when agreeing on real kex algo to use (which is a common
# pitfall when adding this apparently).
kex_algos.append("ext-info-c")
# Similar to ext-info, but used in both server modes, so done outside
# of above if/else.
if self.advertise_strict_kex:
which = "s" if self.server_mode else "c"
kex_algos.append(f"kex-strict-{which}[email protected]")
m = Message()
m.add_byte(cMSG_KEXINIT)
m.add_bytes(os.urandom(16))
m.add_list(kex_algos)
m.add_list(available_server_keys)
m.add_list(self.preferred_ciphers)
m.add_list(self.preferred_ciphers)
m.add_list(self.preferred_macs)
m.add_list(self.preferred_macs)
m.add_list(self.preferred_compression)
m.add_list(self.preferred_compression)
m.add_string(bytes())
m.add_string(bytes())
m.add_boolean(False)
m.add_int(0)
# save a copy for later (needed to compute a hash)
self.local_kex_init = self._latest_kex_init = m.asbytes()
self._send_message(m)
def _really_parse_kex_init(self, m, ignore_first_byte=False):
parsed = {}
if ignore_first_byte:
m.get_byte()
m.get_bytes(16) # cookie, discarded
parsed["kex_algo_list"] = m.get_list()
parsed["server_key_algo_list"] = m.get_list()
parsed["client_encrypt_algo_list"] = m.get_list()
parsed["server_encrypt_algo_list"] = m.get_list()
parsed["client_mac_algo_list"] = m.get_list()
parsed["server_mac_algo_list"] = m.get_list()
parsed["client_compress_algo_list"] = m.get_list()
parsed["server_compress_algo_list"] = m.get_list()
parsed["client_lang_list"] = m.get_list()
parsed["server_lang_list"] = m.get_list()
parsed["kex_follows"] = m.get_boolean()
m.get_int() # unused
return parsed
def _get_latest_kex_init(self):
return self._really_parse_kex_init(
Message(self._latest_kex_init),
ignore_first_byte=True,
)
def _parse_kex_init(self, m):
parsed = self._really_parse_kex_init(m)
kex_algo_list = parsed["kex_algo_list"]
server_key_algo_list = parsed["server_key_algo_list"]
client_encrypt_algo_list = parsed["client_encrypt_algo_list"]
server_encrypt_algo_list = parsed["server_encrypt_algo_list"]
client_mac_algo_list = parsed["client_mac_algo_list"]
server_mac_algo_list = parsed["server_mac_algo_list"]
client_compress_algo_list = parsed["client_compress_algo_list"]
server_compress_algo_list = parsed["server_compress_algo_list"]
client_lang_list = parsed["client_lang_list"]
server_lang_list = parsed["server_lang_list"]
kex_follows = parsed["kex_follows"]
self._log(DEBUG, "=== Key exchange possibilities ===")
for prefix, value in (
("kex algos", kex_algo_list),
("server key", server_key_algo_list),
# TODO: shouldn't these two lines say "cipher" to match usual
# terminology (including elsewhere in paramiko!)?
("client encrypt", client_encrypt_algo_list),
("server encrypt", server_encrypt_algo_list),
("client mac", client_mac_algo_list),
("server mac", server_mac_algo_list),
("client compress", client_compress_algo_list),
("server compress", server_compress_algo_list),
("client lang", client_lang_list),
("server lang", server_lang_list),
):
if value == [""]:
value = ["<none>"]
value = ", ".join(value)
self._log(DEBUG, "{}: {}".format(prefix, value))
self._log(DEBUG, "kex follows: {}".format(kex_follows))
self._log(DEBUG, "=== Key exchange agreements ===")
# Record, and strip out, ext-info and/or strict-kex non-algorithms
self._remote_ext_info = None
self._remote_strict_kex = None
to_pop = []
for i, algo in enumerate(kex_algo_list):
if algo.startswith("ext-info-"):
self._remote_ext_info = algo
to_pop.insert(0, i)
elif algo.startswith("kex-strict-"):
# NOTE: this is what we are expecting from the /remote/ end.
which = "c" if self.server_mode else "s"
expected = f"kex-strict-{which}[email protected]"
# Set strict mode if agreed.
self.agreed_on_strict_kex = (
algo == expected and self.advertise_strict_kex
)
self._log(
DEBUG, f"Strict kex mode: {self.agreed_on_strict_kex}"
)
to_pop.insert(0, i)
for i in to_pop:
kex_algo_list.pop(i)
# CVE mitigation: expect zeroed-out seqno anytime we are performing kex
# init phase, if strict mode was negotiated.
if (
self.agreed_on_strict_kex
and not self.initial_kex_done
and m.seqno != 0
):
raise MessageOrderError(
"In strict-kex mode, but KEXINIT was not the first packet!"
)
# as a server, we pick the first item in the client's list that we
# support.
# as a client, we pick the first item in our list that the server
# supports.
if self.server_mode:
agreed_kex = list(
filter(self.preferred_kex.__contains__, kex_algo_list)
)
else:
agreed_kex = list(
filter(kex_algo_list.__contains__, self.preferred_kex)
)
if len(agreed_kex) == 0:
# TODO: do an auth-overhaul style aggregate exception here?
# TODO: would let us streamline log output & show all failures up
# front
raise IncompatiblePeer(
"Incompatible ssh peer (no acceptable kex algorithm)"
) # noqa
self.kex_engine = self._kex_info[agreed_kex[0]](self)
self._log(DEBUG, "Kex: {}".format(agreed_kex[0]))
if self.server_mode:
available_server_keys = list(
filter(
list(self.server_key_dict.keys()).__contains__,
self.preferred_keys,
)
)
agreed_keys = list(
filter(
available_server_keys.__contains__, server_key_algo_list
)
)
else:
agreed_keys = list(
filter(server_key_algo_list.__contains__, self.preferred_keys)
)
if len(agreed_keys) == 0:
raise IncompatiblePeer(
"Incompatible ssh peer (no acceptable host key)"
) # noqa
self.host_key_type = agreed_keys[0]
if self.server_mode and (self.get_server_key() is None):
raise IncompatiblePeer(
"Incompatible ssh peer (can't match requested host key type)"
) # noqa
self._log_agreement("HostKey", agreed_keys[0], agreed_keys[0])
if self.server_mode:
agreed_local_ciphers = list(
filter(
self.preferred_ciphers.__contains__,
server_encrypt_algo_list,
)
)
agreed_remote_ciphers = list(
filter(
self.preferred_ciphers.__contains__,
client_encrypt_algo_list,
)
)
else:
agreed_local_ciphers = list(
filter(
client_encrypt_algo_list.__contains__,
self.preferred_ciphers,
)
)
agreed_remote_ciphers = list(
filter(
server_encrypt_algo_list.__contains__,
self.preferred_ciphers,
)
)
if len(agreed_local_ciphers) == 0 or len(agreed_remote_ciphers) == 0:
raise IncompatiblePeer(
"Incompatible ssh server (no acceptable ciphers)"
) # noqa
self.local_cipher = agreed_local_ciphers[0]
self.remote_cipher = agreed_remote_ciphers[0]
self._log_agreement(
"Cipher", local=self.local_cipher, remote=self.remote_cipher
)
if self.server_mode:
agreed_remote_macs = list(
filter(self.preferred_macs.__contains__, client_mac_algo_list)
)
agreed_local_macs = list(
filter(self.preferred_macs.__contains__, server_mac_algo_list)
)
else:
agreed_local_macs = list(
filter(client_mac_algo_list.__contains__, self.preferred_macs)
)
agreed_remote_macs = list(
filter(server_mac_algo_list.__contains__, self.preferred_macs)
)
if (len(agreed_local_macs) == 0) or (len(agreed_remote_macs) == 0):
raise IncompatiblePeer(
"Incompatible ssh server (no acceptable macs)"
)
self.local_mac = agreed_local_macs[0]
self.remote_mac = agreed_remote_macs[0]
self._log_agreement(
"MAC", local=self.local_mac, remote=self.remote_mac
)
if self.server_mode:
agreed_remote_compression = list(
filter(
self.preferred_compression.__contains__,
client_compress_algo_list,
)
)
agreed_local_compression = list(
filter(
self.preferred_compression.__contains__,
server_compress_algo_list,
)
)
else:
agreed_local_compression = list(
filter(
client_compress_algo_list.__contains__,
self.preferred_compression,
)
)
agreed_remote_compression = list(
filter(
server_compress_algo_list.__contains__,
self.preferred_compression,
)
)
if (
len(agreed_local_compression) == 0
or len(agreed_remote_compression) == 0
):
msg = "Incompatible ssh server (no acceptable compression)"
msg += " {!r} {!r} {!r}"
raise IncompatiblePeer(
msg.format(
agreed_local_compression,
agreed_remote_compression,
self.preferred_compression,
)
)
self.local_compression = agreed_local_compression[0]
self.remote_compression = agreed_remote_compression[0]
self._log_agreement(
"Compression",
local=self.local_compression,
remote=self.remote_compression,
)
self._log(DEBUG, "=== End of kex handshake ===")
# save for computing hash later...
# now wait! openssh has a bug (and others might too) where there are
# actually some extra bytes (one NUL byte in openssh's case) added to
# the end of the packet but not parsed. turns out we need to throw
# away those bytes because they aren't part of the hash.
self.remote_kex_init = cMSG_KEXINIT + m.get_so_far()
def _activate_inbound(self):
"""switch on newly negotiated encryption parameters for
inbound traffic"""
info = self._cipher_info[self.remote_cipher]
aead = info.get("is_aead", False)
block_size = info["block-size"]
key_size = info["key-size"]
# Non-AEAD/GCM type ciphers' IV size is their block size.
iv_size = info.get("iv-size", block_size)
if self.server_mode:
iv_in = self._compute_key("A", iv_size)
key_in = self._compute_key("C", key_size)
else:
iv_in = self._compute_key("B", iv_size)
key_in = self._compute_key("D", key_size)
engine = self._get_engine(
name=self.remote_cipher,
key=key_in,
iv=iv_in,
operation=self._DECRYPT,
aead=aead,
)
etm = (not aead) and "[email protected]" in self.remote_mac
mac_size = self._mac_info[self.remote_mac]["size"]
mac_engine = self._mac_info[self.remote_mac]["class"]
# initial mac keys are done in the hash's natural size (not the
# potentially truncated transmission size)
if self.server_mode:
mac_key = self._compute_key("E", mac_engine().digest_size)
else:
mac_key = self._compute_key("F", mac_engine().digest_size)
self.packetizer.set_inbound_cipher(
block_engine=engine,
block_size=block_size,
mac_engine=None if aead else mac_engine,
mac_size=16 if aead else mac_size,
mac_key=None if aead else mac_key,
etm=etm,
aead=aead,
iv_in=iv_in if aead else None,
)
compress_in = self._compression_info[self.remote_compression][1]
if compress_in is not None and (
self.remote_compression != "[email protected]" or self.authenticated
):
self._log(DEBUG, "Switching on inbound compression ...")
self.packetizer.set_inbound_compressor(compress_in())
# Reset inbound sequence number if strict mode.
if self.agreed_on_strict_kex:
self._log(
DEBUG,
"Resetting inbound seqno after NEWKEYS due to strict mode",
)
self.packetizer.reset_seqno_in()
def _activate_outbound(self):
"""switch on newly negotiated encryption parameters for
outbound traffic"""
m = Message()
m.add_byte(cMSG_NEWKEYS)
self._send_message(m)
# Reset outbound sequence number if strict mode.
if self.agreed_on_strict_kex:
self._log(
DEBUG,
"Resetting outbound seqno after NEWKEYS due to strict mode",
)
self.packetizer.reset_seqno_out()
info = self._cipher_info[self.local_cipher]
aead = info.get("is_aead", False)
block_size = info["block-size"]
key_size = info["key-size"]
# Non-AEAD/GCM type ciphers' IV size is their block size.
iv_size = info.get("iv-size", block_size)
if self.server_mode:
iv_out = self._compute_key("B", iv_size)
key_out = self._compute_key("D", key_size)
else:
iv_out = self._compute_key("A", iv_size)
key_out = self._compute_key("C", key_size)
engine = self._get_engine(
name=self.local_cipher,
key=key_out,
iv=iv_out,
operation=self._ENCRYPT,
aead=aead,
)
etm = (not aead) and "[email protected]" in self.local_mac
mac_size = self._mac_info[self.local_mac]["size"]
mac_engine = self._mac_info[self.local_mac]["class"]
# initial mac keys are done in the hash's natural size (not the
# potentially truncated transmission size)
if self.server_mode:
mac_key = self._compute_key("F", mac_engine().digest_size)
else:
mac_key = self._compute_key("E", mac_engine().digest_size)
sdctr = self.local_cipher.endswith("-ctr")
self.packetizer.set_outbound_cipher(
block_engine=engine,
block_size=block_size,
mac_engine=None if aead else mac_engine,
mac_size=16 if aead else mac_size,
mac_key=None if aead else mac_key,
sdctr=sdctr,
etm=etm,
aead=aead,
iv_out=iv_out if aead else None,
)
compress_out = self._compression_info[self.local_compression][0]
if compress_out is not None and (
self.local_compression != "[email protected]" or self.authenticated
):
self._log(DEBUG, "Switching on outbound compression ...")
self.packetizer.set_outbound_compressor(compress_out())
if not self.packetizer.need_rekey():
self.in_kex = False
# If client indicated extension support, send that packet immediately
if (
self.server_mode
and self.server_sig_algs
and self._remote_ext_info == "ext-info-c"
):
extensions = {"server-sig-algs": ",".join(self.preferred_pubkeys)}
m = Message()
m.add_byte(cMSG_EXT_INFO)
m.add_int(len(extensions))
for name, value in sorted(extensions.items()):
m.add_string(name)
m.add_string(value)
self._send_message(m)
# we always expect to receive NEWKEYS now
self._expect_packet(MSG_NEWKEYS)
def _auth_trigger(self):
self.authenticated = True
# delayed initiation of compression
if self.local_compression == "[email protected]":
compress_out = self._compression_info[self.local_compression][0]
self._log(DEBUG, "Switching on outbound compression ...")
self.packetizer.set_outbound_compressor(compress_out())
if self.remote_compression == "[email protected]":
compress_in = self._compression_info[self.remote_compression][1]
self._log(DEBUG, "Switching on inbound compression ...")
self.packetizer.set_inbound_compressor(compress_in())
def _parse_ext_info(self, msg):
# Packet is a count followed by that many key-string to possibly-bytes
# pairs.
extensions = {}
for _ in range(msg.get_int()):
name = msg.get_text()
value = msg.get_string()
extensions[name] = value
self._log(DEBUG, "Got EXT_INFO: {}".format(extensions))
# NOTE: this should work ok in cases where a server sends /two/ such
# messages; the RFC explicitly states a 2nd one should overwrite the
# 1st.
self.server_extensions = extensions
def _parse_newkeys(self, m):
self._log(DEBUG, "Switch to new keys ...")
self._activate_inbound()
# can also free a bunch of stuff here
self.local_kex_init = self.remote_kex_init = None
self.K = None
self.kex_engine = None
if self.server_mode and (self.auth_handler is None):
# create auth handler for server mode
self.auth_handler = AuthHandler(self)
if not self.initial_kex_done:
# this was the first key exchange
# (also signal to packetizer as it sometimes wants to know this
# status as well, eg when seqnos rollover)
self.initial_kex_done = self.packetizer._initial_kex_done = True
# send an event?
if self.completion_event is not None:
self.completion_event.set()
# it's now okay to send data again (if this was a re-key)
if not self.packetizer.need_rekey():
self.in_kex = False
self.clear_to_send_lock.acquire()
try:
self.clear_to_send.set()
finally:
self.clear_to_send_lock.release()
return
def _parse_disconnect(self, m):
code = m.get_int()
desc = m.get_text()
self._log(INFO, "Disconnect (code {:d}): {}".format(code, desc))
def _parse_global_request(self, m):
kind = m.get_text()
self._log(DEBUG, 'Received global request "{}"'.format(kind))
want_reply = m.get_boolean()
if not self.server_mode:
self._log(
DEBUG,
'Rejecting "{}" global request from server.'.format(kind),
)
ok = False
elif kind == "tcpip-forward":
address = m.get_text()
port = m.get_int()
ok = self.server_object.check_port_forward_request(address, port)
if ok:
ok = (ok,)
elif kind == "cancel-tcpip-forward":
address = m.get_text()
port = m.get_int()
self.server_object.cancel_port_forward_request(address, port)
ok = True
else:
ok = self.server_object.check_global_request(kind, m)
extra = ()
if type(ok) is tuple:
extra = ok
ok = True
if want_reply:
msg = Message()
if ok:
msg.add_byte(cMSG_REQUEST_SUCCESS)
msg.add(*extra)
else:
msg.add_byte(cMSG_REQUEST_FAILURE)
self._send_message(msg)
def _parse_request_success(self, m):
self._log(DEBUG, "Global request successful.")
self.global_response = m
if self.completion_event is not None:
self.completion_event.set()
def _parse_request_failure(self, m):
self._log(DEBUG, "Global request denied.")
self.global_response = None
if self.completion_event is not None:
self.completion_event.set()
def _parse_channel_open_success(self, m):
chanid = m.get_int()
server_chanid = m.get_int()
server_window_size = m.get_int()
server_max_packet_size = m.get_int()
chan = self._channels.get(chanid)
if chan is None:
self._log(WARNING, "Success for unrequested channel! [??]")
return
self.lock.acquire()
try:
chan._set_remote_channel(
server_chanid, server_window_size, server_max_packet_size
)
self._log(DEBUG, "Secsh channel {:d} opened.".format(chanid))
if chanid in self.channel_events:
self.channel_events[chanid].set()
del self.channel_events[chanid]
finally:
self.lock.release()
return
def _parse_channel_open_failure(self, m):
chanid = m.get_int()
reason = m.get_int()
reason_str = m.get_text()
m.get_text() # ignored language
reason_text = CONNECTION_FAILED_CODE.get(reason, "(unknown code)")
self._log(
ERROR,
"Secsh channel {:d} open FAILED: {}: {}".format(
chanid, reason_str, reason_text
),
)
self.lock.acquire()
try:
self.saved_exception = ChannelException(reason, reason_text)
if chanid in self.channel_events:
self._channels.delete(chanid)
if chanid in self.channel_events:
self.channel_events[chanid].set()
del self.channel_events[chanid]
finally:
self.lock.release()
return
def _parse_channel_open(self, m):
kind = m.get_text()
chanid = m.get_int()
initial_window_size = m.get_int()
max_packet_size = m.get_int()
reject = False
if (
kind == "[email protected]"
and self._forward_agent_handler is not None
):
self._log(DEBUG, "Incoming forward agent connection")
self.lock.acquire()
try:
my_chanid = self._next_channel()
finally:
self.lock.release()
elif (kind == "x11") and (self._x11_handler is not None):
origin_addr = m.get_text()
origin_port = m.get_int()
self._log(
DEBUG,
"Incoming x11 connection from {}:{:d}".format(
origin_addr, origin_port
),
)
self.lock.acquire()
try:
my_chanid = self._next_channel()
finally:
self.lock.release()
elif (kind == "forwarded-tcpip") and (self._tcp_handler is not None):
server_addr = m.get_text()
server_port = m.get_int()
origin_addr = m.get_text()
origin_port = m.get_int()
self._log(
DEBUG,
"Incoming tcp forwarded connection from {}:{:d}".format(
origin_addr, origin_port
),
)
self.lock.acquire()
try:
my_chanid = self._next_channel()
finally:
self.lock.release()
elif not self.server_mode:
self._log(
DEBUG,
'Rejecting "{}" channel request from server.'.format(kind),
)
reject = True
reason = OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED
else:
self.lock.acquire()
try:
my_chanid = self._next_channel()
finally:
self.lock.release()
if kind == "direct-tcpip":
# handle direct-tcpip requests coming from the client
dest_addr = m.get_text()
dest_port = m.get_int()
origin_addr = m.get_text()
origin_port = m.get_int()
reason = self.server_object.check_channel_direct_tcpip_request(
my_chanid,
(origin_addr, origin_port),
(dest_addr, dest_port),
)
else:
reason = self.server_object.check_channel_request(
kind, my_chanid
)
if reason != OPEN_SUCCEEDED:
self._log(
DEBUG,
'Rejecting "{}" channel request from client.'.format(kind),
)
reject = True
if reject:
msg = Message()
msg.add_byte(cMSG_CHANNEL_OPEN_FAILURE)
msg.add_int(chanid)
msg.add_int(reason)
msg.add_string("")
msg.add_string("en")
self._send_message(msg)
return
chan = Channel(my_chanid)
self.lock.acquire()
try:
self._channels.put(my_chanid, chan)
self.channels_seen[my_chanid] = True
chan._set_transport(self)
chan._set_window(
self.default_window_size, self.default_max_packet_size
)
chan._set_remote_channel(
chanid, initial_window_size, max_packet_size
)
finally:
self.lock.release()
m = Message()
m.add_byte(cMSG_CHANNEL_OPEN_SUCCESS)
m.add_int(chanid)
m.add_int(my_chanid)
m.add_int(self.default_window_size)
m.add_int(self.default_max_packet_size)
self._send_message(m)
self._log(
DEBUG, "Secsh channel {:d} ({}) opened.".format(my_chanid, kind)
)
if kind == "[email protected]":
self._forward_agent_handler(chan)
elif kind == "x11":
self._x11_handler(chan, (origin_addr, origin_port))
elif kind == "forwarded-tcpip":
chan.origin_addr = (origin_addr, origin_port)
self._tcp_handler(
chan, (origin_addr, origin_port), (server_addr, server_port)
)
else:
self._queue_incoming_channel(chan)
def _parse_debug(self, m):
m.get_boolean() # always_display
msg = m.get_string()
m.get_string() # language
self._log(DEBUG, "Debug msg: {}".format(util.safe_string(msg)))
def _get_subsystem_handler(self, name):
try:
self.lock.acquire()
if name not in self.subsystem_table:
return None, [], {}
return self.subsystem_table[name]
finally:
self.lock.release()
_channel_handler_table = {
MSG_CHANNEL_SUCCESS: Channel._request_success,
MSG_CHANNEL_FAILURE: Channel._request_failed,
MSG_CHANNEL_DATA: Channel._feed,
MSG_CHANNEL_EXTENDED_DATA: Channel._feed_extended,
MSG_CHANNEL_WINDOW_ADJUST: Channel._window_adjust,
MSG_CHANNEL_REQUEST: Channel._handle_request,
MSG_CHANNEL_EOF: Channel._handle_eof,
MSG_CHANNEL_CLOSE: Channel._handle_close,
}
# TODO 4.0: drop this, we barely use it ourselves, it badly replicates the
# Transport-internal algorithm management, AND does so in a way which doesn't
# honor newer things like disabled_algorithms!
class SecurityOptions:
"""
Simple object containing the security preferences of an ssh transport.
These are tuples of acceptable ciphers, digests, key types, and key
exchange algorithms, listed in order of preference.
Changing the contents and/or order of these fields affects the underlying
`.Transport` (but only if you change them before starting the session).
If you try to add an algorithm that paramiko doesn't recognize,
``ValueError`` will be raised. If you try to assign something besides a
tuple to one of the fields, ``TypeError`` will be raised.
"""
__slots__ = "_transport"
def __init__(self, transport):
self._transport = transport
def __repr__(self):
"""
Returns a string representation of this object, for debugging.
"""
return "<paramiko.SecurityOptions for {!r}>".format(self._transport)
def _set(self, name, orig, x):
if type(x) is list:
x = tuple(x)
if type(x) is not tuple:
raise TypeError("expected tuple or list")
possible = list(getattr(self._transport, orig).keys())
forbidden = [n for n in x if n not in possible]
if len(forbidden) > 0:
raise ValueError("unknown cipher")
setattr(self._transport, name, x)
@property
def ciphers(self):
"""Symmetric encryption ciphers"""
return self._transport._preferred_ciphers
@ciphers.setter
def ciphers(self, x):
self._set("_preferred_ciphers", "_cipher_info", x)
@property
def digests(self):
"""Digest (one-way hash) algorithms"""
return self._transport._preferred_macs
@digests.setter
def digests(self, x):
self._set("_preferred_macs", "_mac_info", x)
@property
def key_types(self):
"""Public-key algorithms"""
return self._transport._preferred_keys
@key_types.setter
def key_types(self, x):
self._set("_preferred_keys", "_key_info", x)
@property
def kex(self):
"""Key exchange algorithms"""
return self._transport._preferred_kex
@kex.setter
def kex(self, x):
self._set("_preferred_kex", "_kex_info", x)
@property
def compression(self):
"""Compression algorithms"""
return self._transport._preferred_compression
@compression.setter
def compression(self, x):
self._set("_preferred_compression", "_compression_info", x)
class ChannelMap:
def __init__(self):
# (id -> Channel)
self._map = weakref.WeakValueDictionary()
self._lock = threading.Lock()
def put(self, chanid, chan):
self._lock.acquire()
try:
self._map[chanid] = chan
finally:
self._lock.release()
def get(self, chanid):
self._lock.acquire()
try:
return self._map.get(chanid, None)
finally:
self._lock.release()
def delete(self, chanid):
self._lock.acquire()
try:
try:
del self._map[chanid]
except KeyError:
pass
finally:
self._lock.release()
def values(self):
self._lock.acquire()
try:
return list(self._map.values())
finally:
self._lock.release()
def __len__(self):
self._lock.acquire()
try:
return len(self._map)
finally:
self._lock.release()
class ServiceRequestingTransport(Transport):
"""
Transport, but also handling service requests, like it oughtta!
.. versionadded:: 3.2
"""
# NOTE: this purposefully duplicates some of the parent class in order to
# modernize, refactor, etc. The intent is that eventually we will collapse
# this one onto the parent in a backwards incompatible release.
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._service_userauth_accepted = False
self._handler_table[MSG_SERVICE_ACCEPT] = self._parse_service_accept
def _parse_service_accept(self, m):
service = m.get_text()
# Short-circuit for any service name not ssh-userauth.
# NOTE: it's technically possible for 'service name' in
# SERVICE_REQUEST/ACCEPT messages to be "ssh-connection" --
# but I don't see evidence of Paramiko ever initiating or expecting to
# receive one of these. We /do/ see the 'service name' field in
# MSG_USERAUTH_REQUEST/ACCEPT/FAILURE set to this string, but that is a
# different set of handlers, so...!
if service != "ssh-userauth":
# TODO 4.0: consider erroring here (with an ability to opt out?)
# instead as it probably means something went Very Wrong.
self._log(
DEBUG, 'Service request "{}" accepted (?)'.format(service)
)
return
# Record that we saw a service-userauth acceptance, meaning we are free
# to submit auth requests.
self._service_userauth_accepted = True
self._log(DEBUG, "MSG_SERVICE_ACCEPT received; auth may begin")
def ensure_session(self):
# Make sure we're not trying to auth on a not-yet-open or
# already-closed transport session; that's our responsibility, not that
# of AuthHandler.
if (not self.active) or (not self.initial_kex_done):
# TODO: better error message? this can happen in many places, eg
# user error (authing before connecting) or developer error (some
# improperly handled pre/mid auth shutdown didn't become fatal
# enough). The latter is much more common & should ideally be fixed
# by terminating things harder?
raise SSHException("No existing session")
# Also make sure we've actually been told we are allowed to auth.
if self._service_userauth_accepted:
return
# Or request to do so, otherwise.
m = Message()
m.add_byte(cMSG_SERVICE_REQUEST)
m.add_string("ssh-userauth")
self._log(DEBUG, "Sending MSG_SERVICE_REQUEST: ssh-userauth")
self._send_message(m)
# Now we wait to hear back; the user is expecting a blocking-style auth
# request so there's no point giving control back anywhere.
while not self._service_userauth_accepted:
# TODO: feels like we're missing an AuthHandler Event like
# 'self.auth_event' which is set when AuthHandler shuts down in
# ways good AND bad. Transport only seems to have completion_event
# which is unclear re: intent, eg it's set by newkeys which always
# happens on connection, so it'll always be set by the time we get
# here.
# NOTE: this copies the timing of event.wait() in
# AuthHandler.wait_for_response, re: 1/10 of a second. Could
# presumably be smaller, but seems unlikely this period is going to
# be "too long" for any code doing ssh networking...
time.sleep(0.1)
self.auth_handler = self.get_auth_handler()
def get_auth_handler(self):
# NOTE: using new sibling subclass instead of classic AuthHandler
return AuthOnlyHandler(self)
def auth_none(self, username):
# TODO 4.0: merge to parent, preserving (most of) docstring
self.ensure_session()
return self.auth_handler.auth_none(username)
def auth_password(self, username, password, fallback=True):
# TODO 4.0: merge to parent, preserving (most of) docstring
self.ensure_session()
try:
return self.auth_handler.auth_password(username, password)
except BadAuthenticationType as e:
# if password auth isn't allowed, but keyboard-interactive *is*,
# try to fudge it
if not fallback or ("keyboard-interactive" not in e.allowed_types):
raise
try:
def handler(title, instructions, fields):
if len(fields) > 1:
raise SSHException("Fallback authentication failed.")
if len(fields) == 0:
# for some reason, at least on os x, a 2nd request will
# be made with zero fields requested. maybe it's just
# to try to fake out automated scripting of the exact
# type we're doing here. *shrug* :)
return []
return [password]
return self.auth_interactive(username, handler)
except SSHException:
# attempt to fudge failed; just raise the original exception
raise e
def auth_publickey(self, username, key):
# TODO 4.0: merge to parent, preserving (most of) docstring
self.ensure_session()
return self.auth_handler.auth_publickey(username, key)
def auth_interactive(self, username, handler, submethods=""):
# TODO 4.0: merge to parent, preserving (most of) docstring
self.ensure_session()
return self.auth_handler.auth_interactive(
username, handler, submethods
)
def auth_interactive_dumb(self, username, handler=None, submethods=""):
# TODO 4.0: merge to parent, preserving (most of) docstring
# NOTE: legacy impl omitted equiv of ensure_session since it just wraps
# another call to an auth method. however we reinstate it for
# consistency reasons.
self.ensure_session()
if not handler:
def handler(title, instructions, prompt_list):
answers = []
if title:
print(title.strip())
if instructions:
print(instructions.strip())
for prompt, show_input in prompt_list:
print(prompt.strip(), end=" ")
answers.append(input())
return answers
return self.auth_interactive(username, handler, submethods)
def auth_gssapi_with_mic(self, username, gss_host, gss_deleg_creds):
# TODO 4.0: merge to parent, preserving (most of) docstring
self.ensure_session()
self.auth_handler = self.get_auth_handler()
return self.auth_handler.auth_gssapi_with_mic(
username, gss_host, gss_deleg_creds
)
def auth_gssapi_keyex(self, username):
# TODO 4.0: merge to parent, preserving (most of) docstring
self.ensure_session()
self.auth_handler = self.get_auth_handler()
return self.auth_handler.auth_gssapi_keyex(username)
| 135,632 | Python | .py | 3,132 | 32.076948 | 90 | 0.584187 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
611 | kex_gex.py | paramiko_paramiko/paramiko/kex_gex.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Variant on `KexGroup1 <paramiko.kex_group1.KexGroup1>` where the prime "p" and
generator "g" are provided by the server. A bit more work is required on the
client side, and a **lot** more on the server side.
"""
import os
from hashlib import sha1, sha256
from paramiko import util
from paramiko.common import DEBUG, byte_chr, byte_ord, byte_mask
from paramiko.message import Message
from paramiko.ssh_exception import SSHException
(
_MSG_KEXDH_GEX_REQUEST_OLD,
_MSG_KEXDH_GEX_GROUP,
_MSG_KEXDH_GEX_INIT,
_MSG_KEXDH_GEX_REPLY,
_MSG_KEXDH_GEX_REQUEST,
) = range(30, 35)
(
c_MSG_KEXDH_GEX_REQUEST_OLD,
c_MSG_KEXDH_GEX_GROUP,
c_MSG_KEXDH_GEX_INIT,
c_MSG_KEXDH_GEX_REPLY,
c_MSG_KEXDH_GEX_REQUEST,
) = [byte_chr(c) for c in range(30, 35)]
class KexGex:
name = "diffie-hellman-group-exchange-sha1"
min_bits = 1024
max_bits = 8192
preferred_bits = 2048
hash_algo = sha1
def __init__(self, transport):
self.transport = transport
self.p = None
self.q = None
self.g = None
self.x = None
self.e = None
self.f = None
self.old_style = False
def start_kex(self, _test_old_style=False):
if self.transport.server_mode:
self.transport._expect_packet(
_MSG_KEXDH_GEX_REQUEST, _MSG_KEXDH_GEX_REQUEST_OLD
)
return
# request a bit range: we accept (min_bits) to (max_bits), but prefer
# (preferred_bits). according to the spec, we shouldn't pull the
# minimum up above 1024.
m = Message()
if _test_old_style:
# only used for unit tests: we shouldn't ever send this
m.add_byte(c_MSG_KEXDH_GEX_REQUEST_OLD)
m.add_int(self.preferred_bits)
self.old_style = True
else:
m.add_byte(c_MSG_KEXDH_GEX_REQUEST)
m.add_int(self.min_bits)
m.add_int(self.preferred_bits)
m.add_int(self.max_bits)
self.transport._send_message(m)
self.transport._expect_packet(_MSG_KEXDH_GEX_GROUP)
def parse_next(self, ptype, m):
if ptype == _MSG_KEXDH_GEX_REQUEST:
return self._parse_kexdh_gex_request(m)
elif ptype == _MSG_KEXDH_GEX_GROUP:
return self._parse_kexdh_gex_group(m)
elif ptype == _MSG_KEXDH_GEX_INIT:
return self._parse_kexdh_gex_init(m)
elif ptype == _MSG_KEXDH_GEX_REPLY:
return self._parse_kexdh_gex_reply(m)
elif ptype == _MSG_KEXDH_GEX_REQUEST_OLD:
return self._parse_kexdh_gex_request_old(m)
msg = "KexGex {} asked to handle packet type {:d}"
raise SSHException(msg.format(self.name, ptype))
# ...internals...
def _generate_x(self):
# generate an "x" (1 < x < (p-1)/2).
q = (self.p - 1) // 2
qnorm = util.deflate_long(q, 0)
qhbyte = byte_ord(qnorm[0])
byte_count = len(qnorm)
qmask = 0xFF
while not (qhbyte & 0x80):
qhbyte <<= 1
qmask >>= 1
while True:
x_bytes = os.urandom(byte_count)
x_bytes = byte_mask(x_bytes[0], qmask) + x_bytes[1:]
x = util.inflate_long(x_bytes, 1)
if (x > 1) and (x < q):
break
self.x = x
def _parse_kexdh_gex_request(self, m):
minbits = m.get_int()
preferredbits = m.get_int()
maxbits = m.get_int()
# smoosh the user's preferred size into our own limits
if preferredbits > self.max_bits:
preferredbits = self.max_bits
if preferredbits < self.min_bits:
preferredbits = self.min_bits
# fix min/max if they're inconsistent. technically, we could just pout
# and hang up, but there's no harm in giving them the benefit of the
# doubt and just picking a bitsize for them.
if minbits > preferredbits:
minbits = preferredbits
if maxbits < preferredbits:
maxbits = preferredbits
# now save a copy
self.min_bits = minbits
self.preferred_bits = preferredbits
self.max_bits = maxbits
# generate prime
pack = self.transport._get_modulus_pack()
if pack is None:
raise SSHException("Can't do server-side gex with no modulus pack")
self.transport._log(
DEBUG,
"Picking p ({} <= {} <= {} bits)".format(
minbits, preferredbits, maxbits
),
)
self.g, self.p = pack.get_modulus(minbits, preferredbits, maxbits)
m = Message()
m.add_byte(c_MSG_KEXDH_GEX_GROUP)
m.add_mpint(self.p)
m.add_mpint(self.g)
self.transport._send_message(m)
self.transport._expect_packet(_MSG_KEXDH_GEX_INIT)
def _parse_kexdh_gex_request_old(self, m):
# same as above, but without min_bits or max_bits (used by older
# clients like putty)
self.preferred_bits = m.get_int()
# smoosh the user's preferred size into our own limits
if self.preferred_bits > self.max_bits:
self.preferred_bits = self.max_bits
if self.preferred_bits < self.min_bits:
self.preferred_bits = self.min_bits
# generate prime
pack = self.transport._get_modulus_pack()
if pack is None:
raise SSHException("Can't do server-side gex with no modulus pack")
self.transport._log(
DEBUG, "Picking p (~ {} bits)".format(self.preferred_bits)
)
self.g, self.p = pack.get_modulus(
self.min_bits, self.preferred_bits, self.max_bits
)
m = Message()
m.add_byte(c_MSG_KEXDH_GEX_GROUP)
m.add_mpint(self.p)
m.add_mpint(self.g)
self.transport._send_message(m)
self.transport._expect_packet(_MSG_KEXDH_GEX_INIT)
self.old_style = True
def _parse_kexdh_gex_group(self, m):
self.p = m.get_mpint()
self.g = m.get_mpint()
# reject if p's bit length < 1024 or > 8192
bitlen = util.bit_length(self.p)
if (bitlen < 1024) or (bitlen > 8192):
raise SSHException(
"Server-generated gex p (don't ask) is out of range "
"({} bits)".format(bitlen)
)
self.transport._log(DEBUG, "Got server p ({} bits)".format(bitlen))
self._generate_x()
# now compute e = g^x mod p
self.e = pow(self.g, self.x, self.p)
m = Message()
m.add_byte(c_MSG_KEXDH_GEX_INIT)
m.add_mpint(self.e)
self.transport._send_message(m)
self.transport._expect_packet(_MSG_KEXDH_GEX_REPLY)
def _parse_kexdh_gex_init(self, m):
self.e = m.get_mpint()
if (self.e < 1) or (self.e > self.p - 1):
raise SSHException('Client kex "e" is out of range')
self._generate_x()
self.f = pow(self.g, self.x, self.p)
K = pow(self.e, self.x, self.p)
key = self.transport.get_server_key().asbytes()
# okay, build up the hash H of
# (V_C || V_S || I_C || I_S || K_S || min || n || max || p || g || e || f || K) # noqa
hm = Message()
hm.add(
self.transport.remote_version,
self.transport.local_version,
self.transport.remote_kex_init,
self.transport.local_kex_init,
key,
)
if not self.old_style:
hm.add_int(self.min_bits)
hm.add_int(self.preferred_bits)
if not self.old_style:
hm.add_int(self.max_bits)
hm.add_mpint(self.p)
hm.add_mpint(self.g)
hm.add_mpint(self.e)
hm.add_mpint(self.f)
hm.add_mpint(K)
H = self.hash_algo(hm.asbytes()).digest()
self.transport._set_K_H(K, H)
# sign it
sig = self.transport.get_server_key().sign_ssh_data(
H, self.transport.host_key_type
)
# send reply
m = Message()
m.add_byte(c_MSG_KEXDH_GEX_REPLY)
m.add_string(key)
m.add_mpint(self.f)
m.add_string(sig)
self.transport._send_message(m)
self.transport._activate_outbound()
def _parse_kexdh_gex_reply(self, m):
host_key = m.get_string()
self.f = m.get_mpint()
sig = m.get_string()
if (self.f < 1) or (self.f > self.p - 1):
raise SSHException('Server kex "f" is out of range')
K = pow(self.f, self.x, self.p)
# okay, build up the hash H of
# (V_C || V_S || I_C || I_S || K_S || min || n || max || p || g || e || f || K) # noqa
hm = Message()
hm.add(
self.transport.local_version,
self.transport.remote_version,
self.transport.local_kex_init,
self.transport.remote_kex_init,
host_key,
)
if not self.old_style:
hm.add_int(self.min_bits)
hm.add_int(self.preferred_bits)
if not self.old_style:
hm.add_int(self.max_bits)
hm.add_mpint(self.p)
hm.add_mpint(self.g)
hm.add_mpint(self.e)
hm.add_mpint(self.f)
hm.add_mpint(K)
self.transport._set_K_H(K, self.hash_algo(hm.asbytes()).digest())
self.transport._verify_key(host_key, sig)
self.transport._activate_outbound()
class KexGexSHA256(KexGex):
name = "diffie-hellman-group-exchange-sha256"
hash_algo = sha256
| 10,320 | Python | .py | 267 | 30.037453 | 95 | 0.59061 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
612 | _winapi.py | paramiko_paramiko/paramiko/_winapi.py | """
Windows API functions implemented as ctypes functions and classes as found
in jaraco.windows (3.4.1).
If you encounter issues with this module, please consider reporting the issues
in jaraco.windows and asking the author to port the fixes back here.
"""
import builtins
import ctypes.wintypes
from paramiko.util import u
######################
# jaraco.windows.error
def format_system_message(errno):
"""
Call FormatMessage with a system error number to retrieve
the descriptive error message.
"""
# first some flags used by FormatMessageW
ALLOCATE_BUFFER = 0x100
FROM_SYSTEM = 0x1000
# Let FormatMessageW allocate the buffer (we'll free it below)
# Also, let it know we want a system error message.
flags = ALLOCATE_BUFFER | FROM_SYSTEM
source = None
message_id = errno
language_id = 0
result_buffer = ctypes.wintypes.LPWSTR()
buffer_size = 0
arguments = None
bytes = ctypes.windll.kernel32.FormatMessageW(
flags,
source,
message_id,
language_id,
ctypes.byref(result_buffer),
buffer_size,
arguments,
)
# note the following will cause an infinite loop if GetLastError
# repeatedly returns an error that cannot be formatted, although
# this should not happen.
handle_nonzero_success(bytes)
message = result_buffer.value
ctypes.windll.kernel32.LocalFree(result_buffer)
return message
class WindowsError(builtins.WindowsError):
"""more info about errors at
http://msdn.microsoft.com/en-us/library/ms681381(VS.85).aspx"""
def __init__(self, value=None):
if value is None:
value = ctypes.windll.kernel32.GetLastError()
strerror = format_system_message(value)
args = 0, strerror, None, value
super().__init__(*args)
@property
def message(self):
return self.strerror
@property
def code(self):
return self.winerror
def __str__(self):
return self.message
def __repr__(self):
return "{self.__class__.__name__}({self.winerror})".format(**vars())
def handle_nonzero_success(result):
if result == 0:
raise WindowsError()
###########################
# jaraco.windows.api.memory
GMEM_MOVEABLE = 0x2
GlobalAlloc = ctypes.windll.kernel32.GlobalAlloc
GlobalAlloc.argtypes = ctypes.wintypes.UINT, ctypes.c_size_t
GlobalAlloc.restype = ctypes.wintypes.HANDLE
GlobalLock = ctypes.windll.kernel32.GlobalLock
GlobalLock.argtypes = (ctypes.wintypes.HGLOBAL,)
GlobalLock.restype = ctypes.wintypes.LPVOID
GlobalUnlock = ctypes.windll.kernel32.GlobalUnlock
GlobalUnlock.argtypes = (ctypes.wintypes.HGLOBAL,)
GlobalUnlock.restype = ctypes.wintypes.BOOL
GlobalSize = ctypes.windll.kernel32.GlobalSize
GlobalSize.argtypes = (ctypes.wintypes.HGLOBAL,)
GlobalSize.restype = ctypes.c_size_t
CreateFileMapping = ctypes.windll.kernel32.CreateFileMappingW
CreateFileMapping.argtypes = [
ctypes.wintypes.HANDLE,
ctypes.c_void_p,
ctypes.wintypes.DWORD,
ctypes.wintypes.DWORD,
ctypes.wintypes.DWORD,
ctypes.wintypes.LPWSTR,
]
CreateFileMapping.restype = ctypes.wintypes.HANDLE
MapViewOfFile = ctypes.windll.kernel32.MapViewOfFile
MapViewOfFile.restype = ctypes.wintypes.HANDLE
UnmapViewOfFile = ctypes.windll.kernel32.UnmapViewOfFile
UnmapViewOfFile.argtypes = (ctypes.wintypes.HANDLE,)
RtlMoveMemory = ctypes.windll.kernel32.RtlMoveMemory
RtlMoveMemory.argtypes = (ctypes.c_void_p, ctypes.c_void_p, ctypes.c_size_t)
ctypes.windll.kernel32.LocalFree.argtypes = (ctypes.wintypes.HLOCAL,)
#####################
# jaraco.windows.mmap
class MemoryMap:
"""
A memory map object which can have security attributes overridden.
"""
def __init__(self, name, length, security_attributes=None):
self.name = name
self.length = length
self.security_attributes = security_attributes
self.pos = 0
def __enter__(self):
p_SA = (
ctypes.byref(self.security_attributes)
if self.security_attributes
else None
)
INVALID_HANDLE_VALUE = -1
PAGE_READWRITE = 0x4
FILE_MAP_WRITE = 0x2
filemap = ctypes.windll.kernel32.CreateFileMappingW(
INVALID_HANDLE_VALUE,
p_SA,
PAGE_READWRITE,
0,
self.length,
u(self.name),
)
handle_nonzero_success(filemap)
if filemap == INVALID_HANDLE_VALUE:
raise Exception("Failed to create file mapping")
self.filemap = filemap
self.view = MapViewOfFile(filemap, FILE_MAP_WRITE, 0, 0, 0)
return self
def seek(self, pos):
self.pos = pos
def write(self, msg):
assert isinstance(msg, bytes)
n = len(msg)
if self.pos + n >= self.length: # A little safety.
raise ValueError(f"Refusing to write {n} bytes")
dest = self.view + self.pos
length = ctypes.c_size_t(n)
ctypes.windll.kernel32.RtlMoveMemory(dest, msg, length)
self.pos += n
def read(self, n):
"""
Read n bytes from mapped view.
"""
out = ctypes.create_string_buffer(n)
source = self.view + self.pos
length = ctypes.c_size_t(n)
ctypes.windll.kernel32.RtlMoveMemory(out, source, length)
self.pos += n
return out.raw
def __exit__(self, exc_type, exc_val, tb):
ctypes.windll.kernel32.UnmapViewOfFile(self.view)
ctypes.windll.kernel32.CloseHandle(self.filemap)
#############################
# jaraco.windows.api.security
# from WinNT.h
READ_CONTROL = 0x00020000
STANDARD_RIGHTS_REQUIRED = 0x000F0000
STANDARD_RIGHTS_READ = READ_CONTROL
STANDARD_RIGHTS_WRITE = READ_CONTROL
STANDARD_RIGHTS_EXECUTE = READ_CONTROL
STANDARD_RIGHTS_ALL = 0x001F0000
# from NTSecAPI.h
POLICY_VIEW_LOCAL_INFORMATION = 0x00000001
POLICY_VIEW_AUDIT_INFORMATION = 0x00000002
POLICY_GET_PRIVATE_INFORMATION = 0x00000004
POLICY_TRUST_ADMIN = 0x00000008
POLICY_CREATE_ACCOUNT = 0x00000010
POLICY_CREATE_SECRET = 0x00000020
POLICY_CREATE_PRIVILEGE = 0x00000040
POLICY_SET_DEFAULT_QUOTA_LIMITS = 0x00000080
POLICY_SET_AUDIT_REQUIREMENTS = 0x00000100
POLICY_AUDIT_LOG_ADMIN = 0x00000200
POLICY_SERVER_ADMIN = 0x00000400
POLICY_LOOKUP_NAMES = 0x00000800
POLICY_NOTIFICATION = 0x00001000
POLICY_ALL_ACCESS = (
STANDARD_RIGHTS_REQUIRED
| POLICY_VIEW_LOCAL_INFORMATION
| POLICY_VIEW_AUDIT_INFORMATION
| POLICY_GET_PRIVATE_INFORMATION
| POLICY_TRUST_ADMIN
| POLICY_CREATE_ACCOUNT
| POLICY_CREATE_SECRET
| POLICY_CREATE_PRIVILEGE
| POLICY_SET_DEFAULT_QUOTA_LIMITS
| POLICY_SET_AUDIT_REQUIREMENTS
| POLICY_AUDIT_LOG_ADMIN
| POLICY_SERVER_ADMIN
| POLICY_LOOKUP_NAMES
)
POLICY_READ = (
STANDARD_RIGHTS_READ
| POLICY_VIEW_AUDIT_INFORMATION
| POLICY_GET_PRIVATE_INFORMATION
)
POLICY_WRITE = (
STANDARD_RIGHTS_WRITE
| POLICY_TRUST_ADMIN
| POLICY_CREATE_ACCOUNT
| POLICY_CREATE_SECRET
| POLICY_CREATE_PRIVILEGE
| POLICY_SET_DEFAULT_QUOTA_LIMITS
| POLICY_SET_AUDIT_REQUIREMENTS
| POLICY_AUDIT_LOG_ADMIN
| POLICY_SERVER_ADMIN
)
POLICY_EXECUTE = (
STANDARD_RIGHTS_EXECUTE
| POLICY_VIEW_LOCAL_INFORMATION
| POLICY_LOOKUP_NAMES
)
class TokenAccess:
TOKEN_QUERY = 0x8
class TokenInformationClass:
TokenUser = 1
class TOKEN_USER(ctypes.Structure):
num = 1
_fields_ = [
("SID", ctypes.c_void_p),
("ATTRIBUTES", ctypes.wintypes.DWORD),
]
class SECURITY_DESCRIPTOR(ctypes.Structure):
"""
typedef struct _SECURITY_DESCRIPTOR
{
UCHAR Revision;
UCHAR Sbz1;
SECURITY_DESCRIPTOR_CONTROL Control;
PSID Owner;
PSID Group;
PACL Sacl;
PACL Dacl;
} SECURITY_DESCRIPTOR;
"""
SECURITY_DESCRIPTOR_CONTROL = ctypes.wintypes.USHORT
REVISION = 1
_fields_ = [
("Revision", ctypes.c_ubyte),
("Sbz1", ctypes.c_ubyte),
("Control", SECURITY_DESCRIPTOR_CONTROL),
("Owner", ctypes.c_void_p),
("Group", ctypes.c_void_p),
("Sacl", ctypes.c_void_p),
("Dacl", ctypes.c_void_p),
]
class SECURITY_ATTRIBUTES(ctypes.Structure):
"""
typedef struct _SECURITY_ATTRIBUTES {
DWORD nLength;
LPVOID lpSecurityDescriptor;
BOOL bInheritHandle;
} SECURITY_ATTRIBUTES;
"""
_fields_ = [
("nLength", ctypes.wintypes.DWORD),
("lpSecurityDescriptor", ctypes.c_void_p),
("bInheritHandle", ctypes.wintypes.BOOL),
]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.nLength = ctypes.sizeof(SECURITY_ATTRIBUTES)
@property
def descriptor(self):
return self._descriptor
@descriptor.setter
def descriptor(self, value):
self._descriptor = value
self.lpSecurityDescriptor = ctypes.addressof(value)
ctypes.windll.advapi32.SetSecurityDescriptorOwner.argtypes = (
ctypes.POINTER(SECURITY_DESCRIPTOR),
ctypes.c_void_p,
ctypes.wintypes.BOOL,
)
#########################
# jaraco.windows.security
def GetTokenInformation(token, information_class):
"""
Given a token, get the token information for it.
"""
data_size = ctypes.wintypes.DWORD()
ctypes.windll.advapi32.GetTokenInformation(
token, information_class.num, 0, 0, ctypes.byref(data_size)
)
data = ctypes.create_string_buffer(data_size.value)
handle_nonzero_success(
ctypes.windll.advapi32.GetTokenInformation(
token,
information_class.num,
ctypes.byref(data),
ctypes.sizeof(data),
ctypes.byref(data_size),
)
)
return ctypes.cast(data, ctypes.POINTER(TOKEN_USER)).contents
def OpenProcessToken(proc_handle, access):
result = ctypes.wintypes.HANDLE()
proc_handle = ctypes.wintypes.HANDLE(proc_handle)
handle_nonzero_success(
ctypes.windll.advapi32.OpenProcessToken(
proc_handle, access, ctypes.byref(result)
)
)
return result
def get_current_user():
"""
Return a TOKEN_USER for the owner of this process.
"""
process = OpenProcessToken(
ctypes.windll.kernel32.GetCurrentProcess(), TokenAccess.TOKEN_QUERY
)
return GetTokenInformation(process, TOKEN_USER)
def get_security_attributes_for_user(user=None):
"""
Return a SECURITY_ATTRIBUTES structure with the SID set to the
specified user (uses current user if none is specified).
"""
if user is None:
user = get_current_user()
assert isinstance(user, TOKEN_USER), "user must be TOKEN_USER instance"
SD = SECURITY_DESCRIPTOR()
SA = SECURITY_ATTRIBUTES()
# by attaching the actual security descriptor, it will be garbage-
# collected with the security attributes
SA.descriptor = SD
SA.bInheritHandle = 1
ctypes.windll.advapi32.InitializeSecurityDescriptor(
ctypes.byref(SD), SECURITY_DESCRIPTOR.REVISION
)
ctypes.windll.advapi32.SetSecurityDescriptorOwner(
ctypes.byref(SD), user.SID, 0
)
return SA
| 11,204 | Python | .py | 336 | 27.66369 | 78 | 0.683533 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
613 | file.py | paramiko_paramiko/paramiko/file.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from io import BytesIO
from paramiko.common import (
linefeed_byte_value,
crlf,
cr_byte,
linefeed_byte,
cr_byte_value,
)
from paramiko.util import ClosingContextManager, u
class BufferedFile(ClosingContextManager):
"""
Reusable base class to implement Python-style file buffering around a
simpler stream.
"""
_DEFAULT_BUFSIZE = 8192
SEEK_SET = 0
SEEK_CUR = 1
SEEK_END = 2
FLAG_READ = 0x1
FLAG_WRITE = 0x2
FLAG_APPEND = 0x4
FLAG_BINARY = 0x10
FLAG_BUFFERED = 0x20
FLAG_LINE_BUFFERED = 0x40
FLAG_UNIVERSAL_NEWLINE = 0x80
def __init__(self):
self.newlines = None
self._flags = 0
self._bufsize = self._DEFAULT_BUFSIZE
self._wbuffer = BytesIO()
self._rbuffer = bytes()
self._at_trailing_cr = False
self._closed = False
# pos - position within the file, according to the user
# realpos - position according the OS
# (these may be different because we buffer for line reading)
self._pos = self._realpos = 0
# size only matters for seekable files
self._size = 0
def __del__(self):
self.close()
def __iter__(self):
"""
Returns an iterator that can be used to iterate over the lines in this
file. This iterator happens to return the file itself, since a file is
its own iterator.
:raises: ``ValueError`` -- if the file is closed.
"""
if self._closed:
raise ValueError("I/O operation on closed file")
return self
def close(self):
"""
Close the file. Future read and write operations will fail.
"""
self.flush()
self._closed = True
def flush(self):
"""
Write out any data in the write buffer. This may do nothing if write
buffering is not turned on.
"""
self._write_all(self._wbuffer.getvalue())
self._wbuffer = BytesIO()
return
def __next__(self):
"""
Returns the next line from the input, or raises ``StopIteration``
when EOF is hit. Unlike python file objects, it's okay to mix
calls to `.next` and `.readline`.
:raises: ``StopIteration`` -- when the end of the file is reached.
:returns:
a line (`str`, or `bytes` if the file was opened in binary mode)
read from the file.
"""
line = self.readline()
if not line:
raise StopIteration
return line
def readable(self):
"""
Check if the file can be read from.
:returns:
`True` if the file can be read from. If `False`, `read` will raise
an exception.
"""
return (self._flags & self.FLAG_READ) == self.FLAG_READ
def writable(self):
"""
Check if the file can be written to.
:returns:
`True` if the file can be written to. If `False`, `write` will
raise an exception.
"""
return (self._flags & self.FLAG_WRITE) == self.FLAG_WRITE
def seekable(self):
"""
Check if the file supports random access.
:returns:
`True` if the file supports random access. If `False`, `seek` will
raise an exception.
"""
return False
def readinto(self, buff):
"""
Read up to ``len(buff)`` bytes into ``bytearray`` *buff* and return the
number of bytes read.
:returns:
The number of bytes read.
"""
data = self.read(len(buff))
buff[: len(data)] = data
return len(data)
def read(self, size=None):
"""
Read at most ``size`` bytes from the file (less if we hit the end of
the file first). If the ``size`` argument is negative or omitted,
read all the remaining data in the file.
.. note::
``'b'`` mode flag is ignored (``self.FLAG_BINARY`` in
``self._flags``), because SSH treats all files as binary, since we
have no idea what encoding the file is in, or even if the file is
text data.
:param int size: maximum number of bytes to read
:returns:
data read from the file (as bytes), or an empty string if EOF was
encountered immediately
"""
if self._closed:
raise IOError("File is closed")
if not (self._flags & self.FLAG_READ):
raise IOError("File is not open for reading")
if (size is None) or (size < 0):
# go for broke
result = bytearray(self._rbuffer)
self._rbuffer = bytes()
self._pos += len(result)
while True:
try:
new_data = self._read(self._DEFAULT_BUFSIZE)
except EOFError:
new_data = None
if (new_data is None) or (len(new_data) == 0):
break
result.extend(new_data)
self._realpos += len(new_data)
self._pos += len(new_data)
return bytes(result)
if size <= len(self._rbuffer):
result = self._rbuffer[:size]
self._rbuffer = self._rbuffer[size:]
self._pos += len(result)
return result
while len(self._rbuffer) < size:
read_size = size - len(self._rbuffer)
if self._flags & self.FLAG_BUFFERED:
read_size = max(self._bufsize, read_size)
try:
new_data = self._read(read_size)
except EOFError:
new_data = None
if (new_data is None) or (len(new_data) == 0):
break
self._rbuffer += new_data
self._realpos += len(new_data)
result = self._rbuffer[:size]
self._rbuffer = self._rbuffer[size:]
self._pos += len(result)
return result
def readline(self, size=None):
"""
Read one entire line from the file. A trailing newline character is
kept in the string (but may be absent when a file ends with an
incomplete line). If the size argument is present and non-negative, it
is a maximum byte count (including the trailing newline) and an
incomplete line may be returned. An empty string is returned only when
EOF is encountered immediately.
.. note::
Unlike stdio's ``fgets``, the returned string contains null
characters (``'\\0'``) if they occurred in the input.
:param int size: maximum length of returned string.
:returns:
next line of the file, or an empty string if the end of the
file has been reached.
If the file was opened in binary (``'b'``) mode: bytes are returned
Else: the encoding of the file is assumed to be UTF-8 and character
strings (`str`) are returned
"""
# it's almost silly how complex this function is.
if self._closed:
raise IOError("File is closed")
if not (self._flags & self.FLAG_READ):
raise IOError("File not open for reading")
line = self._rbuffer
truncated = False
while True:
if (
self._at_trailing_cr
and self._flags & self.FLAG_UNIVERSAL_NEWLINE
and len(line) > 0
):
# edge case: the newline may be '\r\n' and we may have read
# only the first '\r' last time.
if line[0] == linefeed_byte_value:
line = line[1:]
self._record_newline(crlf)
else:
self._record_newline(cr_byte)
self._at_trailing_cr = False
# check size before looking for a linefeed, in case we already have
# enough.
if (size is not None) and (size >= 0):
if len(line) >= size:
# truncate line
self._rbuffer = line[size:]
line = line[:size]
truncated = True
break
n = size - len(line)
else:
n = self._bufsize
if linefeed_byte in line or (
self._flags & self.FLAG_UNIVERSAL_NEWLINE and cr_byte in line
):
break
try:
new_data = self._read(n)
except EOFError:
new_data = None
if (new_data is None) or (len(new_data) == 0):
self._rbuffer = bytes()
self._pos += len(line)
return line if self._flags & self.FLAG_BINARY else u(line)
line += new_data
self._realpos += len(new_data)
# find the newline
pos = line.find(linefeed_byte)
if self._flags & self.FLAG_UNIVERSAL_NEWLINE:
rpos = line.find(cr_byte)
if (rpos >= 0) and (rpos < pos or pos < 0):
pos = rpos
if pos == -1:
# we couldn't find a newline in the truncated string, return it
self._pos += len(line)
return line if self._flags & self.FLAG_BINARY else u(line)
xpos = pos + 1
if (
line[pos] == cr_byte_value
and xpos < len(line)
and line[xpos] == linefeed_byte_value
):
xpos += 1
# if the string was truncated, _rbuffer needs to have the string after
# the newline character plus the truncated part of the line we stored
# earlier in _rbuffer
if truncated:
self._rbuffer = line[xpos:] + self._rbuffer
else:
self._rbuffer = line[xpos:]
lf = line[pos:xpos]
line = line[:pos] + linefeed_byte
if (len(self._rbuffer) == 0) and (lf == cr_byte):
# we could read the line up to a '\r' and there could still be a
# '\n' following that we read next time. note that and eat it.
self._at_trailing_cr = True
else:
self._record_newline(lf)
self._pos += len(line)
return line if self._flags & self.FLAG_BINARY else u(line)
def readlines(self, sizehint=None):
"""
Read all remaining lines using `readline` and return them as a list.
If the optional ``sizehint`` argument is present, instead of reading up
to EOF, whole lines totalling approximately sizehint bytes (possibly
after rounding up to an internal buffer size) are read.
:param int sizehint: desired maximum number of bytes to read.
:returns: list of lines read from the file.
"""
lines = []
byte_count = 0
while True:
line = self.readline()
if len(line) == 0:
break
lines.append(line)
byte_count += len(line)
if (sizehint is not None) and (byte_count >= sizehint):
break
return lines
def seek(self, offset, whence=0):
"""
Set the file's current position, like stdio's ``fseek``. Not all file
objects support seeking.
.. note::
If a file is opened in append mode (``'a'`` or ``'a+'``), any seek
operations will be undone at the next write (as the file position
will move back to the end of the file).
:param int offset:
position to move to within the file, relative to ``whence``.
:param int whence:
type of movement: 0 = absolute; 1 = relative to the current
position; 2 = relative to the end of the file.
:raises: ``IOError`` -- if the file doesn't support random access.
"""
raise IOError("File does not support seeking.")
def tell(self):
"""
Return the file's current position. This may not be accurate or
useful if the underlying file doesn't support random access, or was
opened in append mode.
:returns: file position (`number <int>` of bytes).
"""
return self._pos
def write(self, data):
"""
Write data to the file. If write buffering is on (``bufsize`` was
specified and non-zero), some or all of the data may not actually be
written yet. (Use `flush` or `close` to force buffered data to be
written out.)
:param data: ``str``/``bytes`` data to write
"""
if isinstance(data, str):
# Accept text and encode as utf-8 for compatibility only.
data = data.encode("utf-8")
if self._closed:
raise IOError("File is closed")
if not (self._flags & self.FLAG_WRITE):
raise IOError("File not open for writing")
if not (self._flags & self.FLAG_BUFFERED):
self._write_all(data)
return
self._wbuffer.write(data)
if self._flags & self.FLAG_LINE_BUFFERED:
# only scan the new data for linefeed, to avoid wasting time.
last_newline_pos = data.rfind(linefeed_byte)
if last_newline_pos >= 0:
wbuf = self._wbuffer.getvalue()
last_newline_pos += len(wbuf) - len(data)
self._write_all(wbuf[: last_newline_pos + 1])
self._wbuffer = BytesIO()
self._wbuffer.write(wbuf[last_newline_pos + 1 :])
return
# even if we're line buffering, if the buffer has grown past the
# buffer size, force a flush.
if self._wbuffer.tell() >= self._bufsize:
self.flush()
return
def writelines(self, sequence):
"""
Write a sequence of strings to the file. The sequence can be any
iterable object producing strings, typically a list of strings. (The
name is intended to match `readlines`; `writelines` does not add line
separators.)
:param sequence: an iterable sequence of strings.
"""
for line in sequence:
self.write(line)
return
def xreadlines(self):
"""
Identical to ``iter(f)``. This is a deprecated file interface that
predates Python iterator support.
"""
return self
@property
def closed(self):
return self._closed
# ...overrides...
def _read(self, size):
"""
(subclass override)
Read data from the stream. Return ``None`` or raise ``EOFError`` to
indicate EOF.
"""
raise EOFError()
def _write(self, data):
"""
(subclass override)
Write data into the stream.
"""
raise IOError("write not implemented")
def _get_size(self):
"""
(subclass override)
Return the size of the file. This is called from within `_set_mode`
if the file is opened in append mode, so the file position can be
tracked and `seek` and `tell` will work correctly. If the file is
a stream that can't be randomly accessed, you don't need to override
this method,
"""
return 0
# ...internals...
def _set_mode(self, mode="r", bufsize=-1):
"""
Subclasses call this method to initialize the BufferedFile.
"""
# set bufsize in any event, because it's used for readline().
self._bufsize = self._DEFAULT_BUFSIZE
if bufsize < 0:
# do no buffering by default, because otherwise writes will get
# buffered in a way that will probably confuse people.
bufsize = 0
if bufsize == 1:
# apparently, line buffering only affects writes. reads are only
# buffered if you call readline (directly or indirectly: iterating
# over a file will indirectly call readline).
self._flags |= self.FLAG_BUFFERED | self.FLAG_LINE_BUFFERED
elif bufsize > 1:
self._bufsize = bufsize
self._flags |= self.FLAG_BUFFERED
self._flags &= ~self.FLAG_LINE_BUFFERED
elif bufsize == 0:
# unbuffered
self._flags &= ~(self.FLAG_BUFFERED | self.FLAG_LINE_BUFFERED)
if ("r" in mode) or ("+" in mode):
self._flags |= self.FLAG_READ
if ("w" in mode) or ("+" in mode):
self._flags |= self.FLAG_WRITE
if "a" in mode:
self._flags |= self.FLAG_WRITE | self.FLAG_APPEND
self._size = self._get_size()
self._pos = self._realpos = self._size
if "b" in mode:
self._flags |= self.FLAG_BINARY
if "U" in mode:
self._flags |= self.FLAG_UNIVERSAL_NEWLINE
# built-in file objects have this attribute to store which kinds of
# line terminations they've seen:
# <http://www.python.org/doc/current/lib/built-in-funcs.html>
self.newlines = None
def _write_all(self, raw_data):
# the underlying stream may be something that does partial writes (like
# a socket).
data = memoryview(raw_data)
while len(data) > 0:
count = self._write(data)
data = data[count:]
if self._flags & self.FLAG_APPEND:
self._size += count
self._pos = self._realpos = self._size
else:
self._pos += count
self._realpos += count
return None
def _record_newline(self, newline):
# silliness about tracking what kinds of newlines we've seen.
# i don't understand why it can be None, a string, or a tuple, instead
# of just always being a tuple, but we'll emulate that behavior anyway.
if not (self._flags & self.FLAG_UNIVERSAL_NEWLINE):
return
if self.newlines is None:
self.newlines = newline
elif self.newlines != newline and isinstance(self.newlines, bytes):
self.newlines = (self.newlines, newline)
elif newline not in self.newlines:
self.newlines += (newline,)
| 19,063 | Python | .py | 473 | 29.934461 | 79 | 0.569193 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
614 | dsskey.py | paramiko_paramiko/paramiko/dsskey.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
DSS keys.
"""
from cryptography.exceptions import InvalidSignature
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import dsa
from cryptography.hazmat.primitives.asymmetric.utils import (
decode_dss_signature,
encode_dss_signature,
)
from paramiko import util
from paramiko.common import zero_byte
from paramiko.ssh_exception import SSHException
from paramiko.message import Message
from paramiko.ber import BER, BERException
from paramiko.pkey import PKey
class DSSKey(PKey):
"""
Representation of a DSS key which can be used to sign an verify SSH2
data.
"""
name = "ssh-dss"
def __init__(
self,
msg=None,
data=None,
filename=None,
password=None,
vals=None,
file_obj=None,
):
self.p = None
self.q = None
self.g = None
self.y = None
self.x = None
self.public_blob = None
if file_obj is not None:
self._from_private_key(file_obj, password)
return
if filename is not None:
self._from_private_key_file(filename, password)
return
if (msg is None) and (data is not None):
msg = Message(data)
if vals is not None:
self.p, self.q, self.g, self.y = vals
else:
self._check_type_and_load_cert(
msg=msg,
key_type=self.name,
cert_type=f"{self.name}[email protected]",
)
self.p = msg.get_mpint()
self.q = msg.get_mpint()
self.g = msg.get_mpint()
self.y = msg.get_mpint()
self.size = util.bit_length(self.p)
def asbytes(self):
m = Message()
m.add_string(self.name)
m.add_mpint(self.p)
m.add_mpint(self.q)
m.add_mpint(self.g)
m.add_mpint(self.y)
return m.asbytes()
def __str__(self):
return self.asbytes()
@property
def _fields(self):
return (self.get_name(), self.p, self.q, self.g, self.y)
# TODO 4.0: remove
def get_name(self):
return self.name
def get_bits(self):
return self.size
def can_sign(self):
return self.x is not None
def sign_ssh_data(self, data, algorithm=None):
key = dsa.DSAPrivateNumbers(
x=self.x,
public_numbers=dsa.DSAPublicNumbers(
y=self.y,
parameter_numbers=dsa.DSAParameterNumbers(
p=self.p, q=self.q, g=self.g
),
),
).private_key(backend=default_backend())
sig = key.sign(data, hashes.SHA1())
r, s = decode_dss_signature(sig)
m = Message()
m.add_string(self.name)
# apparently, in rare cases, r or s may be shorter than 20 bytes!
rstr = util.deflate_long(r, 0)
sstr = util.deflate_long(s, 0)
if len(rstr) < 20:
rstr = zero_byte * (20 - len(rstr)) + rstr
if len(sstr) < 20:
sstr = zero_byte * (20 - len(sstr)) + sstr
m.add_string(rstr + sstr)
return m
def verify_ssh_sig(self, data, msg):
if len(msg.asbytes()) == 40:
# spies.com bug: signature has no header
sig = msg.asbytes()
else:
kind = msg.get_text()
if kind != self.name:
return 0
sig = msg.get_binary()
# pull out (r, s) which are NOT encoded as mpints
sigR = util.inflate_long(sig[:20], 1)
sigS = util.inflate_long(sig[20:], 1)
signature = encode_dss_signature(sigR, sigS)
key = dsa.DSAPublicNumbers(
y=self.y,
parameter_numbers=dsa.DSAParameterNumbers(
p=self.p, q=self.q, g=self.g
),
).public_key(backend=default_backend())
try:
key.verify(signature, data, hashes.SHA1())
except InvalidSignature:
return False
else:
return True
def write_private_key_file(self, filename, password=None):
key = dsa.DSAPrivateNumbers(
x=self.x,
public_numbers=dsa.DSAPublicNumbers(
y=self.y,
parameter_numbers=dsa.DSAParameterNumbers(
p=self.p, q=self.q, g=self.g
),
),
).private_key(backend=default_backend())
self._write_private_key_file(
filename,
key,
serialization.PrivateFormat.TraditionalOpenSSL,
password=password,
)
def write_private_key(self, file_obj, password=None):
key = dsa.DSAPrivateNumbers(
x=self.x,
public_numbers=dsa.DSAPublicNumbers(
y=self.y,
parameter_numbers=dsa.DSAParameterNumbers(
p=self.p, q=self.q, g=self.g
),
),
).private_key(backend=default_backend())
self._write_private_key(
file_obj,
key,
serialization.PrivateFormat.TraditionalOpenSSL,
password=password,
)
@staticmethod
def generate(bits=1024, progress_func=None):
"""
Generate a new private DSS key. This factory function can be used to
generate a new host key or authentication key.
:param int bits: number of bits the generated key should be.
:param progress_func: Unused
:return: new `.DSSKey` private key
"""
numbers = dsa.generate_private_key(
bits, backend=default_backend()
).private_numbers()
key = DSSKey(
vals=(
numbers.public_numbers.parameter_numbers.p,
numbers.public_numbers.parameter_numbers.q,
numbers.public_numbers.parameter_numbers.g,
numbers.public_numbers.y,
)
)
key.x = numbers.x
return key
# ...internals...
def _from_private_key_file(self, filename, password):
data = self._read_private_key_file("DSA", filename, password)
self._decode_key(data)
def _from_private_key(self, file_obj, password):
data = self._read_private_key("DSA", file_obj, password)
self._decode_key(data)
def _decode_key(self, data):
pkformat, data = data
# private key file contains:
# DSAPrivateKey = { version = 0, p, q, g, y, x }
if pkformat == self._PRIVATE_KEY_FORMAT_ORIGINAL:
try:
keylist = BER(data).decode()
except BERException as e:
raise SSHException("Unable to parse key file: {}".format(e))
elif pkformat == self._PRIVATE_KEY_FORMAT_OPENSSH:
keylist = self._uint32_cstruct_unpack(data, "iiiii")
keylist = [0] + list(keylist)
else:
self._got_bad_key_format_id(pkformat)
if type(keylist) is not list or len(keylist) < 6 or keylist[0] != 0:
raise SSHException(
"not a valid DSA private key file (bad ber encoding)"
)
self.p = keylist[1]
self.q = keylist[2]
self.g = keylist[3]
self.y = keylist[4]
self.x = keylist[5]
self.size = util.bit_length(self.p)
| 8,248 | Python | .py | 229 | 26.716157 | 79 | 0.587735 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
615 | auth_strategy.py | paramiko_paramiko/paramiko/auth_strategy.py | """
Modern, adaptable authentication machinery.
Replaces certain parts of `.SSHClient`. For a concrete implementation, see the
``OpenSSHAuthStrategy`` class in `Fabric <https://fabfile.org>`_.
"""
from collections import namedtuple
from .agent import AgentKey
from .util import get_logger
from .ssh_exception import AuthenticationException
class AuthSource:
"""
Some SSH authentication source, such as a password, private key, or agent.
See subclasses in this module for concrete implementations.
All implementations must accept at least a ``username`` (``str``) kwarg.
"""
def __init__(self, username):
self.username = username
def _repr(self, **kwargs):
# TODO: are there any good libs for this? maybe some helper from
# structlog?
pairs = [f"{k}={v!r}" for k, v in kwargs.items()]
joined = ", ".join(pairs)
return f"{self.__class__.__name__}({joined})"
def __repr__(self):
return self._repr()
def authenticate(self, transport):
"""
Perform authentication.
"""
raise NotImplementedError
class NoneAuth(AuthSource):
"""
Auth type "none", ie https://www.rfc-editor.org/rfc/rfc4252#section-5.2 .
"""
def authenticate(self, transport):
return transport.auth_none(self.username)
class Password(AuthSource):
"""
Password authentication.
:param callable password_getter:
A lazy callable that should return a `str` password value at
authentication time, such as a `functools.partial` wrapping
`getpass.getpass`, an API call to a secrets store, or similar.
If you already know the password at instantiation time, you should
simply use something like ``lambda: "my literal"`` (for a literal, but
also, shame on you!) or ``lambda: variable_name`` (for something stored
in a variable).
"""
def __init__(self, username, password_getter):
super().__init__(username=username)
self.password_getter = password_getter
def __repr__(self):
# Password auth is marginally more 'username-caring' than pkeys, so may
# as well log that info here.
return super()._repr(user=self.username)
def authenticate(self, transport):
# Lazily get the password, in case it's prompting a user
# TODO: be nice to log source _of_ the password?
password = self.password_getter()
return transport.auth_password(self.username, password)
# TODO 4.0: twiddle this, or PKey, or both, so they're more obviously distinct.
# TODO 4.0: the obvious is to make this more wordy (PrivateKeyAuth), the
# minimalist approach might be to rename PKey to just Key (esp given all the
# subclasses are WhateverKey and not WhateverPKey)
class PrivateKey(AuthSource):
"""
Essentially a mixin for private keys.
Knows how to auth, but leaves key material discovery/loading/decryption to
subclasses.
Subclasses **must** ensure that they've set ``self.pkey`` to a decrypted
`.PKey` instance before calling ``super().authenticate``; typically
either in their ``__init__``, or in an overridden ``authenticate`` prior to
its `super` call.
"""
def authenticate(self, transport):
return transport.auth_publickey(self.username, self.pkey)
class InMemoryPrivateKey(PrivateKey):
"""
An in-memory, decrypted `.PKey` object.
"""
def __init__(self, username, pkey):
super().__init__(username=username)
# No decryption (presumably) necessary!
self.pkey = pkey
def __repr__(self):
# NOTE: most of interesting repr-bits for private keys is in PKey.
# TODO: tacking on agent-ness like this is a bit awkward, but, eh?
rep = super()._repr(pkey=self.pkey)
if isinstance(self.pkey, AgentKey):
rep += " [agent]"
return rep
class OnDiskPrivateKey(PrivateKey):
"""
Some on-disk private key that needs opening and possibly decrypting.
:param str source:
String tracking where this key's path was specified; should be one of
``"ssh-config"``, ``"python-config"``, or ``"implicit-home"``.
:param Path path:
The filesystem path this key was loaded from.
:param PKey pkey:
The `PKey` object this auth source uses/represents.
"""
def __init__(self, username, source, path, pkey):
super().__init__(username=username)
self.source = source
allowed = ("ssh-config", "python-config", "implicit-home")
if source not in allowed:
raise ValueError(f"source argument must be one of: {allowed!r}")
self.path = path
# Superclass wants .pkey, other two are mostly for display/debugging.
self.pkey = pkey
def __repr__(self):
return self._repr(
key=self.pkey, source=self.source, path=str(self.path)
)
# TODO re sources: is there anything in an OpenSSH config file that doesn't fit
# into what Paramiko already had kwargs for?
SourceResult = namedtuple("SourceResult", ["source", "result"])
# TODO: tempting to make this an OrderedDict, except the keys essentially want
# to be rich objects (AuthSources) which do not make for useful user indexing?
# TODO: members being vanilla tuples is pretty old-school/expedient; they
# "really" want to be something that's type friendlier (unless the tuple's 2nd
# member being a Union of two types is "fine"?), which I assume means yet more
# classes, eg an abstract SourceResult with concrete AuthSuccess and
# AuthFailure children?
# TODO: arguably we want __init__ typechecking of the members (or to leverage
# mypy by classifying this literally as list-of-AuthSource?)
class AuthResult(list):
"""
Represents a partial or complete SSH authentication attempt.
This class conceptually extends `AuthStrategy` by pairing the former's
authentication **sources** with the **results** of trying to authenticate
with them.
`AuthResult` is a (subclass of) `list` of `namedtuple`, which are of the
form ``namedtuple('SourceResult', 'source', 'result')`` (where the
``source`` member is an `AuthSource` and the ``result`` member is either a
return value from the relevant `.Transport` method, or an exception
object).
.. note::
Transport auth method results are always themselves a ``list`` of "next
allowable authentication methods".
In the simple case of "you just authenticated successfully", it's an
empty list; if your auth was rejected but you're allowed to try again,
it will be a list of string method names like ``pubkey`` or
``password``.
The ``__str__`` of this class represents the empty-list scenario as the
word ``success``, which should make reading the result of an
authentication session more obvious to humans.
Instances also have a `strategy` attribute referencing the `AuthStrategy`
which was attempted.
"""
def __init__(self, strategy, *args, **kwargs):
self.strategy = strategy
super().__init__(*args, **kwargs)
def __str__(self):
# NOTE: meaningfully distinct from __repr__, which still wants to use
# superclass' implementation.
# TODO: go hog wild, use rich.Table? how is that on degraded term's?
# TODO: test this lol
return "\n".join(
f"{x.source} -> {x.result or 'success'}" for x in self
)
# TODO 4.0: descend from SSHException or even just Exception
class AuthFailure(AuthenticationException):
"""
Basic exception wrapping an `AuthResult` indicating overall auth failure.
Note that `AuthFailure` descends from `AuthenticationException` but is
generally "higher level"; the latter is now only raised by individual
`AuthSource` attempts and should typically only be seen by users when
encapsulated in this class. It subclasses `AuthenticationException`
primarily for backwards compatibility reasons.
"""
def __init__(self, result):
self.result = result
def __str__(self):
return "\n" + str(self.result)
class AuthStrategy:
"""
This class represents one or more attempts to auth with an SSH server.
By default, subclasses must at least accept an ``ssh_config``
(`.SSHConfig`) keyword argument, but may opt to accept more as needed for
their particular strategy.
"""
def __init__(
self,
ssh_config,
):
self.ssh_config = ssh_config
self.log = get_logger(__name__)
def get_sources(self):
"""
Generator yielding `AuthSource` instances, in the order to try.
This is the primary override point for subclasses: you figure out what
sources you need, and ``yield`` them.
Subclasses _of_ subclasses may find themselves wanting to do things
like filtering or discarding around a call to `super`.
"""
raise NotImplementedError
def authenticate(self, transport):
"""
Handles attempting `AuthSource` instances yielded from `get_sources`.
You *normally* won't need to override this, but it's an option for
advanced users.
"""
succeeded = False
overall_result = AuthResult(strategy=self)
# TODO: arguably we could fit in a "send none auth, record allowed auth
# types sent back" thing here as OpenSSH-client does, but that likely
# wants to live in fabric.OpenSSHAuthStrategy as not all target servers
# will implement it!
# TODO: needs better "server told us too many attempts" checking!
for source in self.get_sources():
self.log.debug(f"Trying {source}")
try: # NOTE: this really wants to _only_ wrap the authenticate()!
result = source.authenticate(transport)
succeeded = True
# TODO: 'except PartialAuthentication' is needed for 2FA and
# similar, as per old SSHClient.connect - it is the only way
# AuthHandler supplies access to the 'name-list' field from
# MSG_USERAUTH_FAILURE, at present.
except Exception as e:
result = e
# TODO: look at what this could possibly raise, we don't really
# want Exception here, right? just SSHException subclasses? or
# do we truly want to capture anything at all with assumption
# it's easy enough for users to look afterwards?
# NOTE: showing type, not message, for tersity & also most of
# the time it's basically just "Authentication failed."
source_class = e.__class__.__name__
self.log.info(
f"Authentication via {source} failed with {source_class}"
)
overall_result.append(SourceResult(source, result))
if succeeded:
break
# Gotta die here if nothing worked, otherwise Transport's main loop
# just kinda hangs out until something times out!
if not succeeded:
raise AuthFailure(result=overall_result)
# Success: give back what was done, in case they care.
return overall_result
# TODO: is there anything OpenSSH client does which _can't_ cleanly map to
# iterating a generator?
| 11,437 | Python | .py | 243 | 39.584362 | 79 | 0.665888 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
616 | kex_curve25519.py | paramiko_paramiko/paramiko/kex_curve25519.py | import binascii
import hashlib
from cryptography.exceptions import UnsupportedAlgorithm
from cryptography.hazmat.primitives import constant_time, serialization
from cryptography.hazmat.primitives.asymmetric.x25519 import (
X25519PrivateKey,
X25519PublicKey,
)
from paramiko.message import Message
from paramiko.common import byte_chr
from paramiko.ssh_exception import SSHException
_MSG_KEXECDH_INIT, _MSG_KEXECDH_REPLY = range(30, 32)
c_MSG_KEXECDH_INIT, c_MSG_KEXECDH_REPLY = [byte_chr(c) for c in range(30, 32)]
class KexCurve25519:
hash_algo = hashlib.sha256
def __init__(self, transport):
self.transport = transport
self.key = None
@classmethod
def is_available(cls):
try:
X25519PrivateKey.generate()
except UnsupportedAlgorithm:
return False
else:
return True
def _perform_exchange(self, peer_key):
secret = self.key.exchange(peer_key)
if constant_time.bytes_eq(secret, b"\x00" * 32):
raise SSHException(
"peer's curve25519 public value has wrong order"
)
return secret
def start_kex(self):
self.key = X25519PrivateKey.generate()
if self.transport.server_mode:
self.transport._expect_packet(_MSG_KEXECDH_INIT)
return
m = Message()
m.add_byte(c_MSG_KEXECDH_INIT)
m.add_string(
self.key.public_key().public_bytes(
serialization.Encoding.Raw, serialization.PublicFormat.Raw
)
)
self.transport._send_message(m)
self.transport._expect_packet(_MSG_KEXECDH_REPLY)
def parse_next(self, ptype, m):
if self.transport.server_mode and (ptype == _MSG_KEXECDH_INIT):
return self._parse_kexecdh_init(m)
elif not self.transport.server_mode and (ptype == _MSG_KEXECDH_REPLY):
return self._parse_kexecdh_reply(m)
raise SSHException(
"KexCurve25519 asked to handle packet type {:d}".format(ptype)
)
def _parse_kexecdh_init(self, m):
peer_key_bytes = m.get_string()
peer_key = X25519PublicKey.from_public_bytes(peer_key_bytes)
K = self._perform_exchange(peer_key)
K = int(binascii.hexlify(K), 16)
# compute exchange hash
hm = Message()
hm.add(
self.transport.remote_version,
self.transport.local_version,
self.transport.remote_kex_init,
self.transport.local_kex_init,
)
server_key_bytes = self.transport.get_server_key().asbytes()
exchange_key_bytes = self.key.public_key().public_bytes(
serialization.Encoding.Raw, serialization.PublicFormat.Raw
)
hm.add_string(server_key_bytes)
hm.add_string(peer_key_bytes)
hm.add_string(exchange_key_bytes)
hm.add_mpint(K)
H = self.hash_algo(hm.asbytes()).digest()
self.transport._set_K_H(K, H)
sig = self.transport.get_server_key().sign_ssh_data(
H, self.transport.host_key_type
)
# construct reply
m = Message()
m.add_byte(c_MSG_KEXECDH_REPLY)
m.add_string(server_key_bytes)
m.add_string(exchange_key_bytes)
m.add_string(sig)
self.transport._send_message(m)
self.transport._activate_outbound()
def _parse_kexecdh_reply(self, m):
peer_host_key_bytes = m.get_string()
peer_key_bytes = m.get_string()
sig = m.get_binary()
peer_key = X25519PublicKey.from_public_bytes(peer_key_bytes)
K = self._perform_exchange(peer_key)
K = int(binascii.hexlify(K), 16)
# compute exchange hash and verify signature
hm = Message()
hm.add(
self.transport.local_version,
self.transport.remote_version,
self.transport.local_kex_init,
self.transport.remote_kex_init,
)
hm.add_string(peer_host_key_bytes)
hm.add_string(
self.key.public_key().public_bytes(
serialization.Encoding.Raw, serialization.PublicFormat.Raw
)
)
hm.add_string(peer_key_bytes)
hm.add_mpint(K)
self.transport._set_K_H(K, self.hash_algo(hm.asbytes()).digest())
self.transport._verify_key(peer_host_key_bytes, sig)
self.transport._activate_outbound()
| 4,436 | Python | .py | 115 | 29.608696 | 78 | 0.629036 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
617 | __init__.py | paramiko_paramiko/paramiko/__init__.py | # Copyright (C) 2003-2011 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# flake8: noqa
import sys
from paramiko._version import __version__, __version_info__
from paramiko.transport import (
SecurityOptions,
ServiceRequestingTransport,
Transport,
)
from paramiko.client import (
AutoAddPolicy,
MissingHostKeyPolicy,
RejectPolicy,
SSHClient,
WarningPolicy,
)
from paramiko.auth_handler import AuthHandler
from paramiko.auth_strategy import (
AuthFailure,
AuthStrategy,
AuthResult,
AuthSource,
InMemoryPrivateKey,
NoneAuth,
OnDiskPrivateKey,
Password,
PrivateKey,
SourceResult,
)
from paramiko.ssh_gss import GSSAuth, GSS_AUTH_AVAILABLE, GSS_EXCEPTIONS
from paramiko.channel import (
Channel,
ChannelFile,
ChannelStderrFile,
ChannelStdinFile,
)
from paramiko.ssh_exception import (
AuthenticationException,
BadAuthenticationType,
BadHostKeyException,
ChannelException,
ConfigParseError,
CouldNotCanonicalize,
IncompatiblePeer,
MessageOrderError,
PasswordRequiredException,
ProxyCommandFailure,
SSHException,
)
from paramiko.server import ServerInterface, SubsystemHandler, InteractiveQuery
from paramiko.rsakey import RSAKey
from paramiko.dsskey import DSSKey
from paramiko.ecdsakey import ECDSAKey
from paramiko.ed25519key import Ed25519Key
from paramiko.sftp import SFTPError, BaseSFTP
from paramiko.sftp_client import SFTP, SFTPClient
from paramiko.sftp_server import SFTPServer
from paramiko.sftp_attr import SFTPAttributes
from paramiko.sftp_handle import SFTPHandle
from paramiko.sftp_si import SFTPServerInterface
from paramiko.sftp_file import SFTPFile
from paramiko.message import Message
from paramiko.packet import Packetizer
from paramiko.file import BufferedFile
from paramiko.agent import Agent, AgentKey
from paramiko.pkey import PKey, PublicBlob, UnknownKeyType
from paramiko.hostkeys import HostKeys
from paramiko.config import SSHConfig, SSHConfigDict
from paramiko.proxy import ProxyCommand
from paramiko.common import (
AUTH_SUCCESSFUL,
AUTH_PARTIALLY_SUCCESSFUL,
AUTH_FAILED,
OPEN_SUCCEEDED,
OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED,
OPEN_FAILED_CONNECT_FAILED,
OPEN_FAILED_UNKNOWN_CHANNEL_TYPE,
OPEN_FAILED_RESOURCE_SHORTAGE,
)
from paramiko.sftp import (
SFTP_OK,
SFTP_EOF,
SFTP_NO_SUCH_FILE,
SFTP_PERMISSION_DENIED,
SFTP_FAILURE,
SFTP_BAD_MESSAGE,
SFTP_NO_CONNECTION,
SFTP_CONNECTION_LOST,
SFTP_OP_UNSUPPORTED,
)
from paramiko.common import io_sleep
# TODO: I guess a real plugin system might be nice for future expansion...
key_classes = [DSSKey, RSAKey, Ed25519Key, ECDSAKey]
__author__ = "Jeff Forcier <[email protected]>"
__license__ = "GNU Lesser General Public License (LGPL)"
# TODO 4.0: remove this, jeez
__all__ = [
"Agent",
"AgentKey",
"AuthenticationException",
"AutoAddPolicy",
"BadAuthenticationType",
"BadHostKeyException",
"BufferedFile",
"Channel",
"ChannelException",
"ConfigParseError",
"CouldNotCanonicalize",
"DSSKey",
"ECDSAKey",
"Ed25519Key",
"HostKeys",
"Message",
"MissingHostKeyPolicy",
"PKey",
"PasswordRequiredException",
"ProxyCommand",
"ProxyCommandFailure",
"RSAKey",
"RejectPolicy",
"SFTP",
"SFTPAttributes",
"SFTPClient",
"SFTPError",
"SFTPFile",
"SFTPHandle",
"SFTPServer",
"SFTPServerInterface",
"SSHClient",
"SSHConfig",
"SSHConfigDict",
"SSHException",
"SecurityOptions",
"ServerInterface",
"SubsystemHandler",
"Transport",
"WarningPolicy",
"io_sleep",
"util",
]
| 4,446 | Python | .py | 156 | 25.083333 | 79 | 0.761271 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
618 | pkey.py | paramiko_paramiko/paramiko/pkey.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Common API for all public keys.
"""
import base64
from base64 import encodebytes, decodebytes
from binascii import unhexlify
import os
from pathlib import Path
from hashlib import md5, sha256
import re
import struct
import bcrypt
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.ciphers import algorithms, modes, Cipher
from cryptography.hazmat.primitives import asymmetric
from paramiko import util
from paramiko.util import u, b
from paramiko.common import o600
from paramiko.ssh_exception import SSHException, PasswordRequiredException
from paramiko.message import Message
# TripleDES is moving from `cryptography.hazmat.primitives.ciphers.algorithms`
# in cryptography>=43.0.0 to `cryptography.hazmat.decrepit.ciphers.algorithms`
# It will be removed from `cryptography.hazmat.primitives.ciphers.algorithms`
# in cryptography==48.0.0.
#
# Source References:
# - https://github.com/pyca/cryptography/commit/722a6393e61b3ac
# - https://github.com/pyca/cryptography/pull/11407/files
try:
from cryptography.hazmat.decrepit.ciphers.algorithms import TripleDES
except ImportError:
from cryptography.hazmat.primitives.ciphers.algorithms import TripleDES
OPENSSH_AUTH_MAGIC = b"openssh-key-v1\x00"
def _unpad_openssh(data):
# At the moment, this is only used for unpadding private keys on disk. This
# really ought to be made constant time (possibly by upstreaming this logic
# into pyca/cryptography).
padding_length = data[-1]
if 0x20 <= padding_length < 0x7F:
return data # no padding, last byte part comment (printable ascii)
if padding_length > 15:
raise SSHException("Invalid key")
for i in range(padding_length):
if data[i - padding_length] != i + 1:
raise SSHException("Invalid key")
return data[:-padding_length]
class UnknownKeyType(Exception):
"""
An unknown public/private key algorithm was attempted to be read.
"""
def __init__(self, key_type=None, key_bytes=None):
self.key_type = key_type
self.key_bytes = key_bytes
def __str__(self):
return f"UnknownKeyType(type={self.key_type!r}, bytes=<{len(self.key_bytes)}>)" # noqa
class PKey:
"""
Base class for public keys.
Also includes some "meta" level convenience constructors such as
`.from_type_string`.
"""
# known encryption types for private key files:
_CIPHER_TABLE = {
"AES-128-CBC": {
"cipher": algorithms.AES,
"keysize": 16,
"blocksize": 16,
"mode": modes.CBC,
},
"AES-256-CBC": {
"cipher": algorithms.AES,
"keysize": 32,
"blocksize": 16,
"mode": modes.CBC,
},
"DES-EDE3-CBC": {
"cipher": TripleDES,
"keysize": 24,
"blocksize": 8,
"mode": modes.CBC,
},
}
_PRIVATE_KEY_FORMAT_ORIGINAL = 1
_PRIVATE_KEY_FORMAT_OPENSSH = 2
BEGIN_TAG = re.compile(
r"^-{5}BEGIN (RSA|DSA|EC|OPENSSH) PRIVATE KEY-{5}\s*$"
)
END_TAG = re.compile(r"^-{5}END (RSA|DSA|EC|OPENSSH) PRIVATE KEY-{5}\s*$")
@staticmethod
def from_path(path, passphrase=None):
"""
Attempt to instantiate appropriate key subclass from given file path.
:param Path path: The path to load (may also be a `str`).
:returns:
A `PKey` subclass instance.
:raises:
`UnknownKeyType`, if our crypto backend doesn't know this key type.
.. versionadded:: 3.2
"""
# TODO: make sure sphinx is reading Path right in param list...
# Lazy import to avoid circular import issues
from paramiko import DSSKey, RSAKey, Ed25519Key, ECDSAKey
# Normalize to string, as cert suffix isn't quite an extension, so
# pathlib isn't useful for this.
path = str(path)
# Sort out cert vs key, i.e. it is 'legal' to hand this kind of API
# /either/ the key /or/ the cert, when there is a key/cert pair.
cert_suffix = "-cert.pub"
if str(path).endswith(cert_suffix):
key_path = path[: -len(cert_suffix)]
cert_path = path
else:
key_path = path
cert_path = path + cert_suffix
key_path = Path(key_path).expanduser()
cert_path = Path(cert_path).expanduser()
data = key_path.read_bytes()
# Like OpenSSH, try modern/OpenSSH-specific key load first
try:
loaded = serialization.load_ssh_private_key(
data=data, password=passphrase
)
# Then fall back to assuming legacy PEM type
except ValueError:
loaded = serialization.load_pem_private_key(
data=data, password=passphrase
)
# TODO Python 3.10: match statement? (NOTE: we cannot use a dict
# because the results from the loader are literal backend, eg openssl,
# private classes, so isinstance tests work but exact 'x class is y'
# tests will not work)
# TODO: leverage already-parsed/math'd obj to avoid duplicate cpu
# cycles? seemingly requires most of our key subclasses to be rewritten
# to be cryptography-object-forward. this is still likely faster than
# the old SSHClient code that just tried instantiating every class!
key_class = None
if isinstance(loaded, asymmetric.dsa.DSAPrivateKey):
key_class = DSSKey
elif isinstance(loaded, asymmetric.rsa.RSAPrivateKey):
key_class = RSAKey
elif isinstance(loaded, asymmetric.ed25519.Ed25519PrivateKey):
key_class = Ed25519Key
elif isinstance(loaded, asymmetric.ec.EllipticCurvePrivateKey):
key_class = ECDSAKey
else:
raise UnknownKeyType(key_bytes=data, key_type=loaded.__class__)
with key_path.open() as fd:
key = key_class.from_private_key(fd, password=passphrase)
if cert_path.exists():
# load_certificate can take Message, path-str, or value-str
key.load_certificate(str(cert_path))
return key
@staticmethod
def from_type_string(key_type, key_bytes):
"""
Given type `str` & raw `bytes`, return a `PKey` subclass instance.
For example, ``PKey.from_type_string("ssh-ed25519", <public bytes>)``
will (if successful) return a new `.Ed25519Key`.
:param str key_type:
The key type, eg ``"ssh-ed25519"``.
:param bytes key_bytes:
The raw byte data forming the key material, as expected by
subclasses' ``data`` parameter.
:returns:
A `PKey` subclass instance.
:raises:
`UnknownKeyType`, if no registered classes knew about this type.
.. versionadded:: 3.2
"""
from paramiko import key_classes
for key_class in key_classes:
if key_type in key_class.identifiers():
# TODO: needs to passthru things like passphrase
return key_class(data=key_bytes)
raise UnknownKeyType(key_type=key_type, key_bytes=key_bytes)
@classmethod
def identifiers(cls):
"""
returns an iterable of key format/name strings this class can handle.
Most classes only have a single identifier, and thus this default
implementation suffices; see `.ECDSAKey` for one example of an
override.
"""
return [cls.name]
# TODO 4.0: make this and subclasses consistent, some of our own
# classmethods even assume kwargs we don't define!
# TODO 4.0: prob also raise NotImplementedError instead of pass'ing; the
# contract is pretty obviously that you need to handle msg/data/filename
# appropriately. (If 'pass' is a concession to testing, see about doing the
# work to fix the tests instead)
def __init__(self, msg=None, data=None):
"""
Create a new instance of this public key type. If ``msg`` is given,
the key's public part(s) will be filled in from the message. If
``data`` is given, the key's public part(s) will be filled in from
the string.
:param .Message msg:
an optional SSH `.Message` containing a public key of this type.
:param bytes data:
optional, the bytes of a public key of this type
:raises: `.SSHException` --
if a key cannot be created from the ``data`` or ``msg`` given, or
no key was passed in.
"""
pass
# TODO: arguably this might want to be __str__ instead? ehh
# TODO: ditto the interplay between showing class name (currently we just
# say PKey writ large) and algorithm (usually == class name, but not
# always, also sometimes shows certificate-ness)
# TODO: if we do change it, we also want to tweak eg AgentKey, as it
# currently displays agent-ness with a suffix
def __repr__(self):
comment = ""
# Works for AgentKey, may work for others?
if hasattr(self, "comment") and self.comment:
comment = f", comment={self.comment!r}"
return f"PKey(alg={self.algorithm_name}, bits={self.get_bits()}, fp={self.fingerprint}{comment})" # noqa
# TODO 4.0: just merge into __bytes__ (everywhere)
def asbytes(self):
"""
Return a string of an SSH `.Message` made up of the public part(s) of
this key. This string is suitable for passing to `__init__` to
re-create the key object later.
"""
return bytes()
def __bytes__(self):
return self.asbytes()
def __eq__(self, other):
return isinstance(other, PKey) and self._fields == other._fields
def __hash__(self):
return hash(self._fields)
@property
def _fields(self):
raise NotImplementedError
def get_name(self):
"""
Return the name of this private key implementation.
:return:
name of this private key type, in SSH terminology, as a `str` (for
example, ``"ssh-rsa"``).
"""
return ""
@property
def algorithm_name(self):
"""
Return the key algorithm identifier for this key.
Similar to `get_name`, but aimed at pure algorithm name instead of SSH
protocol field value.
"""
# Nuke the leading 'ssh-'
# TODO in Python 3.9: use .removeprefix()
name = self.get_name().replace("ssh-", "")
# Trim any cert suffix (but leave the -cert, as OpenSSH does)
cert_tail = "[email protected]"
if cert_tail in name:
name = name.replace(cert_tail, "-cert")
# Nuke any eg ECDSA suffix, OpenSSH does basically this too.
else:
name = name.split("-")[0]
return name.upper()
def get_bits(self):
"""
Return the number of significant bits in this key. This is useful
for judging the relative security of a key.
:return: bits in the key (as an `int`)
"""
# TODO 4.0: raise NotImplementedError, 0 is unlikely to ever be
# _correct_ and nothing in the critical path seems to use this.
return 0
def can_sign(self):
"""
Return ``True`` if this key has the private part necessary for signing
data.
"""
return False
def get_fingerprint(self):
"""
Return an MD5 fingerprint of the public part of this key. Nothing
secret is revealed.
:return:
a 16-byte `string <str>` (binary) of the MD5 fingerprint, in SSH
format.
"""
return md5(self.asbytes()).digest()
@property
def fingerprint(self):
"""
Modern fingerprint property designed to be comparable to OpenSSH.
Currently only does SHA256 (the OpenSSH default).
.. versionadded:: 3.2
"""
hashy = sha256(bytes(self))
hash_name = hashy.name.upper()
b64ed = encodebytes(hashy.digest())
cleaned = u(b64ed).strip().rstrip("=") # yes, OpenSSH does this too!
return f"{hash_name}:{cleaned}"
def get_base64(self):
"""
Return a base64 string containing the public part of this key. Nothing
secret is revealed. This format is compatible with that used to store
public key files or recognized host keys.
:return: a base64 `string <str>` containing the public part of the key.
"""
return u(encodebytes(self.asbytes())).replace("\n", "")
def sign_ssh_data(self, data, algorithm=None):
"""
Sign a blob of data with this private key, and return a `.Message`
representing an SSH signature message.
:param bytes data:
the data to sign.
:param str algorithm:
the signature algorithm to use, if different from the key's
internal name. Default: ``None``.
:return: an SSH signature `message <.Message>`.
.. versionchanged:: 2.9
Added the ``algorithm`` kwarg.
"""
return bytes()
def verify_ssh_sig(self, data, msg):
"""
Given a blob of data, and an SSH message representing a signature of
that data, verify that it was signed with this key.
:param bytes data: the data that was signed.
:param .Message msg: an SSH signature message
:return:
``True`` if the signature verifies correctly; ``False`` otherwise.
"""
return False
@classmethod
def from_private_key_file(cls, filename, password=None):
"""
Create a key object by reading a private key file. If the private
key is encrypted and ``password`` is not ``None``, the given password
will be used to decrypt the key (otherwise `.PasswordRequiredException`
is thrown). Through the magic of Python, this factory method will
exist in all subclasses of PKey (such as `.RSAKey` or `.DSSKey`), but
is useless on the abstract PKey class.
:param str filename: name of the file to read
:param str password:
an optional password to use to decrypt the key file, if it's
encrypted
:return: a new `.PKey` based on the given private key
:raises: ``IOError`` -- if there was an error reading the file
:raises: `.PasswordRequiredException` -- if the private key file is
encrypted, and ``password`` is ``None``
:raises: `.SSHException` -- if the key file is invalid
"""
key = cls(filename=filename, password=password)
return key
@classmethod
def from_private_key(cls, file_obj, password=None):
"""
Create a key object by reading a private key from a file (or file-like)
object. If the private key is encrypted and ``password`` is not
``None``, the given password will be used to decrypt the key (otherwise
`.PasswordRequiredException` is thrown).
:param file_obj: the file-like object to read from
:param str password:
an optional password to use to decrypt the key, if it's encrypted
:return: a new `.PKey` based on the given private key
:raises: ``IOError`` -- if there was an error reading the key
:raises: `.PasswordRequiredException` --
if the private key file is encrypted, and ``password`` is ``None``
:raises: `.SSHException` -- if the key file is invalid
"""
key = cls(file_obj=file_obj, password=password)
return key
def write_private_key_file(self, filename, password=None):
"""
Write private key contents into a file. If the password is not
``None``, the key is encrypted before writing.
:param str filename: name of the file to write
:param str password:
an optional password to use to encrypt the key file
:raises: ``IOError`` -- if there was an error writing the file
:raises: `.SSHException` -- if the key is invalid
"""
raise Exception("Not implemented in PKey")
def write_private_key(self, file_obj, password=None):
"""
Write private key contents into a file (or file-like) object. If the
password is not ``None``, the key is encrypted before writing.
:param file_obj: the file-like object to write into
:param str password: an optional password to use to encrypt the key
:raises: ``IOError`` -- if there was an error writing to the file
:raises: `.SSHException` -- if the key is invalid
"""
# TODO 4.0: NotImplementedError (plus everywhere else in here)
raise Exception("Not implemented in PKey")
def _read_private_key_file(self, tag, filename, password=None):
"""
Read an SSH2-format private key file, looking for a string of the type
``"BEGIN xxx PRIVATE KEY"`` for some ``xxx``, base64-decode the text we
find, and return it as a string. If the private key is encrypted and
``password`` is not ``None``, the given password will be used to
decrypt the key (otherwise `.PasswordRequiredException` is thrown).
:param str tag: ``"RSA"`` or ``"DSA"``, the tag used to mark the
data block.
:param str filename: name of the file to read.
:param str password:
an optional password to use to decrypt the key file, if it's
encrypted.
:return: the `bytes` that make up the private key.
:raises: ``IOError`` -- if there was an error reading the file.
:raises: `.PasswordRequiredException` -- if the private key file is
encrypted, and ``password`` is ``None``.
:raises: `.SSHException` -- if the key file is invalid.
"""
with open(filename, "r") as f:
data = self._read_private_key(tag, f, password)
return data
def _read_private_key(self, tag, f, password=None):
lines = f.readlines()
if not lines:
raise SSHException("no lines in {} private key file".format(tag))
# find the BEGIN tag
start = 0
m = self.BEGIN_TAG.match(lines[start])
line_range = len(lines) - 1
while start < line_range and not m:
start += 1
m = self.BEGIN_TAG.match(lines[start])
start += 1
keytype = m.group(1) if m else None
if start >= len(lines) or keytype is None:
raise SSHException("not a valid {} private key file".format(tag))
# find the END tag
end = start
m = self.END_TAG.match(lines[end])
while end < line_range and not m:
end += 1
m = self.END_TAG.match(lines[end])
if keytype == tag:
data = self._read_private_key_pem(lines, end, password)
pkformat = self._PRIVATE_KEY_FORMAT_ORIGINAL
elif keytype == "OPENSSH":
data = self._read_private_key_openssh(lines[start:end], password)
pkformat = self._PRIVATE_KEY_FORMAT_OPENSSH
else:
raise SSHException(
"encountered {} key, expected {} key".format(keytype, tag)
)
return pkformat, data
def _got_bad_key_format_id(self, id_):
err = "{}._read_private_key() spat out an unknown key format id '{}'"
raise SSHException(err.format(self.__class__.__name__, id_))
def _read_private_key_pem(self, lines, end, password):
start = 0
# parse any headers first
headers = {}
start += 1
while start < len(lines):
line = lines[start].split(": ")
if len(line) == 1:
break
headers[line[0].lower()] = line[1].strip()
start += 1
# if we trudged to the end of the file, just try to cope.
try:
data = decodebytes(b("".join(lines[start:end])))
except base64.binascii.Error as e:
raise SSHException("base64 decoding error: {}".format(e))
if "proc-type" not in headers:
# unencryped: done
return data
# encrypted keyfile: will need a password
proc_type = headers["proc-type"]
if proc_type != "4,ENCRYPTED":
raise SSHException(
'Unknown private key structure "{}"'.format(proc_type)
)
try:
encryption_type, saltstr = headers["dek-info"].split(",")
except:
raise SSHException("Can't parse DEK-info in private key file")
if encryption_type not in self._CIPHER_TABLE:
raise SSHException(
'Unknown private key cipher "{}"'.format(encryption_type)
)
# if no password was passed in,
# raise an exception pointing out that we need one
if password is None:
raise PasswordRequiredException("Private key file is encrypted")
cipher = self._CIPHER_TABLE[encryption_type]["cipher"]
keysize = self._CIPHER_TABLE[encryption_type]["keysize"]
mode = self._CIPHER_TABLE[encryption_type]["mode"]
salt = unhexlify(b(saltstr))
key = util.generate_key_bytes(md5, salt, password, keysize)
decryptor = Cipher(
cipher(key), mode(salt), backend=default_backend()
).decryptor()
return decryptor.update(data) + decryptor.finalize()
def _read_private_key_openssh(self, lines, password):
"""
Read the new OpenSSH SSH2 private key format available
since OpenSSH version 6.5
Reference:
https://github.com/openssh/openssh-portable/blob/master/PROTOCOL.key
"""
try:
data = decodebytes(b("".join(lines)))
except base64.binascii.Error as e:
raise SSHException("base64 decoding error: {}".format(e))
# read data struct
auth_magic = data[:15]
if auth_magic != OPENSSH_AUTH_MAGIC:
raise SSHException("unexpected OpenSSH key header encountered")
cstruct = self._uint32_cstruct_unpack(data[15:], "sssur")
cipher, kdfname, kdf_options, num_pubkeys, remainder = cstruct
# For now, just support 1 key.
if num_pubkeys > 1:
raise SSHException(
"unsupported: private keyfile has multiple keys"
)
pubkey, privkey_blob = self._uint32_cstruct_unpack(remainder, "ss")
if kdfname == b("bcrypt"):
if cipher == b("aes256-cbc"):
mode = modes.CBC
elif cipher == b("aes256-ctr"):
mode = modes.CTR
else:
raise SSHException(
"unknown cipher `{}` used in private key file".format(
cipher.decode("utf-8")
)
)
# Encrypted private key.
# If no password was passed in, raise an exception pointing
# out that we need one
if password is None:
raise PasswordRequiredException(
"private key file is encrypted"
)
# Unpack salt and rounds from kdfoptions
salt, rounds = self._uint32_cstruct_unpack(kdf_options, "su")
# run bcrypt kdf to derive key and iv/nonce (32 + 16 bytes)
key_iv = bcrypt.kdf(
b(password),
b(salt),
48,
rounds,
# We can't control how many rounds are on disk, so no sense
# warning about it.
ignore_few_rounds=True,
)
key = key_iv[:32]
iv = key_iv[32:]
# decrypt private key blob
decryptor = Cipher(
algorithms.AES(key), mode(iv), default_backend()
).decryptor()
decrypted_privkey = decryptor.update(privkey_blob)
decrypted_privkey += decryptor.finalize()
elif cipher == b("none") and kdfname == b("none"):
# Unencrypted private key
decrypted_privkey = privkey_blob
else:
raise SSHException(
"unknown cipher or kdf used in private key file"
)
# Unpack private key and verify checkints
cstruct = self._uint32_cstruct_unpack(decrypted_privkey, "uusr")
checkint1, checkint2, keytype, keydata = cstruct
if checkint1 != checkint2:
raise SSHException(
"OpenSSH private key file checkints do not match"
)
return _unpad_openssh(keydata)
def _uint32_cstruct_unpack(self, data, strformat):
"""
Used to read new OpenSSH private key format.
Unpacks a c data structure containing a mix of 32-bit uints and
variable length strings prefixed by 32-bit uint size field,
according to the specified format. Returns the unpacked vars
in a tuple.
Format strings:
s - denotes a string
i - denotes a long integer, encoded as a byte string
u - denotes a 32-bit unsigned integer
r - the remainder of the input string, returned as a string
"""
arr = []
idx = 0
try:
for f in strformat:
if f == "s":
# string
s_size = struct.unpack(">L", data[idx : idx + 4])[0]
idx += 4
s = data[idx : idx + s_size]
idx += s_size
arr.append(s)
if f == "i":
# long integer
s_size = struct.unpack(">L", data[idx : idx + 4])[0]
idx += 4
s = data[idx : idx + s_size]
idx += s_size
i = util.inflate_long(s, True)
arr.append(i)
elif f == "u":
# 32-bit unsigned int
u = struct.unpack(">L", data[idx : idx + 4])[0]
idx += 4
arr.append(u)
elif f == "r":
# remainder as string
s = data[idx:]
arr.append(s)
break
except Exception as e:
# PKey-consuming code frequently wants to save-and-skip-over issues
# with loading keys, and uses SSHException as the (really friggin
# awful) signal for this. So for now...we do this.
raise SSHException(str(e))
return tuple(arr)
def _write_private_key_file(self, filename, key, format, password=None):
"""
Write an SSH2-format private key file in a form that can be read by
paramiko or openssh. If no password is given, the key is written in
a trivially-encoded format (base64) which is completely insecure. If
a password is given, DES-EDE3-CBC is used.
:param str tag:
``"RSA"`` or ``"DSA"``, the tag used to mark the data block.
:param filename: name of the file to write.
:param bytes data: data blob that makes up the private key.
:param str password: an optional password to use to encrypt the file.
:raises: ``IOError`` -- if there was an error writing the file.
"""
# Ensure that we create new key files directly with a user-only mode,
# instead of opening, writing, then chmodding, which leaves us open to
# CVE-2022-24302.
with os.fdopen(
os.open(
filename,
# NOTE: O_TRUNC is a noop on new files, and O_CREAT is a noop
# on existing files, so using all 3 in both cases is fine.
flags=os.O_WRONLY | os.O_TRUNC | os.O_CREAT,
# Ditto the use of the 'mode' argument; it should be safe to
# give even for existing files (though it will not act like a
# chmod in that case).
mode=o600,
),
# Yea, you still gotta inform the FLO that it is in "write" mode.
"w",
) as f:
self._write_private_key(f, key, format, password=password)
def _write_private_key(self, f, key, format, password=None):
if password is None:
encryption = serialization.NoEncryption()
else:
encryption = serialization.BestAvailableEncryption(b(password))
f.write(
key.private_bytes(
serialization.Encoding.PEM, format, encryption
).decode()
)
def _check_type_and_load_cert(self, msg, key_type, cert_type):
"""
Perform message type-checking & optional certificate loading.
This includes fast-forwarding cert ``msg`` objects past the nonce, so
that the subsequent fields are the key numbers; thus the caller may
expect to treat the message as key material afterwards either way.
The obtained key type is returned for classes which need to know what
it was (e.g. ECDSA.)
"""
# Normalization; most classes have a single key type and give a string,
# but eg ECDSA is a 1:N mapping.
key_types = key_type
cert_types = cert_type
if isinstance(key_type, str):
key_types = [key_types]
if isinstance(cert_types, str):
cert_types = [cert_types]
# Can't do much with no message, that should've been handled elsewhere
if msg is None:
raise SSHException("Key object may not be empty")
# First field is always key type, in either kind of object. (make sure
# we rewind before grabbing it - sometimes caller had to do their own
# introspection first!)
msg.rewind()
type_ = msg.get_text()
# Regular public key - nothing special to do besides the implicit
# type check.
if type_ in key_types:
pass
# OpenSSH-compatible certificate - store full copy as .public_blob
# (so signing works correctly) and then fast-forward past the
# nonce.
elif type_ in cert_types:
# This seems the cleanest way to 'clone' an already-being-read
# message; they're *IO objects at heart and their .getvalue()
# always returns the full value regardless of pointer position.
self.load_certificate(Message(msg.asbytes()))
# Read out nonce as it comes before the public numbers - our caller
# is likely going to use the (only borrowed by us, not owned)
# 'msg' object for loading those numbers right after this.
# TODO: usefully interpret it & other non-public-number fields
# (requires going back into per-type subclasses.)
msg.get_string()
else:
err = "Invalid key (class: {}, data type: {}"
raise SSHException(err.format(self.__class__.__name__, type_))
def load_certificate(self, value):
"""
Supplement the private key contents with data loaded from an OpenSSH
public key (``.pub``) or certificate (``-cert.pub``) file, a string
containing such a file, or a `.Message` object.
The .pub contents adds no real value, since the private key
file includes sufficient information to derive the public
key info. For certificates, however, this can be used on
the client side to offer authentication requests to the server
based on certificate instead of raw public key.
See:
https://github.com/openssh/openssh-portable/blob/master/PROTOCOL.certkeys
Note: very little effort is made to validate the certificate contents,
that is for the server to decide if it is good enough to authenticate
successfully.
"""
if isinstance(value, Message):
constructor = "from_message"
elif os.path.isfile(value):
constructor = "from_file"
else:
constructor = "from_string"
blob = getattr(PublicBlob, constructor)(value)
if not blob.key_type.startswith(self.get_name()):
err = "PublicBlob type {} incompatible with key type {}"
raise ValueError(err.format(blob.key_type, self.get_name()))
self.public_blob = blob
# General construct for an OpenSSH style Public Key blob
# readable from a one-line file of the format:
# <key-name> <base64-blob> [<comment>]
# Of little value in the case of standard public keys
# {ssh-rsa, ssh-dss, ssh-ecdsa, ssh-ed25519}, but should
# provide rudimentary support for {*-cert.v01}
class PublicBlob:
"""
OpenSSH plain public key or OpenSSH signed public key (certificate).
Tries to be as dumb as possible and barely cares about specific
per-key-type data.
.. note::
Most of the time you'll want to call `from_file`, `from_string` or
`from_message` for useful instantiation, the main constructor is
basically "I should be using ``attrs`` for this."
"""
def __init__(self, type_, blob, comment=None):
"""
Create a new public blob of given type and contents.
:param str type_: Type indicator, eg ``ssh-rsa``.
:param bytes blob: The blob bytes themselves.
:param str comment: A comment, if one was given (e.g. file-based.)
"""
self.key_type = type_
self.key_blob = blob
self.comment = comment
@classmethod
def from_file(cls, filename):
"""
Create a public blob from a ``-cert.pub``-style file on disk.
"""
with open(filename) as f:
string = f.read()
return cls.from_string(string)
@classmethod
def from_string(cls, string):
"""
Create a public blob from a ``-cert.pub``-style string.
"""
fields = string.split(None, 2)
if len(fields) < 2:
msg = "Not enough fields for public blob: {}"
raise ValueError(msg.format(fields))
key_type = fields[0]
key_blob = decodebytes(b(fields[1]))
try:
comment = fields[2].strip()
except IndexError:
comment = None
# Verify that the blob message first (string) field matches the
# key_type
m = Message(key_blob)
blob_type = m.get_text()
if blob_type != key_type:
deets = "key type={!r}, but blob type={!r}".format(
key_type, blob_type
)
raise ValueError("Invalid PublicBlob contents: {}".format(deets))
# All good? All good.
return cls(type_=key_type, blob=key_blob, comment=comment)
@classmethod
def from_message(cls, message):
"""
Create a public blob from a network `.Message`.
Specifically, a cert-bearing pubkey auth packet, because by definition
OpenSSH-style certificates 'are' their own network representation."
"""
type_ = message.get_text()
return cls(type_=type_, blob=message.asbytes())
def __str__(self):
ret = "{} public key/certificate".format(self.key_type)
if self.comment:
ret += "- {}".format(self.comment)
return ret
def __eq__(self, other):
# Just piggyback on Message/BytesIO, since both of these should be one.
return self and other and self.key_blob == other.key_blob
def __ne__(self, other):
return not self == other
| 36,581 | Python | .py | 827 | 34.564692 | 113 | 0.60883 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
619 | hostkeys.py | paramiko_paramiko/paramiko/hostkeys.py | # Copyright (C) 2006-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from base64 import encodebytes, decodebytes
import binascii
import os
import re
from collections.abc import MutableMapping
from hashlib import sha1
from hmac import HMAC
from paramiko.pkey import PKey, UnknownKeyType
from paramiko.util import get_logger, constant_time_bytes_eq, b, u
from paramiko.ssh_exception import SSHException
class HostKeys(MutableMapping):
"""
Representation of an OpenSSH-style "known hosts" file. Host keys can be
read from one or more files, and then individual hosts can be looked up to
verify server keys during SSH negotiation.
A `.HostKeys` object can be treated like a dict; any dict lookup is
equivalent to calling `lookup`.
.. versionadded:: 1.5.3
"""
def __init__(self, filename=None):
"""
Create a new HostKeys object, optionally loading keys from an OpenSSH
style host-key file.
:param str filename: filename to load host keys from, or ``None``
"""
# emulate a dict of { hostname: { keytype: PKey } }
self._entries = []
if filename is not None:
self.load(filename)
def add(self, hostname, keytype, key):
"""
Add a host key entry to the table. Any existing entry for a
``(hostname, keytype)`` pair will be replaced.
:param str hostname: the hostname (or IP) to add
:param str keytype: key type (``"ssh-rsa"`` or ``"ssh-dss"``)
:param .PKey key: the key to add
"""
for e in self._entries:
if (hostname in e.hostnames) and (e.key.get_name() == keytype):
e.key = key
return
self._entries.append(HostKeyEntry([hostname], key))
def load(self, filename):
"""
Read a file of known SSH host keys, in the format used by OpenSSH.
This type of file unfortunately doesn't exist on Windows, but on
posix, it will usually be stored in
``os.path.expanduser("~/.ssh/known_hosts")``.
If this method is called multiple times, the host keys are merged,
not cleared. So multiple calls to `load` will just call `add`,
replacing any existing entries and adding new ones.
:param str filename: name of the file to read host keys from
:raises: ``IOError`` -- if there was an error reading the file
"""
with open(filename, "r") as f:
for lineno, line in enumerate(f, 1):
line = line.strip()
if (len(line) == 0) or (line[0] == "#"):
continue
try:
entry = HostKeyEntry.from_line(line, lineno)
except SSHException:
continue
if entry is not None:
_hostnames = entry.hostnames
for h in _hostnames:
if self.check(h, entry.key):
entry.hostnames.remove(h)
if len(entry.hostnames):
self._entries.append(entry)
def save(self, filename):
"""
Save host keys into a file, in the format used by OpenSSH. The order
of keys in the file will be preserved when possible (if these keys were
loaded from a file originally). The single exception is that combined
lines will be split into individual key lines, which is arguably a bug.
:param str filename: name of the file to write
:raises: ``IOError`` -- if there was an error writing the file
.. versionadded:: 1.6.1
"""
with open(filename, "w") as f:
for e in self._entries:
line = e.to_line()
if line:
f.write(line)
def lookup(self, hostname):
"""
Find a hostkey entry for a given hostname or IP. If no entry is found,
``None`` is returned. Otherwise a dictionary of keytype to key is
returned. The keytype will be either ``"ssh-rsa"`` or ``"ssh-dss"``.
:param str hostname: the hostname (or IP) to lookup
:return: dict of `str` -> `.PKey` keys associated with this host
(or ``None``)
"""
class SubDict(MutableMapping):
def __init__(self, hostname, entries, hostkeys):
self._hostname = hostname
self._entries = entries
self._hostkeys = hostkeys
def __iter__(self):
for k in self.keys():
yield k
def __len__(self):
return len(self.keys())
def __delitem__(self, key):
for e in list(self._entries):
if e.key.get_name() == key:
self._entries.remove(e)
break
else:
raise KeyError(key)
def __getitem__(self, key):
for e in self._entries:
if e.key.get_name() == key:
return e.key
raise KeyError(key)
def __setitem__(self, key, val):
for e in self._entries:
if e.key is None:
continue
if e.key.get_name() == key:
# replace
e.key = val
break
else:
# add a new one
e = HostKeyEntry([hostname], val)
self._entries.append(e)
self._hostkeys._entries.append(e)
def keys(self):
return [
e.key.get_name()
for e in self._entries
if e.key is not None
]
entries = []
for e in self._entries:
if self._hostname_matches(hostname, e):
entries.append(e)
if len(entries) == 0:
return None
return SubDict(hostname, entries, self)
def _hostname_matches(self, hostname, entry):
"""
Tests whether ``hostname`` string matches given SubDict ``entry``.
:returns bool:
"""
for h in entry.hostnames:
if (
h == hostname
or h.startswith("|1|")
and not hostname.startswith("|1|")
and constant_time_bytes_eq(self.hash_host(hostname, h), h)
):
return True
return False
def check(self, hostname, key):
"""
Return True if the given key is associated with the given hostname
in this dictionary.
:param str hostname: hostname (or IP) of the SSH server
:param .PKey key: the key to check
:return:
``True`` if the key is associated with the hostname; else ``False``
"""
k = self.lookup(hostname)
if k is None:
return False
host_key = k.get(key.get_name(), None)
if host_key is None:
return False
return host_key.asbytes() == key.asbytes()
def clear(self):
"""
Remove all host keys from the dictionary.
"""
self._entries = []
def __iter__(self):
for k in self.keys():
yield k
def __len__(self):
return len(self.keys())
def __getitem__(self, key):
ret = self.lookup(key)
if ret is None:
raise KeyError(key)
return ret
def __delitem__(self, key):
index = None
for i, entry in enumerate(self._entries):
if self._hostname_matches(key, entry):
index = i
break
if index is None:
raise KeyError(key)
self._entries.pop(index)
def __setitem__(self, hostname, entry):
# don't use this please.
if len(entry) == 0:
self._entries.append(HostKeyEntry([hostname], None))
return
for key_type in entry.keys():
found = False
for e in self._entries:
if (hostname in e.hostnames) and e.key.get_name() == key_type:
# replace
e.key = entry[key_type]
found = True
if not found:
self._entries.append(HostKeyEntry([hostname], entry[key_type]))
def keys(self):
ret = []
for e in self._entries:
for h in e.hostnames:
if h not in ret:
ret.append(h)
return ret
def values(self):
ret = []
for k in self.keys():
ret.append(self.lookup(k))
return ret
@staticmethod
def hash_host(hostname, salt=None):
"""
Return a "hashed" form of the hostname, as used by OpenSSH when storing
hashed hostnames in the known_hosts file.
:param str hostname: the hostname to hash
:param str salt: optional salt to use when hashing
(must be 20 bytes long)
:return: the hashed hostname as a `str`
"""
if salt is None:
salt = os.urandom(sha1().digest_size)
else:
if salt.startswith("|1|"):
salt = salt.split("|")[2]
salt = decodebytes(b(salt))
assert len(salt) == sha1().digest_size
hmac = HMAC(salt, b(hostname), sha1).digest()
hostkey = "|1|{}|{}".format(u(encodebytes(salt)), u(encodebytes(hmac)))
return hostkey.replace("\n", "")
class InvalidHostKey(Exception):
def __init__(self, line, exc):
self.line = line
self.exc = exc
self.args = (line, exc)
class HostKeyEntry:
"""
Representation of a line in an OpenSSH-style "known hosts" file.
"""
def __init__(self, hostnames=None, key=None):
self.valid = (hostnames is not None) and (key is not None)
self.hostnames = hostnames
self.key = key
@classmethod
def from_line(cls, line, lineno=None):
"""
Parses the given line of text to find the names for the host,
the type of key, and the key data. The line is expected to be in the
format used by the OpenSSH known_hosts file. Fields are separated by a
single space or tab.
Lines are expected to not have leading or trailing whitespace.
We don't bother to check for comments or empty lines. All of
that should be taken care of before sending the line to us.
:param str line: a line from an OpenSSH known_hosts file
"""
log = get_logger("paramiko.hostkeys")
fields = re.split(" |\t", line)
if len(fields) < 3:
# Bad number of fields
msg = "Not enough fields found in known_hosts in line {} ({!r})"
log.info(msg.format(lineno, line))
return None
fields = fields[:3]
names, key_type, key = fields
names = names.split(",")
# Decide what kind of key we're looking at and create an object
# to hold it accordingly.
try:
# TODO: this grew organically and doesn't seem /wrong/ per se (file
# read -> unicode str -> bytes for base64 decode -> decoded bytes);
# but in Python 3 forever land, can we simply use
# `base64.b64decode(str-from-file)` here?
key_bytes = decodebytes(b(key))
except binascii.Error as e:
raise InvalidHostKey(line, e)
try:
return cls(names, PKey.from_type_string(key_type, key_bytes))
except UnknownKeyType:
# TODO 4.0: consider changing HostKeys API so this just raises
# naturally and the exception is muted higher up in the stack?
log.info("Unable to handle key of type {}".format(key_type))
return None
def to_line(self):
"""
Returns a string in OpenSSH known_hosts file format, or None if
the object is not in a valid state. A trailing newline is
included.
"""
if self.valid:
return "{} {} {}\n".format(
",".join(self.hostnames),
self.key.get_name(),
self.key.get_base64(),
)
return None
def __repr__(self):
return "<HostKeyEntry {!r}: {!r}>".format(self.hostnames, self.key)
| 13,208 | Python | .py | 326 | 29.570552 | 79 | 0.561291 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
620 | sftp_file.py | paramiko_paramiko/paramiko/sftp_file.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
SFTP file object
"""
from binascii import hexlify
from collections import deque
import socket
import threading
import time
from paramiko.common import DEBUG, io_sleep
from paramiko.file import BufferedFile
from paramiko.util import u
from paramiko.sftp import (
CMD_CLOSE,
CMD_READ,
CMD_DATA,
SFTPError,
CMD_WRITE,
CMD_STATUS,
CMD_FSTAT,
CMD_ATTRS,
CMD_FSETSTAT,
CMD_EXTENDED,
int64,
)
from paramiko.sftp_attr import SFTPAttributes
class SFTPFile(BufferedFile):
"""
Proxy object for a file on the remote server, in client mode SFTP.
Instances of this class may be used as context managers in the same way
that built-in Python file objects are.
"""
# Some sftp servers will choke if you send read/write requests larger than
# this size.
MAX_REQUEST_SIZE = 32768
def __init__(self, sftp, handle, mode="r", bufsize=-1):
BufferedFile.__init__(self)
self.sftp = sftp
self.handle = handle
BufferedFile._set_mode(self, mode, bufsize)
self.pipelined = False
self._prefetching = False
self._prefetch_done = False
self._prefetch_data = {}
self._prefetch_extents = {}
self._prefetch_lock = threading.Lock()
self._saved_exception = None
self._reqs = deque()
def __del__(self):
self._close(async_=True)
def close(self):
"""
Close the file.
"""
self._close(async_=False)
def _close(self, async_=False):
# We allow double-close without signaling an error, because real
# Python file objects do. However, we must protect against actually
# sending multiple CMD_CLOSE packets, because after we close our
# handle, the same handle may be re-allocated by the server, and we
# may end up mysteriously closing some random other file. (This is
# especially important because we unconditionally call close() from
# __del__.)
if self._closed:
return
self.sftp._log(DEBUG, "close({})".format(u(hexlify(self.handle))))
if self.pipelined:
self.sftp._finish_responses(self)
BufferedFile.close(self)
try:
if async_:
# GC'd file handle could be called from an arbitrary thread
# -- don't wait for a response
self.sftp._async_request(type(None), CMD_CLOSE, self.handle)
else:
self.sftp._request(CMD_CLOSE, self.handle)
except EOFError:
# may have outlived the Transport connection
pass
except (IOError, socket.error):
# may have outlived the Transport connection
pass
def _data_in_prefetch_requests(self, offset, size):
k = [
x for x in list(self._prefetch_extents.values()) if x[0] <= offset
]
if len(k) == 0:
return False
k.sort(key=lambda x: x[0])
buf_offset, buf_size = k[-1]
if buf_offset + buf_size <= offset:
# prefetch request ends before this one begins
return False
if buf_offset + buf_size >= offset + size:
# inclusive
return True
# well, we have part of the request. see if another chunk has
# the rest.
return self._data_in_prefetch_requests(
buf_offset + buf_size, offset + size - buf_offset - buf_size
)
def _data_in_prefetch_buffers(self, offset):
"""
if a block of data is present in the prefetch buffers, at the given
offset, return the offset of the relevant prefetch buffer. otherwise,
return None. this guarantees nothing about the number of bytes
collected in the prefetch buffer so far.
"""
k = [i for i in self._prefetch_data.keys() if i <= offset]
if len(k) == 0:
return None
index = max(k)
buf_offset = offset - index
if buf_offset >= len(self._prefetch_data[index]):
# it's not here
return None
return index
def _read_prefetch(self, size):
"""
read data out of the prefetch buffer, if possible. if the data isn't
in the buffer, return None. otherwise, behaves like a normal read.
"""
# while not closed, and haven't fetched past the current position,
# and haven't reached EOF...
while True:
offset = self._data_in_prefetch_buffers(self._realpos)
if offset is not None:
break
if self._prefetch_done or self._closed:
break
self.sftp._read_response()
self._check_exception()
if offset is None:
self._prefetching = False
return None
prefetch = self._prefetch_data[offset]
del self._prefetch_data[offset]
buf_offset = self._realpos - offset
if buf_offset > 0:
self._prefetch_data[offset] = prefetch[:buf_offset]
prefetch = prefetch[buf_offset:]
if size < len(prefetch):
self._prefetch_data[self._realpos + size] = prefetch[size:]
prefetch = prefetch[:size]
return prefetch
def _read(self, size):
size = min(size, self.MAX_REQUEST_SIZE)
if self._prefetching:
data = self._read_prefetch(size)
if data is not None:
return data
t, msg = self.sftp._request(
CMD_READ, self.handle, int64(self._realpos), int(size)
)
if t != CMD_DATA:
raise SFTPError("Expected data")
return msg.get_string()
def _write(self, data):
# may write less than requested if it would exceed max packet size
chunk = min(len(data), self.MAX_REQUEST_SIZE)
sftp_async_request = self.sftp._async_request(
type(None),
CMD_WRITE,
self.handle,
int64(self._realpos),
data[:chunk],
)
self._reqs.append(sftp_async_request)
if not self.pipelined or (
len(self._reqs) > 100 and self.sftp.sock.recv_ready()
):
while len(self._reqs):
req = self._reqs.popleft()
t, msg = self.sftp._read_response(req)
if t != CMD_STATUS:
raise SFTPError("Expected status")
# convert_status already called
return chunk
def settimeout(self, timeout):
"""
Set a timeout on read/write operations on the underlying socket or
ssh `.Channel`.
:param float timeout:
seconds to wait for a pending read/write operation before raising
``socket.timeout``, or ``None`` for no timeout
.. seealso:: `.Channel.settimeout`
"""
self.sftp.sock.settimeout(timeout)
def gettimeout(self):
"""
Returns the timeout in seconds (as a `float`) associated with the
socket or ssh `.Channel` used for this file.
.. seealso:: `.Channel.gettimeout`
"""
return self.sftp.sock.gettimeout()
def setblocking(self, blocking):
"""
Set blocking or non-blocking mode on the underiying socket or ssh
`.Channel`.
:param int blocking:
0 to set non-blocking mode; non-0 to set blocking mode.
.. seealso:: `.Channel.setblocking`
"""
self.sftp.sock.setblocking(blocking)
def seekable(self):
"""
Check if the file supports random access.
:return:
`True` if the file supports random access. If `False`,
:meth:`seek` will raise an exception
"""
return True
def seek(self, offset, whence=0):
"""
Set the file's current position.
See `file.seek` for details.
"""
self.flush()
if whence == self.SEEK_SET:
self._realpos = self._pos = offset
elif whence == self.SEEK_CUR:
self._pos += offset
self._realpos = self._pos
else:
self._realpos = self._pos = self._get_size() + offset
self._rbuffer = bytes()
def stat(self):
"""
Retrieve information about this file from the remote system. This is
exactly like `.SFTPClient.stat`, except that it operates on an
already-open file.
:returns:
an `.SFTPAttributes` object containing attributes about this file.
"""
t, msg = self.sftp._request(CMD_FSTAT, self.handle)
if t != CMD_ATTRS:
raise SFTPError("Expected attributes")
return SFTPAttributes._from_msg(msg)
def chmod(self, mode):
"""
Change the mode (permissions) of this file. The permissions are
unix-style and identical to those used by Python's `os.chmod`
function.
:param int mode: new permissions
"""
self.sftp._log(
DEBUG, "chmod({}, {!r})".format(hexlify(self.handle), mode)
)
attr = SFTPAttributes()
attr.st_mode = mode
self.sftp._request(CMD_FSETSTAT, self.handle, attr)
def chown(self, uid, gid):
"""
Change the owner (``uid``) and group (``gid``) of this file. As with
Python's `os.chown` function, you must pass both arguments, so if you
only want to change one, use `stat` first to retrieve the current
owner and group.
:param int uid: new owner's uid
:param int gid: new group id
"""
self.sftp._log(
DEBUG,
"chown({}, {!r}, {!r})".format(hexlify(self.handle), uid, gid),
)
attr = SFTPAttributes()
attr.st_uid, attr.st_gid = uid, gid
self.sftp._request(CMD_FSETSTAT, self.handle, attr)
def utime(self, times):
"""
Set the access and modified times of this file. If
``times`` is ``None``, then the file's access and modified times are
set to the current time. Otherwise, ``times`` must be a 2-tuple of
numbers, of the form ``(atime, mtime)``, which is used to set the
access and modified times, respectively. This bizarre API is mimicked
from Python for the sake of consistency -- I apologize.
:param tuple times:
``None`` or a tuple of (access time, modified time) in standard
internet epoch time (seconds since 01 January 1970 GMT)
"""
if times is None:
times = (time.time(), time.time())
self.sftp._log(
DEBUG, "utime({}, {!r})".format(hexlify(self.handle), times)
)
attr = SFTPAttributes()
attr.st_atime, attr.st_mtime = times
self.sftp._request(CMD_FSETSTAT, self.handle, attr)
def truncate(self, size):
"""
Change the size of this file. This usually extends
or shrinks the size of the file, just like the ``truncate()`` method on
Python file objects.
:param size: the new size of the file
"""
self.sftp._log(
DEBUG, "truncate({}, {!r})".format(hexlify(self.handle), size)
)
attr = SFTPAttributes()
attr.st_size = size
self.sftp._request(CMD_FSETSTAT, self.handle, attr)
def check(self, hash_algorithm, offset=0, length=0, block_size=0):
"""
Ask the server for a hash of a section of this file. This can be used
to verify a successful upload or download, or for various rsync-like
operations.
The file is hashed from ``offset``, for ``length`` bytes.
If ``length`` is 0, the remainder of the file is hashed. Thus, if both
``offset`` and ``length`` are zero, the entire file is hashed.
Normally, ``block_size`` will be 0 (the default), and this method will
return a byte string representing the requested hash (for example, a
string of length 16 for MD5, or 20 for SHA-1). If a non-zero
``block_size`` is given, each chunk of the file (from ``offset`` to
``offset + length``) of ``block_size`` bytes is computed as a separate
hash. The hash results are all concatenated and returned as a single
string.
For example, ``check('sha1', 0, 1024, 512)`` will return a string of
length 40. The first 20 bytes will be the SHA-1 of the first 512 bytes
of the file, and the last 20 bytes will be the SHA-1 of the next 512
bytes.
:param str hash_algorithm:
the name of the hash algorithm to use (normally ``"sha1"`` or
``"md5"``)
:param offset:
offset into the file to begin hashing (0 means to start from the
beginning)
:param length:
number of bytes to hash (0 means continue to the end of the file)
:param int block_size:
number of bytes to hash per result (must not be less than 256; 0
means to compute only one hash of the entire segment)
:return:
`str` of bytes representing the hash of each block, concatenated
together
:raises:
``IOError`` -- if the server doesn't support the "check-file"
extension, or possibly doesn't support the hash algorithm requested
.. note:: Many (most?) servers don't support this extension yet.
.. versionadded:: 1.4
"""
t, msg = self.sftp._request(
CMD_EXTENDED,
"check-file",
self.handle,
hash_algorithm,
int64(offset),
int64(length),
block_size,
)
msg.get_text() # ext
msg.get_text() # alg
data = msg.get_remainder()
return data
def set_pipelined(self, pipelined=True):
"""
Turn on/off the pipelining of write operations to this file. When
pipelining is on, paramiko won't wait for the server response after
each write operation. Instead, they're collected as they come in. At
the first non-write operation (including `.close`), all remaining
server responses are collected. This means that if there was an error
with one of your later writes, an exception might be thrown from within
`.close` instead of `.write`.
By default, files are not pipelined.
:param bool pipelined:
``True`` if pipelining should be turned on for this file; ``False``
otherwise
.. versionadded:: 1.5
"""
self.pipelined = pipelined
def prefetch(self, file_size=None, max_concurrent_requests=None):
"""
Pre-fetch the remaining contents of this file in anticipation of future
`.read` calls. If reading the entire file, pre-fetching can
dramatically improve the download speed by avoiding roundtrip latency.
The file's contents are incrementally buffered in a background thread.
The prefetched data is stored in a buffer until read via the `.read`
method. Once data has been read, it's removed from the buffer. The
data may be read in a random order (using `.seek`); chunks of the
buffer that haven't been read will continue to be buffered.
:param int file_size:
When this is ``None`` (the default), this method calls `stat` to
determine the remote file size. In some situations, doing so can
cause exceptions or hangs (see `#562
<https://github.com/paramiko/paramiko/pull/562>`_); as a
workaround, one may call `stat` explicitly and pass its value in
via this parameter.
:param int max_concurrent_requests:
The maximum number of concurrent read requests to prefetch. See
`.SFTPClient.get` (its ``max_concurrent_prefetch_requests`` param)
for details.
.. versionadded:: 1.5.1
.. versionchanged:: 1.16.0
The ``file_size`` parameter was added (with no default value).
.. versionchanged:: 1.16.1
The ``file_size`` parameter was made optional for backwards
compatibility.
.. versionchanged:: 3.3
Added ``max_concurrent_requests``.
"""
if file_size is None:
file_size = self.stat().st_size
# queue up async reads for the rest of the file
chunks = []
n = self._realpos
while n < file_size:
chunk = min(self.MAX_REQUEST_SIZE, file_size - n)
chunks.append((n, chunk))
n += chunk
if len(chunks) > 0:
self._start_prefetch(chunks, max_concurrent_requests)
def readv(self, chunks, max_concurrent_prefetch_requests=None):
"""
Read a set of blocks from the file by (offset, length). This is more
efficient than doing a series of `.seek` and `.read` calls, since the
prefetch machinery is used to retrieve all the requested blocks at
once.
:param chunks:
a list of ``(offset, length)`` tuples indicating which sections of
the file to read
:param int max_concurrent_prefetch_requests:
The maximum number of concurrent read requests to prefetch. See
`.SFTPClient.get` (its ``max_concurrent_prefetch_requests`` param)
for details.
:return: a list of blocks read, in the same order as in ``chunks``
.. versionadded:: 1.5.4
.. versionchanged:: 3.3
Added ``max_concurrent_prefetch_requests``.
"""
self.sftp._log(
DEBUG, "readv({}, {!r})".format(hexlify(self.handle), chunks)
)
read_chunks = []
for offset, size in chunks:
# don't fetch data that's already in the prefetch buffer
if self._data_in_prefetch_buffers(
offset
) or self._data_in_prefetch_requests(offset, size):
continue
# break up anything larger than the max read size
while size > 0:
chunk_size = min(size, self.MAX_REQUEST_SIZE)
read_chunks.append((offset, chunk_size))
offset += chunk_size
size -= chunk_size
self._start_prefetch(read_chunks, max_concurrent_prefetch_requests)
# now we can just devolve to a bunch of read()s :)
for x in chunks:
self.seek(x[0])
yield self.read(x[1])
# ...internals...
def _get_size(self):
try:
return self.stat().st_size
except:
return 0
def _start_prefetch(self, chunks, max_concurrent_requests=None):
self._prefetching = True
self._prefetch_done = False
t = threading.Thread(
target=self._prefetch_thread,
args=(chunks, max_concurrent_requests),
)
t.daemon = True
t.start()
def _prefetch_thread(self, chunks, max_concurrent_requests):
# do these read requests in a temporary thread because there may be
# a lot of them, so it may block.
for offset, length in chunks:
# Limit the number of concurrent requests in a busy-loop
if max_concurrent_requests is not None:
while True:
with self._prefetch_lock:
pf_len = len(self._prefetch_extents)
if pf_len < max_concurrent_requests:
break
time.sleep(io_sleep)
num = self.sftp._async_request(
self, CMD_READ, self.handle, int64(offset), int(length)
)
with self._prefetch_lock:
self._prefetch_extents[num] = (offset, length)
def _async_response(self, t, msg, num):
if t == CMD_STATUS:
# save exception and re-raise it on next file operation
try:
self.sftp._convert_status(msg)
except Exception as e:
self._saved_exception = e
return
if t != CMD_DATA:
raise SFTPError("Expected data")
data = msg.get_string()
while True:
with self._prefetch_lock:
# spin if in race with _prefetch_thread
if num in self._prefetch_extents:
offset, length = self._prefetch_extents[num]
self._prefetch_data[offset] = data
del self._prefetch_extents[num]
if len(self._prefetch_extents) == 0:
self._prefetch_done = True
break
def _check_exception(self):
"""if there's a saved exception, raise & clear it"""
if self._saved_exception is not None:
x = self._saved_exception
self._saved_exception = None
raise x
| 21,820 | Python | .py | 523 | 31.774379 | 79 | 0.595166 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
621 | sftp_server.py | paramiko_paramiko/paramiko/sftp_server.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Server-mode SFTP support.
"""
import os
import errno
import sys
from hashlib import md5, sha1
from paramiko import util
from paramiko.sftp import (
BaseSFTP,
Message,
SFTP_FAILURE,
SFTP_PERMISSION_DENIED,
SFTP_NO_SUCH_FILE,
int64,
)
from paramiko.sftp_si import SFTPServerInterface
from paramiko.sftp_attr import SFTPAttributes
from paramiko.common import DEBUG
from paramiko.server import SubsystemHandler
from paramiko.util import b
# known hash algorithms for the "check-file" extension
from paramiko.sftp import (
CMD_HANDLE,
SFTP_DESC,
CMD_STATUS,
SFTP_EOF,
CMD_NAME,
SFTP_BAD_MESSAGE,
CMD_EXTENDED_REPLY,
SFTP_FLAG_READ,
SFTP_FLAG_WRITE,
SFTP_FLAG_APPEND,
SFTP_FLAG_CREATE,
SFTP_FLAG_TRUNC,
SFTP_FLAG_EXCL,
CMD_NAMES,
CMD_OPEN,
CMD_CLOSE,
SFTP_OK,
CMD_READ,
CMD_DATA,
CMD_WRITE,
CMD_REMOVE,
CMD_RENAME,
CMD_MKDIR,
CMD_RMDIR,
CMD_OPENDIR,
CMD_READDIR,
CMD_STAT,
CMD_ATTRS,
CMD_LSTAT,
CMD_FSTAT,
CMD_SETSTAT,
CMD_FSETSTAT,
CMD_READLINK,
CMD_SYMLINK,
CMD_REALPATH,
CMD_EXTENDED,
SFTP_OP_UNSUPPORTED,
)
_hash_class = {"sha1": sha1, "md5": md5}
class SFTPServer(BaseSFTP, SubsystemHandler):
"""
Server-side SFTP subsystem support. Since this is a `.SubsystemHandler`,
it can be (and is meant to be) set as the handler for ``"sftp"`` requests.
Use `.Transport.set_subsystem_handler` to activate this class.
"""
def __init__(
self,
channel,
name,
server,
sftp_si=SFTPServerInterface,
*args,
**kwargs
):
"""
The constructor for SFTPServer is meant to be called from within the
`.Transport` as a subsystem handler. ``server`` and any additional
parameters or keyword parameters are passed from the original call to
`.Transport.set_subsystem_handler`.
:param .Channel channel: channel passed from the `.Transport`.
:param str name: name of the requested subsystem.
:param .ServerInterface server:
the server object associated with this channel and subsystem
:param sftp_si:
a subclass of `.SFTPServerInterface` to use for handling individual
requests.
"""
BaseSFTP.__init__(self)
SubsystemHandler.__init__(self, channel, name, server)
transport = channel.get_transport()
self.logger = util.get_logger(transport.get_log_channel() + ".sftp")
self.ultra_debug = transport.get_hexdump()
self.next_handle = 1
# map of handle-string to SFTPHandle for files & folders:
self.file_table = {}
self.folder_table = {}
self.server = sftp_si(server, *args, **kwargs)
def _log(self, level, msg):
if issubclass(type(msg), list):
for m in msg:
super()._log(level, "[chan " + self.sock.get_name() + "] " + m)
else:
super()._log(level, "[chan " + self.sock.get_name() + "] " + msg)
def start_subsystem(self, name, transport, channel):
self.sock = channel
self._log(DEBUG, "Started sftp server on channel {!r}".format(channel))
self._send_server_version()
self.server.session_started()
while True:
try:
t, data = self._read_packet()
except EOFError:
self._log(DEBUG, "EOF -- end of session")
return
except Exception as e:
self._log(DEBUG, "Exception on channel: " + str(e))
self._log(DEBUG, util.tb_strings())
return
msg = Message(data)
request_number = msg.get_int()
try:
self._process(t, request_number, msg)
except Exception as e:
self._log(DEBUG, "Exception in server processing: " + str(e))
self._log(DEBUG, util.tb_strings())
# send some kind of failure message, at least
try:
self._send_status(request_number, SFTP_FAILURE)
except:
pass
def finish_subsystem(self):
self.server.session_ended()
super().finish_subsystem()
# close any file handles that were left open
# (so we can return them to the OS quickly)
for f in self.file_table.values():
f.close()
for f in self.folder_table.values():
f.close()
self.file_table = {}
self.folder_table = {}
@staticmethod
def convert_errno(e):
"""
Convert an errno value (as from an ``OSError`` or ``IOError``) into a
standard SFTP result code. This is a convenience function for trapping
exceptions in server code and returning an appropriate result.
:param int e: an errno code, as from ``OSError.errno``.
:return: an `int` SFTP error code like ``SFTP_NO_SUCH_FILE``.
"""
if e == errno.EACCES:
# permission denied
return SFTP_PERMISSION_DENIED
elif (e == errno.ENOENT) or (e == errno.ENOTDIR):
# no such file
return SFTP_NO_SUCH_FILE
else:
return SFTP_FAILURE
@staticmethod
def set_file_attr(filename, attr):
"""
Change a file's attributes on the local filesystem. The contents of
``attr`` are used to change the permissions, owner, group ownership,
and/or modification & access time of the file, depending on which
attributes are present in ``attr``.
This is meant to be a handy helper function for translating SFTP file
requests into local file operations.
:param str filename:
name of the file to alter (should usually be an absolute path).
:param .SFTPAttributes attr: attributes to change.
"""
if sys.platform != "win32":
# mode operations are meaningless on win32
if attr._flags & attr.FLAG_PERMISSIONS:
os.chmod(filename, attr.st_mode)
if attr._flags & attr.FLAG_UIDGID:
os.chown(filename, attr.st_uid, attr.st_gid)
if attr._flags & attr.FLAG_AMTIME:
os.utime(filename, (attr.st_atime, attr.st_mtime))
if attr._flags & attr.FLAG_SIZE:
with open(filename, "w+") as f:
f.truncate(attr.st_size)
# ...internals...
def _response(self, request_number, t, *args):
msg = Message()
msg.add_int(request_number)
for item in args:
# NOTE: this is a very silly tiny class used for SFTPFile mostly
if isinstance(item, int64):
msg.add_int64(item)
elif isinstance(item, int):
msg.add_int(item)
elif isinstance(item, (str, bytes)):
msg.add_string(item)
elif type(item) is SFTPAttributes:
item._pack(msg)
else:
raise Exception(
"unknown type for {!r} type {!r}".format(item, type(item))
)
self._send_packet(t, msg)
def _send_handle_response(self, request_number, handle, folder=False):
if not issubclass(type(handle), SFTPHandle):
# must be error code
self._send_status(request_number, handle)
return
handle._set_name(b("hx{:d}".format(self.next_handle)))
self.next_handle += 1
if folder:
self.folder_table[handle._get_name()] = handle
else:
self.file_table[handle._get_name()] = handle
self._response(request_number, CMD_HANDLE, handle._get_name())
def _send_status(self, request_number, code, desc=None):
if desc is None:
try:
desc = SFTP_DESC[code]
except IndexError:
desc = "Unknown"
# some clients expect a "language" tag at the end
# (but don't mind it being blank)
self._response(request_number, CMD_STATUS, code, desc, "")
def _open_folder(self, request_number, path):
resp = self.server.list_folder(path)
if issubclass(type(resp), list):
# got an actual list of filenames in the folder
folder = SFTPHandle()
folder._set_files(resp)
self._send_handle_response(request_number, folder, True)
return
# must be an error code
self._send_status(request_number, resp)
def _read_folder(self, request_number, folder):
flist = folder._get_next_files()
if len(flist) == 0:
self._send_status(request_number, SFTP_EOF)
return
msg = Message()
msg.add_int(request_number)
msg.add_int(len(flist))
for attr in flist:
msg.add_string(attr.filename)
msg.add_string(attr)
attr._pack(msg)
self._send_packet(CMD_NAME, msg)
def _check_file(self, request_number, msg):
# this extension actually comes from v6 protocol, but since it's an
# extension, i feel like we can reasonably support it backported.
# it's very useful for verifying uploaded files or checking for
# rsync-like differences between local and remote files.
handle = msg.get_binary()
alg_list = msg.get_list()
start = msg.get_int64()
length = msg.get_int64()
block_size = msg.get_int()
if handle not in self.file_table:
self._send_status(
request_number, SFTP_BAD_MESSAGE, "Invalid handle"
)
return
f = self.file_table[handle]
for x in alg_list:
if x in _hash_class:
algname = x
alg = _hash_class[x]
break
else:
self._send_status(
request_number, SFTP_FAILURE, "No supported hash types found"
)
return
if length == 0:
st = f.stat()
if not issubclass(type(st), SFTPAttributes):
self._send_status(request_number, st, "Unable to stat file")
return
length = st.st_size - start
if block_size == 0:
block_size = length
if block_size < 256:
self._send_status(
request_number, SFTP_FAILURE, "Block size too small"
)
return
sum_out = bytes()
offset = start
while offset < start + length:
blocklen = min(block_size, start + length - offset)
# don't try to read more than about 64KB at a time
chunklen = min(blocklen, 65536)
count = 0
hash_obj = alg()
while count < blocklen:
data = f.read(offset, chunklen)
if not isinstance(data, bytes):
self._send_status(
request_number, data, "Unable to hash file"
)
return
hash_obj.update(data)
count += len(data)
offset += count
sum_out += hash_obj.digest()
msg = Message()
msg.add_int(request_number)
msg.add_string("check-file")
msg.add_string(algname)
msg.add_bytes(sum_out)
self._send_packet(CMD_EXTENDED_REPLY, msg)
def _convert_pflags(self, pflags):
"""convert SFTP-style open() flags to Python's os.open() flags"""
if (pflags & SFTP_FLAG_READ) and (pflags & SFTP_FLAG_WRITE):
flags = os.O_RDWR
elif pflags & SFTP_FLAG_WRITE:
flags = os.O_WRONLY
else:
flags = os.O_RDONLY
if pflags & SFTP_FLAG_APPEND:
flags |= os.O_APPEND
if pflags & SFTP_FLAG_CREATE:
flags |= os.O_CREAT
if pflags & SFTP_FLAG_TRUNC:
flags |= os.O_TRUNC
if pflags & SFTP_FLAG_EXCL:
flags |= os.O_EXCL
return flags
def _process(self, t, request_number, msg):
self._log(DEBUG, "Request: {}".format(CMD_NAMES[t]))
if t == CMD_OPEN:
path = msg.get_text()
flags = self._convert_pflags(msg.get_int())
attr = SFTPAttributes._from_msg(msg)
self._send_handle_response(
request_number, self.server.open(path, flags, attr)
)
elif t == CMD_CLOSE:
handle = msg.get_binary()
if handle in self.folder_table:
del self.folder_table[handle]
self._send_status(request_number, SFTP_OK)
return
if handle in self.file_table:
self.file_table[handle].close()
del self.file_table[handle]
self._send_status(request_number, SFTP_OK)
return
self._send_status(
request_number, SFTP_BAD_MESSAGE, "Invalid handle"
)
elif t == CMD_READ:
handle = msg.get_binary()
offset = msg.get_int64()
length = msg.get_int()
if handle not in self.file_table:
self._send_status(
request_number, SFTP_BAD_MESSAGE, "Invalid handle"
)
return
data = self.file_table[handle].read(offset, length)
if isinstance(data, (bytes, str)):
if len(data) == 0:
self._send_status(request_number, SFTP_EOF)
else:
self._response(request_number, CMD_DATA, data)
else:
self._send_status(request_number, data)
elif t == CMD_WRITE:
handle = msg.get_binary()
offset = msg.get_int64()
data = msg.get_binary()
if handle not in self.file_table:
self._send_status(
request_number, SFTP_BAD_MESSAGE, "Invalid handle"
)
return
self._send_status(
request_number, self.file_table[handle].write(offset, data)
)
elif t == CMD_REMOVE:
path = msg.get_text()
self._send_status(request_number, self.server.remove(path))
elif t == CMD_RENAME:
oldpath = msg.get_text()
newpath = msg.get_text()
self._send_status(
request_number, self.server.rename(oldpath, newpath)
)
elif t == CMD_MKDIR:
path = msg.get_text()
attr = SFTPAttributes._from_msg(msg)
self._send_status(request_number, self.server.mkdir(path, attr))
elif t == CMD_RMDIR:
path = msg.get_text()
self._send_status(request_number, self.server.rmdir(path))
elif t == CMD_OPENDIR:
path = msg.get_text()
self._open_folder(request_number, path)
return
elif t == CMD_READDIR:
handle = msg.get_binary()
if handle not in self.folder_table:
self._send_status(
request_number, SFTP_BAD_MESSAGE, "Invalid handle"
)
return
folder = self.folder_table[handle]
self._read_folder(request_number, folder)
elif t == CMD_STAT:
path = msg.get_text()
resp = self.server.stat(path)
if issubclass(type(resp), SFTPAttributes):
self._response(request_number, CMD_ATTRS, resp)
else:
self._send_status(request_number, resp)
elif t == CMD_LSTAT:
path = msg.get_text()
resp = self.server.lstat(path)
if issubclass(type(resp), SFTPAttributes):
self._response(request_number, CMD_ATTRS, resp)
else:
self._send_status(request_number, resp)
elif t == CMD_FSTAT:
handle = msg.get_binary()
if handle not in self.file_table:
self._send_status(
request_number, SFTP_BAD_MESSAGE, "Invalid handle"
)
return
resp = self.file_table[handle].stat()
if issubclass(type(resp), SFTPAttributes):
self._response(request_number, CMD_ATTRS, resp)
else:
self._send_status(request_number, resp)
elif t == CMD_SETSTAT:
path = msg.get_text()
attr = SFTPAttributes._from_msg(msg)
self._send_status(request_number, self.server.chattr(path, attr))
elif t == CMD_FSETSTAT:
handle = msg.get_binary()
attr = SFTPAttributes._from_msg(msg)
if handle not in self.file_table:
self._response(
request_number, SFTP_BAD_MESSAGE, "Invalid handle"
)
return
self._send_status(
request_number, self.file_table[handle].chattr(attr)
)
elif t == CMD_READLINK:
path = msg.get_text()
resp = self.server.readlink(path)
if isinstance(resp, (bytes, str)):
self._response(
request_number, CMD_NAME, 1, resp, "", SFTPAttributes()
)
else:
self._send_status(request_number, resp)
elif t == CMD_SYMLINK:
# the sftp 2 draft is incorrect here!
# path always follows target_path
target_path = msg.get_text()
path = msg.get_text()
self._send_status(
request_number, self.server.symlink(target_path, path)
)
elif t == CMD_REALPATH:
path = msg.get_text()
rpath = self.server.canonicalize(path)
self._response(
request_number, CMD_NAME, 1, rpath, "", SFTPAttributes()
)
elif t == CMD_EXTENDED:
tag = msg.get_text()
if tag == "check-file":
self._check_file(request_number, msg)
elif tag == "[email protected]":
oldpath = msg.get_text()
newpath = msg.get_text()
self._send_status(
request_number, self.server.posix_rename(oldpath, newpath)
)
else:
self._send_status(request_number, SFTP_OP_UNSUPPORTED)
else:
self._send_status(request_number, SFTP_OP_UNSUPPORTED)
from paramiko.sftp_handle import SFTPHandle
| 19,492 | Python | .py | 506 | 27.626482 | 79 | 0.562121 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
622 | kex_group1.py | paramiko_paramiko/paramiko/kex_group1.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Standard SSH key exchange ("kex" if you wanna sound cool). Diffie-Hellman of
1024 bit key halves, using a known "p" prime and "g" generator.
"""
import os
from hashlib import sha1
from paramiko import util
from paramiko.common import max_byte, zero_byte, byte_chr, byte_mask
from paramiko.message import Message
from paramiko.ssh_exception import SSHException
_MSG_KEXDH_INIT, _MSG_KEXDH_REPLY = range(30, 32)
c_MSG_KEXDH_INIT, c_MSG_KEXDH_REPLY = [byte_chr(c) for c in range(30, 32)]
b7fffffffffffffff = byte_chr(0x7F) + max_byte * 7
b0000000000000000 = zero_byte * 8
class KexGroup1:
# draft-ietf-secsh-transport-09.txt, page 17
P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF # noqa
G = 2
name = "diffie-hellman-group1-sha1"
hash_algo = sha1
def __init__(self, transport):
self.transport = transport
self.x = 0
self.e = 0
self.f = 0
def start_kex(self):
self._generate_x()
if self.transport.server_mode:
# compute f = g^x mod p, but don't send it yet
self.f = pow(self.G, self.x, self.P)
self.transport._expect_packet(_MSG_KEXDH_INIT)
return
# compute e = g^x mod p (where g=2), and send it
self.e = pow(self.G, self.x, self.P)
m = Message()
m.add_byte(c_MSG_KEXDH_INIT)
m.add_mpint(self.e)
self.transport._send_message(m)
self.transport._expect_packet(_MSG_KEXDH_REPLY)
def parse_next(self, ptype, m):
if self.transport.server_mode and (ptype == _MSG_KEXDH_INIT):
return self._parse_kexdh_init(m)
elif not self.transport.server_mode and (ptype == _MSG_KEXDH_REPLY):
return self._parse_kexdh_reply(m)
msg = "KexGroup1 asked to handle packet type {:d}"
raise SSHException(msg.format(ptype))
# ...internals...
def _generate_x(self):
# generate an "x" (1 < x < q), where q is (p-1)/2.
# p is a 128-byte (1024-bit) number, where the first 64 bits are 1.
# therefore q can be approximated as a 2^1023. we drop the subset of
# potential x where the first 63 bits are 1, because some of those
# will be larger than q (but this is a tiny tiny subset of
# potential x).
while 1:
x_bytes = os.urandom(128)
x_bytes = byte_mask(x_bytes[0], 0x7F) + x_bytes[1:]
if (
x_bytes[:8] != b7fffffffffffffff
and x_bytes[:8] != b0000000000000000
):
break
self.x = util.inflate_long(x_bytes)
def _parse_kexdh_reply(self, m):
# client mode
host_key = m.get_string()
self.f = m.get_mpint()
if (self.f < 1) or (self.f > self.P - 1):
raise SSHException('Server kex "f" is out of range')
sig = m.get_binary()
K = pow(self.f, self.x, self.P)
# okay, build up the hash H of
# (V_C || V_S || I_C || I_S || K_S || e || f || K)
hm = Message()
hm.add(
self.transport.local_version,
self.transport.remote_version,
self.transport.local_kex_init,
self.transport.remote_kex_init,
)
hm.add_string(host_key)
hm.add_mpint(self.e)
hm.add_mpint(self.f)
hm.add_mpint(K)
self.transport._set_K_H(K, self.hash_algo(hm.asbytes()).digest())
self.transport._verify_key(host_key, sig)
self.transport._activate_outbound()
def _parse_kexdh_init(self, m):
# server mode
self.e = m.get_mpint()
if (self.e < 1) or (self.e > self.P - 1):
raise SSHException('Client kex "e" is out of range')
K = pow(self.e, self.x, self.P)
key = self.transport.get_server_key().asbytes()
# okay, build up the hash H of
# (V_C || V_S || I_C || I_S || K_S || e || f || K)
hm = Message()
hm.add(
self.transport.remote_version,
self.transport.local_version,
self.transport.remote_kex_init,
self.transport.local_kex_init,
)
hm.add_string(key)
hm.add_mpint(self.e)
hm.add_mpint(self.f)
hm.add_mpint(K)
H = self.hash_algo(hm.asbytes()).digest()
self.transport._set_K_H(K, H)
# sign it
sig = self.transport.get_server_key().sign_ssh_data(
H, self.transport.host_key_type
)
# send reply
m = Message()
m.add_byte(c_MSG_KEXDH_REPLY)
m.add_string(key)
m.add_mpint(self.f)
m.add_string(sig)
self.transport._send_message(m)
self.transport._activate_outbound()
| 5,740 | Python | .py | 138 | 33.891304 | 274 | 0.625604 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
623 | sftp_si.py | paramiko_paramiko/paramiko/sftp_si.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
An interface to override for SFTP server support.
"""
import os
import sys
from paramiko.sftp import SFTP_OP_UNSUPPORTED
class SFTPServerInterface:
"""
This class defines an interface for controlling the behavior of paramiko
when using the `.SFTPServer` subsystem to provide an SFTP server.
Methods on this class are called from the SFTP session's thread, so you can
block as long as necessary without affecting other sessions (even other
SFTP sessions). However, raising an exception will usually cause the SFTP
session to abruptly end, so you will usually want to catch exceptions and
return an appropriate error code.
All paths are in string form instead of unicode because not all SFTP
clients & servers obey the requirement that paths be encoded in UTF-8.
"""
def __init__(self, server, *args, **kwargs):
"""
Create a new SFTPServerInterface object. This method does nothing by
default and is meant to be overridden by subclasses.
:param .ServerInterface server:
the server object associated with this channel and SFTP subsystem
"""
super().__init__(*args, **kwargs)
def session_started(self):
"""
The SFTP server session has just started. This method is meant to be
overridden to perform any necessary setup before handling callbacks
from SFTP operations.
"""
pass
def session_ended(self):
"""
The SFTP server session has just ended, either cleanly or via an
exception. This method is meant to be overridden to perform any
necessary cleanup before this `.SFTPServerInterface` object is
destroyed.
"""
pass
def open(self, path, flags, attr):
"""
Open a file on the server and create a handle for future operations
on that file. On success, a new object subclassed from `.SFTPHandle`
should be returned. This handle will be used for future operations
on the file (read, write, etc). On failure, an error code such as
``SFTP_PERMISSION_DENIED`` should be returned.
``flags`` contains the requested mode for opening (read-only,
write-append, etc) as a bitset of flags from the ``os`` module:
- ``os.O_RDONLY``
- ``os.O_WRONLY``
- ``os.O_RDWR``
- ``os.O_APPEND``
- ``os.O_CREAT``
- ``os.O_TRUNC``
- ``os.O_EXCL``
(One of ``os.O_RDONLY``, ``os.O_WRONLY``, or ``os.O_RDWR`` will always
be set.)
The ``attr`` object contains requested attributes of the file if it
has to be created. Some or all attribute fields may be missing if
the client didn't specify them.
.. note:: The SFTP protocol defines all files to be in "binary" mode.
There is no equivalent to Python's "text" mode.
:param str path:
the requested path (relative or absolute) of the file to be opened.
:param int flags:
flags or'd together from the ``os`` module indicating the requested
mode for opening the file.
:param .SFTPAttributes attr:
requested attributes of the file if it is newly created.
:return: a new `.SFTPHandle` or error code.
"""
return SFTP_OP_UNSUPPORTED
def list_folder(self, path):
"""
Return a list of files within a given folder. The ``path`` will use
posix notation (``"/"`` separates folder names) and may be an absolute
or relative path.
The list of files is expected to be a list of `.SFTPAttributes`
objects, which are similar in structure to the objects returned by
``os.stat``. In addition, each object should have its ``filename``
field filled in, since this is important to a directory listing and
not normally present in ``os.stat`` results. The method
`.SFTPAttributes.from_stat` will usually do what you want.
In case of an error, you should return one of the ``SFTP_*`` error
codes, such as ``SFTP_PERMISSION_DENIED``.
:param str path: the requested path (relative or absolute) to be
listed.
:return:
a list of the files in the given folder, using `.SFTPAttributes`
objects.
.. note::
You should normalize the given ``path`` first (see the `os.path`
module) and check appropriate permissions before returning the list
of files. Be careful of malicious clients attempting to use
relative paths to escape restricted folders, if you're doing a
direct translation from the SFTP server path to your local
filesystem.
"""
return SFTP_OP_UNSUPPORTED
def stat(self, path):
"""
Return an `.SFTPAttributes` object for a path on the server, or an
error code. If your server supports symbolic links (also known as
"aliases"), you should follow them. (`lstat` is the corresponding
call that doesn't follow symlinks/aliases.)
:param str path:
the requested path (relative or absolute) to fetch file statistics
for.
:return:
an `.SFTPAttributes` object for the given file, or an SFTP error
code (like ``SFTP_PERMISSION_DENIED``).
"""
return SFTP_OP_UNSUPPORTED
def lstat(self, path):
"""
Return an `.SFTPAttributes` object for a path on the server, or an
error code. If your server supports symbolic links (also known as
"aliases"), you should not follow them -- instead, you should
return data on the symlink or alias itself. (`stat` is the
corresponding call that follows symlinks/aliases.)
:param str path:
the requested path (relative or absolute) to fetch file statistics
for.
:type path: str
:return:
an `.SFTPAttributes` object for the given file, or an SFTP error
code (like ``SFTP_PERMISSION_DENIED``).
"""
return SFTP_OP_UNSUPPORTED
def remove(self, path):
"""
Delete a file, if possible.
:param str path:
the requested path (relative or absolute) of the file to delete.
:return: an SFTP error code `int` like ``SFTP_OK``.
"""
return SFTP_OP_UNSUPPORTED
def rename(self, oldpath, newpath):
"""
Rename (or move) a file. The SFTP specification implies that this
method can be used to move an existing file into a different folder,
and since there's no other (easy) way to move files via SFTP, it's
probably a good idea to implement "move" in this method too, even for
files that cross disk partition boundaries, if at all possible.
.. note:: You should return an error if a file with the same name as
``newpath`` already exists. (The rename operation should be
non-desctructive.)
.. note::
This method implements 'standard' SFTP ``RENAME`` behavior; those
seeking the OpenSSH "POSIX rename" extension behavior should use
`posix_rename`.
:param str oldpath:
the requested path (relative or absolute) of the existing file.
:param str newpath: the requested new path of the file.
:return: an SFTP error code `int` like ``SFTP_OK``.
"""
return SFTP_OP_UNSUPPORTED
def posix_rename(self, oldpath, newpath):
"""
Rename (or move) a file, following posix conventions. If newpath
already exists, it will be overwritten.
:param str oldpath:
the requested path (relative or absolute) of the existing file.
:param str newpath: the requested new path of the file.
:return: an SFTP error code `int` like ``SFTP_OK``.
:versionadded: 2.2
"""
return SFTP_OP_UNSUPPORTED
def mkdir(self, path, attr):
"""
Create a new directory with the given attributes. The ``attr``
object may be considered a "hint" and ignored.
The ``attr`` object will contain only those fields provided by the
client in its request, so you should use ``hasattr`` to check for
the presence of fields before using them. In some cases, the ``attr``
object may be completely empty.
:param str path:
requested path (relative or absolute) of the new folder.
:param .SFTPAttributes attr: requested attributes of the new folder.
:return: an SFTP error code `int` like ``SFTP_OK``.
"""
return SFTP_OP_UNSUPPORTED
def rmdir(self, path):
"""
Remove a directory if it exists. The ``path`` should refer to an
existing, empty folder -- otherwise this method should return an
error.
:param str path:
requested path (relative or absolute) of the folder to remove.
:return: an SFTP error code `int` like ``SFTP_OK``.
"""
return SFTP_OP_UNSUPPORTED
def chattr(self, path, attr):
"""
Change the attributes of a file. The ``attr`` object will contain
only those fields provided by the client in its request, so you
should check for the presence of fields before using them.
:param str path:
requested path (relative or absolute) of the file to change.
:param attr:
requested attributes to change on the file (an `.SFTPAttributes`
object)
:return: an error code `int` like ``SFTP_OK``.
"""
return SFTP_OP_UNSUPPORTED
def canonicalize(self, path):
"""
Return the canonical form of a path on the server. For example,
if the server's home folder is ``/home/foo``, the path
``"../betty"`` would be canonicalized to ``"/home/betty"``. Note
the obvious security issues: if you're serving files only from a
specific folder, you probably don't want this method to reveal path
names outside that folder.
You may find the Python methods in ``os.path`` useful, especially
``os.path.normpath`` and ``os.path.realpath``.
The default implementation returns ``os.path.normpath('/' + path)``.
"""
if os.path.isabs(path):
out = os.path.normpath(path)
else:
out = os.path.normpath("/" + path)
if sys.platform == "win32":
# on windows, normalize backslashes to sftp/posix format
out = out.replace("\\", "/")
return out
def readlink(self, path):
"""
Return the target of a symbolic link (or shortcut) on the server.
If the specified path doesn't refer to a symbolic link, an error
should be returned.
:param str path: path (relative or absolute) of the symbolic link.
:return:
the target `str` path of the symbolic link, or an error code like
``SFTP_NO_SUCH_FILE``.
"""
return SFTP_OP_UNSUPPORTED
def symlink(self, target_path, path):
"""
Create a symbolic link on the server, as new pathname ``path``,
with ``target_path`` as the target of the link.
:param str target_path:
path (relative or absolute) of the target for this new symbolic
link.
:param str path:
path (relative or absolute) of the symbolic link to create.
:return: an error code `int` like ``SFTP_OK``.
"""
return SFTP_OP_UNSUPPORTED
| 12,544 | Python | .py | 267 | 38.142322 | 79 | 0.64017 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
624 | packet.py | paramiko_paramiko/paramiko/packet.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Packet handling
"""
import errno
import os
import socket
import struct
import threading
import time
from hmac import HMAC
from paramiko import util
from paramiko.common import (
linefeed_byte,
cr_byte_value,
MSG_NAMES,
DEBUG,
xffffffff,
zero_byte,
byte_ord,
)
from paramiko.util import u
from paramiko.ssh_exception import SSHException, ProxyCommandFailure
from paramiko.message import Message
def compute_hmac(key, message, digest_class):
return HMAC(key, message, digest_class).digest()
class NeedRekeyException(Exception):
"""
Exception indicating a rekey is needed.
"""
pass
def first_arg(e):
arg = None
if type(e.args) is tuple and len(e.args) > 0:
arg = e.args[0]
return arg
class Packetizer:
"""
Implementation of the base SSH packet protocol.
"""
# READ the secsh RFC's before raising these values. if anything,
# they should probably be lower.
REKEY_PACKETS = pow(2, 29)
REKEY_BYTES = pow(2, 29)
# Allow receiving this many packets after a re-key request before
# terminating
REKEY_PACKETS_OVERFLOW_MAX = pow(2, 29)
# Allow receiving this many bytes after a re-key request before terminating
REKEY_BYTES_OVERFLOW_MAX = pow(2, 29)
def __init__(self, socket):
self.__socket = socket
self.__logger = None
self.__closed = False
self.__dump_packets = False
self.__need_rekey = False
self.__init_count = 0
self.__remainder = bytes()
self._initial_kex_done = False
# used for noticing when to re-key:
self.__sent_bytes = 0
self.__sent_packets = 0
self.__received_bytes = 0
self.__received_packets = 0
self.__received_bytes_overflow = 0
self.__received_packets_overflow = 0
# current inbound/outbound ciphering:
self.__block_size_out = 8
self.__block_size_in = 8
self.__mac_size_out = 0
self.__mac_size_in = 0
self.__block_engine_out = None
self.__block_engine_in = None
self.__sdctr_out = False
self.__mac_engine_out = None
self.__mac_engine_in = None
self.__mac_key_out = bytes()
self.__mac_key_in = bytes()
self.__compress_engine_out = None
self.__compress_engine_in = None
self.__sequence_number_out = 0
self.__sequence_number_in = 0
self.__etm_out = False
self.__etm_in = False
# AEAD (eg aes128-gcm/aes256-gcm) cipher use
self.__aead_out = False
self.__aead_in = False
self.__iv_out = None
self.__iv_in = None
# lock around outbound writes (packet computation)
self.__write_lock = threading.RLock()
# keepalives:
self.__keepalive_interval = 0
self.__keepalive_last = time.time()
self.__keepalive_callback = None
self.__timer = None
self.__handshake_complete = False
self.__timer_expired = False
@property
def closed(self):
return self.__closed
def reset_seqno_out(self):
self.__sequence_number_out = 0
def reset_seqno_in(self):
self.__sequence_number_in = 0
def set_log(self, log):
"""
Set the Python log object to use for logging.
"""
self.__logger = log
def set_outbound_cipher(
self,
block_engine,
block_size,
mac_engine,
mac_size,
mac_key,
sdctr=False,
etm=False,
aead=False,
iv_out=None,
):
"""
Switch outbound data cipher.
:param etm: Set encrypt-then-mac from OpenSSH
"""
self.__block_engine_out = block_engine
self.__sdctr_out = sdctr
self.__block_size_out = block_size
self.__mac_engine_out = mac_engine
self.__mac_size_out = mac_size
self.__mac_key_out = mac_key
self.__sent_bytes = 0
self.__sent_packets = 0
self.__etm_out = etm
self.__aead_out = aead
self.__iv_out = iv_out
# wait until the reset happens in both directions before clearing
# rekey flag
self.__init_count |= 1
if self.__init_count == 3:
self.__init_count = 0
self.__need_rekey = False
def set_inbound_cipher(
self,
block_engine,
block_size,
mac_engine,
mac_size,
mac_key,
etm=False,
aead=False,
iv_in=None,
):
"""
Switch inbound data cipher.
:param etm: Set encrypt-then-mac from OpenSSH
"""
self.__block_engine_in = block_engine
self.__block_size_in = block_size
self.__mac_engine_in = mac_engine
self.__mac_size_in = mac_size
self.__mac_key_in = mac_key
self.__received_bytes = 0
self.__received_packets = 0
self.__received_bytes_overflow = 0
self.__received_packets_overflow = 0
self.__etm_in = etm
self.__aead_in = aead
self.__iv_in = iv_in
# wait until the reset happens in both directions before clearing
# rekey flag
self.__init_count |= 2
if self.__init_count == 3:
self.__init_count = 0
self.__need_rekey = False
def set_outbound_compressor(self, compressor):
self.__compress_engine_out = compressor
def set_inbound_compressor(self, compressor):
self.__compress_engine_in = compressor
def close(self):
self.__closed = True
self.__socket.close()
def set_hexdump(self, hexdump):
self.__dump_packets = hexdump
def get_hexdump(self):
return self.__dump_packets
def get_mac_size_in(self):
return self.__mac_size_in
def get_mac_size_out(self):
return self.__mac_size_out
def need_rekey(self):
"""
Returns ``True`` if a new set of keys needs to be negotiated. This
will be triggered during a packet read or write, so it should be
checked after every read or write, or at least after every few.
"""
return self.__need_rekey
def set_keepalive(self, interval, callback):
"""
Turn on/off the callback keepalive. If ``interval`` seconds pass with
no data read from or written to the socket, the callback will be
executed and the timer will be reset.
"""
self.__keepalive_interval = interval
self.__keepalive_callback = callback
self.__keepalive_last = time.time()
def read_timer(self):
self.__timer_expired = True
def start_handshake(self, timeout):
"""
Tells `Packetizer` that the handshake process started.
Starts a book keeping timer that can signal a timeout in the
handshake process.
:param float timeout: amount of seconds to wait before timing out
"""
if not self.__timer:
self.__timer = threading.Timer(float(timeout), self.read_timer)
self.__timer.start()
def handshake_timed_out(self):
"""
Checks if the handshake has timed out.
If `start_handshake` wasn't called before the call to this function,
the return value will always be `False`. If the handshake completed
before a timeout was reached, the return value will be `False`
:return: handshake time out status, as a `bool`
"""
if not self.__timer:
return False
if self.__handshake_complete:
return False
return self.__timer_expired
def complete_handshake(self):
"""
Tells `Packetizer` that the handshake has completed.
"""
if self.__timer:
self.__timer.cancel()
self.__timer_expired = False
self.__handshake_complete = True
def read_all(self, n, check_rekey=False):
"""
Read as close to N bytes as possible, blocking as long as necessary.
:param int n: number of bytes to read
:return: the data read, as a `str`
:raises:
``EOFError`` -- if the socket was closed before all the bytes could
be read
"""
out = bytes()
# handle over-reading from reading the banner line
if len(self.__remainder) > 0:
out = self.__remainder[:n]
self.__remainder = self.__remainder[n:]
n -= len(out)
while n > 0:
got_timeout = False
if self.handshake_timed_out():
raise EOFError()
try:
x = self.__socket.recv(n)
if len(x) == 0:
raise EOFError()
out += x
n -= len(x)
except socket.timeout:
got_timeout = True
except socket.error as e:
# on Linux, sometimes instead of socket.timeout, we get
# EAGAIN. this is a bug in recent (> 2.6.9) kernels but
# we need to work around it.
arg = first_arg(e)
if arg == errno.EAGAIN:
got_timeout = True
elif self.__closed:
raise EOFError()
else:
raise
if got_timeout:
if self.__closed:
raise EOFError()
if check_rekey and (len(out) == 0) and self.__need_rekey:
raise NeedRekeyException()
self._check_keepalive()
return out
def write_all(self, out):
self.__keepalive_last = time.time()
iteration_with_zero_as_return_value = 0
while len(out) > 0:
retry_write = False
try:
n = self.__socket.send(out)
except socket.timeout:
retry_write = True
except socket.error as e:
arg = first_arg(e)
if arg == errno.EAGAIN:
retry_write = True
else:
n = -1
except ProxyCommandFailure:
raise # so it doesn't get swallowed by the below catchall
except Exception:
# could be: (32, 'Broken pipe')
n = -1
if retry_write:
n = 0
if self.__closed:
n = -1
else:
if n == 0 and iteration_with_zero_as_return_value > 10:
# We shouldn't retry the write, but we didn't
# manage to send anything over the socket. This might be an
# indication that we have lost contact with the remote
# side, but are yet to receive an EOFError or other socket
# errors. Let's give it some iteration to try and catch up.
n = -1
iteration_with_zero_as_return_value += 1
if n < 0:
raise EOFError()
if n == len(out):
break
out = out[n:]
return
def readline(self, timeout):
"""
Read a line from the socket. We assume no data is pending after the
line, so it's okay to attempt large reads.
"""
buf = self.__remainder
while linefeed_byte not in buf:
buf += self._read_timeout(timeout)
n = buf.index(linefeed_byte)
self.__remainder = buf[n + 1 :]
buf = buf[:n]
if (len(buf) > 0) and (buf[-1] == cr_byte_value):
buf = buf[:-1]
return u(buf)
def _inc_iv_counter(self, iv):
# Per https://www.rfc-editor.org/rfc/rfc5647.html#section-7.1 ,
# we increment the last 8 bytes of the 12-byte IV...
iv_counter_b = iv[4:]
iv_counter = int.from_bytes(iv_counter_b, "big")
inc_iv_counter = iv_counter + 1
inc_iv_counter_b = inc_iv_counter.to_bytes(8, "big")
# ...then re-concatenate it with the static first 4 bytes
new_iv = iv[0:4] + inc_iv_counter_b
return new_iv
def send_message(self, data):
"""
Write a block of data using the current cipher, as an SSH block.
"""
# encrypt this sucka
data = data.asbytes()
cmd = byte_ord(data[0])
if cmd in MSG_NAMES:
cmd_name = MSG_NAMES[cmd]
else:
cmd_name = "${:x}".format(cmd)
orig_len = len(data)
self.__write_lock.acquire()
try:
if self.__compress_engine_out is not None:
data = self.__compress_engine_out(data)
packet = self._build_packet(data)
if self.__dump_packets:
self._log(
DEBUG,
"Write packet <{}>, length {}".format(cmd_name, orig_len),
)
self._log(DEBUG, util.format_binary(packet, "OUT: "))
if self.__block_engine_out is not None:
if self.__etm_out:
# packet length is not encrypted in EtM
out = packet[0:4] + self.__block_engine_out.update(
packet[4:]
)
elif self.__aead_out:
# Packet-length field is used as the 'associated data'
# under AES-GCM, so like EtM, it's not encrypted. See
# https://www.rfc-editor.org/rfc/rfc5647#section-7.3
out = packet[0:4] + self.__block_engine_out.encrypt(
self.__iv_out, packet[4:], packet[0:4]
)
self.__iv_out = self._inc_iv_counter(self.__iv_out)
else:
out = self.__block_engine_out.update(packet)
else:
out = packet
# Append an MAC when needed (eg, not under AES-GCM)
if self.__block_engine_out is not None and not self.__aead_out:
packed = struct.pack(">I", self.__sequence_number_out)
payload = packed + (out if self.__etm_out else packet)
out += compute_hmac(
self.__mac_key_out, payload, self.__mac_engine_out
)[: self.__mac_size_out]
next_seq = (self.__sequence_number_out + 1) & xffffffff
if next_seq == 0 and not self._initial_kex_done:
raise SSHException(
"Sequence number rolled over during initial kex!"
)
self.__sequence_number_out = next_seq
self.write_all(out)
self.__sent_bytes += len(out)
self.__sent_packets += 1
sent_too_much = (
self.__sent_packets >= self.REKEY_PACKETS
or self.__sent_bytes >= self.REKEY_BYTES
)
if sent_too_much and not self.__need_rekey:
# only ask once for rekeying
msg = "Rekeying (hit {} packets, {} bytes sent)"
self._log(
DEBUG, msg.format(self.__sent_packets, self.__sent_bytes)
)
self.__received_bytes_overflow = 0
self.__received_packets_overflow = 0
self._trigger_rekey()
finally:
self.__write_lock.release()
def read_message(self):
"""
Only one thread should ever be in this function (no other locking is
done).
:raises: `.SSHException` -- if the packet is mangled
:raises: `.NeedRekeyException` -- if the transport should rekey
"""
header = self.read_all(self.__block_size_in, check_rekey=True)
if self.__etm_in:
packet_size = struct.unpack(">I", header[:4])[0]
remaining = packet_size - self.__block_size_in + 4
packet = header[4:] + self.read_all(remaining, check_rekey=False)
mac = self.read_all(self.__mac_size_in, check_rekey=False)
mac_payload = (
struct.pack(">II", self.__sequence_number_in, packet_size)
+ packet
)
my_mac = compute_hmac(
self.__mac_key_in, mac_payload, self.__mac_engine_in
)[: self.__mac_size_in]
if not util.constant_time_bytes_eq(my_mac, mac):
raise SSHException("Mismatched MAC")
header = packet
if self.__aead_in:
# Grab unencrypted (considered 'additional data' under GCM) packet
# length.
packet_size = struct.unpack(">I", header[:4])[0]
aad = header[:4]
remaining = (
packet_size - self.__block_size_in + 4 + self.__mac_size_in
)
packet = header[4:] + self.read_all(remaining, check_rekey=False)
header = self.__block_engine_in.decrypt(self.__iv_in, packet, aad)
self.__iv_in = self._inc_iv_counter(self.__iv_in)
if self.__block_engine_in is not None and not self.__aead_in:
header = self.__block_engine_in.update(header)
if self.__dump_packets:
self._log(DEBUG, util.format_binary(header, "IN: "))
# When ETM or AEAD (GCM) are in use, we've already read the packet size
# & decrypted everything, so just set the packet back to the header we
# obtained.
if self.__etm_in or self.__aead_in:
packet = header
# Otherwise, use the older non-ETM logic
else:
packet_size = struct.unpack(">I", header[:4])[0]
# leftover contains decrypted bytes from the first block (after the
# length field)
leftover = header[4:]
if (packet_size - len(leftover)) % self.__block_size_in != 0:
raise SSHException("Invalid packet blocking")
buf = self.read_all(
packet_size + self.__mac_size_in - len(leftover)
)
packet = buf[: packet_size - len(leftover)]
post_packet = buf[packet_size - len(leftover) :]
if self.__block_engine_in is not None:
packet = self.__block_engine_in.update(packet)
packet = leftover + packet
if self.__dump_packets:
self._log(DEBUG, util.format_binary(packet, "IN: "))
if self.__mac_size_in > 0 and not self.__etm_in and not self.__aead_in:
mac = post_packet[: self.__mac_size_in]
mac_payload = (
struct.pack(">II", self.__sequence_number_in, packet_size)
+ packet
)
my_mac = compute_hmac(
self.__mac_key_in, mac_payload, self.__mac_engine_in
)[: self.__mac_size_in]
if not util.constant_time_bytes_eq(my_mac, mac):
raise SSHException("Mismatched MAC")
padding = byte_ord(packet[0])
payload = packet[1 : packet_size - padding]
if self.__dump_packets:
self._log(
DEBUG,
"Got payload ({} bytes, {} padding)".format(
packet_size, padding
),
)
if self.__compress_engine_in is not None:
payload = self.__compress_engine_in(payload)
msg = Message(payload[1:])
msg.seqno = self.__sequence_number_in
next_seq = (self.__sequence_number_in + 1) & xffffffff
if next_seq == 0 and not self._initial_kex_done:
raise SSHException(
"Sequence number rolled over during initial kex!"
)
self.__sequence_number_in = next_seq
# check for rekey
raw_packet_size = packet_size + self.__mac_size_in + 4
self.__received_bytes += raw_packet_size
self.__received_packets += 1
if self.__need_rekey:
# we've asked to rekey -- give them some packets to comply before
# dropping the connection
self.__received_bytes_overflow += raw_packet_size
self.__received_packets_overflow += 1
if (
self.__received_packets_overflow
>= self.REKEY_PACKETS_OVERFLOW_MAX
) or (
self.__received_bytes_overflow >= self.REKEY_BYTES_OVERFLOW_MAX
):
raise SSHException(
"Remote transport is ignoring rekey requests"
)
elif (self.__received_packets >= self.REKEY_PACKETS) or (
self.__received_bytes >= self.REKEY_BYTES
):
# only ask once for rekeying
err = "Rekeying (hit {} packets, {} bytes received)"
self._log(
DEBUG,
err.format(self.__received_packets, self.__received_bytes),
)
self.__received_bytes_overflow = 0
self.__received_packets_overflow = 0
self._trigger_rekey()
cmd = byte_ord(payload[0])
if cmd in MSG_NAMES:
cmd_name = MSG_NAMES[cmd]
else:
cmd_name = "${:x}".format(cmd)
if self.__dump_packets:
self._log(
DEBUG,
"Read packet <{}>, length {}".format(cmd_name, len(payload)),
)
return cmd, msg
# ...protected...
def _log(self, level, msg):
if self.__logger is None:
return
if issubclass(type(msg), list):
for m in msg:
self.__logger.log(level, m)
else:
self.__logger.log(level, msg)
def _check_keepalive(self):
if (
not self.__keepalive_interval
or not self.__block_engine_out
or self.__need_rekey
):
# wait till we're encrypting, and not in the middle of rekeying
return
now = time.time()
if now > self.__keepalive_last + self.__keepalive_interval:
self.__keepalive_callback()
self.__keepalive_last = now
def _read_timeout(self, timeout):
start = time.time()
while True:
try:
x = self.__socket.recv(128)
if len(x) == 0:
raise EOFError()
break
except socket.timeout:
pass
if self.__closed:
raise EOFError()
now = time.time()
if now - start >= timeout:
raise socket.timeout()
return x
def _build_packet(self, payload):
# pad up at least 4 bytes, to nearest block-size (usually 8)
bsize = self.__block_size_out
# do not include payload length in computations for padding in EtM mode
# (payload length won't be encrypted)
addlen = 4 if self.__etm_out or self.__aead_out else 8
padding = 3 + bsize - ((len(payload) + addlen) % bsize)
packet = struct.pack(">IB", len(payload) + padding + 1, padding)
packet += payload
if self.__sdctr_out or self.__block_engine_out is None:
# cute trick i caught openssh doing: if we're not encrypting or
# SDCTR mode (RFC4344),
# don't waste random bytes for the padding
packet += zero_byte * padding
else:
packet += os.urandom(padding)
return packet
def _trigger_rekey(self):
# outside code should check for this flag
self.__need_rekey = True
| 24,314 | Python | .py | 624 | 28.060897 | 79 | 0.548438 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
625 | ecdsakey.py | paramiko_paramiko/paramiko/ecdsakey.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
ECDSA keys
"""
from cryptography.exceptions import InvalidSignature, UnsupportedAlgorithm
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.hazmat.primitives.asymmetric.utils import (
decode_dss_signature,
encode_dss_signature,
)
from paramiko.common import four_byte
from paramiko.message import Message
from paramiko.pkey import PKey
from paramiko.ssh_exception import SSHException
from paramiko.util import deflate_long
class _ECDSACurve:
"""
Represents a specific ECDSA Curve (nistp256, nistp384, etc).
Handles the generation of the key format identifier and the selection of
the proper hash function. Also grabs the proper curve from the 'ecdsa'
package.
"""
def __init__(self, curve_class, nist_name):
self.nist_name = nist_name
self.key_length = curve_class.key_size
# Defined in RFC 5656 6.2
self.key_format_identifier = "ecdsa-sha2-" + self.nist_name
# Defined in RFC 5656 6.2.1
if self.key_length <= 256:
self.hash_object = hashes.SHA256
elif self.key_length <= 384:
self.hash_object = hashes.SHA384
else:
self.hash_object = hashes.SHA512
self.curve_class = curve_class
class _ECDSACurveSet:
"""
A collection to hold the ECDSA curves. Allows querying by oid and by key
format identifier. The two ways in which ECDSAKey needs to be able to look
up curves.
"""
def __init__(self, ecdsa_curves):
self.ecdsa_curves = ecdsa_curves
def get_key_format_identifier_list(self):
return [curve.key_format_identifier for curve in self.ecdsa_curves]
def get_by_curve_class(self, curve_class):
for curve in self.ecdsa_curves:
if curve.curve_class == curve_class:
return curve
def get_by_key_format_identifier(self, key_format_identifier):
for curve in self.ecdsa_curves:
if curve.key_format_identifier == key_format_identifier:
return curve
def get_by_key_length(self, key_length):
for curve in self.ecdsa_curves:
if curve.key_length == key_length:
return curve
class ECDSAKey(PKey):
"""
Representation of an ECDSA key which can be used to sign and verify SSH2
data.
"""
_ECDSA_CURVES = _ECDSACurveSet(
[
_ECDSACurve(ec.SECP256R1, "nistp256"),
_ECDSACurve(ec.SECP384R1, "nistp384"),
_ECDSACurve(ec.SECP521R1, "nistp521"),
]
)
def __init__(
self,
msg=None,
data=None,
filename=None,
password=None,
vals=None,
file_obj=None,
# TODO 4.0: remove; it does nothing since porting to cryptography.io
validate_point=True,
):
self.verifying_key = None
self.signing_key = None
self.public_blob = None
if file_obj is not None:
self._from_private_key(file_obj, password)
return
if filename is not None:
self._from_private_key_file(filename, password)
return
if (msg is None) and (data is not None):
msg = Message(data)
if vals is not None:
self.signing_key, self.verifying_key = vals
c_class = self.signing_key.curve.__class__
self.ecdsa_curve = self._ECDSA_CURVES.get_by_curve_class(c_class)
else:
# Must set ecdsa_curve first; subroutines called herein may need to
# spit out our get_name(), which relies on this.
key_type = msg.get_text()
# But this also means we need to hand it a real key/curve
# identifier, so strip out any cert business. (NOTE: could push
# that into _ECDSACurveSet.get_by_key_format_identifier(), but it
# feels more correct to do it here?)
suffix = "[email protected]"
if key_type.endswith(suffix):
key_type = key_type[: -len(suffix)]
self.ecdsa_curve = self._ECDSA_CURVES.get_by_key_format_identifier(
key_type
)
key_types = self._ECDSA_CURVES.get_key_format_identifier_list()
cert_types = [
"{}[email protected]".format(x) for x in key_types
]
self._check_type_and_load_cert(
msg=msg, key_type=key_types, cert_type=cert_types
)
curvename = msg.get_text()
if curvename != self.ecdsa_curve.nist_name:
raise SSHException(
"Can't handle curve of type {}".format(curvename)
)
pointinfo = msg.get_binary()
try:
key = ec.EllipticCurvePublicKey.from_encoded_point(
self.ecdsa_curve.curve_class(), pointinfo
)
self.verifying_key = key
except ValueError:
raise SSHException("Invalid public key")
@classmethod
def identifiers(cls):
return cls._ECDSA_CURVES.get_key_format_identifier_list()
# TODO 4.0: deprecate/remove
@classmethod
def supported_key_format_identifiers(cls):
return cls.identifiers()
def asbytes(self):
key = self.verifying_key
m = Message()
m.add_string(self.ecdsa_curve.key_format_identifier)
m.add_string(self.ecdsa_curve.nist_name)
numbers = key.public_numbers()
key_size_bytes = (key.curve.key_size + 7) // 8
x_bytes = deflate_long(numbers.x, add_sign_padding=False)
x_bytes = b"\x00" * (key_size_bytes - len(x_bytes)) + x_bytes
y_bytes = deflate_long(numbers.y, add_sign_padding=False)
y_bytes = b"\x00" * (key_size_bytes - len(y_bytes)) + y_bytes
point_str = four_byte + x_bytes + y_bytes
m.add_string(point_str)
return m.asbytes()
def __str__(self):
return self.asbytes()
@property
def _fields(self):
return (
self.get_name(),
self.verifying_key.public_numbers().x,
self.verifying_key.public_numbers().y,
)
def get_name(self):
return self.ecdsa_curve.key_format_identifier
def get_bits(self):
return self.ecdsa_curve.key_length
def can_sign(self):
return self.signing_key is not None
def sign_ssh_data(self, data, algorithm=None):
ecdsa = ec.ECDSA(self.ecdsa_curve.hash_object())
sig = self.signing_key.sign(data, ecdsa)
r, s = decode_dss_signature(sig)
m = Message()
m.add_string(self.ecdsa_curve.key_format_identifier)
m.add_string(self._sigencode(r, s))
return m
def verify_ssh_sig(self, data, msg):
if msg.get_text() != self.ecdsa_curve.key_format_identifier:
return False
sig = msg.get_binary()
sigR, sigS = self._sigdecode(sig)
signature = encode_dss_signature(sigR, sigS)
try:
self.verifying_key.verify(
signature, data, ec.ECDSA(self.ecdsa_curve.hash_object())
)
except InvalidSignature:
return False
else:
return True
def write_private_key_file(self, filename, password=None):
self._write_private_key_file(
filename,
self.signing_key,
serialization.PrivateFormat.TraditionalOpenSSL,
password=password,
)
def write_private_key(self, file_obj, password=None):
self._write_private_key(
file_obj,
self.signing_key,
serialization.PrivateFormat.TraditionalOpenSSL,
password=password,
)
@classmethod
def generate(cls, curve=ec.SECP256R1(), progress_func=None, bits=None):
"""
Generate a new private ECDSA key. This factory function can be used to
generate a new host key or authentication key.
:param progress_func: Not used for this type of key.
:returns: A new private key (`.ECDSAKey`) object
"""
if bits is not None:
curve = cls._ECDSA_CURVES.get_by_key_length(bits)
if curve is None:
raise ValueError("Unsupported key length: {:d}".format(bits))
curve = curve.curve_class()
private_key = ec.generate_private_key(curve, backend=default_backend())
return ECDSAKey(vals=(private_key, private_key.public_key()))
# ...internals...
def _from_private_key_file(self, filename, password):
data = self._read_private_key_file("EC", filename, password)
self._decode_key(data)
def _from_private_key(self, file_obj, password):
data = self._read_private_key("EC", file_obj, password)
self._decode_key(data)
def _decode_key(self, data):
pkformat, data = data
if pkformat == self._PRIVATE_KEY_FORMAT_ORIGINAL:
try:
key = serialization.load_der_private_key(
data, password=None, backend=default_backend()
)
except (
ValueError,
AssertionError,
TypeError,
UnsupportedAlgorithm,
) as e:
raise SSHException(str(e))
elif pkformat == self._PRIVATE_KEY_FORMAT_OPENSSH:
try:
msg = Message(data)
curve_name = msg.get_text()
verkey = msg.get_binary() # noqa: F841
sigkey = msg.get_mpint()
name = "ecdsa-sha2-" + curve_name
curve = self._ECDSA_CURVES.get_by_key_format_identifier(name)
if not curve:
raise SSHException("Invalid key curve identifier")
key = ec.derive_private_key(
sigkey, curve.curve_class(), default_backend()
)
except Exception as e:
# PKey._read_private_key_openssh() should check or return
# keytype - parsing could fail for any reason due to wrong type
raise SSHException(str(e))
else:
self._got_bad_key_format_id(pkformat)
self.signing_key = key
self.verifying_key = key.public_key()
curve_class = key.curve.__class__
self.ecdsa_curve = self._ECDSA_CURVES.get_by_curve_class(curve_class)
def _sigencode(self, r, s):
msg = Message()
msg.add_mpint(r)
msg.add_mpint(s)
return msg.asbytes()
def _sigdecode(self, sig):
msg = Message(sig)
r = msg.get_mpint()
s = msg.get_mpint()
return r, s
| 11,653 | Python | .py | 288 | 30.909722 | 79 | 0.611455 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
626 | kex_gss.py | paramiko_paramiko/paramiko/kex_gss.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
# Copyright (C) 2013-2014 science + computing ag
# Author: Sebastian Deiss <[email protected]>
#
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
This module provides GSS-API / SSPI Key Exchange as defined in :rfc:`4462`.
.. note:: Credential delegation is not supported in server mode.
.. note::
`RFC 4462 Section 2.2
<https://tools.ietf.org/html/rfc4462.html#section-2.2>`_ says we are not
required to implement GSS-API error messages. Thus, in many methods within
this module, if an error occurs an exception will be thrown and the
connection will be terminated.
.. seealso:: :doc:`/api/ssh_gss`
.. versionadded:: 1.15
"""
import os
from hashlib import sha1
from paramiko.common import (
DEBUG,
max_byte,
zero_byte,
byte_chr,
byte_mask,
byte_ord,
)
from paramiko import util
from paramiko.message import Message
from paramiko.ssh_exception import SSHException
(
MSG_KEXGSS_INIT,
MSG_KEXGSS_CONTINUE,
MSG_KEXGSS_COMPLETE,
MSG_KEXGSS_HOSTKEY,
MSG_KEXGSS_ERROR,
) = range(30, 35)
(MSG_KEXGSS_GROUPREQ, MSG_KEXGSS_GROUP) = range(40, 42)
(
c_MSG_KEXGSS_INIT,
c_MSG_KEXGSS_CONTINUE,
c_MSG_KEXGSS_COMPLETE,
c_MSG_KEXGSS_HOSTKEY,
c_MSG_KEXGSS_ERROR,
) = [byte_chr(c) for c in range(30, 35)]
(c_MSG_KEXGSS_GROUPREQ, c_MSG_KEXGSS_GROUP) = [
byte_chr(c) for c in range(40, 42)
]
class KexGSSGroup1:
"""
GSS-API / SSPI Authenticated Diffie-Hellman Key Exchange as defined in `RFC
4462 Section 2 <https://tools.ietf.org/html/rfc4462.html#section-2>`_
"""
# draft-ietf-secsh-transport-09.txt, page 17
P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF # noqa
G = 2
b7fffffffffffffff = byte_chr(0x7F) + max_byte * 7 # noqa
b0000000000000000 = zero_byte * 8 # noqa
NAME = "gss-group1-sha1-toWM5Slw5Ew8Mqkay+al2g=="
def __init__(self, transport):
self.transport = transport
self.kexgss = self.transport.kexgss_ctxt
self.gss_host = None
self.x = 0
self.e = 0
self.f = 0
def start_kex(self):
"""
Start the GSS-API / SSPI Authenticated Diffie-Hellman Key Exchange.
"""
self._generate_x()
if self.transport.server_mode:
# compute f = g^x mod p, but don't send it yet
self.f = pow(self.G, self.x, self.P)
self.transport._expect_packet(MSG_KEXGSS_INIT)
return
# compute e = g^x mod p (where g=2), and send it
self.e = pow(self.G, self.x, self.P)
# Initialize GSS-API Key Exchange
self.gss_host = self.transport.gss_host
m = Message()
m.add_byte(c_MSG_KEXGSS_INIT)
m.add_string(self.kexgss.ssh_init_sec_context(target=self.gss_host))
m.add_mpint(self.e)
self.transport._send_message(m)
self.transport._expect_packet(
MSG_KEXGSS_HOSTKEY,
MSG_KEXGSS_CONTINUE,
MSG_KEXGSS_COMPLETE,
MSG_KEXGSS_ERROR,
)
def parse_next(self, ptype, m):
"""
Parse the next packet.
:param ptype: The (string) type of the incoming packet
:param `.Message` m: The packet content
"""
if self.transport.server_mode and (ptype == MSG_KEXGSS_INIT):
return self._parse_kexgss_init(m)
elif not self.transport.server_mode and (ptype == MSG_KEXGSS_HOSTKEY):
return self._parse_kexgss_hostkey(m)
elif self.transport.server_mode and (ptype == MSG_KEXGSS_CONTINUE):
return self._parse_kexgss_continue(m)
elif not self.transport.server_mode and (ptype == MSG_KEXGSS_COMPLETE):
return self._parse_kexgss_complete(m)
elif ptype == MSG_KEXGSS_ERROR:
return self._parse_kexgss_error(m)
msg = "GSS KexGroup1 asked to handle packet type {:d}"
raise SSHException(msg.format(ptype))
# ## internals...
def _generate_x(self):
"""
generate an "x" (1 < x < q), where q is (p-1)/2.
p is a 128-byte (1024-bit) number, where the first 64 bits are 1.
therefore q can be approximated as a 2^1023. we drop the subset of
potential x where the first 63 bits are 1, because some of those will
be larger than q (but this is a tiny tiny subset of potential x).
"""
while 1:
x_bytes = os.urandom(128)
x_bytes = byte_mask(x_bytes[0], 0x7F) + x_bytes[1:]
first = x_bytes[:8]
if first not in (self.b7fffffffffffffff, self.b0000000000000000):
break
self.x = util.inflate_long(x_bytes)
def _parse_kexgss_hostkey(self, m):
"""
Parse the SSH2_MSG_KEXGSS_HOSTKEY message (client mode).
:param `.Message` m: The content of the SSH2_MSG_KEXGSS_HOSTKEY message
"""
# client mode
host_key = m.get_string()
self.transport.host_key = host_key
sig = m.get_string()
self.transport._verify_key(host_key, sig)
self.transport._expect_packet(MSG_KEXGSS_CONTINUE, MSG_KEXGSS_COMPLETE)
def _parse_kexgss_continue(self, m):
"""
Parse the SSH2_MSG_KEXGSS_CONTINUE message.
:param `.Message` m: The content of the SSH2_MSG_KEXGSS_CONTINUE
message
"""
if not self.transport.server_mode:
srv_token = m.get_string()
m = Message()
m.add_byte(c_MSG_KEXGSS_CONTINUE)
m.add_string(
self.kexgss.ssh_init_sec_context(
target=self.gss_host, recv_token=srv_token
)
)
self.transport.send_message(m)
self.transport._expect_packet(
MSG_KEXGSS_CONTINUE, MSG_KEXGSS_COMPLETE, MSG_KEXGSS_ERROR
)
else:
pass
def _parse_kexgss_complete(self, m):
"""
Parse the SSH2_MSG_KEXGSS_COMPLETE message (client mode).
:param `.Message` m: The content of the
SSH2_MSG_KEXGSS_COMPLETE message
"""
# client mode
if self.transport.host_key is None:
self.transport.host_key = NullHostKey()
self.f = m.get_mpint()
if (self.f < 1) or (self.f > self.P - 1):
raise SSHException('Server kex "f" is out of range')
mic_token = m.get_string()
# This must be TRUE, if there is a GSS-API token in this message.
bool = m.get_boolean()
srv_token = None
if bool:
srv_token = m.get_string()
K = pow(self.f, self.x, self.P)
# okay, build up the hash H of
# (V_C || V_S || I_C || I_S || K_S || e || f || K)
hm = Message()
hm.add(
self.transport.local_version,
self.transport.remote_version,
self.transport.local_kex_init,
self.transport.remote_kex_init,
)
hm.add_string(self.transport.host_key.__str__())
hm.add_mpint(self.e)
hm.add_mpint(self.f)
hm.add_mpint(K)
H = sha1(str(hm)).digest()
self.transport._set_K_H(K, H)
if srv_token is not None:
self.kexgss.ssh_init_sec_context(
target=self.gss_host, recv_token=srv_token
)
self.kexgss.ssh_check_mic(mic_token, H)
else:
self.kexgss.ssh_check_mic(mic_token, H)
self.transport.gss_kex_used = True
self.transport._activate_outbound()
def _parse_kexgss_init(self, m):
"""
Parse the SSH2_MSG_KEXGSS_INIT message (server mode).
:param `.Message` m: The content of the SSH2_MSG_KEXGSS_INIT message
"""
# server mode
client_token = m.get_string()
self.e = m.get_mpint()
if (self.e < 1) or (self.e > self.P - 1):
raise SSHException('Client kex "e" is out of range')
K = pow(self.e, self.x, self.P)
self.transport.host_key = NullHostKey()
key = self.transport.host_key.__str__()
# okay, build up the hash H of
# (V_C || V_S || I_C || I_S || K_S || e || f || K)
hm = Message()
hm.add(
self.transport.remote_version,
self.transport.local_version,
self.transport.remote_kex_init,
self.transport.local_kex_init,
)
hm.add_string(key)
hm.add_mpint(self.e)
hm.add_mpint(self.f)
hm.add_mpint(K)
H = sha1(hm.asbytes()).digest()
self.transport._set_K_H(K, H)
srv_token = self.kexgss.ssh_accept_sec_context(
self.gss_host, client_token
)
m = Message()
if self.kexgss._gss_srv_ctxt_status:
mic_token = self.kexgss.ssh_get_mic(
self.transport.session_id, gss_kex=True
)
m.add_byte(c_MSG_KEXGSS_COMPLETE)
m.add_mpint(self.f)
m.add_string(mic_token)
if srv_token is not None:
m.add_boolean(True)
m.add_string(srv_token)
else:
m.add_boolean(False)
self.transport._send_message(m)
self.transport.gss_kex_used = True
self.transport._activate_outbound()
else:
m.add_byte(c_MSG_KEXGSS_CONTINUE)
m.add_string(srv_token)
self.transport._send_message(m)
self.transport._expect_packet(
MSG_KEXGSS_CONTINUE, MSG_KEXGSS_COMPLETE, MSG_KEXGSS_ERROR
)
def _parse_kexgss_error(self, m):
"""
Parse the SSH2_MSG_KEXGSS_ERROR message (client mode).
The server may send a GSS-API error message. if it does, we display
the error by throwing an exception (client mode).
:param `.Message` m: The content of the SSH2_MSG_KEXGSS_ERROR message
:raise SSHException: Contains GSS-API major and minor status as well as
the error message and the language tag of the
message
"""
maj_status = m.get_int()
min_status = m.get_int()
err_msg = m.get_string()
m.get_string() # we don't care about the language!
raise SSHException(
"""GSS-API Error:
Major Status: {}
Minor Status: {}
Error Message: {}
""".format(
maj_status, min_status, err_msg
)
)
class KexGSSGroup14(KexGSSGroup1):
"""
GSS-API / SSPI Authenticated Diffie-Hellman Group14 Key Exchange as defined
in `RFC 4462 Section 2
<https://tools.ietf.org/html/rfc4462.html#section-2>`_
"""
P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AACAA68FFFFFFFFFFFFFFFF # noqa
G = 2
NAME = "gss-group14-sha1-toWM5Slw5Ew8Mqkay+al2g=="
class KexGSSGex:
"""
GSS-API / SSPI Authenticated Diffie-Hellman Group Exchange as defined in
`RFC 4462 Section 2 <https://tools.ietf.org/html/rfc4462.html#section-2>`_
"""
NAME = "gss-gex-sha1-toWM5Slw5Ew8Mqkay+al2g=="
min_bits = 1024
max_bits = 8192
preferred_bits = 2048
def __init__(self, transport):
self.transport = transport
self.kexgss = self.transport.kexgss_ctxt
self.gss_host = None
self.p = None
self.q = None
self.g = None
self.x = None
self.e = None
self.f = None
self.old_style = False
def start_kex(self):
"""
Start the GSS-API / SSPI Authenticated Diffie-Hellman Group Exchange
"""
if self.transport.server_mode:
self.transport._expect_packet(MSG_KEXGSS_GROUPREQ)
return
# request a bit range: we accept (min_bits) to (max_bits), but prefer
# (preferred_bits). according to the spec, we shouldn't pull the
# minimum up above 1024.
self.gss_host = self.transport.gss_host
m = Message()
m.add_byte(c_MSG_KEXGSS_GROUPREQ)
m.add_int(self.min_bits)
m.add_int(self.preferred_bits)
m.add_int(self.max_bits)
self.transport._send_message(m)
self.transport._expect_packet(MSG_KEXGSS_GROUP)
def parse_next(self, ptype, m):
"""
Parse the next packet.
:param ptype: The (string) type of the incoming packet
:param `.Message` m: The packet content
"""
if ptype == MSG_KEXGSS_GROUPREQ:
return self._parse_kexgss_groupreq(m)
elif ptype == MSG_KEXGSS_GROUP:
return self._parse_kexgss_group(m)
elif ptype == MSG_KEXGSS_INIT:
return self._parse_kexgss_gex_init(m)
elif ptype == MSG_KEXGSS_HOSTKEY:
return self._parse_kexgss_hostkey(m)
elif ptype == MSG_KEXGSS_CONTINUE:
return self._parse_kexgss_continue(m)
elif ptype == MSG_KEXGSS_COMPLETE:
return self._parse_kexgss_complete(m)
elif ptype == MSG_KEXGSS_ERROR:
return self._parse_kexgss_error(m)
msg = "KexGex asked to handle packet type {:d}"
raise SSHException(msg.format(ptype))
# ## internals...
def _generate_x(self):
# generate an "x" (1 < x < (p-1)/2).
q = (self.p - 1) // 2
qnorm = util.deflate_long(q, 0)
qhbyte = byte_ord(qnorm[0])
byte_count = len(qnorm)
qmask = 0xFF
while not (qhbyte & 0x80):
qhbyte <<= 1
qmask >>= 1
while True:
x_bytes = os.urandom(byte_count)
x_bytes = byte_mask(x_bytes[0], qmask) + x_bytes[1:]
x = util.inflate_long(x_bytes, 1)
if (x > 1) and (x < q):
break
self.x = x
def _parse_kexgss_groupreq(self, m):
"""
Parse the SSH2_MSG_KEXGSS_GROUPREQ message (server mode).
:param `.Message` m: The content of the
SSH2_MSG_KEXGSS_GROUPREQ message
"""
minbits = m.get_int()
preferredbits = m.get_int()
maxbits = m.get_int()
# smoosh the user's preferred size into our own limits
if preferredbits > self.max_bits:
preferredbits = self.max_bits
if preferredbits < self.min_bits:
preferredbits = self.min_bits
# fix min/max if they're inconsistent. technically, we could just pout
# and hang up, but there's no harm in giving them the benefit of the
# doubt and just picking a bitsize for them.
if minbits > preferredbits:
minbits = preferredbits
if maxbits < preferredbits:
maxbits = preferredbits
# now save a copy
self.min_bits = minbits
self.preferred_bits = preferredbits
self.max_bits = maxbits
# generate prime
pack = self.transport._get_modulus_pack()
if pack is None:
raise SSHException("Can't do server-side gex with no modulus pack")
self.transport._log(
DEBUG, # noqa
"Picking p ({} <= {} <= {} bits)".format(
minbits, preferredbits, maxbits
),
)
self.g, self.p = pack.get_modulus(minbits, preferredbits, maxbits)
m = Message()
m.add_byte(c_MSG_KEXGSS_GROUP)
m.add_mpint(self.p)
m.add_mpint(self.g)
self.transport._send_message(m)
self.transport._expect_packet(MSG_KEXGSS_INIT)
def _parse_kexgss_group(self, m):
"""
Parse the SSH2_MSG_KEXGSS_GROUP message (client mode).
:param `Message` m: The content of the SSH2_MSG_KEXGSS_GROUP message
"""
self.p = m.get_mpint()
self.g = m.get_mpint()
# reject if p's bit length < 1024 or > 8192
bitlen = util.bit_length(self.p)
if (bitlen < 1024) or (bitlen > 8192):
raise SSHException(
"Server-generated gex p (don't ask) is out of range "
"({} bits)".format(bitlen)
)
self.transport._log(
DEBUG, "Got server p ({} bits)".format(bitlen)
) # noqa
self._generate_x()
# now compute e = g^x mod p
self.e = pow(self.g, self.x, self.p)
m = Message()
m.add_byte(c_MSG_KEXGSS_INIT)
m.add_string(self.kexgss.ssh_init_sec_context(target=self.gss_host))
m.add_mpint(self.e)
self.transport._send_message(m)
self.transport._expect_packet(
MSG_KEXGSS_HOSTKEY,
MSG_KEXGSS_CONTINUE,
MSG_KEXGSS_COMPLETE,
MSG_KEXGSS_ERROR,
)
def _parse_kexgss_gex_init(self, m):
"""
Parse the SSH2_MSG_KEXGSS_INIT message (server mode).
:param `Message` m: The content of the SSH2_MSG_KEXGSS_INIT message
"""
client_token = m.get_string()
self.e = m.get_mpint()
if (self.e < 1) or (self.e > self.p - 1):
raise SSHException('Client kex "e" is out of range')
self._generate_x()
self.f = pow(self.g, self.x, self.p)
K = pow(self.e, self.x, self.p)
self.transport.host_key = NullHostKey()
key = self.transport.host_key.__str__()
# okay, build up the hash H of
# (V_C || V_S || I_C || I_S || K_S || min || n || max || p || g || e || f || K) # noqa
hm = Message()
hm.add(
self.transport.remote_version,
self.transport.local_version,
self.transport.remote_kex_init,
self.transport.local_kex_init,
key,
)
hm.add_int(self.min_bits)
hm.add_int(self.preferred_bits)
hm.add_int(self.max_bits)
hm.add_mpint(self.p)
hm.add_mpint(self.g)
hm.add_mpint(self.e)
hm.add_mpint(self.f)
hm.add_mpint(K)
H = sha1(hm.asbytes()).digest()
self.transport._set_K_H(K, H)
srv_token = self.kexgss.ssh_accept_sec_context(
self.gss_host, client_token
)
m = Message()
if self.kexgss._gss_srv_ctxt_status:
mic_token = self.kexgss.ssh_get_mic(
self.transport.session_id, gss_kex=True
)
m.add_byte(c_MSG_KEXGSS_COMPLETE)
m.add_mpint(self.f)
m.add_string(mic_token)
if srv_token is not None:
m.add_boolean(True)
m.add_string(srv_token)
else:
m.add_boolean(False)
self.transport._send_message(m)
self.transport.gss_kex_used = True
self.transport._activate_outbound()
else:
m.add_byte(c_MSG_KEXGSS_CONTINUE)
m.add_string(srv_token)
self.transport._send_message(m)
self.transport._expect_packet(
MSG_KEXGSS_CONTINUE, MSG_KEXGSS_COMPLETE, MSG_KEXGSS_ERROR
)
def _parse_kexgss_hostkey(self, m):
"""
Parse the SSH2_MSG_KEXGSS_HOSTKEY message (client mode).
:param `Message` m: The content of the SSH2_MSG_KEXGSS_HOSTKEY message
"""
# client mode
host_key = m.get_string()
self.transport.host_key = host_key
sig = m.get_string()
self.transport._verify_key(host_key, sig)
self.transport._expect_packet(MSG_KEXGSS_CONTINUE, MSG_KEXGSS_COMPLETE)
def _parse_kexgss_continue(self, m):
"""
Parse the SSH2_MSG_KEXGSS_CONTINUE message.
:param `Message` m: The content of the SSH2_MSG_KEXGSS_CONTINUE message
"""
if not self.transport.server_mode:
srv_token = m.get_string()
m = Message()
m.add_byte(c_MSG_KEXGSS_CONTINUE)
m.add_string(
self.kexgss.ssh_init_sec_context(
target=self.gss_host, recv_token=srv_token
)
)
self.transport.send_message(m)
self.transport._expect_packet(
MSG_KEXGSS_CONTINUE, MSG_KEXGSS_COMPLETE, MSG_KEXGSS_ERROR
)
else:
pass
def _parse_kexgss_complete(self, m):
"""
Parse the SSH2_MSG_KEXGSS_COMPLETE message (client mode).
:param `Message` m: The content of the SSH2_MSG_KEXGSS_COMPLETE message
"""
if self.transport.host_key is None:
self.transport.host_key = NullHostKey()
self.f = m.get_mpint()
mic_token = m.get_string()
# This must be TRUE, if there is a GSS-API token in this message.
bool = m.get_boolean()
srv_token = None
if bool:
srv_token = m.get_string()
if (self.f < 1) or (self.f > self.p - 1):
raise SSHException('Server kex "f" is out of range')
K = pow(self.f, self.x, self.p)
# okay, build up the hash H of
# (V_C || V_S || I_C || I_S || K_S || min || n || max || p || g || e || f || K) # noqa
hm = Message()
hm.add(
self.transport.local_version,
self.transport.remote_version,
self.transport.local_kex_init,
self.transport.remote_kex_init,
self.transport.host_key.__str__(),
)
if not self.old_style:
hm.add_int(self.min_bits)
hm.add_int(self.preferred_bits)
if not self.old_style:
hm.add_int(self.max_bits)
hm.add_mpint(self.p)
hm.add_mpint(self.g)
hm.add_mpint(self.e)
hm.add_mpint(self.f)
hm.add_mpint(K)
H = sha1(hm.asbytes()).digest()
self.transport._set_K_H(K, H)
if srv_token is not None:
self.kexgss.ssh_init_sec_context(
target=self.gss_host, recv_token=srv_token
)
self.kexgss.ssh_check_mic(mic_token, H)
else:
self.kexgss.ssh_check_mic(mic_token, H)
self.transport.gss_kex_used = True
self.transport._activate_outbound()
def _parse_kexgss_error(self, m):
"""
Parse the SSH2_MSG_KEXGSS_ERROR message (client mode).
The server may send a GSS-API error message. if it does, we display
the error by throwing an exception (client mode).
:param `Message` m: The content of the SSH2_MSG_KEXGSS_ERROR message
:raise SSHException: Contains GSS-API major and minor status as well as
the error message and the language tag of the
message
"""
maj_status = m.get_int()
min_status = m.get_int()
err_msg = m.get_string()
m.get_string() # we don't care about the language (lang_tag)!
raise SSHException(
"""GSS-API Error:
Major Status: {}
Minor Status: {}
Error Message: {}
""".format(
maj_status, min_status, err_msg
)
)
class NullHostKey:
"""
This class represents the Null Host Key for GSS-API Key Exchange as defined
in `RFC 4462 Section 5
<https://tools.ietf.org/html/rfc4462.html#section-5>`_
"""
def __init__(self):
self.key = ""
def __str__(self):
return self.key
def get_name(self):
return self.key
| 24,562 | Python | .py | 626 | 29.929712 | 530 | 0.594027 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
627 | kex_ecdh_nist.py | paramiko_paramiko/paramiko/kex_ecdh_nist.py | """
Ephemeral Elliptic Curve Diffie-Hellman (ECDH) key exchange
RFC 5656, Section 4
"""
from hashlib import sha256, sha384, sha512
from paramiko.common import byte_chr
from paramiko.message import Message
from paramiko.ssh_exception import SSHException
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.hazmat.primitives import serialization
from binascii import hexlify
_MSG_KEXECDH_INIT, _MSG_KEXECDH_REPLY = range(30, 32)
c_MSG_KEXECDH_INIT, c_MSG_KEXECDH_REPLY = [byte_chr(c) for c in range(30, 32)]
class KexNistp256:
name = "ecdh-sha2-nistp256"
hash_algo = sha256
curve = ec.SECP256R1()
def __init__(self, transport):
self.transport = transport
# private key, client public and server public keys
self.P = 0
self.Q_C = None
self.Q_S = None
def start_kex(self):
self._generate_key_pair()
if self.transport.server_mode:
self.transport._expect_packet(_MSG_KEXECDH_INIT)
return
m = Message()
m.add_byte(c_MSG_KEXECDH_INIT)
# SEC1: V2.0 2.3.3 Elliptic-Curve-Point-to-Octet-String Conversion
m.add_string(
self.Q_C.public_bytes(
serialization.Encoding.X962,
serialization.PublicFormat.UncompressedPoint,
)
)
self.transport._send_message(m)
self.transport._expect_packet(_MSG_KEXECDH_REPLY)
def parse_next(self, ptype, m):
if self.transport.server_mode and (ptype == _MSG_KEXECDH_INIT):
return self._parse_kexecdh_init(m)
elif not self.transport.server_mode and (ptype == _MSG_KEXECDH_REPLY):
return self._parse_kexecdh_reply(m)
raise SSHException(
"KexECDH asked to handle packet type {:d}".format(ptype)
)
def _generate_key_pair(self):
self.P = ec.generate_private_key(self.curve, default_backend())
if self.transport.server_mode:
self.Q_S = self.P.public_key()
return
self.Q_C = self.P.public_key()
def _parse_kexecdh_init(self, m):
Q_C_bytes = m.get_string()
self.Q_C = ec.EllipticCurvePublicKey.from_encoded_point(
self.curve, Q_C_bytes
)
K_S = self.transport.get_server_key().asbytes()
K = self.P.exchange(ec.ECDH(), self.Q_C)
K = int(hexlify(K), 16)
# compute exchange hash
hm = Message()
hm.add(
self.transport.remote_version,
self.transport.local_version,
self.transport.remote_kex_init,
self.transport.local_kex_init,
)
hm.add_string(K_S)
hm.add_string(Q_C_bytes)
# SEC1: V2.0 2.3.3 Elliptic-Curve-Point-to-Octet-String Conversion
hm.add_string(
self.Q_S.public_bytes(
serialization.Encoding.X962,
serialization.PublicFormat.UncompressedPoint,
)
)
hm.add_mpint(int(K))
H = self.hash_algo(hm.asbytes()).digest()
self.transport._set_K_H(K, H)
sig = self.transport.get_server_key().sign_ssh_data(
H, self.transport.host_key_type
)
# construct reply
m = Message()
m.add_byte(c_MSG_KEXECDH_REPLY)
m.add_string(K_S)
m.add_string(
self.Q_S.public_bytes(
serialization.Encoding.X962,
serialization.PublicFormat.UncompressedPoint,
)
)
m.add_string(sig)
self.transport._send_message(m)
self.transport._activate_outbound()
def _parse_kexecdh_reply(self, m):
K_S = m.get_string()
Q_S_bytes = m.get_string()
self.Q_S = ec.EllipticCurvePublicKey.from_encoded_point(
self.curve, Q_S_bytes
)
sig = m.get_binary()
K = self.P.exchange(ec.ECDH(), self.Q_S)
K = int(hexlify(K), 16)
# compute exchange hash and verify signature
hm = Message()
hm.add(
self.transport.local_version,
self.transport.remote_version,
self.transport.local_kex_init,
self.transport.remote_kex_init,
)
hm.add_string(K_S)
# SEC1: V2.0 2.3.3 Elliptic-Curve-Point-to-Octet-String Conversion
hm.add_string(
self.Q_C.public_bytes(
serialization.Encoding.X962,
serialization.PublicFormat.UncompressedPoint,
)
)
hm.add_string(Q_S_bytes)
hm.add_mpint(K)
self.transport._set_K_H(K, self.hash_algo(hm.asbytes()).digest())
self.transport._verify_key(K_S, sig)
self.transport._activate_outbound()
class KexNistp384(KexNistp256):
name = "ecdh-sha2-nistp384"
hash_algo = sha384
curve = ec.SECP384R1()
class KexNistp521(KexNistp256):
name = "ecdh-sha2-nistp521"
hash_algo = sha512
curve = ec.SECP521R1()
| 5,012 | Python | .py | 136 | 27.948529 | 78 | 0.610985 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
628 | ber.py | paramiko_paramiko/paramiko/ber.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from paramiko.common import max_byte, zero_byte, byte_ord, byte_chr
import paramiko.util as util
from paramiko.util import b
from paramiko.sftp import int64
class BERException(Exception):
pass
class BER:
"""
Robey's tiny little attempt at a BER decoder.
"""
def __init__(self, content=bytes()):
self.content = b(content)
self.idx = 0
def asbytes(self):
return self.content
def __str__(self):
return self.asbytes()
def __repr__(self):
return "BER('" + repr(self.content) + "')"
def decode(self):
return self.decode_next()
def decode_next(self):
if self.idx >= len(self.content):
return None
ident = byte_ord(self.content[self.idx])
self.idx += 1
if (ident & 31) == 31:
# identifier > 30
ident = 0
while self.idx < len(self.content):
t = byte_ord(self.content[self.idx])
self.idx += 1
ident = (ident << 7) | (t & 0x7F)
if not (t & 0x80):
break
if self.idx >= len(self.content):
return None
# now fetch length
size = byte_ord(self.content[self.idx])
self.idx += 1
if size & 0x80:
# more complimicated...
# FIXME: theoretically should handle indefinite-length (0x80)
t = size & 0x7F
if self.idx + t > len(self.content):
return None
size = util.inflate_long(
self.content[self.idx : self.idx + t], True
)
self.idx += t
if self.idx + size > len(self.content):
# can't fit
return None
data = self.content[self.idx : self.idx + size]
self.idx += size
# now switch on id
if ident == 0x30:
# sequence
return self.decode_sequence(data)
elif ident == 2:
# int
return util.inflate_long(data)
else:
# 1: boolean (00 false, otherwise true)
msg = "Unknown ber encoding type {:d} (robey is lazy)"
raise BERException(msg.format(ident))
@staticmethod
def decode_sequence(data):
out = []
ber = BER(data)
while True:
x = ber.decode_next()
if x is None:
break
out.append(x)
return out
def encode_tlv(self, ident, val):
# no need to support ident > 31 here
self.content += byte_chr(ident)
if len(val) > 0x7F:
lenstr = util.deflate_long(len(val))
self.content += byte_chr(0x80 + len(lenstr)) + lenstr
else:
self.content += byte_chr(len(val))
self.content += val
def encode(self, x):
if type(x) is bool:
if x:
self.encode_tlv(1, max_byte)
else:
self.encode_tlv(1, zero_byte)
elif (type(x) is int) or (type(x) is int64):
self.encode_tlv(2, util.deflate_long(x))
elif type(x) is str:
self.encode_tlv(4, x)
elif (type(x) is list) or (type(x) is tuple):
self.encode_tlv(0x30, self.encode_sequence(x))
else:
raise BERException(
"Unknown type for encoding: {!r}".format(type(x))
)
@staticmethod
def encode_sequence(data):
ber = BER()
for item in data:
ber.encode(item)
return ber.asbytes()
| 4,369 | Python | .py | 124 | 26.241935 | 79 | 0.571395 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
629 | win_openssh.py | paramiko_paramiko/paramiko/win_openssh.py | # Copyright (C) 2021 Lew Gordon <[email protected]>
# Copyright (C) 2022 Patrick Spendrin <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import os.path
import time
PIPE_NAME = r"\\.\pipe\openssh-ssh-agent"
def can_talk_to_agent():
# use os.listdir() instead of os.path.exists(), because os.path.exists()
# uses CreateFileW() API and the pipe cannot be reopen unless the server
# calls DisconnectNamedPipe().
dir_, name = os.path.split(PIPE_NAME)
name = name.lower()
return any(name == n.lower() for n in os.listdir(dir_))
class OpenSSHAgentConnection:
def __init__(self):
while True:
try:
self._pipe = os.open(PIPE_NAME, os.O_RDWR | os.O_BINARY)
except OSError as e:
# retry when errno 22 which means that the server has not
# called DisconnectNamedPipe() yet.
if e.errno != 22:
raise
else:
break
time.sleep(0.1)
def send(self, data):
return os.write(self._pipe, data)
def recv(self, n):
return os.read(self._pipe, n)
def close(self):
return os.close(self._pipe)
| 1,918 | Python | .py | 47 | 34.93617 | 79 | 0.671858 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
630 | sftp_handle.py | paramiko_paramiko/paramiko/sftp_handle.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Abstraction of an SFTP file handle (for server mode).
"""
import os
from paramiko.sftp import SFTP_OP_UNSUPPORTED, SFTP_OK
from paramiko.util import ClosingContextManager
class SFTPHandle(ClosingContextManager):
"""
Abstract object representing a handle to an open file (or folder) in an
SFTP server implementation. Each handle has a string representation used
by the client to refer to the underlying file.
Server implementations can (and should) subclass SFTPHandle to implement
features of a file handle, like `stat` or `chattr`.
Instances of this class may be used as context managers.
"""
def __init__(self, flags=0):
"""
Create a new file handle representing a local file being served over
SFTP. If ``flags`` is passed in, it's used to determine if the file
is open in append mode.
:param int flags: optional flags as passed to
`.SFTPServerInterface.open`
"""
self.__flags = flags
self.__name = None
# only for handles to folders:
self.__files = {}
self.__tell = None
def close(self):
"""
When a client closes a file, this method is called on the handle.
Normally you would use this method to close the underlying OS level
file object(s).
The default implementation checks for attributes on ``self`` named
``readfile`` and/or ``writefile``, and if either or both are present,
their ``close()`` methods are called. This means that if you are
using the default implementations of `read` and `write`, this
method's default implementation should be fine also.
"""
readfile = getattr(self, "readfile", None)
if readfile is not None:
readfile.close()
writefile = getattr(self, "writefile", None)
if writefile is not None:
writefile.close()
def read(self, offset, length):
"""
Read up to ``length`` bytes from this file, starting at position
``offset``. The offset may be a Python long, since SFTP allows it
to be 64 bits.
If the end of the file has been reached, this method may return an
empty string to signify EOF, or it may also return ``SFTP_EOF``.
The default implementation checks for an attribute on ``self`` named
``readfile``, and if present, performs the read operation on the Python
file-like object found there. (This is meant as a time saver for the
common case where you are wrapping a Python file object.)
:param offset: position in the file to start reading from.
:param int length: number of bytes to attempt to read.
:return: the `bytes` read, or an error code `int`.
"""
readfile = getattr(self, "readfile", None)
if readfile is None:
return SFTP_OP_UNSUPPORTED
try:
if self.__tell is None:
self.__tell = readfile.tell()
if offset != self.__tell:
readfile.seek(offset)
self.__tell = offset
data = readfile.read(length)
except IOError as e:
self.__tell = None
return SFTPServer.convert_errno(e.errno)
self.__tell += len(data)
return data
def write(self, offset, data):
"""
Write ``data`` into this file at position ``offset``. Extending the
file past its original end is expected. Unlike Python's normal
``write()`` methods, this method cannot do a partial write: it must
write all of ``data`` or else return an error.
The default implementation checks for an attribute on ``self`` named
``writefile``, and if present, performs the write operation on the
Python file-like object found there. The attribute is named
differently from ``readfile`` to make it easy to implement read-only
(or write-only) files, but if both attributes are present, they should
refer to the same file.
:param offset: position in the file to start reading from.
:param bytes data: data to write into the file.
:return: an SFTP error code like ``SFTP_OK``.
"""
writefile = getattr(self, "writefile", None)
if writefile is None:
return SFTP_OP_UNSUPPORTED
try:
# in append mode, don't care about seeking
if (self.__flags & os.O_APPEND) == 0:
if self.__tell is None:
self.__tell = writefile.tell()
if offset != self.__tell:
writefile.seek(offset)
self.__tell = offset
writefile.write(data)
writefile.flush()
except IOError as e:
self.__tell = None
return SFTPServer.convert_errno(e.errno)
if self.__tell is not None:
self.__tell += len(data)
return SFTP_OK
def stat(self):
"""
Return an `.SFTPAttributes` object referring to this open file, or an
error code. This is equivalent to `.SFTPServerInterface.stat`, except
it's called on an open file instead of a path.
:return:
an attributes object for the given file, or an SFTP error code
(like ``SFTP_PERMISSION_DENIED``).
:rtype: `.SFTPAttributes` or error code
"""
return SFTP_OP_UNSUPPORTED
def chattr(self, attr):
"""
Change the attributes of this file. The ``attr`` object will contain
only those fields provided by the client in its request, so you should
check for the presence of fields before using them.
:param .SFTPAttributes attr: the attributes to change on this file.
:return: an `int` error code like ``SFTP_OK``.
"""
return SFTP_OP_UNSUPPORTED
# ...internals...
def _set_files(self, files):
"""
Used by the SFTP server code to cache a directory listing. (In
the SFTP protocol, listing a directory is a multi-stage process
requiring a temporary handle.)
"""
self.__files = files
def _get_next_files(self):
"""
Used by the SFTP server code to retrieve a cached directory
listing.
"""
fnlist = self.__files[:16]
self.__files = self.__files[16:]
return fnlist
def _get_name(self):
return self.__name
def _set_name(self, name):
self.__name = name
from paramiko.sftp_server import SFTPServer
| 7,424 | Python | .py | 168 | 35.761905 | 79 | 0.637521 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
631 | sftp_attr.py | paramiko_paramiko/paramiko/sftp_attr.py | # Copyright (C) 2003-2006 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import stat
import time
from paramiko.common import x80000000, o700, o70, xffffffff
class SFTPAttributes:
"""
Representation of the attributes of a file (or proxied file) for SFTP in
client or server mode. It attempts to mirror the object returned by
`os.stat` as closely as possible, so it may have the following fields,
with the same meanings as those returned by an `os.stat` object:
- ``st_size``
- ``st_uid``
- ``st_gid``
- ``st_mode``
- ``st_atime``
- ``st_mtime``
Because SFTP allows flags to have other arbitrary named attributes, these
are stored in a dict named ``attr``. Occasionally, the filename is also
stored, in ``filename``.
"""
FLAG_SIZE = 1
FLAG_UIDGID = 2
FLAG_PERMISSIONS = 4
FLAG_AMTIME = 8
FLAG_EXTENDED = x80000000
def __init__(self):
"""
Create a new (empty) SFTPAttributes object. All fields will be empty.
"""
self._flags = 0
self.st_size = None
self.st_uid = None
self.st_gid = None
self.st_mode = None
self.st_atime = None
self.st_mtime = None
self.attr = {}
@classmethod
def from_stat(cls, obj, filename=None):
"""
Create an `.SFTPAttributes` object from an existing ``stat`` object (an
object returned by `os.stat`).
:param object obj: an object returned by `os.stat` (or equivalent).
:param str filename: the filename associated with this file.
:return: new `.SFTPAttributes` object with the same attribute fields.
"""
attr = cls()
attr.st_size = obj.st_size
attr.st_uid = obj.st_uid
attr.st_gid = obj.st_gid
attr.st_mode = obj.st_mode
attr.st_atime = obj.st_atime
attr.st_mtime = obj.st_mtime
if filename is not None:
attr.filename = filename
return attr
def __repr__(self):
return "<SFTPAttributes: {}>".format(self._debug_str())
# ...internals...
@classmethod
def _from_msg(cls, msg, filename=None, longname=None):
attr = cls()
attr._unpack(msg)
if filename is not None:
attr.filename = filename
if longname is not None:
attr.longname = longname
return attr
def _unpack(self, msg):
self._flags = msg.get_int()
if self._flags & self.FLAG_SIZE:
self.st_size = msg.get_int64()
if self._flags & self.FLAG_UIDGID:
self.st_uid = msg.get_int()
self.st_gid = msg.get_int()
if self._flags & self.FLAG_PERMISSIONS:
self.st_mode = msg.get_int()
if self._flags & self.FLAG_AMTIME:
self.st_atime = msg.get_int()
self.st_mtime = msg.get_int()
if self._flags & self.FLAG_EXTENDED:
count = msg.get_int()
for i in range(count):
self.attr[msg.get_string()] = msg.get_string()
def _pack(self, msg):
self._flags = 0
if self.st_size is not None:
self._flags |= self.FLAG_SIZE
if (self.st_uid is not None) and (self.st_gid is not None):
self._flags |= self.FLAG_UIDGID
if self.st_mode is not None:
self._flags |= self.FLAG_PERMISSIONS
if (self.st_atime is not None) and (self.st_mtime is not None):
self._flags |= self.FLAG_AMTIME
if len(self.attr) > 0:
self._flags |= self.FLAG_EXTENDED
msg.add_int(self._flags)
if self._flags & self.FLAG_SIZE:
msg.add_int64(self.st_size)
if self._flags & self.FLAG_UIDGID:
msg.add_int(self.st_uid)
msg.add_int(self.st_gid)
if self._flags & self.FLAG_PERMISSIONS:
msg.add_int(self.st_mode)
if self._flags & self.FLAG_AMTIME:
# throw away any fractional seconds
msg.add_int(int(self.st_atime))
msg.add_int(int(self.st_mtime))
if self._flags & self.FLAG_EXTENDED:
msg.add_int(len(self.attr))
for key, val in self.attr.items():
msg.add_string(key)
msg.add_string(val)
return
def _debug_str(self):
out = "[ "
if self.st_size is not None:
out += "size={} ".format(self.st_size)
if (self.st_uid is not None) and (self.st_gid is not None):
out += "uid={} gid={} ".format(self.st_uid, self.st_gid)
if self.st_mode is not None:
out += "mode=" + oct(self.st_mode) + " "
if (self.st_atime is not None) and (self.st_mtime is not None):
out += "atime={} mtime={} ".format(self.st_atime, self.st_mtime)
for k, v in self.attr.items():
out += '"{}"={!r} '.format(str(k), v)
out += "]"
return out
@staticmethod
def _rwx(n, suid, sticky=False):
if suid:
suid = 2
out = "-r"[n >> 2] + "-w"[(n >> 1) & 1]
if sticky:
out += "-xTt"[suid + (n & 1)]
else:
out += "-xSs"[suid + (n & 1)]
return out
def __str__(self):
"""create a unix-style long description of the file (like ls -l)"""
if self.st_mode is not None:
kind = stat.S_IFMT(self.st_mode)
if kind == stat.S_IFIFO:
ks = "p"
elif kind == stat.S_IFCHR:
ks = "c"
elif kind == stat.S_IFDIR:
ks = "d"
elif kind == stat.S_IFBLK:
ks = "b"
elif kind == stat.S_IFREG:
ks = "-"
elif kind == stat.S_IFLNK:
ks = "l"
elif kind == stat.S_IFSOCK:
ks = "s"
else:
ks = "?"
ks += self._rwx(
(self.st_mode & o700) >> 6, self.st_mode & stat.S_ISUID
)
ks += self._rwx(
(self.st_mode & o70) >> 3, self.st_mode & stat.S_ISGID
)
ks += self._rwx(
self.st_mode & 7, self.st_mode & stat.S_ISVTX, True
)
else:
ks = "?---------"
# compute display date
if (self.st_mtime is None) or (self.st_mtime == xffffffff):
# shouldn't really happen
datestr = "(unknown date)"
else:
time_tuple = time.localtime(self.st_mtime)
if abs(time.time() - self.st_mtime) > 15_552_000:
# (15,552,000s = 6 months)
datestr = time.strftime("%d %b %Y", time_tuple)
else:
datestr = time.strftime("%d %b %H:%M", time_tuple)
filename = getattr(self, "filename", "?")
# not all servers support uid/gid
uid = self.st_uid
gid = self.st_gid
size = self.st_size
if uid is None:
uid = 0
if gid is None:
gid = 0
if size is None:
size = 0
# TODO: not sure this actually worked as expected beforehand, leaving
# it untouched for the time being, re: .format() upgrade, until someone
# has time to doublecheck
return "%s 1 %-8d %-8d %8d %-12s %s" % (
ks,
uid,
gid,
size,
datestr,
filename,
)
def asbytes(self):
return str(self).encode()
| 8,258 | Python | .py | 220 | 27.959091 | 79 | 0.547824 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
632 | common.py | paramiko_paramiko/paramiko/common.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Common constants and global variables.
"""
import logging
import struct
#
# Formerly of py3compat.py. May be fully delete'able with a deeper look?
#
def byte_chr(c):
assert isinstance(c, int)
return struct.pack("B", c)
def byte_mask(c, mask):
assert isinstance(c, int)
return struct.pack("B", c & mask)
def byte_ord(c):
# In case we're handed a string instead of an int.
if not isinstance(c, int):
c = ord(c)
return c
(
MSG_DISCONNECT,
MSG_IGNORE,
MSG_UNIMPLEMENTED,
MSG_DEBUG,
MSG_SERVICE_REQUEST,
MSG_SERVICE_ACCEPT,
MSG_EXT_INFO,
) = range(1, 8)
(MSG_KEXINIT, MSG_NEWKEYS) = range(20, 22)
(
MSG_USERAUTH_REQUEST,
MSG_USERAUTH_FAILURE,
MSG_USERAUTH_SUCCESS,
MSG_USERAUTH_BANNER,
) = range(50, 54)
MSG_USERAUTH_PK_OK = 60
(MSG_USERAUTH_INFO_REQUEST, MSG_USERAUTH_INFO_RESPONSE) = range(60, 62)
(MSG_USERAUTH_GSSAPI_RESPONSE, MSG_USERAUTH_GSSAPI_TOKEN) = range(60, 62)
(
MSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE,
MSG_USERAUTH_GSSAPI_ERROR,
MSG_USERAUTH_GSSAPI_ERRTOK,
MSG_USERAUTH_GSSAPI_MIC,
) = range(63, 67)
HIGHEST_USERAUTH_MESSAGE_ID = 79
(MSG_GLOBAL_REQUEST, MSG_REQUEST_SUCCESS, MSG_REQUEST_FAILURE) = range(80, 83)
(
MSG_CHANNEL_OPEN,
MSG_CHANNEL_OPEN_SUCCESS,
MSG_CHANNEL_OPEN_FAILURE,
MSG_CHANNEL_WINDOW_ADJUST,
MSG_CHANNEL_DATA,
MSG_CHANNEL_EXTENDED_DATA,
MSG_CHANNEL_EOF,
MSG_CHANNEL_CLOSE,
MSG_CHANNEL_REQUEST,
MSG_CHANNEL_SUCCESS,
MSG_CHANNEL_FAILURE,
) = range(90, 101)
cMSG_DISCONNECT = byte_chr(MSG_DISCONNECT)
cMSG_IGNORE = byte_chr(MSG_IGNORE)
cMSG_UNIMPLEMENTED = byte_chr(MSG_UNIMPLEMENTED)
cMSG_DEBUG = byte_chr(MSG_DEBUG)
cMSG_SERVICE_REQUEST = byte_chr(MSG_SERVICE_REQUEST)
cMSG_SERVICE_ACCEPT = byte_chr(MSG_SERVICE_ACCEPT)
cMSG_EXT_INFO = byte_chr(MSG_EXT_INFO)
cMSG_KEXINIT = byte_chr(MSG_KEXINIT)
cMSG_NEWKEYS = byte_chr(MSG_NEWKEYS)
cMSG_USERAUTH_REQUEST = byte_chr(MSG_USERAUTH_REQUEST)
cMSG_USERAUTH_FAILURE = byte_chr(MSG_USERAUTH_FAILURE)
cMSG_USERAUTH_SUCCESS = byte_chr(MSG_USERAUTH_SUCCESS)
cMSG_USERAUTH_BANNER = byte_chr(MSG_USERAUTH_BANNER)
cMSG_USERAUTH_PK_OK = byte_chr(MSG_USERAUTH_PK_OK)
cMSG_USERAUTH_INFO_REQUEST = byte_chr(MSG_USERAUTH_INFO_REQUEST)
cMSG_USERAUTH_INFO_RESPONSE = byte_chr(MSG_USERAUTH_INFO_RESPONSE)
cMSG_USERAUTH_GSSAPI_RESPONSE = byte_chr(MSG_USERAUTH_GSSAPI_RESPONSE)
cMSG_USERAUTH_GSSAPI_TOKEN = byte_chr(MSG_USERAUTH_GSSAPI_TOKEN)
cMSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE = byte_chr(
MSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE
)
cMSG_USERAUTH_GSSAPI_ERROR = byte_chr(MSG_USERAUTH_GSSAPI_ERROR)
cMSG_USERAUTH_GSSAPI_ERRTOK = byte_chr(MSG_USERAUTH_GSSAPI_ERRTOK)
cMSG_USERAUTH_GSSAPI_MIC = byte_chr(MSG_USERAUTH_GSSAPI_MIC)
cMSG_GLOBAL_REQUEST = byte_chr(MSG_GLOBAL_REQUEST)
cMSG_REQUEST_SUCCESS = byte_chr(MSG_REQUEST_SUCCESS)
cMSG_REQUEST_FAILURE = byte_chr(MSG_REQUEST_FAILURE)
cMSG_CHANNEL_OPEN = byte_chr(MSG_CHANNEL_OPEN)
cMSG_CHANNEL_OPEN_SUCCESS = byte_chr(MSG_CHANNEL_OPEN_SUCCESS)
cMSG_CHANNEL_OPEN_FAILURE = byte_chr(MSG_CHANNEL_OPEN_FAILURE)
cMSG_CHANNEL_WINDOW_ADJUST = byte_chr(MSG_CHANNEL_WINDOW_ADJUST)
cMSG_CHANNEL_DATA = byte_chr(MSG_CHANNEL_DATA)
cMSG_CHANNEL_EXTENDED_DATA = byte_chr(MSG_CHANNEL_EXTENDED_DATA)
cMSG_CHANNEL_EOF = byte_chr(MSG_CHANNEL_EOF)
cMSG_CHANNEL_CLOSE = byte_chr(MSG_CHANNEL_CLOSE)
cMSG_CHANNEL_REQUEST = byte_chr(MSG_CHANNEL_REQUEST)
cMSG_CHANNEL_SUCCESS = byte_chr(MSG_CHANNEL_SUCCESS)
cMSG_CHANNEL_FAILURE = byte_chr(MSG_CHANNEL_FAILURE)
# for debugging:
MSG_NAMES = {
MSG_DISCONNECT: "disconnect",
MSG_IGNORE: "ignore",
MSG_UNIMPLEMENTED: "unimplemented",
MSG_DEBUG: "debug",
MSG_SERVICE_REQUEST: "service-request",
MSG_SERVICE_ACCEPT: "service-accept",
MSG_KEXINIT: "kexinit",
MSG_EXT_INFO: "ext-info",
MSG_NEWKEYS: "newkeys",
30: "kex30",
31: "kex31",
32: "kex32",
33: "kex33",
34: "kex34",
40: "kex40",
41: "kex41",
MSG_USERAUTH_REQUEST: "userauth-request",
MSG_USERAUTH_FAILURE: "userauth-failure",
MSG_USERAUTH_SUCCESS: "userauth-success",
MSG_USERAUTH_BANNER: "userauth--banner",
MSG_USERAUTH_PK_OK: "userauth-60(pk-ok/info-request)",
MSG_USERAUTH_INFO_RESPONSE: "userauth-info-response",
MSG_GLOBAL_REQUEST: "global-request",
MSG_REQUEST_SUCCESS: "request-success",
MSG_REQUEST_FAILURE: "request-failure",
MSG_CHANNEL_OPEN: "channel-open",
MSG_CHANNEL_OPEN_SUCCESS: "channel-open-success",
MSG_CHANNEL_OPEN_FAILURE: "channel-open-failure",
MSG_CHANNEL_WINDOW_ADJUST: "channel-window-adjust",
MSG_CHANNEL_DATA: "channel-data",
MSG_CHANNEL_EXTENDED_DATA: "channel-extended-data",
MSG_CHANNEL_EOF: "channel-eof",
MSG_CHANNEL_CLOSE: "channel-close",
MSG_CHANNEL_REQUEST: "channel-request",
MSG_CHANNEL_SUCCESS: "channel-success",
MSG_CHANNEL_FAILURE: "channel-failure",
MSG_USERAUTH_GSSAPI_RESPONSE: "userauth-gssapi-response",
MSG_USERAUTH_GSSAPI_TOKEN: "userauth-gssapi-token",
MSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE: "userauth-gssapi-exchange-complete",
MSG_USERAUTH_GSSAPI_ERROR: "userauth-gssapi-error",
MSG_USERAUTH_GSSAPI_ERRTOK: "userauth-gssapi-error-token",
MSG_USERAUTH_GSSAPI_MIC: "userauth-gssapi-mic",
}
# authentication request return codes:
AUTH_SUCCESSFUL, AUTH_PARTIALLY_SUCCESSFUL, AUTH_FAILED = range(3)
# channel request failed reasons:
(
OPEN_SUCCEEDED,
OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED,
OPEN_FAILED_CONNECT_FAILED,
OPEN_FAILED_UNKNOWN_CHANNEL_TYPE,
OPEN_FAILED_RESOURCE_SHORTAGE,
) = range(0, 5)
CONNECTION_FAILED_CODE = {
1: "Administratively prohibited",
2: "Connect failed",
3: "Unknown channel type",
4: "Resource shortage",
}
(
DISCONNECT_SERVICE_NOT_AVAILABLE,
DISCONNECT_AUTH_CANCELLED_BY_USER,
DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE,
) = (7, 13, 14)
zero_byte = byte_chr(0)
one_byte = byte_chr(1)
four_byte = byte_chr(4)
max_byte = byte_chr(0xFF)
cr_byte = byte_chr(13)
linefeed_byte = byte_chr(10)
crlf = cr_byte + linefeed_byte
cr_byte_value = 13
linefeed_byte_value = 10
xffffffff = 0xFFFFFFFF
x80000000 = 0x80000000
o666 = 438
o660 = 432
o644 = 420
o600 = 384
o777 = 511
o700 = 448
o70 = 56
DEBUG = logging.DEBUG
INFO = logging.INFO
WARNING = logging.WARNING
ERROR = logging.ERROR
CRITICAL = logging.CRITICAL
# Common IO/select/etc sleep period, in seconds
io_sleep = 0.01
DEFAULT_WINDOW_SIZE = 64 * 2**15
DEFAULT_MAX_PACKET_SIZE = 2**15
# lower bound on the max packet size we'll accept from the remote host
# Minimum packet size is 32768 bytes according to
# http://www.ietf.org/rfc/rfc4254.txt
MIN_WINDOW_SIZE = 2**15
# However, according to http://www.ietf.org/rfc/rfc4253.txt it is perfectly
# legal to accept a size much smaller, as OpenSSH client does as size 16384.
MIN_PACKET_SIZE = 2**12
# Max windows size according to http://www.ietf.org/rfc/rfc4254.txt
MAX_WINDOW_SIZE = 2**32 - 1
| 7,756 | Python | .py | 216 | 33.106481 | 79 | 0.745174 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
633 | server.py | paramiko_paramiko/paramiko/server.py | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
`.ServerInterface` is an interface to override for server support.
"""
import threading
from paramiko import util
from paramiko.common import (
DEBUG,
ERROR,
OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED,
AUTH_FAILED,
AUTH_SUCCESSFUL,
)
class ServerInterface:
"""
This class defines an interface for controlling the behavior of Paramiko
in server mode.
Methods on this class are called from Paramiko's primary thread, so you
shouldn't do too much work in them. (Certainly nothing that blocks or
sleeps.)
"""
def check_channel_request(self, kind, chanid):
"""
Determine if a channel request of a given type will be granted, and
return ``OPEN_SUCCEEDED`` or an error code. This method is
called in server mode when the client requests a channel, after
authentication is complete.
If you allow channel requests (and an ssh server that didn't would be
useless), you should also override some of the channel request methods
below, which are used to determine which services will be allowed on
a given channel:
- `check_channel_pty_request`
- `check_channel_shell_request`
- `check_channel_subsystem_request`
- `check_channel_window_change_request`
- `check_channel_x11_request`
- `check_channel_forward_agent_request`
The ``chanid`` parameter is a small number that uniquely identifies the
channel within a `.Transport`. A `.Channel` object is not created
unless this method returns ``OPEN_SUCCEEDED`` -- once a
`.Channel` object is created, you can call `.Channel.get_id` to
retrieve the channel ID.
The return value should either be ``OPEN_SUCCEEDED`` (or
``0``) to allow the channel request, or one of the following error
codes to reject it:
- ``OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED``
- ``OPEN_FAILED_CONNECT_FAILED``
- ``OPEN_FAILED_UNKNOWN_CHANNEL_TYPE``
- ``OPEN_FAILED_RESOURCE_SHORTAGE``
The default implementation always returns
``OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED``.
:param str kind:
the kind of channel the client would like to open (usually
``"session"``).
:param int chanid: ID of the channel
:return: an `int` success or failure code (listed above)
"""
return OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED
def get_allowed_auths(self, username):
"""
Return a list of authentication methods supported by the server.
This list is sent to clients attempting to authenticate, to inform them
of authentication methods that might be successful.
The "list" is actually a string of comma-separated names of types of
authentication. Possible values are ``"password"``, ``"publickey"``,
and ``"none"``.
The default implementation always returns ``"password"``.
:param str username: the username requesting authentication.
:return: a comma-separated `str` of authentication types
"""
return "password"
def check_auth_none(self, username):
"""
Determine if a client may open channels with no (further)
authentication.
Return ``AUTH_FAILED`` if the client must authenticate, or
``AUTH_SUCCESSFUL`` if it's okay for the client to not
authenticate.
The default implementation always returns ``AUTH_FAILED``.
:param str username: the username of the client.
:return:
``AUTH_FAILED`` if the authentication fails; ``AUTH_SUCCESSFUL`` if
it succeeds.
:rtype: int
"""
return AUTH_FAILED
def check_auth_password(self, username, password):
"""
Determine if a given username and password supplied by the client is
acceptable for use in authentication.
Return ``AUTH_FAILED`` if the password is not accepted,
``AUTH_SUCCESSFUL`` if the password is accepted and completes
the authentication, or ``AUTH_PARTIALLY_SUCCESSFUL`` if your
authentication is stateful, and this key is accepted for
authentication, but more authentication is required. (In this latter
case, `get_allowed_auths` will be called to report to the client what
options it has for continuing the authentication.)
The default implementation always returns ``AUTH_FAILED``.
:param str username: the username of the authenticating client.
:param str password: the password given by the client.
:return:
``AUTH_FAILED`` if the authentication fails; ``AUTH_SUCCESSFUL`` if
it succeeds; ``AUTH_PARTIALLY_SUCCESSFUL`` if the password auth is
successful, but authentication must continue.
:rtype: int
"""
return AUTH_FAILED
def check_auth_publickey(self, username, key):
"""
Determine if a given key supplied by the client is acceptable for use
in authentication. You should override this method in server mode to
check the username and key and decide if you would accept a signature
made using this key.
Return ``AUTH_FAILED`` if the key is not accepted,
``AUTH_SUCCESSFUL`` if the key is accepted and completes the
authentication, or ``AUTH_PARTIALLY_SUCCESSFUL`` if your
authentication is stateful, and this password is accepted for
authentication, but more authentication is required. (In this latter
case, `get_allowed_auths` will be called to report to the client what
options it has for continuing the authentication.)
Note that you don't have to actually verify any key signtature here.
If you're willing to accept the key, Paramiko will do the work of
verifying the client's signature.
The default implementation always returns ``AUTH_FAILED``.
:param str username: the username of the authenticating client
:param .PKey key: the key object provided by the client
:return:
``AUTH_FAILED`` if the client can't authenticate with this key;
``AUTH_SUCCESSFUL`` if it can; ``AUTH_PARTIALLY_SUCCESSFUL`` if it
can authenticate with this key but must continue with
authentication
:rtype: int
"""
return AUTH_FAILED
def check_auth_interactive(self, username, submethods):
"""
Begin an interactive authentication challenge, if supported. You
should override this method in server mode if you want to support the
``"keyboard-interactive"`` auth type, which requires you to send a
series of questions for the client to answer.
Return ``AUTH_FAILED`` if this auth method isn't supported. Otherwise,
you should return an `.InteractiveQuery` object containing the prompts
and instructions for the user. The response will be sent via a call
to `check_auth_interactive_response`.
The default implementation always returns ``AUTH_FAILED``.
:param str username: the username of the authenticating client
:param str submethods:
a comma-separated list of methods preferred by the client (usually
empty)
:return:
``AUTH_FAILED`` if this auth method isn't supported; otherwise an
object containing queries for the user
:rtype: int or `.InteractiveQuery`
"""
return AUTH_FAILED
def check_auth_interactive_response(self, responses):
"""
Continue or finish an interactive authentication challenge, if
supported. You should override this method in server mode if you want
to support the ``"keyboard-interactive"`` auth type.
Return ``AUTH_FAILED`` if the responses are not accepted,
``AUTH_SUCCESSFUL`` if the responses are accepted and complete
the authentication, or ``AUTH_PARTIALLY_SUCCESSFUL`` if your
authentication is stateful, and this set of responses is accepted for
authentication, but more authentication is required. (In this latter
case, `get_allowed_auths` will be called to report to the client what
options it has for continuing the authentication.)
If you wish to continue interactive authentication with more questions,
you may return an `.InteractiveQuery` object, which should cause the
client to respond with more answers, calling this method again. This
cycle can continue indefinitely.
The default implementation always returns ``AUTH_FAILED``.
:param responses: list of `str` responses from the client
:return:
``AUTH_FAILED`` if the authentication fails; ``AUTH_SUCCESSFUL`` if
it succeeds; ``AUTH_PARTIALLY_SUCCESSFUL`` if the interactive auth
is successful, but authentication must continue; otherwise an
object containing queries for the user
:rtype: int or `.InteractiveQuery`
"""
return AUTH_FAILED
def check_auth_gssapi_with_mic(
self, username, gss_authenticated=AUTH_FAILED, cc_file=None
):
"""
Authenticate the given user to the server if he is a valid krb5
principal.
:param str username: The username of the authenticating client
:param int gss_authenticated: The result of the krb5 authentication
:param str cc_filename: The krb5 client credentials cache filename
:return: ``AUTH_FAILED`` if the user is not authenticated otherwise
``AUTH_SUCCESSFUL``
:rtype: int
:note: Kerberos credential delegation is not supported.
:see: `.ssh_gss`
:note: : We are just checking in L{AuthHandler} that the given user is
a valid krb5 principal!
We don't check if the krb5 principal is allowed to log in on
the server, because there is no way to do that in python. So
if you develop your own SSH server with paramiko for a certain
platform like Linux, you should call C{krb5_kuserok()} in
your local kerberos library to make sure that the
krb5_principal has an account on the server and is allowed to
log in as a user.
:see: http://www.unix.com/man-page/all/3/krb5_kuserok/
"""
if gss_authenticated == AUTH_SUCCESSFUL:
return AUTH_SUCCESSFUL
return AUTH_FAILED
def check_auth_gssapi_keyex(
self, username, gss_authenticated=AUTH_FAILED, cc_file=None
):
"""
Authenticate the given user to the server if he is a valid krb5
principal and GSS-API Key Exchange was performed.
If GSS-API Key Exchange was not performed, this authentication method
won't be available.
:param str username: The username of the authenticating client
:param int gss_authenticated: The result of the krb5 authentication
:param str cc_filename: The krb5 client credentials cache filename
:return: ``AUTH_FAILED`` if the user is not authenticated otherwise
``AUTH_SUCCESSFUL``
:rtype: int
:note: Kerberos credential delegation is not supported.
:see: `.ssh_gss` `.kex_gss`
:note: : We are just checking in L{AuthHandler} that the given user is
a valid krb5 principal!
We don't check if the krb5 principal is allowed to log in on
the server, because there is no way to do that in python. So
if you develop your own SSH server with paramiko for a certain
platform like Linux, you should call C{krb5_kuserok()} in
your local kerberos library to make sure that the
krb5_principal has an account on the server and is allowed
to log in as a user.
:see: http://www.unix.com/man-page/all/3/krb5_kuserok/
"""
if gss_authenticated == AUTH_SUCCESSFUL:
return AUTH_SUCCESSFUL
return AUTH_FAILED
def enable_auth_gssapi(self):
"""
Overwrite this function in your SSH server to enable GSSAPI
authentication.
The default implementation always returns false.
:returns bool: Whether GSSAPI authentication is enabled.
:see: `.ssh_gss`
"""
UseGSSAPI = False
return UseGSSAPI
def check_port_forward_request(self, address, port):
"""
Handle a request for port forwarding. The client is asking that
connections to the given address and port be forwarded back across
this ssh connection. An address of ``"0.0.0.0"`` indicates a global
address (any address associated with this server) and a port of ``0``
indicates that no specific port is requested (usually the OS will pick
a port).
The default implementation always returns ``False``, rejecting the
port forwarding request. If the request is accepted, you should return
the port opened for listening.
:param str address: the requested address
:param int port: the requested port
:return:
the port number (`int`) that was opened for listening, or ``False``
to reject
"""
return False
def cancel_port_forward_request(self, address, port):
"""
The client would like to cancel a previous port-forwarding request.
If the given address and port is being forwarded across this ssh
connection, the port should be closed.
:param str address: the forwarded address
:param int port: the forwarded port
"""
pass
def check_global_request(self, kind, msg):
"""
Handle a global request of the given ``kind``. This method is called
in server mode and client mode, whenever the remote host makes a global
request. If there are any arguments to the request, they will be in
``msg``.
There aren't any useful global requests defined, aside from port
forwarding, so usually this type of request is an extension to the
protocol.
If the request was successful and you would like to return contextual
data to the remote host, return a tuple. Items in the tuple will be
sent back with the successful result. (Note that the items in the
tuple can only be strings, ints, or bools.)
The default implementation always returns ``False``, indicating that it
does not support any global requests.
.. note:: Port forwarding requests are handled separately, in
`check_port_forward_request`.
:param str kind: the kind of global request being made.
:param .Message msg: any extra arguments to the request.
:return:
``True`` or a `tuple` of data if the request was granted; ``False``
otherwise.
"""
return False
# ...Channel requests...
def check_channel_pty_request(
self, channel, term, width, height, pixelwidth, pixelheight, modes
):
"""
Determine if a pseudo-terminal of the given dimensions (usually
requested for shell access) can be provided on the given channel.
The default implementation always returns ``False``.
:param .Channel channel: the `.Channel` the pty request arrived on.
:param str term: type of terminal requested (for example, ``"vt100"``).
:param int width: width of screen in characters.
:param int height: height of screen in characters.
:param int pixelwidth:
width of screen in pixels, if known (may be ``0`` if unknown).
:param int pixelheight:
height of screen in pixels, if known (may be ``0`` if unknown).
:return:
``True`` if the pseudo-terminal has been allocated; ``False``
otherwise.
"""
return False
def check_channel_shell_request(self, channel):
"""
Determine if a shell will be provided to the client on the given
channel. If this method returns ``True``, the channel should be
connected to the stdin/stdout of a shell (or something that acts like
a shell).
The default implementation always returns ``False``.
:param .Channel channel: the `.Channel` the request arrived on.
:return:
``True`` if this channel is now hooked up to a shell; ``False`` if
a shell can't or won't be provided.
"""
return False
def check_channel_exec_request(self, channel, command):
"""
Determine if a shell command will be executed for the client. If this
method returns ``True``, the channel should be connected to the stdin,
stdout, and stderr of the shell command.
The default implementation always returns ``False``.
:param .Channel channel: the `.Channel` the request arrived on.
:param str command: the command to execute.
:return:
``True`` if this channel is now hooked up to the stdin, stdout, and
stderr of the executing command; ``False`` if the command will not
be executed.
.. versionadded:: 1.1
"""
return False
def check_channel_subsystem_request(self, channel, name):
"""
Determine if a requested subsystem will be provided to the client on
the given channel. If this method returns ``True``, all future I/O
through this channel will be assumed to be connected to the requested
subsystem. An example of a subsystem is ``sftp``.
The default implementation checks for a subsystem handler assigned via
`.Transport.set_subsystem_handler`.
If one has been set, the handler is invoked and this method returns
``True``. Otherwise it returns ``False``.
.. note:: Because the default implementation uses the `.Transport` to
identify valid subsystems, you probably won't need to override this
method.
:param .Channel channel: the `.Channel` the pty request arrived on.
:param str name: name of the requested subsystem.
:return:
``True`` if this channel is now hooked up to the requested
subsystem; ``False`` if that subsystem can't or won't be provided.
"""
transport = channel.get_transport()
handler_class, args, kwargs = transport._get_subsystem_handler(name)
if handler_class is None:
return False
handler = handler_class(channel, name, self, *args, **kwargs)
handler.start()
return True
def check_channel_window_change_request(
self, channel, width, height, pixelwidth, pixelheight
):
"""
Determine if the pseudo-terminal on the given channel can be resized.
This only makes sense if a pty was previously allocated on it.
The default implementation always returns ``False``.
:param .Channel channel: the `.Channel` the pty request arrived on.
:param int width: width of screen in characters.
:param int height: height of screen in characters.
:param int pixelwidth:
width of screen in pixels, if known (may be ``0`` if unknown).
:param int pixelheight:
height of screen in pixels, if known (may be ``0`` if unknown).
:return: ``True`` if the terminal was resized; ``False`` if not.
"""
return False
def check_channel_x11_request(
self,
channel,
single_connection,
auth_protocol,
auth_cookie,
screen_number,
):
"""
Determine if the client will be provided with an X11 session. If this
method returns ``True``, X11 applications should be routed through new
SSH channels, using `.Transport.open_x11_channel`.
The default implementation always returns ``False``.
:param .Channel channel: the `.Channel` the X11 request arrived on
:param bool single_connection:
``True`` if only a single X11 channel should be opened, else
``False``.
:param str auth_protocol: the protocol used for X11 authentication
:param str auth_cookie: the cookie used to authenticate to X11
:param int screen_number: the number of the X11 screen to connect to
:return: ``True`` if the X11 session was opened; ``False`` if not
"""
return False
def check_channel_forward_agent_request(self, channel):
"""
Determine if the client will be provided with an forward agent session.
If this method returns ``True``, the server will allow SSH Agent
forwarding.
The default implementation always returns ``False``.
:param .Channel channel: the `.Channel` the request arrived on
:return: ``True`` if the AgentForward was loaded; ``False`` if not
If ``True`` is returned, the server should create an
:class:`AgentServerProxy` to access the agent.
"""
return False
def check_channel_direct_tcpip_request(self, chanid, origin, destination):
"""
Determine if a local port forwarding channel will be granted, and
return ``OPEN_SUCCEEDED`` or an error code. This method is
called in server mode when the client requests a channel, after
authentication is complete.
The ``chanid`` parameter is a small number that uniquely identifies the
channel within a `.Transport`. A `.Channel` object is not created
unless this method returns ``OPEN_SUCCEEDED`` -- once a
`.Channel` object is created, you can call `.Channel.get_id` to
retrieve the channel ID.
The origin and destination parameters are (ip_address, port) tuples
that correspond to both ends of the TCP connection in the forwarding
tunnel.
The return value should either be ``OPEN_SUCCEEDED`` (or
``0``) to allow the channel request, or one of the following error
codes to reject it:
- ``OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED``
- ``OPEN_FAILED_CONNECT_FAILED``
- ``OPEN_FAILED_UNKNOWN_CHANNEL_TYPE``
- ``OPEN_FAILED_RESOURCE_SHORTAGE``
The default implementation always returns
``OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED``.
:param int chanid: ID of the channel
:param tuple origin:
2-tuple containing the IP address and port of the originator
(client side)
:param tuple destination:
2-tuple containing the IP address and port of the destination
(server side)
:return: an `int` success or failure code (listed above)
"""
return OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED
def check_channel_env_request(self, channel, name, value):
"""
Check whether a given environment variable can be specified for the
given channel. This method should return ``True`` if the server
is willing to set the specified environment variable. Note that
some environment variables (e.g., PATH) can be exceedingly
dangerous, so blindly allowing the client to set the environment
is almost certainly not a good idea.
The default implementation always returns ``False``.
:param channel: the `.Channel` the env request arrived on
:param str name: name
:param str value: Channel value
:returns: A boolean
"""
return False
def get_banner(self):
"""
A pre-login banner to display to the user. The message may span
multiple lines separated by crlf pairs. The language should be in
rfc3066 style, for example: en-US
The default implementation always returns ``(None, None)``.
:returns: A tuple containing the banner and language code.
.. versionadded:: 2.3
"""
return (None, None)
class InteractiveQuery:
"""
A query (set of prompts) for a user during interactive authentication.
"""
def __init__(self, name="", instructions="", *prompts):
"""
Create a new interactive query to send to the client. The name and
instructions are optional, but are generally displayed to the end
user. A list of prompts may be included, or they may be added via
the `add_prompt` method.
:param str name: name of this query
:param str instructions:
user instructions (usually short) about this query
:param str prompts: one or more authentication prompts
"""
self.name = name
self.instructions = instructions
self.prompts = []
for x in prompts:
if isinstance(x, str):
self.add_prompt(x)
else:
self.add_prompt(x[0], x[1])
def add_prompt(self, prompt, echo=True):
"""
Add a prompt to this query. The prompt should be a (reasonably short)
string. Multiple prompts can be added to the same query.
:param str prompt: the user prompt
:param bool echo:
``True`` (default) if the user's response should be echoed;
``False`` if not (for a password or similar)
"""
self.prompts.append((prompt, echo))
class SubsystemHandler(threading.Thread):
"""
Handler for a subsystem in server mode. If you create a subclass of this
class and pass it to `.Transport.set_subsystem_handler`, an object of this
class will be created for each request for this subsystem. Each new object
will be executed within its own new thread by calling `start_subsystem`.
When that method completes, the channel is closed.
For example, if you made a subclass ``MP3Handler`` and registered it as the
handler for subsystem ``"mp3"``, then whenever a client has successfully
authenticated and requests subsystem ``"mp3"``, an object of class
``MP3Handler`` will be created, and `start_subsystem` will be called on
it from a new thread.
"""
def __init__(self, channel, name, server):
"""
Create a new handler for a channel. This is used by `.ServerInterface`
to start up a new handler when a channel requests this subsystem. You
don't need to override this method, but if you do, be sure to pass the
``channel`` and ``name`` parameters through to the original
``__init__`` method here.
:param .Channel channel: the channel associated with this
subsystem request.
:param str name: name of the requested subsystem.
:param .ServerInterface server:
the server object for the session that started this subsystem
"""
threading.Thread.__init__(self, target=self._run)
self.__channel = channel
self.__transport = channel.get_transport()
self.__name = name
self.__server = server
def get_server(self):
"""
Return the `.ServerInterface` object associated with this channel and
subsystem.
"""
return self.__server
def _run(self):
try:
self.__transport._log(
DEBUG, "Starting handler for subsystem {}".format(self.__name)
)
self.start_subsystem(self.__name, self.__transport, self.__channel)
except Exception as e:
self.__transport._log(
ERROR,
'Exception in subsystem handler for "{}": {}'.format(
self.__name, e
),
)
self.__transport._log(ERROR, util.tb_strings())
try:
self.finish_subsystem()
except:
pass
def start_subsystem(self, name, transport, channel):
"""
Process an ssh subsystem in server mode. This method is called on a
new object (and in a new thread) for each subsystem request. It is
assumed that all subsystem logic will take place here, and when the
subsystem is finished, this method will return. After this method
returns, the channel is closed.
The combination of ``transport`` and ``channel`` are unique; this
handler corresponds to exactly one `.Channel` on one `.Transport`.
.. note::
It is the responsibility of this method to exit if the underlying
`.Transport` is closed. This can be done by checking
`.Transport.is_active` or noticing an EOF on the `.Channel`. If
this method loops forever without checking for this case, your
Python interpreter may refuse to exit because this thread will
still be running.
:param str name: name of the requested subsystem.
:param .Transport transport: the server-mode `.Transport`.
:param .Channel channel: the channel associated with this subsystem
request.
"""
pass
def finish_subsystem(self):
"""
Perform any cleanup at the end of a subsystem. The default
implementation just closes the channel.
.. versionadded:: 1.1
"""
self.__channel.close()
| 30,457 | Python | .py | 618 | 39.953074 | 79 | 0.653322 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
634 | agent.py | paramiko_paramiko/paramiko/agent.py | # Copyright (C) 2003-2007 John Rochester <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
SSH Agent interface
"""
import os
import socket
import struct
import sys
import threading
import time
import tempfile
import stat
from logging import DEBUG
from select import select
from paramiko.common import io_sleep, byte_chr
from paramiko.ssh_exception import SSHException, AuthenticationException
from paramiko.message import Message
from paramiko.pkey import PKey, UnknownKeyType
from paramiko.util import asbytes, get_logger
cSSH2_AGENTC_REQUEST_IDENTITIES = byte_chr(11)
SSH2_AGENT_IDENTITIES_ANSWER = 12
cSSH2_AGENTC_SIGN_REQUEST = byte_chr(13)
SSH2_AGENT_SIGN_RESPONSE = 14
SSH_AGENT_RSA_SHA2_256 = 2
SSH_AGENT_RSA_SHA2_512 = 4
# NOTE: RFC mildly confusing; while these flags are OR'd together, OpenSSH at
# least really treats them like "AND"s, in the sense that if it finds the
# SHA256 flag set it won't continue looking at the SHA512 one; it
# short-circuits right away.
# Thus, we never want to eg submit 6 to say "either's good".
ALGORITHM_FLAG_MAP = {
"rsa-sha2-256": SSH_AGENT_RSA_SHA2_256,
"rsa-sha2-512": SSH_AGENT_RSA_SHA2_512,
}
for key, value in list(ALGORITHM_FLAG_MAP.items()):
ALGORITHM_FLAG_MAP[f"{key}[email protected]"] = value
# TODO 4.0: rename all these - including making some of their methods public?
class AgentSSH:
def __init__(self):
self._conn = None
self._keys = ()
def get_keys(self):
"""
Return the list of keys available through the SSH agent, if any. If
no SSH agent was running (or it couldn't be contacted), an empty list
will be returned.
This method performs no IO, just returns the list of keys retrieved
when the connection was made.
:return:
a tuple of `.AgentKey` objects representing keys available on the
SSH agent
"""
return self._keys
def _connect(self, conn):
self._conn = conn
ptype, result = self._send_message(cSSH2_AGENTC_REQUEST_IDENTITIES)
if ptype != SSH2_AGENT_IDENTITIES_ANSWER:
raise SSHException("could not get keys from ssh-agent")
keys = []
for i in range(result.get_int()):
keys.append(
AgentKey(
agent=self,
blob=result.get_binary(),
comment=result.get_text(),
)
)
self._keys = tuple(keys)
def _close(self):
if self._conn is not None:
self._conn.close()
self._conn = None
self._keys = ()
def _send_message(self, msg):
msg = asbytes(msg)
self._conn.send(struct.pack(">I", len(msg)) + msg)
data = self._read_all(4)
msg = Message(self._read_all(struct.unpack(">I", data)[0]))
return ord(msg.get_byte()), msg
def _read_all(self, wanted):
result = self._conn.recv(wanted)
while len(result) < wanted:
if len(result) == 0:
raise SSHException("lost ssh-agent")
extra = self._conn.recv(wanted - len(result))
if len(extra) == 0:
raise SSHException("lost ssh-agent")
result += extra
return result
class AgentProxyThread(threading.Thread):
"""
Class in charge of communication between two channels.
"""
def __init__(self, agent):
threading.Thread.__init__(self, target=self.run)
self._agent = agent
self._exit = False
def run(self):
try:
(r, addr) = self.get_connection()
# Found that r should be either
# a socket from the socket library or None
self.__inr = r
# The address should be an IP address as a string? or None
self.__addr = addr
self._agent.connect()
if not isinstance(self._agent, int) and (
self._agent._conn is None
or not hasattr(self._agent._conn, "fileno")
):
raise AuthenticationException("Unable to connect to SSH agent")
self._communicate()
except:
# XXX Not sure what to do here ... raise or pass ?
raise
def _communicate(self):
import fcntl
oldflags = fcntl.fcntl(self.__inr, fcntl.F_GETFL)
fcntl.fcntl(self.__inr, fcntl.F_SETFL, oldflags | os.O_NONBLOCK)
while not self._exit:
events = select([self._agent._conn, self.__inr], [], [], 0.5)
for fd in events[0]:
if self._agent._conn == fd:
data = self._agent._conn.recv(512)
if len(data) != 0:
self.__inr.send(data)
else:
self._close()
break
elif self.__inr == fd:
data = self.__inr.recv(512)
if len(data) != 0:
self._agent._conn.send(data)
else:
self._close()
break
time.sleep(io_sleep)
def _close(self):
self._exit = True
self.__inr.close()
self._agent._conn.close()
class AgentLocalProxy(AgentProxyThread):
"""
Class to be used when wanting to ask a local SSH Agent being
asked from a remote fake agent (so use a unix socket for ex.)
"""
def __init__(self, agent):
AgentProxyThread.__init__(self, agent)
def get_connection(self):
"""
Return a pair of socket object and string address.
May block!
"""
conn = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
try:
conn.bind(self._agent._get_filename())
conn.listen(1)
(r, addr) = conn.accept()
return r, addr
except:
raise
class AgentRemoteProxy(AgentProxyThread):
"""
Class to be used when wanting to ask a remote SSH Agent
"""
def __init__(self, agent, chan):
AgentProxyThread.__init__(self, agent)
self.__chan = chan
def get_connection(self):
return self.__chan, None
def get_agent_connection():
"""
Returns some SSH agent object, or None if none were found/supported.
.. versionadded:: 2.10
"""
if ("SSH_AUTH_SOCK" in os.environ) and (sys.platform != "win32"):
conn = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
try:
conn.connect(os.environ["SSH_AUTH_SOCK"])
return conn
except:
# probably a dangling env var: the ssh agent is gone
return
elif sys.platform == "win32":
from . import win_pageant, win_openssh
conn = None
if win_pageant.can_talk_to_agent():
conn = win_pageant.PageantConnection()
elif win_openssh.can_talk_to_agent():
conn = win_openssh.OpenSSHAgentConnection()
return conn
else:
# no agent support
return
class AgentClientProxy:
"""
Class proxying request as a client:
#. client ask for a request_forward_agent()
#. server creates a proxy and a fake SSH Agent
#. server ask for establishing a connection when needed,
calling the forward_agent_handler at client side.
#. the forward_agent_handler launch a thread for connecting
the remote fake agent and the local agent
#. Communication occurs ...
"""
def __init__(self, chanRemote):
self._conn = None
self.__chanR = chanRemote
self.thread = AgentRemoteProxy(self, chanRemote)
self.thread.start()
def __del__(self):
self.close()
def connect(self):
"""
Method automatically called by ``AgentProxyThread.run``.
"""
conn = get_agent_connection()
if not conn:
return
self._conn = conn
def close(self):
"""
Close the current connection and terminate the agent
Should be called manually
"""
if hasattr(self, "thread"):
self.thread._exit = True
self.thread.join(1000)
if self._conn is not None:
self._conn.close()
class AgentServerProxy(AgentSSH):
"""
Allows an SSH server to access a forwarded agent.
This also creates a unix domain socket on the system to allow external
programs to also access the agent. For this reason, you probably only want
to create one of these.
:meth:`connect` must be called before it is usable. This will also load the
list of keys the agent contains. You must also call :meth:`close` in
order to clean up the unix socket and the thread that maintains it.
(:class:`contextlib.closing` might be helpful to you.)
:param .Transport t: Transport used for SSH Agent communication forwarding
:raises: `.SSHException` -- mostly if we lost the agent
"""
def __init__(self, t):
AgentSSH.__init__(self)
self.__t = t
self._dir = tempfile.mkdtemp("sshproxy")
os.chmod(self._dir, stat.S_IRWXU)
self._file = self._dir + "/sshproxy.ssh"
self.thread = AgentLocalProxy(self)
self.thread.start()
def __del__(self):
self.close()
def connect(self):
conn_sock = self.__t.open_forward_agent_channel()
if conn_sock is None:
raise SSHException("lost ssh-agent")
conn_sock.set_name("auth-agent")
self._connect(conn_sock)
def close(self):
"""
Terminate the agent, clean the files, close connections
Should be called manually
"""
os.remove(self._file)
os.rmdir(self._dir)
self.thread._exit = True
self.thread.join(1000)
self._close()
def get_env(self):
"""
Helper for the environment under unix
:return:
a dict containing the ``SSH_AUTH_SOCK`` environment variables
"""
return {"SSH_AUTH_SOCK": self._get_filename()}
def _get_filename(self):
return self._file
class AgentRequestHandler:
"""
Primary/default implementation of SSH agent forwarding functionality.
Simply instantiate this class, handing it a live command-executing session
object, and it will handle forwarding any local SSH agent processes it
finds.
For example::
# Connect
client = SSHClient()
client.connect(host, port, username)
# Obtain session
session = client.get_transport().open_session()
# Forward local agent
AgentRequestHandler(session)
# Commands executed after this point will see the forwarded agent on
# the remote end.
session.exec_command("git clone https://my.git.repository/")
"""
def __init__(self, chanClient):
self._conn = None
self.__chanC = chanClient
chanClient.request_forward_agent(self._forward_agent_handler)
self.__clientProxys = []
def _forward_agent_handler(self, chanRemote):
self.__clientProxys.append(AgentClientProxy(chanRemote))
def __del__(self):
self.close()
def close(self):
for p in self.__clientProxys:
p.close()
class Agent(AgentSSH):
"""
Client interface for using private keys from an SSH agent running on the
local machine. If an SSH agent is running, this class can be used to
connect to it and retrieve `.PKey` objects which can be used when
attempting to authenticate to remote SSH servers.
Upon initialization, a session with the local machine's SSH agent is
opened, if one is running. If no agent is running, initialization will
succeed, but `get_keys` will return an empty tuple.
:raises: `.SSHException` --
if an SSH agent is found, but speaks an incompatible protocol
.. versionchanged:: 2.10
Added support for native openssh agent on windows (extending previous
putty pageant support)
"""
def __init__(self):
AgentSSH.__init__(self)
conn = get_agent_connection()
if not conn:
return
self._connect(conn)
def close(self):
"""
Close the SSH agent connection.
"""
self._close()
class AgentKey(PKey):
"""
Private key held in a local SSH agent. This type of key can be used for
authenticating to a remote server (signing). Most other key operations
work as expected.
.. versionchanged:: 3.2
Added the ``comment`` kwarg and attribute.
.. versionchanged:: 3.2
Added the ``.inner_key`` attribute holding a reference to the 'real'
key instance this key is a proxy for, if one was obtainable, else None.
"""
def __init__(self, agent, blob, comment=""):
self.agent = agent
self.blob = blob
self.comment = comment
msg = Message(blob)
self.name = msg.get_text()
self._logger = get_logger(__file__)
self.inner_key = None
try:
self.inner_key = PKey.from_type_string(
key_type=self.name, key_bytes=blob
)
except UnknownKeyType:
# Log, but don't explode, since inner_key is a best-effort thing.
err = "Unable to derive inner_key for agent key of type {!r}"
self.log(DEBUG, err.format(self.name))
def log(self, *args, **kwargs):
return self._logger.log(*args, **kwargs)
def asbytes(self):
# Prefer inner_key.asbytes, since that will differ for eg RSA-CERT
return self.inner_key.asbytes() if self.inner_key else self.blob
def get_name(self):
return self.name
def get_bits(self):
# Have to work around PKey's default get_bits being crap
if self.inner_key is not None:
return self.inner_key.get_bits()
return super().get_bits()
def __getattr__(self, name):
"""
Proxy any un-implemented methods/properties to the inner_key.
"""
if self.inner_key is None: # nothing to proxy to
raise AttributeError(name)
return getattr(self.inner_key, name)
@property
def _fields(self):
fallback = [self.get_name(), self.blob]
return self.inner_key._fields if self.inner_key else fallback
def sign_ssh_data(self, data, algorithm=None):
msg = Message()
msg.add_byte(cSSH2_AGENTC_SIGN_REQUEST)
# NOTE: this used to be just self.blob, which is not entirely right for
# RSA-CERT 'keys' - those end up always degrading to ssh-rsa type
# signatures, for reasons probably internal to OpenSSH's agent code,
# even if everything else wants SHA2 (including our flag map).
msg.add_string(self.asbytes())
msg.add_string(data)
msg.add_int(ALGORITHM_FLAG_MAP.get(algorithm, 0))
ptype, result = self.agent._send_message(msg)
if ptype != SSH2_AGENT_SIGN_RESPONSE:
raise SSHException("key cannot be used for signing")
return result.get_binary()
| 15,877 | Python | .py | 414 | 30.130435 | 79 | 0.620156 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
635 | buffered_pipe.py | paramiko_paramiko/paramiko/buffered_pipe.py | # Copyright (C) 2006-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Attempt to generalize the "feeder" part of a `.Channel`: an object which can be
read from and closed, but is reading from a buffer fed by another thread. The
read operations are blocking and can have a timeout set.
"""
import array
import threading
import time
from paramiko.util import b
class PipeTimeout(IOError):
"""
Indicates that a timeout was reached on a read from a `.BufferedPipe`.
"""
pass
class BufferedPipe:
"""
A buffer that obeys normal read (with timeout) & close semantics for a
file or socket, but is fed data from another thread. This is used by
`.Channel`.
"""
def __init__(self):
self._lock = threading.Lock()
self._cv = threading.Condition(self._lock)
self._event = None
self._buffer = array.array("B")
self._closed = False
def _buffer_frombytes(self, data):
self._buffer.frombytes(data)
def _buffer_tobytes(self, limit=None):
return self._buffer[:limit].tobytes()
def set_event(self, event):
"""
Set an event on this buffer. When data is ready to be read (or the
buffer has been closed), the event will be set. When no data is
ready, the event will be cleared.
:param threading.Event event: the event to set/clear
"""
self._lock.acquire()
try:
self._event = event
# Make sure the event starts in `set` state if we appear to already
# be closed; otherwise, if we start in `clear` state & are closed,
# nothing will ever call `.feed` and the event (& OS pipe, if we're
# wrapping one - see `Channel.fileno`) will permanently stay in
# `clear`, causing deadlock if e.g. `select`ed upon.
if self._closed or len(self._buffer) > 0:
event.set()
else:
event.clear()
finally:
self._lock.release()
def feed(self, data):
"""
Feed new data into this pipe. This method is assumed to be called
from a separate thread, so synchronization is done.
:param data: the data to add, as a ``str`` or ``bytes``
"""
self._lock.acquire()
try:
if self._event is not None:
self._event.set()
self._buffer_frombytes(b(data))
self._cv.notify_all()
finally:
self._lock.release()
def read_ready(self):
"""
Returns true if data is buffered and ready to be read from this
feeder. A ``False`` result does not mean that the feeder has closed;
it means you may need to wait before more data arrives.
:return:
``True`` if a `read` call would immediately return at least one
byte; ``False`` otherwise.
"""
self._lock.acquire()
try:
if len(self._buffer) == 0:
return False
return True
finally:
self._lock.release()
def read(self, nbytes, timeout=None):
"""
Read data from the pipe. The return value is a string representing
the data received. The maximum amount of data to be received at once
is specified by ``nbytes``. If a string of length zero is returned,
the pipe has been closed.
The optional ``timeout`` argument can be a nonnegative float expressing
seconds, or ``None`` for no timeout. If a float is given, a
`.PipeTimeout` will be raised if the timeout period value has elapsed
before any data arrives.
:param int nbytes: maximum number of bytes to read
:param float timeout:
maximum seconds to wait (or ``None``, the default, to wait forever)
:return: the read data, as a ``str`` or ``bytes``
:raises:
`.PipeTimeout` -- if a timeout was specified and no data was ready
before that timeout
"""
out = bytes()
self._lock.acquire()
try:
if len(self._buffer) == 0:
if self._closed:
return out
# should we block?
if timeout == 0.0:
raise PipeTimeout()
# loop here in case we get woken up but a different thread has
# grabbed everything in the buffer.
while (len(self._buffer) == 0) and not self._closed:
then = time.time()
self._cv.wait(timeout)
if timeout is not None:
timeout -= time.time() - then
if timeout <= 0.0:
raise PipeTimeout()
# something's in the buffer and we have the lock!
if len(self._buffer) <= nbytes:
out = self._buffer_tobytes()
del self._buffer[:]
if (self._event is not None) and not self._closed:
self._event.clear()
else:
out = self._buffer_tobytes(nbytes)
del self._buffer[:nbytes]
finally:
self._lock.release()
return out
def empty(self):
"""
Clear out the buffer and return all data that was in it.
:return:
any data that was in the buffer prior to clearing it out, as a
`str`
"""
self._lock.acquire()
try:
out = self._buffer_tobytes()
del self._buffer[:]
if (self._event is not None) and not self._closed:
self._event.clear()
return out
finally:
self._lock.release()
def close(self):
"""
Close this pipe object. Future calls to `read` after the buffer
has been emptied will return immediately with an empty string.
"""
self._lock.acquire()
try:
self._closed = True
self._cv.notify_all()
if self._event is not None:
self._event.set()
finally:
self._lock.release()
def __len__(self):
"""
Return the number of bytes buffered.
:return: number (`int`) of bytes buffered
"""
self._lock.acquire()
try:
return len(self._buffer)
finally:
self._lock.release()
| 7,225 | Python | .py | 185 | 29.281081 | 79 | 0.581206 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
636 | contact.rst | paramiko_paramiko/sites/www/contact.rst | =======
Contact
=======
You can get in touch with the developer & user community in any of the
following ways:
* Submit contributions on Github - see the :doc:`contributing` page.
* Follow ``@bitprophet`` on Twitter, though it's not a dedicated account and
mostly just retweets funny pictures.
* Subscribe to the ``paramiko`` category on the developer's blog:
http://bitprophet.org/categories/paramiko/
| 409 | Python | .tac | 10 | 39.3 | 76 | 0.753149 | paramiko/paramiko | 9,017 | 1,999 | 1,074 | LGPL-2.1 | 9/5/2024, 5:07:36 PM (Europe/Amsterdam) |
637 | conftest.py | Knio_dominate/conftest.py | # used by pytest to test against the local copy of dominate
# (this file is important even though it is empty)
| 111 | Python | .py | 2 | 54.5 | 59 | 0.779817 | Knio/dominate | 1,689 | 109 | 18 | LGPL-3.0 | 9/5/2024, 5:07:54 PM (Europe/Amsterdam) |
638 | setup.py | Knio_dominate/setup/setup.py | __license__ = '''
This file is part of Dominate.
Dominate is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation, either version 3 of
the License, or (at your option) any later version.
Dominate is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General
Public License along with dominate. If not, see
<http://www.gnu.org/licenses/>.
'''
# pylint: disable=bad-whitespace
from setuptools import setup
import imp
_version = imp.load_source("dominate._version", "dominate/_version.py")
long_description = open('README.md').read()
setup(
name = 'dominate',
version = _version.__version__,
author = 'Tom Flanagan and Jake Wharton',
author_email = '[email protected]',
license = 'LGPLv3',
url = 'https://github.com/Knio/dominate/',
description = 'Dominate is a Python library for creating and manipulating HTML documents using an elegant DOM API.',
long_description = long_description,
long_description_content_type='text/markdown',
keywords = 'framework templating template html xhtml python html5',
python_requires='>=2.7, <3',
classifiers = [
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Text Processing :: Markup :: HTML',
],
packages = ['dominate'],
include_package_data = True,
)
| 2,371 | Python | .py | 54 | 40.851852 | 123 | 0.709705 | Knio/dominate | 1,689 | 109 | 18 | LGPL-3.0 | 9/5/2024, 5:07:54 PM (Europe/Amsterdam) |
639 | svg.py | Knio_dominate/dominate/svg.py | '''
This module consists of classes specific to HTML5-SVG Elements. In general this module does not include
- Elements that are not specific to SVG (eg. <a>)
- Elements that are deprecated
'''
from dominate.tags import html_tag
from dominate.dom_tag import dom_tag
import numbers
__license__ = '''
This file is part of Dominate.
Dominate is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation, either version 3 of
the License, or (at your option) any later version.
Dominate is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General
Public License along with Dominate. If not, see
<http://www.gnu.org/licenses/>.
'''
# Tag attributes
_ATTR_GLOBAL = set([
'accesskey', 'class', 'class', 'contenteditable', 'contextmenu', 'dir',
'draggable', 'id', 'item', 'hidden', 'lang', 'itemprop', 'spellcheck',
'style', 'subject', 'tabindex', 'title'
])
# https://developer.mozilla.org/en-US/docs/Web/SVG/Attribute/Events#Attributes
_ATTR_EVENTS = set([
'onbegin', 'onend', 'onrepeat',
'onabort', 'onerror', 'onresize', 'onscroll', 'onunload',
'oncopy', 'oncut', 'onpaste',
'oncancel', 'oncanplay', 'oncanplaythrough', 'onchange', 'onclick', 'onclose', 'oncuechange', 'ondblclick',
'ondrag', 'ondragend', 'ondragenter', 'ondragexit', 'ondragleave', 'ondragover', 'ondragstart', 'ondrop',
'ondurationchange', 'onemptied', 'onended', 'onerror', 'onfocus', 'oninput', 'oninvalid', 'onkeydown', 'onkeypress',
'onkeyup', 'onload', 'onloadeddata', 'onloadedmetadata','onloadstart', 'onmousedown', 'onmouseenter',
'onmouseleave', 'onmousemove', 'onmouseout', 'onmouseover', 'onmouseup', 'onmousewheel', 'onpause', 'onplay',
'onplaying', 'onprogress', 'onratechange', 'onreset', 'onresize', 'onscroll', 'onseeked', 'onseeking', 'onselect',
'onshow', 'onstalled', 'onsubmit', 'onsuspend', 'ontimeupdate', 'ontoggle', 'onvolumechange', 'onwaiting'
])
DASHED_ATTRIBUTES = set([
'accent', 'alignment', 'arabic', 'baseline', 'cap', 'clip', 'color', 'dominant', 'enable', 'fill', 'flood',
'font', 'glyph', 'horiz', 'image', 'letter', 'lighting', 'marker', 'overline', 'paint', 'panose', 'pointer',
'rendering', 'shape', 'stop', 'strikethrough', 'stroke', 'text', 'underline', 'unicode', 'units', 'v', 'vector',
'vert', 'word', 'writing', 'x'
])
# https://developer.mozilla.org/en-US/docs/Web/SVG/Element/svg
class svg_tag(html_tag):
@staticmethod
def clean_attribute(attribute):
attribute = html_tag.clean_attribute(attribute)
words = attribute.split('_')
if words[0] in DASHED_ATTRIBUTES:
return attribute.replace('_', '-')
return attribute
class svg(svg_tag):
pass
class animate(svg_tag):
'''
The animate SVG element is used to animate an attribute or property of an element over time.
It's normally inserted inside the element or referenced by the href attribute of the target element.
'''
pass
class animateMotion(svg_tag):
'''
The <animateMotion> element causes a referenced element to move along a motion path.
'''
pass
class animateTransform(svg_tag):
'''
The animateTransform element animates a transformation attribute on its target element, thereby allowing
animations to control translation, scaling, rotation, and/or skewing.
'''
is_single = True
class circle(svg_tag):
'''
The <circle> SVG element is an SVG basic shape, used to draw circles based on a center point and a radius.
'''
pass
class clipPath(svg_tag):
'''
The <clipPath> SVG element defines a clipping path, to be used used by the clip-path property.
'''
pass
class defs(svg_tag):
'''
The <defs> element is used to store graphical objects that will be used at a later time. Objects created inside a
<defs> element are not rendered directly. To display them you have to reference them
(with a <use> element for example).
'''
pass
class desc(svg_tag):
'''
The <desc> element provides an accessible, long-text description of any SVG container element or graphics element.
'''
pass
class ellipse(svg_tag):
'''
An ellipse element for svg containers
'''
pass
# (Note, filters are at the bottom of this file)
class g(svg_tag):
'''
The <g> SVG element is a container used to group other SVG elements.
'''
pass
class image(svg_tag):
'''
The <image> SVG element includes images inside SVG documents. It can display raster image files or other SVG files.
'''
pass
class line(svg_tag):
'''
The <line> element is an SVG basic shape used to create a line connecting two points.
'''
pass
class linearGradient(svg_tag):
'''
The <linearGradient> element lets authors define linear gradients that can be applied to fill or
stroke of graphical elements.
'''
pass
class marker(svg_tag):
'''
The <marker> element defines the graphic that is to be used for drawing arrowheads or polymarkers on a given <path>, <line>, <polyline> or <polygon> element.
'''
pass
class mask(svg_tag):
'''
The <mask> element defines an alpha mask for compositing the current object into the background.
A mask is used/referenced using the mask property.
'''
pass
class mpath(svg_tag):
'''
The <mpath> sub-element for the <animateMotion> element provides the ability to reference an
external <path> element as the definition of a motion path.
'''
pass
class pattern(svg_tag):
'''
The <pattern> element defines a graphics object which can be redrawn at repeated x and y-coordinate
intervals ("tiled") to cover an area.
'''
pass
class polygon(svg_tag):
'''
A polygon element for svg containers
'''
pass
class polyline(svg_tag):
'''
A polyline element for svg containers
'''
pass
class radialGradient(svg_tag):
'''
The <radialGradient> element lets authors define radial gradients that can be applied to fill
or stroke of graphical elements.
'''
pass
class path(svg_tag):
'''
A path element for svg containers
'''
pass
class rect(svg_tag):
'''
A rectangle element for svg containers
'''
pass
class stop(svg_tag):
'''
The SVG <stop> element defines a color and its position to use on a gradient.
This element is always a child of a <linearGradient> or <radialGradient> element.
'''
pass
class switch(svg_tag):
'''
The <switch> SVG element evaluates any requiredFeatures, requiredExtensions and systemLanguage attributes
on its direct child elements in order, and then renders the first child where these attributes evaluate to true.
Other direct children will be bypassed and therefore not rendered. If a child element is a container element,
like <g>, then its subtree is also processed/rendered or bypassed/not rendered.
'''
pass
class symbol(svg_tag):
'''
The use of symbol elements for graphics that are used multiple times in the same document adds structure and
semantics. Documents that are rich in structure may be rendered graphically, as speech, or as Braille,
and thus promote accessibility.
'''
pass
class text(svg_tag):
'''
The SVG <text> element draws a graphics element consisting of text. It's possible to apply a gradient,
pattern, clipping path, mask, or filter to <text>, like any other SVG graphics element.
'''
pass
class textPath(svg_tag):
'''
To render text along the shape of a <path>, enclose the text in a <textPath> element that has an href
attribute with a reference to the <path> element.
'''
pass
class title(svg_tag):
'''
The <title> element provides an accessible, short-text description of any SVG container
element or graphics element.
'''
pass
class tspan(svg_tag):
'''
The SVG <tspan> element define a subtext within a <text> element or another <tspan> element.
It allows to adjust the style and/or position of that subtext as needed.
'''
pass
class use(svg_tag):
'''
The <use> element takes nodes from within the SVG document, and duplicates them somewhere else.
'''
pass
class view(svg_tag):
'''
A view is a defined way to view the image, like a zoom level or a detail view.
'''
pass
# FILTERS
class filter(svg_tag):
pass
class feBlend(svg_tag):
pass
class feColorMatrix(svg_tag):
pass
class feComponentTransfer(svg_tag):
pass
class feComposite(svg_tag):
pass
class feConvolveMatrix(svg_tag):
pass
class feDiffuseLighting(svg_tag):
pass
class feDisplacementMap(svg_tag):
pass
class feFlood(svg_tag):
pass
class feGaussianBlur(svg_tag):
pass
class feImage(svg_tag):
pass
class feMerge(svg_tag):
pass
class feMorphology(svg_tag):
pass
class feOffset(svg_tag):
pass
class feSpecularLighting(svg_tag):
pass
class feTile(svg_tag):
pass
class feTurbulence(svg_tag):
pass
class feDistantLight(svg_tag):
pass
class fePointLight(svg_tag):
pass
class feSpotLight(svg_tag):
pass
| 9,121 | Python | .py | 269 | 31.126394 | 159 | 0.733189 | Knio/dominate | 1,689 | 109 | 18 | LGPL-3.0 | 9/5/2024, 5:07:54 PM (Europe/Amsterdam) |
640 | util.py | Knio_dominate/dominate/util.py | '''
Utility classes for creating dynamic html documents
'''
__license__ = '''
This file is part of Dominate.
Dominate is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation, either version 3 of
the License, or (at your option) any later version.
Dominate is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General
Public License along with Dominate. If not, see
<http://www.gnu.org/licenses/>.
'''
import re
from .dom_tag import dom_tag
try:
basestring = basestring
except NameError:
basestring = str
unichr = chr
def include(f):
'''
includes the contents of a file on disk.
takes a filename
'''
fl = open(f, 'r')
data = fl.read()
fl.close()
return raw(data)
def system(cmd, data=None):
'''
pipes the output of a program
'''
import subprocess
s = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stdin=subprocess.PIPE)
out, err = s.communicate(data)
return out.decode('utf8')
def escape(data, quote=True): # stolen from std lib cgi
'''
Escapes special characters into their html entities
Replace special characters "&", "<" and ">" to HTML-safe sequences.
If the optional flag quote is true, the quotation mark character (")
is also translated.
This is used to escape content that appears in the body of an HTML document
'''
data = data.replace("&", "&") # Must be done first!
data = data.replace("<", "<")
data = data.replace(">", ">")
if quote:
data = data.replace('"', """)
return data
_unescape = {
'quot': 34,
'amp': 38,
'lt': 60,
'gt': 62,
'nbsp': 32,
# more here
# http://www.w3.org/TR/html4/sgml/entities.html
'yuml': 255,
}
str_escape = escape
def unescape(data):
'''
unescapes html entities. the opposite of escape.
'''
cc = re.compile(r'&(?:(?:#(\d+))|([^;]+));')
result = []
m = cc.search(data)
while m:
result.append(data[0:m.start()])
d = m.group(1)
if d:
d = int(d)
result.append(unichr(d))
else:
d = _unescape.get(m.group(2), ord('?'))
result.append(unichr(d))
data = data[m.end():]
m = cc.search(data)
result.append(data)
return ''.join(result)
_reserved = ";/?:@&=+$, "
_replace_map = dict((c, '%%%2X' % ord(c)) for c in _reserved)
def url_escape(data):
return ''.join(_replace_map.get(c, c) for c in data)
def url_unescape(data):
return re.sub('%([0-9a-fA-F]{2})',
lambda m: unichr(int(m.group(1), 16)), data)
class container(dom_tag):
'''
Contains multiple elements, but does not add a level
'''
is_inline = True
def _render(self, sb, indent_level, indent_str, pretty, xhtml):
inline = self._render_children(sb, indent_level, indent_str, pretty, xhtml)
if pretty and not inline:
sb.append('\n')
sb.append(indent_str * (indent_level - 1))
return sb
class lazy(dom_tag):
'''
delays function execution until rendered
'''
def __new__(_cls, *args, **kwargs):
'''
Need to reset this special method or else
dom_tag will think it's being used as a dectorator.
This means lazy() can't be used as a dectorator, but
thinking about when you might want that just confuses me.
'''
return object.__new__(_cls)
def __init__(self, func, *args, **kwargs):
super(lazy, self).__init__()
self.func = func
self.args = args
self.kwargs = kwargs
def _render(self, sb, *a, **kw):
r = self.func(*self.args, **self.kwargs)
sb.append(str(r))
class text(dom_tag):
'''
Just a string. Useful for inside context managers
'''
is_pretty = False
is_inline = True
def __init__(self, _text, escape=True):
super(text, self).__init__()
self.escape = escape
if escape:
self.text = str_escape(_text)
else:
self.text = _text
def _render(self, sb, *a, **kw):
sb.append(self.text)
return sb
def raw(s):
'''
Inserts a raw string into the DOM. Unsafe. Alias for text(x, escape=False)
'''
return text(s, escape=False)
| 4,311 | Python | .py | 145 | 26.310345 | 86 | 0.664727 | Knio/dominate | 1,689 | 109 | 18 | LGPL-3.0 | 9/5/2024, 5:07:54 PM (Europe/Amsterdam) |
641 | tags.py | Knio_dominate/dominate/tags.py | '''
HTML tag classes.
'''
__license__ = '''
This file is part of Dominate.
Dominate is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation, either version 3 of
the License, or (at your option) any later version.
Dominate is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General
Public License along with Dominate. If not, see
<http://www.gnu.org/licenses/>.
'''
from .dom_tag import dom_tag, attr, get_current
from .dom1core import dom1core
try:
basestring = basestring
except NameError: # py3
basestring = str
unicode = str
underscored_classes = set(['del', 'input', 'map', 'object'])
# Tag attributes
_ATTR_GLOBAL = set([
'accesskey', 'class', 'class', 'contenteditable', 'contextmenu', 'dir',
'draggable', 'id', 'item', 'hidden', 'lang', 'itemprop', 'spellcheck',
'style', 'subject', 'tabindex', 'title'
])
_ATTR_EVENTS = set([
'onabort', 'onblur', 'oncanplay', 'oncanplaythrough', 'onchange', 'onclick',
'oncontextmenu', 'ondblclick', 'ondrag', 'ondragend', 'ondragenter',
'ondragleave', 'ondragover', 'ondragstart', 'ondrop', 'ondurationchange',
'onemptied', 'onended', 'onerror', 'onfocus', 'onformchange', 'onforminput',
'oninput', 'oninvalid', 'onkeydown', 'onkeypress', 'onkeyup', 'onload',
'onloadeddata', 'onloadedmetadata', 'onloadstart', 'onmousedown',
'onmousemove', 'onmouseout', 'onmouseover', 'onmouseup', 'onmousewheel',
'onpause', 'onplay', 'onplaying', 'onprogress', 'onratechange',
'onreadystatechange', 'onscroll', 'onseeked', 'onseeking', 'onselect',
'onshow', 'onstalled', 'onsubmit', 'onsuspend', 'ontimeupdate',
'onvolumechange', 'onwaiting'
])
ERR_ATTRIBUTE = 'attributes'
ERR_CONTEXT = 'context'
ERR_CONTENT = 'content'
class html_tag(dom_tag, dom1core):
def __init__(self, *args, **kwargs):
'''
Creates a new html tag instance.
'''
super(html_tag, self).__init__(*args, **kwargs)
# def validate(self):
# '''
# Validate the tag. This will check the attributes, context, and contents and
# emit tuples in the form of: element, message.
# '''
# errors = []
# errors.extend(self.validate_attributes())
# errors.extend(self.validate_context())
# errors.extend(self.validate_content())
# return errors
# def validate_attributes(self):
# '''
# Validate the tag attributes.
# '''
# return []
# def validate_context(self):
# '''
# Validate the tag context.
# '''
# return []
# def validate_content(self):
# '''
# Validate the content of the tag.
# '''
# return []
# def _check_attributes(self, *attrs):
# valid = set([])
# for attr in attrs:
# if hasattr(attr, '__iter__'):
# valid |= set(attr)
# else:
# valid.add(attr)
# return set(list(self.attributes.iterkeys())) - valid
################################################################################
############################### Html Tag Classes ###############################
################################################################################
# Root element
class html(html_tag):
'''
The html element represents the root of an HTML document.
'''
pass
# def validate_attributes(self):
# errors = []
# for invalid in self._check_attributes(_ATTR_GLOBAL, 'manifest'):
# errors.append( (self, ERR_ATTRIBUTE, 'Invalid attribute: "%s"' % invalid) )
# return errors
# def validate_context(self):
# if self.parent is not None and not isinstance(self.parent, iframe):
# return [(self, ERR_CONTEXT, 'Must be root element or child of an <iframe>')]
# return []
# def validate_content(self):
# if len(self) != 2 or not isinstance(self[0], head) or not isinstance(self[1], body):
# return [(self, ERR_CONTENT, 'Children must be <head> and then <body>.')]
# return []
# Document metadata
class head(html_tag):
'''
The head element represents a collection of metadata for the document.
'''
pass
class title(html_tag):
'''
The title element represents the document's title or name. Authors should use
titles that identify their documents even when they are used out of context,
for example in a user's history or bookmarks, or in search results. The
document's title is often different from its first heading, since the first
heading does not have to stand alone when taken out of context.
'''
def _get_text(self):
return u''.join(self.get(basestring))
def _set_text(self, text):
self.clear()
self.add(text)
text = property(_get_text, _set_text)
class base(html_tag):
'''
The base element allows authors to specify the document base URL for the
purposes of resolving relative URLs, and the name of the default browsing
context for the purposes of following hyperlinks. The element does not
represent any content beyond this information.
'''
is_single = True
class link(html_tag):
'''
The link element allows authors to link their document to other resources.
'''
is_single = True
class meta(html_tag):
'''
The meta element represents various kinds of metadata that cannot be
expressed using the title, base, link, style, and script elements.
'''
is_single = True
class style(html_tag):
'''
The style element allows authors to embed style information in their
documents. The style element is one of several inputs to the styling
processing model. The element does not represent content for the user.
'''
is_pretty = False
# Scripting
class script(html_tag):
'''
The script element allows authors to include dynamic script and data blocks
in their documents. The element does not represent content for the user.
'''
is_pretty = False
class noscript(html_tag):
'''
The noscript element represents nothing if scripting is enabled, and
represents its children if scripting is disabled. It is used to present
different markup to user agents that support scripting and those that don't
support scripting, by affecting how the document is parsed.
'''
pass
# Sections
class body(html_tag):
'''
The body element represents the main content of the document.
'''
pass
class main(html_tag):
'''
The main content area of a document includes content that is unique to that
document and excludes content that is repeated across a set of documents such
as site navigation links, copyright information, site logos and banners and
search forms (unless the document or application's main function is that of a
search form).
'''
class section(html_tag):
'''
The section element represents a generic section of a document or
application. A section, in this context, is a thematic grouping of content,
typically with a heading.
'''
pass
class nav(html_tag):
'''
The nav element represents a section of a page that links to other pages or
to parts within the page: a section with navigation links.
'''
pass
class article(html_tag):
'''
The article element represents a self-contained composition in a document,
page, application, or site and that is, in principle, independently
distributable or reusable, e.g. in syndication. This could be a forum post, a
magazine or newspaper article, a blog entry, a user-submitted comment, an
interactive widget or gadget, or any other independent item of content.
'''
pass
class aside(html_tag):
'''
The aside element represents a section of a page that consists of content
that is tangentially related to the content around the aside element, and
which could be considered separate from that content. Such sections are
often represented as sidebars in printed typography.
'''
pass
class h1(html_tag):
'''
Represents the highest ranking heading.
'''
pass
class h2(html_tag):
'''
Represents the second-highest ranking heading.
'''
pass
class h3(html_tag):
'''
Represents the third-highest ranking heading.
'''
pass
class h4(html_tag):
'''
Represents the fourth-highest ranking heading.
'''
pass
class h5(html_tag):
'''
Represents the fifth-highest ranking heading.
'''
pass
class h6(html_tag):
'''
Represents the sixth-highest ranking heading.
'''
pass
class hgroup(html_tag):
'''
The hgroup element represents the heading of a section. The element is used
to group a set of h1-h6 elements when the heading has multiple levels, such
as subheadings, alternative titles, or taglines.
'''
pass
class header(html_tag):
'''
The header element represents a group of introductory or navigational aids.
'''
pass
class footer(html_tag):
'''
The footer element represents a footer for its nearest ancestor sectioning
content or sectioning root element. A footer typically contains information
about its section such as who wrote it, links to related documents,
copyright data, and the like.
'''
pass
class address(html_tag):
'''
The address element represents the contact information for its nearest
article or body element ancestor. If that is the body element, then the
contact information applies to the document as a whole.
'''
pass
# Grouping content
class p(html_tag):
'''
The p element represents a paragraph.
'''
pass
class hr(html_tag):
'''
The hr element represents a paragraph-level thematic break, e.g. a scene
change in a story, or a transition to another topic within a section of a
reference book.
'''
is_single = True
class pre(html_tag):
'''
The pre element represents a block of preformatted text, in which structure
is represented by typographic conventions rather than by elements.
'''
is_pretty = False
class blockquote(html_tag):
'''
The blockquote element represents a section that is quoted from another
source.
'''
pass
class ol(html_tag):
'''
The ol element represents a list of items, where the items have been
intentionally ordered, such that changing the order would change the
meaning of the document.
'''
pass
class ul(html_tag):
'''
The ul element represents a list of items, where the order of the items is
not important - that is, where changing the order would not materially change
the meaning of the document.
'''
pass
class li(html_tag):
'''
The li element represents a list item. If its parent element is an ol, ul, or
menu element, then the element is an item of the parent element's list, as
defined for those elements. Otherwise, the list item has no defined
list-related relationship to any other li element.
'''
pass
class dl(html_tag):
'''
The dl element represents an association list consisting of zero or more
name-value groups (a description list). Each group must consist of one or
more names (dt elements) followed by one or more values (dd elements).
Within a single dl element, there should not be more than one dt element for
each name.
'''
pass
class dt(html_tag):
'''
The dt element represents the term, or name, part of a term-description group
in a description list (dl element).
'''
pass
class dd(html_tag):
'''
The dd element represents the description, definition, or value, part of a
term-description group in a description list (dl element).
'''
pass
class figure(html_tag):
'''
The figure element represents some flow content, optionally with a caption,
that is self-contained and is typically referenced as a single unit from the
main flow of the document.
'''
pass
class figcaption(html_tag):
'''
The figcaption element represents a caption or legend for the rest of the
contents of the figcaption element's parent figure element, if any.
'''
pass
class div(html_tag):
'''
The div element has no special meaning at all. It represents its children. It
can be used with the class, lang, and title attributes to mark up semantics
common to a group of consecutive elements.
'''
pass
# Text semantics
class a(html_tag):
'''
If the a element has an href attribute, then it represents a hyperlink (a
hypertext anchor).
If the a element has no href attribute, then the element represents a
placeholder for where a link might otherwise have been placed, if it had been
relevant.
'''
pass
class em(html_tag):
'''
The em element represents stress emphasis of its contents.
'''
pass
class strong(html_tag):
'''
The strong element represents strong importance for its contents.
'''
pass
class small(html_tag):
'''
The small element represents side comments such as small print.
'''
pass
class s(html_tag):
'''
The s element represents contents that are no longer accurate or no longer
relevant.
'''
pass
class cite(html_tag):
'''
The cite element represents the title of a work (e.g. a book, a paper, an
essay, a poem, a score, a song, a script, a film, a TV show, a game, a
sculpture, a painting, a theatre production, a play, an opera, a musical, an
exhibition, a legal case report, etc). This can be a work that is being
quoted or referenced in detail (i.e. a citation), or it can just be a work
that is mentioned in passing.
'''
pass
class q(html_tag):
'''
The q element represents some phrasing content quoted from another source.
'''
pass
class dfn(html_tag):
'''
The dfn element represents the defining instance of a term. The paragraph,
description list group, or section that is the nearest ancestor of the dfn
element must also contain the definition(s) for the term given by the dfn
element.
'''
pass
class abbr(html_tag):
'''
The abbr element represents an abbreviation or acronym, optionally with its
expansion. The title attribute may be used to provide an expansion of the
abbreviation. The attribute, if specified, must contain an expansion of the
abbreviation, and nothing else.
'''
pass
class time_(html_tag):
'''
The time element represents either a time on a 24 hour clock, or a precise
date in the proleptic Gregorian calendar, optionally with a time and a
time-zone offset.
'''
pass
_time = time_
class code(html_tag):
'''
The code element represents a fragment of computer code. This could be an XML
element name, a filename, a computer program, or any other string that a
computer would recognize.
'''
pass
class var(html_tag):
'''
The var element represents a variable. This could be an actual variable in a
mathematical expression or programming context, an identifier representing a
constant, a function parameter, or just be a term used as a placeholder in
prose.
'''
pass
class samp(html_tag):
'''
The samp element represents (sample) output from a program or computing
system.
'''
pass
class kbd(html_tag):
'''
The kbd element represents user input (typically keyboard input, although it
may also be used to represent other input, such as voice commands).
'''
pass
class sub(html_tag):
'''
The sub element represents a subscript.
'''
pass
class sup(html_tag):
is_inline = True
'''
The sup element represents a superscript.
'''
pass
class i(html_tag):
is_inline = True
'''
The i element represents a span of text in an alternate voice or mood, or
otherwise offset from the normal prose in a manner indicating a different
quality of text, such as a taxonomic designation, a technical term, an
idiomatic phrase from another language, a thought, or a ship name in Western
texts.
'''
pass
class b(html_tag):
'''
The b element represents a span of text to which attention is being drawn for
utilitarian purposes without conveying any extra importance and with no
implication of an alternate voice or mood, such as key words in a document
abstract, product names in a review, actionable words in interactive
text-driven software, or an article lede.
'''
pass
class u(html_tag):
'''
The u element represents a span of text with an unarticulated, though
explicitly rendered, non-textual annotation, such as labeling the text as
being a proper name in Chinese text (a Chinese proper name mark), or
labeling the text as being misspelt.
'''
pass
class mark(html_tag):
'''
The mark element represents a run of text in one document marked or
highlighted for reference purposes, due to its relevance in another context.
When used in a quotation or other block of text referred to from the prose,
it indicates a highlight that was not originally present but which has been
added to bring the reader's attention to a part of the text that might not
have been considered important by the original author when the block was
originally written, but which is now under previously unexpected scrutiny.
When used in the main prose of a document, it indicates a part of the
document that has been highlighted due to its likely relevance to the user's
current activity.
'''
pass
class ruby(html_tag):
'''
The ruby element allows one or more spans of phrasing content to be marked
with ruby annotations. Ruby annotations are short runs of text presented
alongside base text, primarily used in East Asian typography as a guide for
pronunciation or to include other annotations. In Japanese, this form of
typography is also known as furigana.
'''
pass
class rt(html_tag):
'''
The rt element marks the ruby text component of a ruby annotation.
'''
pass
class rp(html_tag):
'''
The rp element can be used to provide parentheses around a ruby text
component of a ruby annotation, to be shown by user agents that don't support
ruby annotations.
'''
pass
class bdi(html_tag):
'''
The bdi element represents a span of text that is to be isolated from its
surroundings for the purposes of bidirectional text formatting.
'''
pass
class bdo(html_tag):
'''
The bdo element represents explicit text directionality formatting control
for its children. It allows authors to override the Unicode bidirectional
algorithm by explicitly specifying a direction override.
'''
pass
class span(html_tag):
'''
The span element doesn't mean anything on its own, but can be useful when
used together with the global attributes, e.g. class, lang, or dir. It
represents its children.
'''
pass
class br(html_tag):
'''
The br element represents a line break.
'''
is_single = True
is_inline = True
class wbr(html_tag):
'''
The wbr element represents a line break opportunity.
'''
is_single = True
is_inline = True
# Edits
class ins(html_tag):
'''
The ins element represents an addition to the document.
'''
pass
class del_(html_tag):
'''
The del element represents a removal from the document.
'''
pass
_del = del_
# Embedded content
class img(html_tag):
'''
An img element represents an image.
'''
is_single = True
class iframe(html_tag):
'''
The iframe element represents a nested browsing context.
'''
pass
class embed(html_tag):
'''
The embed element represents an integration point for an external (typically
non-HTML) application or interactive content.
'''
is_single = True
class object_(html_tag):
'''
The object element can represent an external resource, which, depending on
the type of the resource, will either be treated as an image, as a nested
browsing context, or as an external resource to be processed by a plugin.
'''
pass
_object = object_
class param(html_tag):
'''
The param element defines parameters for plugins invoked by object elements.
It does not represent anything on its own.
'''
is_single = True
class video(html_tag):
'''
A video element is used for playing videos or movies, and audio files with
captions.
'''
pass
class audio(html_tag):
'''
An audio element represents a sound or audio stream.
'''
pass
class source(html_tag):
'''
The source element allows authors to specify multiple alternative media
resources for media elements. It does not represent anything on its own.
'''
is_single = True
class track(html_tag):
'''
The track element allows authors to specify explicit external timed text
tracks for media elements. It does not represent anything on its own.
'''
is_single = True
class canvas(html_tag):
'''
The canvas element provides scripts with a resolution-dependent bitmap
canvas, which can be used for rendering graphs, game graphics, or other
visual images on the fly.
'''
pass
class map_(html_tag):
'''
The map element, in conjunction with any area element descendants, defines an
image map. The element represents its children.
'''
pass
_map = map_
class area(html_tag):
'''
The area element represents either a hyperlink with some text and a
corresponding area on an image map, or a dead area on an image map.
'''
is_single = True
# Tabular data
class table(html_tag):
'''
The table element represents data with more than one dimension, in the form
of a table.
'''
pass
class caption(html_tag):
'''
The caption element represents the title of the table that is its parent, if
it has a parent and that is a table element.
'''
pass
class colgroup(html_tag):
'''
The colgroup element represents a group of one or more columns in the table
that is its parent, if it has a parent and that is a table element.
'''
pass
class col(html_tag):
'''
If a col element has a parent and that is a colgroup element that itself has
a parent that is a table element, then the col element represents one or more
columns in the column group represented by that colgroup.
'''
is_single = True
class tbody(html_tag):
'''
The tbody element represents a block of rows that consist of a body of data
for the parent table element, if the tbody element has a parent and it is a
table.
'''
pass
class thead(html_tag):
'''
The thead element represents the block of rows that consist of the column
labels (headers) for the parent table element, if the thead element has a
parent and it is a table.
'''
pass
class tfoot(html_tag):
'''
The tfoot element represents the block of rows that consist of the column
summaries (footers) for the parent table element, if the tfoot element has a
parent and it is a table.
'''
pass
class tr(html_tag):
'''
The tr element represents a row of cells in a table.
'''
pass
class td(html_tag):
'''
The td element represents a data cell in a table.
'''
pass
class th(html_tag):
'''
The th element represents a header cell in a table.
'''
pass
# Forms
class form(html_tag):
'''
The form element represents a collection of form-associated elements, some of
which can represent editable values that can be submitted to a server for
processing.
'''
pass
class fieldset(html_tag):
'''
The fieldset element represents a set of form controls optionally grouped
under a common name.
'''
pass
class legend(html_tag):
'''
The legend element represents a caption for the rest of the contents of the
legend element's parent fieldset element, if any.
'''
pass
class label(html_tag):
'''
The label represents a caption in a user interface. The caption can be
associated with a specific form control, known as the label element's labeled
control, either using for attribute, or by putting the form control inside
the label element itself.
'''
pass
class input_(html_tag):
'''
The input element represents a typed data field, usually with a form control
to allow the user to edit the data.
'''
is_single = True
_input = input_
class button(html_tag):
'''
The button element represents a button. If the element is not disabled, then
the user agent should allow the user to activate the button.
'''
pass
class select(html_tag):
'''
The select element represents a control for selecting amongst a set of
options.
'''
pass
class datalist(html_tag):
'''
The datalist element represents a set of option elements that represent
predefined options for other controls. The contents of the element represents
fallback content for legacy user agents, intermixed with option elements that
represent the predefined options. In the rendering, the datalist element
represents nothing and it, along with its children, should be hidden.
'''
pass
class optgroup(html_tag):
'''
The optgroup element represents a group of option elements with a common
label.
'''
pass
class option(html_tag):
'''
The option element represents an option in a select element or as part of a
list of suggestions in a datalist element.
'''
pass
class textarea(html_tag):
'''
The textarea element represents a multiline plain text edit control for the
element's raw value. The contents of the control represent the control's
default value.
'''
pass
class keygen(html_tag):
'''
The keygen element represents a key pair generator control. When the
control's form is submitted, the private key is stored in the local keystore,
and the public key is packaged and sent to the server.
'''
is_single = True
class output(html_tag):
'''
The output element represents the result of a calculation.
'''
pass
class progress(html_tag):
'''
The progress element represents the completion progress of a task. The
progress is either indeterminate, indicating that progress is being made but
that it is not clear how much more work remains to be done before the task is
complete (e.g. because the task is waiting for a remote host to respond), or
the progress is a number in the range zero to a maximum, giving the fraction
of work that has so far been completed.
'''
pass
class meter(html_tag):
'''
The meter element represents a scalar measurement within a known range, or a
fractional value; for example disk usage, the relevance of a query result, or
the fraction of a voting population to have selected a particular candidate.
'''
pass
# Interactive elements
class details(html_tag):
'''
The details element represents a disclosure widget from which the user can
obtain additional information or controls.
'''
pass
class summary(html_tag):
'''
The summary element represents a summary, caption, or legend for the rest of
the contents of the summary element's parent details element, if any.
'''
pass
class command(html_tag):
'''
The command element represents a command that the user can invoke.
'''
is_single = True
class menu(html_tag):
'''
The menu element represents a list of commands.
'''
pass
class font(html_tag):
'''
The font element represents the font in a html .
'''
pass
# Additional markup
class comment(html_tag):
'''
Normal, one-line comment:
>>> print comment("Hello, comments!")
<!--Hello, comments!-->
For IE's "if" statement comments:
>>> print comment(p("Upgrade your browser."), condition='lt IE6')
<!--[if lt IE6]><p>Upgrade your browser.</p><![endif]-->
Downlevel conditional comments:
>>> print comment(p("You are using a ", em("downlevel"), " browser."),
condition='false', downlevel='revealed')
<![if false]><p>You are using a <em>downlevel</em> browser.</p><![endif]>
For more on conditional comments see:
http://msdn.microsoft.com/en-us/library/ms537512(VS.85).aspx
'''
ATTRIBUTE_CONDITION = 'condition'
# Valid values are 'hidden', 'downlevel' or 'revealed'
ATTRIBUTE_DOWNLEVEL = 'downlevel'
def _render(self, sb, indent_level=1, indent_str=' ', pretty=True, xhtml=False):
has_condition = comment.ATTRIBUTE_CONDITION in self.attributes
is_revealed = comment.ATTRIBUTE_DOWNLEVEL in self.attributes and \
self.attributes[comment.ATTRIBUTE_DOWNLEVEL] == 'revealed'
sb.append('<!')
if not is_revealed:
sb.append('--')
if has_condition:
sb.append('[if %s]>' % self.attributes[comment.ATTRIBUTE_CONDITION])
pretty = self._render_children(sb, indent_level - 1, indent_str, pretty, xhtml)
# if len(self.children) > 1:
if any(isinstance(child, dom_tag) for child in self):
sb.append('\n')
sb.append(indent_str * (indent_level - 1))
if has_condition:
sb.append('<![endif]')
if not is_revealed:
sb.append('--')
sb.append('>')
return sb
| 28,662 | Python | .py | 879 | 29.583618 | 90 | 0.725935 | Knio/dominate | 1,689 | 109 | 18 | LGPL-3.0 | 9/5/2024, 5:07:54 PM (Europe/Amsterdam) |
642 | dom_tag.py | Knio_dominate/dominate/dom_tag.py | __license__ = '''
This file is part of Dominate.
Dominate is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation, either version 3 of
the License, or (at your option) any later version.
Dominate is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General
Public License along with Dominate. If not, see
<http://www.gnu.org/licenses/>.
'''
# pylint: disable=bad-indentation, bad-whitespace, missing-docstring
import copy
import numbers
from collections import defaultdict, namedtuple
from functools import wraps
import threading
from asyncio import get_event_loop
from uuid import uuid4
from contextvars import ContextVar
try:
# Python 3
from collections.abc import Callable
except ImportError: # pragma: no cover
# Python 2.7
from collections import Callable
try:
basestring = basestring
except NameError: # py3 # pragma: no cover
basestring = str
unicode = str
try:
import greenlet
except ImportError:
greenlet = None
# We want dominate to work in async contexts - however, the problem is
# when we bind a tag using "with", we set what is essentially a global variable.
# If we are processing multiple documents at the same time, one context
# can "overwrite" the "bound tag" of another - this can cause documents to
# sort of bleed into one another...
# The solution is to use a ContextVar - which provides async context local storage.
# We use this to store a unique ID for each async context. We then use thie ID to
# form the key (in _get_thread_context) that is used to index the _with_context defaultdict.
# The presense of this key ensures that each async context has its own stack and doesn't conflict.
async_context_id = ContextVar('async_context_id', default = None)
def _get_async_context_id():
if async_context_id.get() is None:
async_context_id.set(uuid4().hex)
return async_context_id.get()
def _get_thread_context():
context = [threading.current_thread()]
# Tag extra content information with a name to make sure
# a greenlet.getcurrent() == 1 doesn't get confused with a
# a _get_thread_context() == 1.
if greenlet:
context.append(("greenlet", greenlet.getcurrent()))
try:
if get_event_loop().is_running():
# Only add this extra information if we are actually in a running event loop
context.append(("async", _get_async_context_id()))
# A runtime error is raised if there is no async loop...
except RuntimeError:
pass
return tuple(context)
class dom_tag(object):
is_single = False # Tag does not require matching end tag (ex. <hr/>)
is_pretty = True # Text inside the tag should be left as-is (ex. <pre>)
# otherwise, text will be escaped() and whitespace may be
# modified
is_inline = False
def __new__(_cls, *args, **kwargs):
'''
Check if bare tag is being used a a decorator
(called with a single function arg).
decorate the function and return
'''
if len(args) == 1 and isinstance(args[0], Callable) \
and not isinstance(args[0], dom_tag) and not kwargs:
wrapped = args[0]
@wraps(wrapped)
def f(*args, **kwargs):
with _cls() as _tag:
return wrapped(*args, **kwargs) or _tag
return f
return object.__new__(_cls)
def __init__(self, *args, **kwargs):
'''
Creates a new tag. Child tags should be passed as arguments and attributes
should be passed as keyword arguments.
There is a non-rendering attribute which controls how the tag renders:
* `__inline` - Boolean value. If True renders all children tags on the same
line.
'''
self.attributes = {}
self.children = []
self.parent = None
# Does not insert newlines on all children if True (recursive attribute)
self.is_inline = kwargs.pop('__inline', self.is_inline)
self.is_pretty = kwargs.pop('__pretty', self.is_pretty)
#Add child elements
if args:
self.add(*args)
for attr, value in kwargs.items():
self.set_attribute(*type(self).clean_pair(attr, value))
self._ctx = None
self._add_to_ctx()
# context manager
frame = namedtuple('frame', ['tag', 'items', 'used'])
# stack of frames
_with_contexts = defaultdict(list)
def _add_to_ctx(self):
stack = dom_tag._with_contexts.get(_get_thread_context())
if stack:
self._ctx = stack[-1]
stack[-1].items.append(self)
def __enter__(self):
stack = dom_tag._with_contexts[_get_thread_context()]
stack.append(dom_tag.frame(self, [], set()))
return self
def __exit__(self, type, value, traceback):
thread_id = _get_thread_context()
stack = dom_tag._with_contexts[thread_id]
frame = stack.pop()
for item in frame.items:
if item in frame.used: continue
self.add(item)
if not stack:
del dom_tag._with_contexts[thread_id]
def __call__(self, func):
'''
tag instance is being used as a decorator.
wrap func to make a copy of this tag
'''
# remove decorator from its context so it doesn't
# get added in where it was defined
if self._ctx:
self._ctx.used.add(self)
@wraps(func)
def f(*args, **kwargs):
tag = copy.deepcopy(self)
tag._add_to_ctx()
with tag:
return func(*args, **kwargs) or tag
return f
def set_attribute(self, key, value):
'''
Add or update the value of an attribute.
'''
if isinstance(key, int):
self.children[key] = value
elif isinstance(key, basestring):
self.attributes[key] = value
else:
raise TypeError('Only integer and string types are valid for assigning '
'child tags and attributes, respectively.')
__setitem__ = set_attribute
def delete_attribute(self, key):
if isinstance(key, int):
del self.children[key:key+1]
else:
del self.attributes[key]
__delitem__ = delete_attribute
def add(self, *args):
'''
Add new child tags.
'''
for obj in args:
if isinstance(obj, numbers.Number):
# Convert to string so we fall into next if block
obj = str(obj)
if isinstance(obj, basestring):
obj = util.escape(obj)
self.children.append(obj)
elif isinstance(obj, dom_tag):
stack = dom_tag._with_contexts.get(_get_thread_context(), [])
for s in stack:
s.used.add(obj)
self.children.append(obj)
obj.parent = self
elif isinstance(obj, dict):
for attr, value in obj.items():
self.set_attribute(*dom_tag.clean_pair(attr, value))
elif hasattr(obj, '__iter__'):
for subobj in obj:
self.add(subobj)
else: # wtf is it?
raise ValueError('%r not a tag or string.' % obj)
if len(args) == 1:
return args[0]
return args
def add_raw_string(self, s):
self.children.append(s)
def remove(self, obj):
self.children.remove(obj)
def clear(self):
for i in self.children:
if isinstance(i, dom_tag) and i.parent is self:
i.parent = None
self.children = []
def get(self, tag=None, **kwargs):
'''
Recursively searches children for tags of a certain
type with matching attributes.
'''
# Stupid workaround since we can not use dom_tag in the method declaration
if tag is None: tag = dom_tag
attrs = [(dom_tag.clean_attribute(attr), value)
for attr, value in kwargs.items()]
results = []
for child in self.children:
if (isinstance(tag, basestring) and type(child).__name__ == tag) or \
(not isinstance(tag, basestring) and isinstance(child, tag)):
if all(child.attributes.get(attribute) == value
for attribute, value in attrs):
# If the child is of correct type and has all attributes and values
# in kwargs add as a result
results.append(child)
if isinstance(child, dom_tag):
# If the child is a dom_tag extend the search down through its children
results.extend(child.get(tag, **kwargs))
return results
def __getitem__(self, key):
'''
Returns the stored value of the specified attribute or child
(if it exists).
'''
if isinstance(key, int):
# Children are accessed using integers
try:
return object.__getattribute__(self, 'children')[key]
except IndexError:
raise IndexError('Child with index "%s" does not exist.' % key)
elif isinstance(key, basestring):
# Attributes are accessed using strings
try:
return object.__getattribute__(self, 'attributes')[key]
except KeyError:
raise AttributeError('Attribute "%s" does not exist.' % key)
else:
raise TypeError('Only integer and string types are valid for accessing '
'child tags and attributes, respectively.')
__getattr__ = __getitem__
def __len__(self):
'''
Number of child elements.
'''
return len(self.children)
def __bool__(self):
'''
Hack for "if x" and __len__
'''
return True
__nonzero__ = __bool__
def __iter__(self):
'''
Iterates over child elements.
'''
return self.children.__iter__()
def __contains__(self, item):
'''
Checks recursively if item is in children tree.
Accepts both a string and a class.
'''
return bool(self.get(item))
def __iadd__(self, obj):
'''
Reflexive binary addition simply adds tag as a child.
'''
self.add(obj)
return self
# String and unicode representations are the same as render()
def __unicode__(self):
return self.render()
__str__ = __unicode__
def render(self, indent=' ', pretty=True, xhtml=False):
data = self._render([], 0, indent, pretty, xhtml)
return u''.join(data)
def _render(self, sb, indent_level, indent_str, pretty, xhtml):
pretty = pretty and self.is_pretty
name = getattr(self, 'tagname', type(self).__name__)
# Workaround for python keywords and standard classes/methods
# (del, object, input)
if name[-1] == '_':
name = name[:-1]
# open tag
sb.append('<')
sb.append(name)
for attribute, value in sorted(self.attributes.items()):
if value in (False, None):
continue
val = unicode(value) if isinstance(value, util.text) and not value.escape else util.escape(unicode(value), True)
sb.append(' %s="%s"' % (attribute, val))
sb.append(' />' if self.is_single and xhtml else '>')
if self.is_single:
return sb
inline = self._render_children(sb, indent_level + 1, indent_str, pretty, xhtml)
if pretty and not inline:
sb.append('\n')
sb.append(indent_str * indent_level)
# close tag
sb.append('</')
sb.append(name)
sb.append('>')
return sb
def _render_children(self, sb, indent_level, indent_str, pretty, xhtml):
inline = True
for child in self.children:
if isinstance(child, dom_tag):
if pretty and not child.is_inline:
inline = False
sb.append('\n')
sb.append(indent_str * indent_level)
child._render(sb, indent_level, indent_str, pretty, xhtml)
else:
sb.append(unicode(child))
return inline
def __repr__(self):
name = '%s.%s' % (self.__module__, type(self).__name__)
attributes_len = len(self.attributes)
attributes = '%s attribute' % attributes_len
if attributes_len != 1: attributes += 's'
children_len = len(self.children)
children = '%s child' % children_len
if children_len != 1: children += 'ren'
return '<%s at %x: %s, %s>' % (name, id(self), attributes, children)
@staticmethod
def clean_attribute(attribute):
'''
Normalize attribute names for shorthand and work arounds for limitations
in Python's syntax
'''
# Shorthand
attribute = {
'cls': 'class',
'className': 'class',
'class_name': 'class',
'klass': 'class',
'fr': 'for',
'html_for': 'for',
'htmlFor': 'for',
'phor': 'for',
}.get(attribute, attribute)
# Workaround for Python's reserved words
if attribute[0] == '_':
attribute = attribute[1:]
# Workaround for dash
special_prefix = any([attribute.startswith(x) for x in ('data_', 'aria_')])
if attribute in set(['http_equiv']) or special_prefix:
attribute = attribute.replace('_', '-').lower()
# Workaround for colon
if attribute.split('_')[0] in ('xlink', 'xml', 'xmlns'):
attribute = attribute.replace('_', ':', 1).lower()
return attribute
@classmethod
def clean_pair(cls, attribute, value):
'''
This will call `clean_attribute` on the attribute and also allows for the
creation of boolean attributes.
Ex. input(selected=True) is equivalent to input(selected="selected")
'''
attribute = cls.clean_attribute(attribute)
# Check for boolean attributes
# (i.e. selected=True becomes selected="selected")
if value is True:
value = attribute
# Ignore `if value is False`: this is filtered out in render()
return (attribute, value)
_get_current_none = object()
def get_current(default=_get_current_none):
'''
get the current tag being used as a with context or decorated function.
if no context is active, raises ValueError, or returns the default, if provided
'''
h = _get_thread_context()
ctx = dom_tag._with_contexts.get(h, None)
if ctx:
return ctx[-1].tag
if default is _get_current_none:
raise ValueError('no current context')
return default
def attr(*args, **kwargs):
'''
Set attributes on the current active tag context
'''
c = get_current()
dicts = args + (kwargs,)
for d in dicts:
for attr, value in d.items():
c.set_attribute(*dom_tag.clean_pair(attr, value))
from . import util
| 14,131 | Python | .py | 393 | 30.603053 | 118 | 0.658814 | Knio/dominate | 1,689 | 109 | 18 | LGPL-3.0 | 9/5/2024, 5:07:54 PM (Europe/Amsterdam) |
643 | __init__.py | Knio_dominate/dominate/__init__.py | from ._version import __version__
version = __version__
from .document import document
| 88 | Python | .py | 3 | 28 | 33 | 0.761905 | Knio/dominate | 1,689 | 109 | 18 | LGPL-3.0 | 9/5/2024, 5:07:54 PM (Europe/Amsterdam) |
644 | document.py | Knio_dominate/dominate/document.py | __license__ = '''
This file is part of Dominate.
Dominate is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation, either version 3 of
the License, or (at your option) any later version.
Dominate is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General
Public License along with Dominate. If not, see
<http://www.gnu.org/licenses/>.
'''
from . import tags
from . import util
try:
basestring = basestring
except NameError: # py3
basestring = str
unicode = str
class document(tags.html):
tagname = 'html'
def __init__(self, title='Dominate', doctype='<!DOCTYPE html>', *a, **kw):
'''
Creates a new document instance. Accepts `title` and `doctype`
'''
super(document, self).__init__(*a, **kw)
self.doctype = doctype
self.head = super(document, self).add(tags.head())
self.body = super(document, self).add(tags.body())
if title is not None:
self.title_node = self.head.add(tags.title(title))
with self.body:
self.header = util.container()
self.main = util.container()
self.footer = util.container()
self._entry = self.main
def get_title(self):
return self.title_node.text
def set_title(self, title):
if isinstance(title, basestring):
self.title_node.text = title
else:
self.head.remove(self.title_node)
self.head.add(title)
self.title_node = title
title = property(get_title, set_title)
def add(self, *args):
'''
Adding tags to a document appends them to the <body>.
'''
return self._entry.add(*args)
def _render(self, sb, *args, **kwargs):
'''
Renders the DOCTYPE and tag tree.
'''
# adds the doctype if one was set
if self.doctype:
sb.append(self.doctype)
sb.append('\n')
return super(document, self)._render(sb, *args, **kwargs)
def __repr__(self):
return '<dominate.document "%s">' % self.title
| 2,243 | Python | .py | 64 | 31.078125 | 76 | 0.686664 | Knio/dominate | 1,689 | 109 | 18 | LGPL-3.0 | 9/5/2024, 5:07:54 PM (Europe/Amsterdam) |
645 | dom1core.py | Knio_dominate/dominate/dom1core.py | __license__ = '''
This file is part of Dominate.
Dominate is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation, either version 3 of
the License, or (at your option) any later version.
Dominate is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General
Public License along with Dominate. If not, see
<http://www.gnu.org/licenses/>.
'''
try:
basestring = basestring
except NameError: # py3
basestring = str
unicode = str
class dom1core(object):
'''
Implements the Document Object Model (Core) Level 1
http://www.w3.org/TR/1998/REC-DOM-Level-1-19981001/
http://www.w3.org/TR/1998/REC-DOM-Level-1-19981001/level-one-core.html
'''
@property
def parentNode(self):
'''
DOM API: Returns the parent tag of the current element.
'''
return self.parent
def getElementById(self, id):
'''
DOM API: Returns single element with matching id value.
'''
results = self.get(id=id)
if len(results) > 1:
raise ValueError('Multiple tags with id "%s".' % id)
elif results:
return results[0]
return None
def getElementsByTagName(self, name):
'''
DOM API: Returns all tags that match name.
'''
if isinstance(name, basestring):
return self.get(name.lower())
return None
def appendChild(self, obj):
'''
DOM API: Add an item to the end of the children list.
'''
self.add(obj)
return self
| 1,734 | Python | .py | 54 | 28.555556 | 72 | 0.719162 | Knio/dominate | 1,689 | 109 | 18 | LGPL-3.0 | 9/5/2024, 5:07:54 PM (Europe/Amsterdam) |
646 | test_document.py | Knio_dominate/tests/test_document.py | from dominate import document
from dominate.tags import *
def test_doc():
d = document()
assert d.render() == \
'''<!DOCTYPE html>
<html>
<head>
<title>Dominate</title>
</head>
<body></body>
</html>'''
def test_decorator():
@document()
def foo():
p('Hello World')
f = foo()
assert f.render() == \
'''<!DOCTYPE html>
<html>
<head>
<title>Dominate</title>
</head>
<body>
<p>Hello World</p>
</body>
</html>'''
def test_bare_decorator():
@document
def foo():
p('Hello World')
assert foo().render() == \
'''<!DOCTYPE html>
<html>
<head>
<title>Dominate</title>
</head>
<body>
<p>Hello World</p>
</body>
</html>'''
def test_title():
d = document()
assert d.title == 'Dominate'
d = document(title='foobar')
assert d.title == 'foobar'
d.title = 'baz'
assert d.title == 'baz'
d.title = title('bar')
assert d.title == 'bar'
assert d.render() == \
'''<!DOCTYPE html>
<html>
<head>
<title>bar</title>
</head>
<body></body>
</html>'''
def test_containers():
d = document()
with d.footer:
div('footer')
with d:
div('main1')
with d.main:
div('main2')
print(d.header)
print(d)
print(d.body.children)
with d.header:
div('header1')
div('header2')
assert d.render() == \
'''<!DOCTYPE html>
<html>
<head>
<title>Dominate</title>
</head>
<body>
<div>header1</div>
<div>header2</div>
''''''
<div>main1</div>
<div>main2</div>
''''''
<div>footer</div>
</body>
</html>'''
def test_attributes():
d = document(title=None, doctype=None, lang='en')
assert d.render() == \
'''<html lang="en">
<head></head>
<body></body>
</html>'''
def test_repr():
d = document(title='foo')
assert d.__repr__() == '<dominate.document "foo">'
if __name__ == '__main__':
# test_doc()
test_decorator()
| 1,857 | Python | .py | 101 | 15.366337 | 52 | 0.590334 | Knio/dominate | 1,689 | 109 | 18 | LGPL-3.0 | 9/5/2024, 5:07:54 PM (Europe/Amsterdam) |
647 | test_dom1core.py | Knio_dominate/tests/test_dom1core.py | import pytest
from dominate.tags import *
def test_dom():
container = div()
with container.add(div(id='base')) as dom:
s1 = span('Hello', id='span1')
s2 = span('World', id='span2')
s3 = span('foobar', id='span3')
dom.appendChild(s3)
assert container.getElementById('base') is dom
assert container.getElementById('span1') is s1
assert container.getElementById('span3') is s3
assert container.getElementById('foo') is None
assert container.getElementsByTagName('span') == [s1, s2, s3]
assert container.getElementsByTagName('SPAN') == [s1, s2, s3]
assert container.getElementsByTagName(1234) is None
def test_element():
d = div(
span(id='a'),
span(id='a'),
)
with pytest.raises(ValueError):
d.getElementById('a')
def test_parent_node():
parent = div(id='parent')
child = div(id='child')
parent.add(child)
assert child.parentNode is parent
| 898 | Python | .py | 28 | 28.857143 | 63 | 0.706019 | Knio/dominate | 1,689 | 109 | 18 | LGPL-3.0 | 9/5/2024, 5:07:54 PM (Europe/Amsterdam) |
648 | test_dom_tag_async.py | Knio_dominate/tests/test_dom_tag_async.py | from asyncio import gather, run, Semaphore
from dominate.dom_tag import async_context_id
from textwrap import dedent
from dominate import tags
# To simulate sleep without making the tests take a hella long time to complete
# lets use a pair of semaphores to explicitly control when our coroutines run.
# The order of execution will be marked as comments below:
def test_async_bleed():
async def tag_routine_1(sem_1, sem_2):
root = tags.div(id = 1) # [1]
with root: # [2]
sem_2.release() # [3]
await sem_1.acquire() # [4]
tags.div(id = 2) # [11]
return str(root) # [12]
async def tag_routine_2(sem_1, sem_2):
await sem_2.acquire() # [5]
root = tags.div(id = 3) # [6]
with root: # [7]
tags.div(id = 4) # [8]
sem_1.release() # [9]
return str(root) # [10]
async def merge():
sem_1 = Semaphore(0)
sem_2 = Semaphore(0)
return await gather(
tag_routine_1(sem_1, sem_2),
tag_routine_2(sem_1, sem_2)
)
# Set this test up for failure - pre-set the context to a non-None value.
# As it is already set, _get_async_context_id will not set it to a new, unique value
# and thus we won't be able to differentiate between the two contexts. This essentially simulates
# the behavior before our async fix was implemented (the bleed):
async_context_id.set(0)
tag_1, tag_2 = run(merge())
# This looks wrong - but its what we would expect if we don't
# properly handle async...
assert tag_1 == dedent("""\
<div id="1">
<div id="3">
<div id="4"></div>
</div>
<div id="2"></div>
</div>
""").strip()
assert tag_2 == dedent("""\
<div id="3">
<div id="4"></div>
</div>
""").strip()
# Okay, now lets do it right - lets clear the context. Now when each async function
# calls _get_async_context_id, it will get a unique value and we can differentiate.
async_context_id.set(None)
tag_1, tag_2 = run(merge())
# Ah, much better...
assert tag_1 == dedent("""\
<div id="1">
<div id="2"></div>
</div>
""").strip()
assert tag_2 == dedent("""\
<div id="3">
<div id="4"></div>
</div>
""").strip()
| 2,374 | Python | .py | 65 | 29.138462 | 101 | 0.571242 | Knio/dominate | 1,689 | 109 | 18 | LGPL-3.0 | 9/5/2024, 5:07:54 PM (Europe/Amsterdam) |
649 | test_utils.py | Knio_dominate/tests/test_utils.py | import dominate
from dominate.tags import *
from dominate import util
def test_context():
id1 = dominate.dom_tag._get_thread_context()
id2 = dominate.dom_tag._get_thread_context()
assert id1 == id2
def test_include():
import os
try:
f = open('_test_include.deleteme', 'w')
f.write('Hello World')
f.close()
d = div()
d += util.include('_test_include.deleteme')
assert d.render() == '<div>Hello World</div>'
finally:
try:
os.remove('_test_include.deleteme')
except:
pass
def test_system():
d = div()
d += util.system('echo Hello World')
assert d.render().replace('\r\n', '\n') == '<div>Hello World\n</div>'
def test_unescape():
assert util.unescape('&<> ') == '&<> '
def test_url():
assert util.url_escape('hi there?') == 'hi%20there%3F'
assert util.url_unescape('hi%20there%3f') == 'hi there?'
def test_container():
d = div()
with d:
with util.container():
pass
assert d.render() == '<div></div>'
d = div()
with d:
with util.container():
h1('a')
assert d.render() == \
'''<div>
<h1>a</h1>
</div>'''
| 1,129 | Python | .py | 44 | 22 | 71 | 0.617757 | Knio/dominate | 1,689 | 109 | 18 | LGPL-3.0 | 9/5/2024, 5:07:54 PM (Europe/Amsterdam) |
650 | test_html.py | Knio_dominate/tests/test_html.py | import dominate
from dominate.tags import *
import pytest
from dominate.util import raw
try:
xrange = xrange
except NameError:
xrange = range
def test_arguments():
assert html(body(h1('Hello, pyy!'))).render() == \
'''<html>
<body>
<h1>Hello, pyy!</h1>
</body>
</html>'''
def test_kwargs():
assert div(
id=4,
checked=True,
cls="mydiv",
data_name='foo',
onclick='alert(1);').render() == \
'''<div checked="checked" class="mydiv" data-name="foo" id="4" onclick="alert(1);"></div>'''
def test_repr():
import re
d = div()
assert repr(d).startswith('<dominate.tags.div at ')
assert repr(d).endswith(' 0 attributes, 0 children>')
d += [1, {'id':'foo'}]
assert repr(d).startswith('<dominate.tags.div at ')
assert repr(d).endswith(' 1 attribute, 1 child>')
def test_add():
d = div()
with pytest.raises(ValueError):
d += None
d += 1
d += xrange(2,3)
d += {'id': 'foo'}
assert d.render() == '<div id="foo">12</div>'
assert len(d) == 2
assert d
with pytest.raises(IndexError):
d[2]
with pytest.raises(TypeError):
d[None]
del d[0]
assert len(d) == 1
def test_iadd():
list = ul()
for item in range(4):
list += li('Item #', item)
# 2 children so doesn't render inline
assert list.render() == \
'''<ul>
<li>Item #0</li>
<li>Item #1</li>
<li>Item #2</li>
<li>Item #3</li>
</ul>'''
def test_context_manager():
other = div()
h = ul()
with h:
li('One')
li('Two')
li('Three')
# added to other, so not added to h
other += li('Four')
assert h.render() == \
'''<ul>
<li>One</li>
<li>Two</li>
<li>Three</li>
</ul>'''
def test_decorator():
@div
def f():
p('Hello')
assert f().render() == \
'''<div>
<p>Hello</p>
</div>'''
d = div()
@d
def f2():
p('Hello')
assert f2().render() == \
'''<div>
<p>Hello</p>
</div>'''
@div(cls='three')
def f3():
p('Hello')
assert f3().render() == \
'''<div class="three">
<p>Hello</p>
</div>'''
def test_nested_decorator():
@div
def f1():
p('hello')
d = div()
with d:
f1()
assert d.render() == \
'''<div>
<div>
<p>hello</p>
</div>
</div>'''
@div()
def f2():
p('hello')
d = div()
with d:
f2()
assert d.render() == \
'''<div>
<div>
<p>hello</p>
</div>
</div>'''
def test_text():
from dominate.util import text
d = div()
with d:
text('Hello World')
assert d.render() == \
'''<div>Hello World</div>'''
assert div(text('<>', escape=False)).render() == \
'''<div><></div>'''
assert div(text('<>')).render() == \
'''<div><></div>'''
def test_raw():
from dominate.util import raw
d = div()
with d:
raw('Hello World<br>')
assert d.render() == '''<div>Hello World<br></div>'''
def test_escape():
assert pre('<>').render() == '''\
<pre><></pre>'''
def test_get_context():
with pytest.raises(ValueError):
d = get_current()
d = get_current(None)
assert d is None
with div() as d1:
d2 = span()
with d2:
d2p = get_current()
d1p = get_current()
assert d1 is d1p
assert d2 is d2p
def test_attributes():
d = div()
d['id'] = 'foo'
assert d['id'] == 'foo'
del d['id']
with pytest.raises(KeyError):
del d['id']
with pytest.raises(AttributeError):
x = d['id']
with div() as d:
attr(data_test=False)
assert d['data-test'] is False
with div() as d:
attr(data_test=True)
assert d['data-test']
with pytest.raises(ValueError):
# not in a tag context
attr(id='moo')
def test_attribute_none():
d = div(foo=1, bar=None)
assert d.render() == '<div foo="1"></div>'
def test_attribute_dashes():
# fix issue #118 (https://github.com/Knio/dominate/issues/118)
expected = '<div aria-foo="bar" data-a-b-c="foo" data-page-size="123"></div>'
assert div(data_a_b_c='foo', data_page_size='123', aria_foo='bar').render() == expected
def test_lazy():
from dominate import util
executed = [False]
def _lazy():
executed[0] = True
return span('Hi')
d = div()
s = util.lazy(_lazy)
d += s
assert executed[0] == False
assert d.render() == '<div>\n <span>Hi</span>\n</div>'
assert executed[0] == True
def test_keyword_attributes():
expected = '<div class="foo" for="bar"></div>'
assert div(cls='foo', fr='bar').render() == expected
assert div(_class='foo', _for='bar').render() == expected
assert div(className='foo', htmlFor='bar').render() == expected
assert div(class_name='foo', html_for='bar').render() == expected
def test_namespaced_attributes():
assert div(foo_bar='one').render() == '<div foo_bar="one"></div>'
assert div(xlink_href='one').render() == '<div xlink:href="one"></div>'
def test_comment():
d = comment('Hi there')
assert d.render() == '<!--Hi there-->'
assert div(d).render() == '<div>\n <!--Hi there-->\n</div>'
d = comment('Hi ie user', condition='IE 6')
assert d.render() == '<!--[if IE 6]>Hi ie user<![endif]-->'
d = comment(div('Hi non-ie user'), condition='!IE', downlevel='revealed')
assert d.render() == '''<![if !IE]>
<div>Hi non-ie user</div>
<![endif]>'''
def test_boolean_attributes():
assert input_(type="checkbox", checked=True).render() == \
'<input checked="checked" type="checkbox">'
assert input_(type="checkbox", checked=False).render() == \
'<input type="checkbox">'
def test_nested_decorator_2():
@span
def foo():
@div(cls='bar')
def bar(x):
p(x)
bar('a')
bar('b')
assert foo().render() == '''<span>
<div class="bar">
<p>a</p>
</div>
<div class="bar">
<p>b</p>
</div>
</span>'''
def test_pretty():
assert head(script('foo'), style('bar')).render() == '''<head>
<script>foo</script>
<style>bar</style>
</head>'''
assert div(pre('test')).render() == '''<div>
<pre>test</pre>
</div>'''
assert div(pre('test')).render(indent='\t') == '''<div>
\t<pre>test</pre>
</div>'''
assert div(pre('test')).render(pretty=False) == \
'''<div><pre>test</pre></div>'''
assert div(span('hi'), span('there'), __pretty=False).render() == \
'''<div><span>hi</span><span>there</span></div>'''
assert span('hi', br(), 'there').render() == \
'''<span>hi<br>there</span>'''
assert span('hi', br(__inline=False), 'there').render() == \
'''<span>hi\n <br>there\n</span>'''
assert p('goodbye ', i('cruel'), ' world').render() == \
'''<p>goodbye <i>cruel</i> world</p>'''
assert p('my 1', sup('st'), ' PR').render() == \
'''<p>my 1<sup>st</sup> PR</p>'''
def test_xhtml():
assert head(script('foo'), style('bar')).render(xhtml=True) == '''<head>
<script>foo</script>
<style>bar</style>
</head>'''
assert span('hi', br(), 'there').render(xhtml=True) == \
'''<span>hi<br />there</span>'''
assert span('hi', br(), 'there').render() == \
'''<span>hi<br>there</span>'''
assert span('hi', br(), 'there').render(xhtml=False) == \
'''<span>hi<br>there</span>'''
def test_verbatim_attributes():
assert div(attr = '{<div></div>}').render() == \
'''<div attr="{<div></div>}"></div>'''
assert div(attr = raw('{<div></div>}')).render() == \
'''<div attr="{<div></div>}"></div>'''
| 7,237 | Python | .py | 270 | 23.3 | 94 | 0.576912 | Knio/dominate | 1,689 | 109 | 18 | LGPL-3.0 | 9/5/2024, 5:07:54 PM (Europe/Amsterdam) |
651 | test_dom_tag.py | Knio_dominate/tests/test_dom_tag.py | import pytest
try:
import mock
except ImportError:
import unittest.mock as mock
from dominate.tags import *
def test___get_thread_context(monkeypatch):
from dominate import dom_tag as sut
greenlet = mock.Mock()
greenlet.getcurrent.return_value = 100
monkeypatch.setattr(sut, 'greenlet', greenlet)
threading = mock.Mock()
threading.current_thread.return_value = 200
monkeypatch.setattr(sut, 'threading', threading)
assert sut._get_thread_context() is not None
def test_add_raw_string():
container = div()
container.add_raw_string('foo')
assert container.children == ['foo']
def test_clear():
container = div()
child = div()
container.add(child)
assert container.children == [child]
assert child.parent == container
container.clear()
assert container.children == []
assert child.parent is None
def test_set_attribute():
container = div()
container.add_raw_string('foo')
container.set_attribute(0, 'bar')
assert container.children == ['bar']
def test_set_attribute_error():
container = div()
with pytest.raises(TypeError, match=(
'Only integer and string types are valid for assigning '
'child tags and attributes, respectively.'
)):
container.set_attribute(1.0, 'foo')
def test___get_item___child_index_error():
d = div()
with pytest.raises(IndexError, match='Child with index "10" does not exist.'):
d[10]
def test___contains__():
container = div()
container.add(div())
assert 'div' in container
def test_nested_context():
def sub(*args):
with div('B') as B:
B.add(*args)
with div('A') as A:
sub(div('C'))
assert str(A) == \
'''<div>A
<div>B
<div>C</div>
</div>
</div>'''
| 1,813 | Python | .py | 60 | 25.183333 | 82 | 0.655908 | Knio/dominate | 1,689 | 109 | 18 | LGPL-3.0 | 9/5/2024, 5:07:54 PM (Europe/Amsterdam) |
652 | test_svg.py | Knio_dominate/tests/test_svg.py | import dominate.svg
from dominate.tags import *
from dominate.svg import *
import pytest
def base():
return svg(
width=120, height=120, viewBox="0 0 120 120", version="1.1",
xmlns="http://www.w3.org/2000/svg",
xmlns_xlink="http://www.w3.org/1999/xlink"
)
def get_expected(func):
return func.__doc__.replace('\n ', '\n').strip()
def output_test(func):
def wrapper():
with base() as result:
func()
assert result.render() == get_expected(func)
return wrapper
# Note, all tests produce working examples. The expected results can be pasted into https://jsfiddle.net/api/mdn/
@output_test
def test_animate():
'''
<svg height="120" version="1.1" viewBox="0 0 120 120" width="120" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<rect height="100" width="100" x="10" y="10">
<animate attributeName="x" attributeType="XML" dur="10s" from="-100" repeatCount="indefinite" to="120"></animate>
</rect>
</svg>
'''
with rect(x="10", y="10", width="100", height="100"):
animate(attributeType="XML", attributeName="x", _from="-100", to="120", dur="10s", repeatCount="indefinite")
@output_test
def test_animate_motion():
'''
<svg height="120" version="1.1" viewBox="0 0 120 120" width="120" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<path d="M10,110 A120,120 -45 0,1 110 10 A120,120 -45 0,1 10,110" fill="none" id="theMotionPath" stroke="lightgrey" stroke-width="2"></path>
<circle cx="10" cy="110" fill="lightgrey" r="3"></circle>
<circle cx="110" cy="10" fill="lightgrey" r="3"></circle>
<circle cx="" cy="" fill="red" r="5">
<animateMotion dur="6s" repeatCount="indefinite">
<mpath xlink:href="#theMotionPath"></mpath>
</animateMotion>
</circle>
</svg>
'''
path(d="M10,110 A120,120 -45 0,1 110 10 A120,120 -45 0,1 10,110", stroke="lightgrey", stroke_width="2",
fill="none", id="theMotionPath")
circle(cx=10, cy=110, r=3, fill="lightgrey")
circle(cx=110, cy=10, r=3, fill="lightgrey")
with circle(cx="", cy="", r="5", fill="red"):
with animateMotion(dur="6s", repeatCount="indefinite"):
mpath(xlink_href="#theMotionPath")
@output_test
def test_animate_transform():
'''
<svg height="120" version="1.1" viewBox="0 0 120 120" width="120" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<polygon points="60,30 90,90, 30,90">
<animateTransform attributeName="transform" attributeType="XML" dur="10s" from="0 60 70" repeatCount="indefinite" to="360 60 70" type="rotate">
</polygon>
</svg>
'''
with polygon(points="60,30 90,90, 30,90"):
animateTransform(attributeName="transform", attributeType="XML", type="rotate", _from="0 60 70",
to="360 60 70", dur="10s", repeatCount="indefinite")
@output_test
def test_circle():
'''
<svg height="120" version="1.1" viewBox="0 0 120 120" width="120" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<circle cx="50" cy="60" fill="black" r="30">
<desc>I am a circle</desc>
</circle>
</svg>
'''
with circle(cx=50, cy=60, r=30, fill="black"):
desc("I am a circle")
@output_test
def test_clip_path():
'''
<svg height="120" version="1.1" viewBox="0 0 120 120" width="120" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<clipPath id="MyClip">
<circle cx="20" cy="30" r="40"></circle>
</clipPath>
<path d="M10,30 A20,20,0,0,1,50,30 A20,20,0,0,1,90,30 Q90,60,50,90 Q10,60,10,30 Z" id="heart"></path>
<use clip-path="url(#MyClip)" fill="red" xlink:href="#heart"></use>
</svg>
'''
with clipPath(id="MyClip"):
circle(cx="20", cy="30", r="40")
path(id="heart", d="M10,30 A20,20,0,0,1,50,30 A20,20,0,0,1,90,30 Q90,60,50,90 Q10,60,10,30 Z")
use(clip_path="url(#MyClip)", xlink_href="#heart", fill="red")
@output_test
def test_defs():
'''
<svg height="120" version="1.1" viewBox="0 0 120 120" width="120" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<defs>
<circle cx="50" cy="60" fill="black" r="30"></circle>
</defs>
<use x="5" xlink:href="#myCircle" y="5"></use>
</svg>
'''
with defs():
circle(cx=50, cy=60, r=30, fill="black")
use(x="5", y="5", xlink_href="#myCircle")
@output_test
def test_ellipse():
'''
<svg height="120" version="1.1" viewBox="0 0 120 120" width="120" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<ellipse cx="100" cy="50" rx="100" ry="50"></ellipse>
</svg>
'''
ellipse(cx="100", cy="50", rx="100", ry="50" )
filter_names = ['feBlend', 'feColorMatrix', 'feComponentTransfer', 'feComposite', 'feConvolveMatrix', 'feDiffuseLighting',
'feDisplacementMap', 'feFlood', 'feGaussianBlur', 'feImage', 'feMerge', 'feMorphology', 'feOffset',
'feSpecularLighting', 'feTile', 'feTurbulence', 'feDistantLight', 'fePointLight', 'feSpotLight']
def test_filters():
for name in filter_names:
attr = getattr(dominate.svg, name)
with filter() as f:
attr()
expected = '''<filter>\n <{0}></{0}>\n</filter>'''.format(name)
assert f.render() == expected
@output_test
def test_g():
'''
<svg height="120" version="1.1" viewBox="0 0 120 120" width="120" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<g fill="white" stroke="green" stroke-width="5">
<circle r="25" rx="40" ry="40"></circle>
<circle r="25" rx="60" ry="60"></circle>
</g>
</svg>
'''
with g(fill="white", stroke="green", stroke_width="5"):
circle(rx=40, ry=40, r=25)
circle(rx=60, ry=60, r=25)
@output_test
def test_line():
'''
<svg height="120" version="1.1" viewBox="0 0 120 120" width="120" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<line stroke="red" x0="0" x1="50" y0="0" y1="50"></line>
</svg>
'''
line(x0='0', x1='50', y0='0', y1='50', stroke='red')
@output_test
def test_linear_gradient():
'''
<svg height="120" version="1.1" viewBox="0 0 120 120" width="120" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<defs>
<linearGradient gradientTransform="rotate(90)" id="myGradient">
<stop offset="5%" stop-color="gold"></stop>
<stop offset="95%" stop-color="red"></stop>
</linearGradient>
</defs>
<circle fill="url('#myGradient')" r="40" rx="50" ry="50"></circle>
</svg>
'''
with defs():
with linearGradient(id="myGradient", gradientTransform="rotate(90)"):
stop(offset="5%", stop_color="gold")
stop(offset="95%", stop_color="red")
circle(rx=50, ry=50, r=40, fill="url('#myGradient')")
@output_test
def test_marker():
'''
<svg height="120" version="1.1" viewBox="0 0 120 120" width="120" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<defs>
<marker id="arrow" markerHeight="6" markerWidth="6" orient="auto-start-reverse" refX="5" refY="5" viewBox="0 0 10 10">
<path d="M 0 0 L 10 5 L 0 10 z"></path>
</marker>
</defs>
<polyline fill="none" marker-end="url(#arrow)" marker-start="url(#arrow)" points="10,10 10,90 90,90" stroke="black"></polyline>
</svg>
'''
with defs():
with marker(id="arrow", viewBox="0 0 10 10", refX="5", refY="5",
markerWidth="6", markerHeight="6", orient="auto-start-reverse"):
path(d="M 0 0 L 10 5 L 0 10 z")
polyline(points="10,10 10,90 90,90", fill="none", stroke="black", marker_start="url(#arrow)",
marker_end="url(#arrow)")
@output_test
def test_mask():
'''
<svg height="120" version="1.1" viewBox="0 0 120 120" width="120" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<mask id="myMask">
<rect fill="white" height="100" width="100" x="0" y="0"></rect>
<path d="M10,35 A20,20,0,0,1,50,35 A20,20,0,0,1,90,35 Q90,65,50,95 Q10,65,10,35 Z" fill="black"></path>
</mask>
<polygon fill="orange" points="-10,110 110,110 110,-10"></polygon>
<circle cx="50" cy="50" mask="url(#myMask)" r="50"></circle>
</svg>
'''
with mask(id="myMask"):
rect(x="0", y="0", width="100", height="100", fill="white")
path(d="M10,35 A20,20,0,0,1,50,35 A20,20,0,0,1,90,35 Q90,65,50,95 Q10,65,10,35 Z", fill="black" )
polygon(points="-10,110 110,110 110,-10", fill="orange")
circle(cx=50, cy=50, r=50, mask="url(#myMask)")
def test_pattern():
'''
<svg viewBox="0 0 230 100">
<defs>
<pattern height="10%" id="star" viewBox="0 0 10 10" width="10%">
<polygon points="0,0 2,5 0,10 5,8 10,10 8,5 10,0 5,2"></polygon>
</pattern>
</defs>
<circle cx="50" cy="50" fill="url(#star)" r="50"></circle>
<circle cx="180" cy="50" fill="none" r="50" stroke="url(#star)" stroke-width="20"></circle>
</svg>
'''
with svg(viewBox="0 0 230 100") as result:
with defs():
with pattern(id="star", viewBox="0 0 10 10", width="10%", height="10%"):
polygon(points="0,0 2,5 0,10 5,8 10,10 8,5 10,0 5,2")
circle(cx=50, cy=50, r=50, fill="url(#star)")
circle(cx=180, cy=50, r=50, fill="none", stroke_width=20, stroke="url(#star)")
assert result.render() == get_expected(test_pattern)
@output_test
def test_radial_gradient():
'''
<svg height="120" version="1.1" viewBox="0 0 120 120" width="120" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<defs>
<radialGradient id="myGradient">
<stop offset="10%" stop-color="gold"></stop>
<stop offset="95%" stop-color="red"></stop>
</radialGradient>
</defs>
<circle cx="50" cy="5" fill="url(#myGradient)" r="50"></circle>
</svg>
'''
with defs():
with radialGradient(id="myGradient"):
stop(offset="10%", stop_color="gold")
stop(offset="95%", stop_color="red")
circle(cx=50, cy=5, r=50, fill="url(#myGradient)")
| 10,000 | Python | .py | 231 | 38.87013 | 149 | 0.622366 | Knio/dominate | 1,689 | 109 | 18 | LGPL-3.0 | 9/5/2024, 5:07:54 PM (Europe/Amsterdam) |
653 | test_dominate.py | Knio_dominate/tests/test_dominate.py |
def test_version():
import dominate
version = '2.9.1'
assert dominate.version == version
assert dominate.__version__ == version
| 137 | Python | .py | 5 | 24.6 | 40 | 0.717557 | Knio/dominate | 1,689 | 109 | 18 | LGPL-3.0 | 9/5/2024, 5:07:54 PM (Europe/Amsterdam) |
654 | setup.py | ranger_ranger/setup.py | #!/usr/bin/env python
# This file is part of ranger, the console file manager.
# License: GNU GPL version 3, see the file "AUTHORS" for details.
from __future__ import (absolute_import, division, print_function)
from hashlib import sha512
import os
import shutil
from setuptools import setup
from setuptools.command.install_lib import install_lib
import ranger
SCRIPTS_PATH = 'build_scripts'
EXECUTABLES_PATHS = ['/ranger/data/scope.sh']
def findall(directory):
return [os.path.join(directory, f) for f in os.listdir(directory)
if os.path.isfile(os.path.join(directory, f))]
def hash_file(path):
with open(path, 'rb') as fobj:
return sha512(fobj.read()).digest()
def scripts_hack(*scripts):
''' Hack around `pip install` temporary directories '''
if not os.path.exists(SCRIPTS_PATH):
os.makedirs(SCRIPTS_PATH)
scripts_path = []
for src_path, basename in scripts:
dest_path = os.path.join(SCRIPTS_PATH, basename)
if not os.path.exists(dest_path) or \
(os.path.exists(src_path) and hash_file(src_path) != hash_file(dest_path)):
shutil.copy(src_path, dest_path)
scripts_path += [dest_path]
return scripts_path
class InstallLib(install_lib):
def run(self):
install_lib.run(self)
# Make executables executable
for path in self.get_outputs():
for exe_path in EXECUTABLES_PATHS:
if path.endswith(exe_path):
mode = ((os.stat(path).st_mode) | 0o555) & 0o7777
print('changing mode of %s to %o' % (path, mode))
os.chmod(path, mode)
def main():
setup(
name='ranger-fm',
description='Vim-like file manager',
long_description=ranger.__doc__,
version=ranger.__version__,
author=ranger.__author__,
author_email=ranger.__email__,
license=ranger.__license__,
url='https://ranger.github.io',
keywords='file-manager vim console file-launcher file-preview',
classifiers=[
'Environment :: Console',
'Environment :: Console :: Curses',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: MacOS',
'Operating System :: POSIX',
'Operating System :: Unix',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Desktop Environment :: File Managers',
'Topic :: Utilities',
],
cmdclass={'install_lib': InstallLib},
scripts=scripts_hack(
('ranger.py', 'ranger'),
('ranger/ext/rifle.py', 'rifle'),
),
data_files=[
('share/applications', [
'doc/ranger.desktop',
]),
('share/man/man1', [
'doc/ranger.1',
'doc/rifle.1',
]),
('share/doc/ranger', [
'doc/colorschemes.md',
'CHANGELOG.md',
'HACKING.md',
'README.md',
]),
('share/doc/ranger/config', findall('doc/config')),
('share/doc/ranger/config/colorschemes', findall('doc/config/colorschemes')),
('share/doc/ranger/examples', findall('examples')),
('share/doc/ranger/tools', findall('doc/tools')),
],
package_data={
'ranger': [
'data/*',
'config/rc.conf',
'config/rifle.conf',
],
},
packages=(
'ranger',
'ranger.api',
'ranger.colorschemes',
'ranger.config',
'ranger.container',
'ranger.core',
'ranger.ext',
'ranger.ext.vcs',
'ranger.gui',
'ranger.gui.widgets',
),
)
if __name__ == '__main__':
main()
| 4,454 | Python | .py | 118 | 27.483051 | 91 | 0.549479 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
655 | .pylintrc | ranger_ranger/.pylintrc | [MASTER]
init-hook='import sys; sys.path.append("tests/pylint")'
load-plugins=py2_compat,python3
[BASIC]
good-names=i,j,k,n,x,y,ex,Run,_,fm,ui,fg,bg
bad-names=foo,baz,toto,tutu,tata
[DESIGN]
max-args=6
max-branches=16
[FORMAT]
max-line-length = 99
enable=no-absolute-import,old-division
disable=consider-using-f-string,cyclic-import,duplicate-code,fixme,import-outside-toplevel,locally-disabled,missing-docstring,no-else-break,no-else-continue,no-else-raise,no-else-return,raise-missing-from,stop-iteration-return,super-with-arguments,superfluous-parens,useless-object-inheritance
[TYPECHECK]
ignored-classes=ranger.core.actions.Actions,tests.ranger.container.test_fsobject.MockFM
| 685 | Python | .py | 15 | 44.4 | 293 | 0.825826 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
656 | ranger.py | ranger_ranger/ranger.py | #!/usr/bin/python -O
# This file is part of ranger, the console file manager. (coding: utf-8)
# License: GNU GPL version 3, see the file "AUTHORS" for details.
# =====================
# This embedded bash script can be executed by sourcing this file.
# It will cd to ranger's last location after you exit it.
# The first argument specifies the command to run ranger, the
# default is simply "ranger". (Not this file itself!)
# The other arguments are passed to ranger.
"""":
temp_file="$(mktemp -t "ranger_cd.XXXXXXXXXX")"
ranger="${1:-ranger}"
if [ -n "$1" ]; then
shift
fi
"$ranger" --choosedir="$temp_file" -- "${@:-$PWD}"
return_value="$?"
if chosen_dir="$(cat -- "$temp_file")" && [ -n "$chosen_dir" ] && [ "$chosen_dir" != "$PWD" ]; then
cd -- "$chosen_dir"
fi
rm -f -- "$temp_file"
return "$return_value"
"""
from __future__ import (absolute_import, division, print_function)
import sys
# Need to find out whether or not the flag --clean was used ASAP,
# because --clean is supposed to disable bytecode compilation
ARGV = sys.argv[1:sys.argv.index('--')] if '--' in sys.argv else sys.argv[1:]
sys.dont_write_bytecode = '-c' in ARGV or '--clean' in ARGV
# Start ranger
import ranger # NOQA pylint: disable=import-self,wrong-import-position
sys.exit(ranger.main()) # pylint: disable=no-member
| 1,314 | Python | .py | 32 | 39.65625 | 99 | 0.681284 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
657 | plugin_new_macro.py | ranger_ranger/examples/plugin_new_macro.py | # Compatible with ranger 1.6.0 through 1.7.*
#
# This plugin adds the new macro %date which is substituted with the current
# date in commands that allow macros. You can test it with the command
# ":shell echo %date; read"
from __future__ import (absolute_import, division, print_function)
import time
import ranger.core.actions
# Save the original macro function
GET_MACROS_OLD = ranger.core.actions.Actions.get_macros
# Define a new macro function
def get_macros_with_date(self):
macros = GET_MACROS_OLD(self)
macros['date'] = time.strftime('%m/%d/%Y')
return macros
# Overwrite the old one
ranger.core.actions.Actions.get_macros = get_macros_with_date
| 676 | Python | .py | 17 | 37.588235 | 76 | 0.761905 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
658 | plugin_hello_world.py | ranger_ranger/examples/plugin_hello_world.py | # Compatible with ranger 1.6.0 through 1.7.*
#
# This is a sample plugin that displays "Hello World" in ranger's console after
# it started.
from __future__ import (absolute_import, division, print_function)
# We are going to extend the hook "ranger.api.hook_ready", so first we need
# to import ranger.api:
import ranger.api
# Save the previously existing hook, because maybe another module already
# extended that hook and we don't want to lose it:
HOOK_READY_OLD = ranger.api.hook_ready
# Create a replacement for the hook that...
def hook_ready(fm):
# ...does the desired action...
fm.notify("Hello World")
# ...and calls the saved hook. If you don't care about the return value,
# simply return the return value of the previous hook to be safe.
return HOOK_READY_OLD(fm)
# Finally, "monkey patch" the existing hook_ready function with our replacement:
ranger.api.hook_ready = hook_ready
| 921 | Python | .py | 20 | 43.65 | 80 | 0.74804 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
659 | plugin_new_sorting_method.py | ranger_ranger/examples/plugin_new_sorting_method.py | # Compatible with ranger 1.6.0 through 1.7.*
#
# This plugin adds the sorting algorithm called 'random'. To enable it, type
# ":set sort=random" or create a key binding with ":map oz set sort=random"
from __future__ import (absolute_import, division, print_function)
from random import random
from ranger.container.directory import Directory
Directory.sort_dict['random'] = lambda path: random()
| 401 | Python | .py | 8 | 48.625 | 77 | 0.771208 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
660 | plugin_fasd_add.py | ranger_ranger/examples/plugin_fasd_add.py | # This plugin adds opened files to `fasd`
from __future__ import (absolute_import, division, print_function)
import subprocess
import ranger.api
from ranger.ext.spawn import check_output
HOOK_INIT_OLD = ranger.api.hook_init
def hook_init(fm):
def fasd_add():
for fobj in fm.thistab.get_selection():
try:
check_output(['fasd', '--add', fobj.path])
except subprocess.CalledProcessError:
pass
fm.signal_bind('execute.before', fasd_add)
return HOOK_INIT_OLD(fm)
ranger.api.hook_init = hook_init
| 576 | Python | .py | 16 | 29.6875 | 66 | 0.675136 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
661 | plugin_pmount_dynamic.py | ranger_ranger/examples/plugin_pmount_dynamic.py | # Tested with ranger 1.7.2
#
# This plugin creates a bunch of keybindings used to mount and unmount
# the devices using pmount(1).
#
# (multiple partitions): alt+m <letter> <digit> : mount /dev/sd<letter><digit>
# (one partition): alt+m <letter> : mount /dev/sd<letter>1
# (no partitions): alt+m <letter> : mount /dev/sd<letter>
#
# (multiple partitions): alt+M <letter> <digit> : unmount /dev/sd<letter><digit>
# (one partition): alt+M <letter> : unmount /dev/sd<letter>1
# (no partitions): alt+M <letter> : unmount /dev/sd<letter>
#
# alt+n : list the devices
from __future__ import (absolute_import, division, print_function)
import subprocess
import ranger.api
MOUNT_KEY = '<alt>m'
UMOUNT_KEY = '<alt>M'
LIST_MOUNTS_KEY = '<alt>n'
HOOK_INIT_OLD = ranger.api.hook_init
def hook_init(fm):
fm.execute_console("map {key} shell -p lsblk".format(key=LIST_MOUNTS_KEY))
diskcmd = "lsblk -lno NAME | awk '!/[1-9]/ {sub(/sd/, \"\"); print}'"
disks = subprocess.check_output(
diskcmd, shell=True).decode('utf-8').replace('\r', '').replace('\n', '')
for disk in disks:
partcmd = "lsblk -lno NAME /dev/sd{0} | sed 's/sd{0}//' | tail -n 1".format(disk)
try:
numparts = int(subprocess.check_output(
partcmd, shell=True).decode('utf-8').replace('\r', '').replace('\n', ''))
except ValueError:
numparts = 0
if numparts == 0:
# no partition, mount the whole device
fm.execute_console("map {key}{0} chain shell pmount sd{0}; cd /media/sd{0}".format(
disk, key=MOUNT_KEY))
fm.execute_console("map {key}{0} chain cd; chain shell pumount sd{0}".format(
disk, key=UMOUNT_KEY))
elif numparts == 1:
# only one partition, mount the partition
fm.execute_console(
"map {key}{0} chain shell pmount sd{0}1; cd /media/sd{0}1".format(
disk, key=MOUNT_KEY))
fm.execute_console("map {key}{0} chain cd; shell pumount sd{0}1".format(
disk, key=UMOUNT_KEY))
else:
# use range start 1, /dev/sd{device}0 doesn't exist
for part in range(1, numparts + 1):
fm.execute_console(
"map {key}{0}{1} chain shell pmount sd{0}{1}; cd /media/sd{0}{1}".format(
disk, part, key=MOUNT_KEY))
fm.execute_console("map {key}{0}{1} chain cd; shell pumount sd{0}{1}".format(
disk, part, key=UMOUNT_KEY))
return HOOK_INIT_OLD(fm)
ranger.api.hook_init = hook_init
| 2,674 | Python | .py | 56 | 39.5 | 95 | 0.578725 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
662 | plugin_chmod_keybindings.py | ranger_ranger/examples/plugin_chmod_keybindings.py | # Compatible with ranger 1.6.0 through ranger 1.7.*
#
# This plugin serves as an example for adding key bindings through a plugin.
# It could replace the ten lines in the rc.conf that create the key bindings
# for the "chmod" command.
from __future__ import (absolute_import, division, print_function)
import ranger.api
HOOK_INIT_OLD = ranger.api.hook_init
def hook_init(fm):
HOOK_INIT_OLD(fm)
# Generate key bindings for the chmod command
command = "map {0}{1}{2} shell -d chmod {1}{0}{2} %s"
for mode in list('ugoa') + ['']:
for perm in "rwxXst":
fm.execute_console(command.format('-', mode, perm))
fm.execute_console(command.format('+', mode, perm))
ranger.api.hook_init = hook_init
| 743 | Python | .py | 17 | 39.352941 | 76 | 0.687587 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
663 | plugin_ipc.py | ranger_ranger/examples/plugin_ipc.py | # Tested with ranger 1.7.0 through ranger 1.7.*
#
# This plugin creates a FIFO in /tmp/ranger-ipc.<PID> to which any
# other program may write. Lines written to this file are evaluated by
# ranger as the ranger :commands.
#
# Example:
# $ echo tab_new ~/images > /tmp/ranger-ipc.1234
from __future__ import (absolute_import, division, print_function)
import ranger.api
HOOK_INIT_OLD = ranger.api.hook_init
def hook_init(fm):
try:
# Create a FIFO.
import os
ipc_fifo = "/tmp/ranger-ipc." + str(os.getpid())
os.mkfifo(ipc_fifo)
# Start the reader thread.
try:
import thread
except ImportError:
import _thread as thread
def ipc_reader(filepath):
while True:
# The IPC encoding depends on the system locale so we can't
# guess here.
# pylint: disable=unspecified-encoding
with open(filepath, 'r') as fifo:
line = fifo.read()
fm.execute_console(line.strip())
thread.start_new_thread(ipc_reader, (ipc_fifo,))
# Remove the FIFO on ranger exit.
def ipc_cleanup(filepath):
try:
os.unlink(filepath)
except IOError:
pass
import atexit
atexit.register(ipc_cleanup, ipc_fifo)
except IOError:
# IPC support disabled
pass
finally:
HOOK_INIT_OLD(fm)
ranger.api.hook_init = hook_init
| 1,516 | Python | .py | 45 | 24.977778 | 75 | 0.593151 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
664 | plugin_linemode.py | ranger_ranger/examples/plugin_linemode.py | # Compatible since ranger 1.7.0
#
# This sample plugin adds a new linemode displaying the filename in rot13.
# Load this plugin by copying it to ~/.config/ranger/plugins/ and activate
# the linemode by typing ":linemode rot13" in ranger. Type Mf to restore
# the default linemode.
from __future__ import (absolute_import, division, print_function)
import codecs
import ranger.api
from ranger.core.linemode import LinemodeBase
@ranger.api.register_linemode
class MyLinemode(LinemodeBase):
name = "rot13"
def filetitle(self, fobj, metadata):
return codecs.encode(fobj.relative_path, "rot_13")
def infostring(self, fobj, metadata):
raise NotImplementedError
| 691 | Python | .py | 17 | 37.588235 | 74 | 0.769115 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
665 | plugin_pmount.py | ranger_ranger/examples/plugin_pmount.py | # Tested with ranger 1.7.2
#
# This plugin creates a bunch of keybindings used to mount and unmount
# the devices using pmount(1).
#
# alt+m <letter> <digit>: mount /dev/sd<letter><digit>
# alt+m <uppercase letter> : mount /dev/sd<letter>
# alt+shift+m <letter> <digit>: unmount /dev/sd<letter><digit>
# alt+shift+m <uppercase letter> : unmount /dev/sd<letter>
# alt+shift+n : list the devices
from __future__ import (absolute_import, division, print_function)
import ranger.api
MOUNT_KEY = '<alt>m'
UMOUNT_KEY = '<alt>M'
LIST_MOUNTS_KEY = '<alt>N'
HOOK_INIT_OLD = ranger.api.hook_init
def hook_init(fm):
fm.execute_console("map {key} shell -p lsblk".format(key=LIST_MOUNTS_KEY))
for disk in "abcdefgh":
fm.execute_console("map {key}{0} chain shell pmount sd{1}; cd /media/sd{1}".format(
disk.upper(), disk, key=MOUNT_KEY))
fm.execute_console("map {key}{0} chain cd; chain shell pumount sd{1}".format(
disk.upper(), disk, key=UMOUNT_KEY))
for part in "123456789":
fm.execute_console(
"map {key}{0}{1} chain shell pmount sd{0}{1}; cd /media/sd{0}{1}".format(
disk, part, key=MOUNT_KEY)
)
fm.execute_console("map {key}{0}{1} chain cd; shell pumount sd{0}{1}".format(
disk, part, key=UMOUNT_KEY))
return HOOK_INIT_OLD(fm)
ranger.api.hook_init = hook_init
| 1,487 | Python | .py | 32 | 40.53125 | 91 | 0.606228 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
666 | plugin_file_filter.py | ranger_ranger/examples/plugin_file_filter.py | # Compatible since ranger 1.7.0 (git commit c82a8a76989c)
#
# This plugin hides the directories "/boot", "/sbin", "/proc" and "/sys" unless
# the "show_hidden" option is activated.
# Save the original filter function
from __future__ import (absolute_import, division, print_function)
import ranger.container.directory
ACCEPT_FILE_OLD = ranger.container.directory.accept_file
HIDE_FILES = ("/boot", "/sbin", "/proc", "/sys")
# Define a new one
def custom_accept_file(fobj, filters):
if not fobj.fm.settings.show_hidden and fobj.path in HIDE_FILES:
return False
return ACCEPT_FILE_OLD(fobj, filters)
# Overwrite the old function
ranger.container.directory.accept_file = custom_accept_file
| 713 | Python | .py | 16 | 41.9375 | 79 | 0.752547 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
667 | plugin_avfs.py | ranger_ranger/examples/plugin_avfs.py | # Tested with ranger 1.9.1
#
# A very simple and possibly buggy support for AVFS
# (http://avf.sourceforge.net/), that allows ranger to handle
# archives.
#
# Run `:avfs' to browse the selected archive.
from __future__ import (absolute_import, division, print_function)
import os
import os.path
from ranger.api.commands import Command
class avfs(Command): # pylint: disable=invalid-name
avfs_root = os.path.join(os.environ["HOME"], ".avfs")
avfs_suffix = "#"
def execute(self):
if os.path.isdir(self.avfs_root):
archive_directory = "".join([
self.avfs_root,
self.fm.thisfile.path,
self.avfs_suffix,
])
if os.path.isdir(archive_directory):
self.fm.cd(archive_directory)
else:
self.fm.notify("This file cannot be handled by avfs.", bad=True)
else:
self.fm.notify("Install `avfs' and run `mountavfs' first.", bad=True)
| 991 | Python | .py | 27 | 29.259259 | 81 | 0.624217 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
668 | pypy.yml | ranger_ranger/.github/workflows/pypy.yml | name: Pypy tests
on:
push:
paths:
- '.github/workflows/pypy.yml'
- '**.py'
- 'requirements.txt'
jobs:
test_pypy:
runs-on: ubuntu-latest
strategy:
max-parallel: 4
matrix:
python-version: [pypy2.7, pypy3.9]
env:
TERM: dumb
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: Pypy lints and tests
run: |
make test_flake8 test_pylint test_pytest test_doctest test_other
| 735 | Python | .py | 29 | 19.448276 | 72 | 0.613636 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
669 | manpage_completion_test.py | ranger_ranger/tests/manpage_completion_test.py | #!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
import os.path
import re
import sys
# Add relevant ranger module to PATH... there surely is a better way to do this...
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
def report(boolean, errormessage):
if not boolean:
sys.stderr.write('TEST FAILURE: ')
sys.stderr.write(errormessage)
sys.stderr.write('\n')
sys.stderr.flush()
def get_path_of_man_page():
dirpath_of_this_file = os.path.dirname(__file__)
return os.path.join(dirpath_of_this_file, '..', 'doc', 'ranger.pod')
def read_manpage():
path = get_path_of_man_page()
# pylint: disable=unspecified-encoding
with open(path, 'r') as man_page:
return man_page.read()
def get_sections():
manpage = read_manpage()
parts = manpage.split('=head1 ')
sections = {}
for part in parts:
if '\n' in part:
section_name, section_content = part.split('\n', 1)
sections[section_name] = section_content
else:
pass
return sections
def find_undocumented_settings():
from ranger.container.settings import ALLOWED_SETTINGS
sections = get_sections()
setting_section = sections['SETTINGS']
matcher_pattern = r'^=item [\w\d_, ]*{setting}'
for setting in ALLOWED_SETTINGS:
matcher = re.compile(matcher_pattern.format(setting=setting), re.M)
report(matcher.search(setting_section),
('Setting %s is not documented in the man page!' % setting))
if __name__ == '__main__':
find_undocumented_settings()
| 1,640 | Python | .py | 43 | 32.325581 | 82 | 0.655281 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
670 | python3.py | ranger_ranger/tests/pylint/python3.py | # Copyright (c) 2014-2020 Claudiu Popa <[email protected]>
# Copyright (c) 2014-2015 Brett Cannon <[email protected]>
# Copyright (c) 2015 Simu Toni <[email protected]>
# Copyright (c) 2015 Pavel Roskin <[email protected]>
# Copyright (c) 2015 Ionel Cristian Maries <[email protected]>
# Copyright (c) 2015 Cosmin Poieana <[email protected]>
# Copyright (c) 2015 Viorel Stirbu <[email protected]>
# Copyright (c) 2016, 2018 Jakub Wilk <[email protected]>
# Copyright (c) 2016-2017 Roy Williams <[email protected]>
# Copyright (c) 2016 Roy Williams <[email protected]>
# Copyright (c) 2016 Łukasz Rogalski <[email protected]>
# Copyright (c) 2016 Erik <[email protected]>
# Copyright (c) 2017, 2020 hippo91 <[email protected]>
# Copyright (c) 2017-2018 Ville Skyttä <[email protected]>
# Copyright (c) 2017 Daniel Miller <[email protected]>
# Copyright (c) 2017 ahirnish <[email protected]>
# Copyright (c) 2018-2020 Anthony Sottile <[email protected]>
# Copyright (c) 2018 sbagan <[email protected]>
# Copyright (c) 2018 Lucas Cimon <[email protected]>
# Copyright (c) 2018 Aivar Annamaa <[email protected]>
# Copyright (c) 2018 ssolanki <[email protected]>
# Copyright (c) 2018 Sushobhit <[email protected]>
# Copyright (c) 2018 Ashley Whetter <[email protected]>
# Copyright (c) 2018 gaurikholkar <[email protected]>
# Copyright (c) 2019-2021 Pierre Sassoulas <[email protected]>
# Copyright (c) 2019 Nick Drozd <[email protected]>
# Copyright (c) 2019 Hugues Bruant <[email protected]>
# Copyright (c) 2019 Gabriel R Sezefredo <[email protected]>
# Copyright (c) 2019 Hugo van Kemenade <[email protected]>
# Copyright (c) 2019 bluesheeptoken <[email protected]>
# Copyright (c) 2020 Peter Kolbus <[email protected]>
# Copyright (c) 2020 谭九鼎 <[email protected]>
# Copyright (c) 2020 Federico Bond <[email protected]>
# Copyright (c) 2020 Athos Ribeiro <[email protected]>
# Copyright (c) 2021 Marc Mueller <[email protected]>
# Copyright (c) 2021 bot <[email protected]>
# Copyright (c) 2021 Tiago Honorato <[email protected]>
# Copyright (c) 2021 tiagohonorato <[email protected]>
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
# For details: https://github.com/PyCQA/pylint/blob/main/LICENSE
"""Check Python 2 code for Python 2/3 source-compatible issues."""
import itertools
import re
import tokenize
from collections import namedtuple
import astroid
from astroid import nodes
from pylint import checkers, interfaces
from pylint.checkers import utils
from pylint.checkers.utils import find_try_except_wrapper_node, node_ignores_exception
from pylint.constants import WarningScope
from pylint.interfaces import INFERENCE, INFERENCE_FAILURE
_ZERO = re.compile("^0+$")
def _is_old_octal(literal):
if _ZERO.match(literal):
return False
if re.match(r"0\d+", literal):
try:
int(literal, 8)
except ValueError:
return False
return True
return None
def _inferred_value_is_dict(value):
if isinstance(value, nodes.Dict):
return True
return isinstance(value, astroid.Instance) and "dict" in value.basenames
def _infer_if_relevant_attr(node, relevant_attrs):
return node.expr.infer() if node.attrname in relevant_attrs else []
def _is_builtin(node):
return getattr(node, "name", None) in ("__builtin__", "builtins")
_ACCEPTS_ITERATOR = {
"iter",
"list",
"tuple",
"sorted",
"set",
"sum",
"any",
"all",
"enumerate",
"dict",
"filter",
"reversed",
"max",
"min",
"frozenset",
"OrderedDict",
"zip",
"map",
}
ATTRIBUTES_ACCEPTS_ITERATOR = {"join", "from_iterable"}
_BUILTIN_METHOD_ACCEPTS_ITERATOR = {
"builtins.list.extend",
"builtins.dict.update",
"builtins.set.update",
}
DICT_METHODS = {"items", "keys", "values"}
def _in_iterating_context(node):
"""Check if the node is being used as an iterator.
Definition is taken from lib2to3.fixer_util.in_special_context().
"""
parent = node.parent
# Since a call can't be the loop variant we only need to know if the node's
# parent is a 'for' loop to know it's being used as the iterator for the
# loop.
if isinstance(parent, nodes.For):
return True
# Need to make sure the use of the node is in the iterator part of the
# comprehension.
if isinstance(parent, nodes.Comprehension):
if parent.iter == node:
return True
# Various built-ins can take in an iterable or list and lead to the same
# value.
elif isinstance(parent, nodes.Call):
if isinstance(parent.func, nodes.Name):
if parent.func.name in _ACCEPTS_ITERATOR:
return True
elif isinstance(parent.func, nodes.Attribute):
if parent.func.attrname in ATTRIBUTES_ACCEPTS_ITERATOR:
return True
inferred = utils.safe_infer(parent.func)
if inferred:
if inferred.qname() in _BUILTIN_METHOD_ACCEPTS_ITERATOR:
return True
root = inferred.root()
if root and root.name == "itertools":
return True
# If the call is in an unpacking, there's no need to warn,
# since it can be considered iterating.
elif isinstance(parent, nodes.Assign) and isinstance(
parent.targets[0], (nodes.List, nodes.Tuple)
):
if len(parent.targets[0].elts) > 1:
return True
# If the call is in a containment check, we consider that to
# be an iterating context
elif (
isinstance(parent, nodes.Compare)
and len(parent.ops) == 1
and parent.ops[0][0] in ["in", "not in"]
):
return True
# Also if it's an `yield from`, that's fair
elif isinstance(parent, nodes.YieldFrom):
return True
if isinstance(parent, nodes.Starred):
return True
return False
def _is_conditional_import(node):
"""Checks if an import node is in the context of a conditional."""
parent = node.parent
return isinstance(
parent, (nodes.TryExcept, nodes.ExceptHandler, nodes.If, nodes.IfExp)
)
Branch = namedtuple("Branch", ["node", "is_py2_only"])
class Python3Checker(checkers.BaseChecker):
__implements__ = interfaces.IAstroidChecker
enabled = False
name = "python3"
msgs = {
# Errors for what will syntactically break in Python 3, warnings for
# everything else.
"E1601": (
"print statement used",
"print-statement",
"Used when a print statement is used "
"(`print` is a function in Python 3)",
),
"E1602": (
"Parameter unpacking specified",
"parameter-unpacking",
"Used when parameter unpacking is specified for a function"
"(Python 3 doesn't allow it)",
),
"E1603": (
"Implicit unpacking of exceptions is not supported in Python 3",
"unpacking-in-except",
"Python3 will not allow implicit unpacking of "
"exceptions in except clauses. "
"See https://www.python.org/dev/peps/pep-3110/",
{"old_names": [("W0712", "old-unpacking-in-except")]},
),
"E1604": (
"Use raise ErrorClass(args) instead of raise ErrorClass, args.",
"old-raise-syntax",
"Used when the alternate raise syntax "
"'raise foo, bar' is used "
"instead of 'raise foo(bar)'.",
{"old_names": [("W0121", "old-old-raise-syntax")]},
),
"E1605": (
"Use of the `` operator",
"backtick",
'Used when the deprecated "``" (backtick) operator is used '
"instead of the str() function.",
{"scope": WarningScope.NODE, "old_names": [("W0333", "old-backtick")]},
),
"E1609": (
"Import * only allowed at module level",
"import-star-module-level",
"Used when the import star syntax is used somewhere "
"else than the module level.",
{"maxversion": (3, 0)},
),
"W1601": (
"apply built-in referenced",
"apply-builtin",
"Used when the apply built-in function is referenced "
"(missing from Python 3)",
),
"W1602": (
"basestring built-in referenced",
"basestring-builtin",
"Used when the basestring built-in function is referenced "
"(missing from Python 3)",
),
"W1603": (
"buffer built-in referenced",
"buffer-builtin",
"Used when the buffer built-in function is referenced "
"(missing from Python 3)",
),
"W1604": (
"cmp built-in referenced",
"cmp-builtin",
"Used when the cmp built-in function is referenced "
"(missing from Python 3)",
),
"W1605": (
"coerce built-in referenced",
"coerce-builtin",
"Used when the coerce built-in function is referenced "
"(missing from Python 3)",
),
"W1606": (
"execfile built-in referenced",
"execfile-builtin",
"Used when the execfile built-in function is referenced "
"(missing from Python 3)",
),
"W1607": (
"file built-in referenced",
"file-builtin",
"Used when the file built-in function is referenced "
"(missing from Python 3)",
),
"W1608": (
"long built-in referenced",
"long-builtin",
"Used when the long built-in function is referenced "
"(missing from Python 3)",
),
"W1609": (
"raw_input built-in referenced",
"raw_input-builtin",
"Used when the raw_input built-in function is referenced "
"(missing from Python 3)",
),
"W1610": (
"reduce built-in referenced",
"reduce-builtin",
"Used when the reduce built-in function is referenced "
"(missing from Python 3)",
),
"W1611": (
"StandardError built-in referenced",
"standarderror-builtin",
"Used when the StandardError built-in function is referenced "
"(missing from Python 3)",
),
"W1612": (
"unicode built-in referenced",
"unicode-builtin",
"Used when the unicode built-in function is referenced "
"(missing from Python 3)",
),
"W1613": (
"xrange built-in referenced",
"xrange-builtin",
"Used when the xrange built-in function is referenced "
"(missing from Python 3)",
),
"W1614": (
"__coerce__ method defined",
"coerce-method",
"Used when a __coerce__ method is defined "
"(method is not used by Python 3)",
),
"W1615": (
"__delslice__ method defined",
"delslice-method",
"Used when a __delslice__ method is defined "
"(method is not used by Python 3)",
),
"W1616": (
"__getslice__ method defined",
"getslice-method",
"Used when a __getslice__ method is defined "
"(method is not used by Python 3)",
),
"W1617": (
"__setslice__ method defined",
"setslice-method",
"Used when a __setslice__ method is defined "
"(method is not used by Python 3)",
),
"W1618": (
"import missing `from __future__ import absolute_import`",
"no-absolute-import",
"Used when an import is not accompanied by "
"``from __future__ import absolute_import`` "
"(default behaviour in Python 3)",
),
"W1619": (
"division w/o __future__ statement",
"old-division",
"Used for non-floor division w/o a float literal or "
"``from __future__ import division`` "
"(Python 3 returns a float for int division unconditionally)",
),
"W1620": (
"Calling a dict.iter*() method",
"dict-iter-method",
"Used for calls to dict.iterkeys(), itervalues() or iteritems() "
"(Python 3 lacks these methods)",
),
"W1621": (
"Calling a dict.view*() method",
"dict-view-method",
"Used for calls to dict.viewkeys(), viewvalues() or viewitems() "
"(Python 3 lacks these methods)",
),
"W1622": (
"Called a next() method on an object",
"next-method-called",
"Used when an object's next() method is called "
"(Python 3 uses the next() built-in function)",
),
"W1623": (
"Assigning to a class's __metaclass__ attribute",
"metaclass-assignment",
"Used when a metaclass is specified by assigning to __metaclass__ "
"(Python 3 specifies the metaclass as a class statement argument)",
),
"W1624": (
"Indexing exceptions will not work on Python 3",
"indexing-exception",
"Indexing exceptions will not work on Python 3. Use "
"`exception.args[index]` instead.",
{"old_names": [("W0713", "old-indexing-exception")]},
),
"W1625": (
"Raising a string exception",
"raising-string",
"Used when a string exception is raised. This will not "
"work on Python 3.",
{"old_names": [("W0701", "old-raising-string")]},
),
"W1626": (
"reload built-in referenced",
"reload-builtin",
"Used when the reload built-in function is referenced "
"(missing from Python 3). You can use instead imp.reload "
"or importlib.reload.",
),
"W1627": (
"__oct__ method defined",
"oct-method",
"Used when an __oct__ method is defined "
"(method is not used by Python 3)",
),
"W1628": (
"__hex__ method defined",
"hex-method",
"Used when a __hex__ method is defined (method is not used by Python 3)",
),
"W1629": (
"__nonzero__ method defined",
"nonzero-method",
"Used when a __nonzero__ method is defined "
"(method is not used by Python 3)",
),
"W1630": (
"__cmp__ method defined",
"cmp-method",
"Used when a __cmp__ method is defined (method is not used by Python 3)",
),
# 'W1631': replaced by W1636
"W1632": (
"input built-in referenced",
"input-builtin",
"Used when the input built-in is referenced "
"(backwards-incompatible semantics in Python 3)",
),
"W1633": (
"round built-in referenced",
"round-builtin",
"Used when the round built-in is referenced "
"(backwards-incompatible semantics in Python 3)",
),
"W1634": (
"intern built-in referenced",
"intern-builtin",
"Used when the intern built-in is referenced "
"(Moved to sys.intern in Python 3)",
),
"W1635": (
"unichr built-in referenced",
"unichr-builtin",
"Used when the unichr built-in is referenced (Use chr in Python 3)",
),
"W1636": (
"map built-in referenced when not iterating",
"map-builtin-not-iterating",
"Used when the map built-in is referenced in a non-iterating "
"context (returns an iterator in Python 3)",
{"old_names": [("W1631", "implicit-map-evaluation")]},
),
"W1637": (
"zip built-in referenced when not iterating",
"zip-builtin-not-iterating",
"Used when the zip built-in is referenced in a non-iterating "
"context (returns an iterator in Python 3)",
),
"W1638": (
"range built-in referenced when not iterating",
"range-builtin-not-iterating",
"Used when the range built-in is referenced in a non-iterating "
"context (returns a range in Python 3)",
),
"W1639": (
"filter built-in referenced when not iterating",
"filter-builtin-not-iterating",
"Used when the filter built-in is referenced in a non-iterating "
"context (returns an iterator in Python 3)",
),
"W1640": (
"Using the cmp argument for list.sort / sorted",
"using-cmp-argument",
"Using the cmp argument for list.sort or the sorted "
"builtin should be avoided, since it was removed in "
"Python 3. Using either `key` or `functools.cmp_to_key` "
"should be preferred.",
),
"W1641": (
"Implementing __eq__ without also implementing __hash__",
"eq-without-hash",
"Used when a class implements __eq__ but not __hash__. In Python 2, objects "
"get object.__hash__ as the default implementation, in Python 3 objects get "
"None as their default __hash__ implementation if they also implement __eq__.",
),
"W1642": (
"__div__ method defined",
"div-method",
"Used when a __div__ method is defined. Using `__truediv__` and setting"
"__div__ = __truediv__ should be preferred."
"(method is not used by Python 3)",
),
"W1643": (
"__idiv__ method defined",
"idiv-method",
"Used when an __idiv__ method is defined. Using `__itruediv__` and setting"
"__idiv__ = __itruediv__ should be preferred."
"(method is not used by Python 3)",
),
"W1644": (
"__rdiv__ method defined",
"rdiv-method",
"Used when a __rdiv__ method is defined. Using `__rtruediv__` and setting"
"__rdiv__ = __rtruediv__ should be preferred."
"(method is not used by Python 3)",
),
"W1645": (
"Exception.message removed in Python 3",
"exception-message-attribute",
"Used when the message attribute is accessed on an Exception. Use "
"str(exception) instead.",
),
"W1646": (
"non-text encoding used in str.decode",
"invalid-str-codec",
"Used when using str.encode or str.decode with a non-text encoding. Use "
"codecs module to handle arbitrary codecs.",
),
"W1647": (
"sys.maxint removed in Python 3",
"sys-max-int",
"Used when accessing sys.maxint. Use sys.maxsize instead.",
),
"W1648": (
"Module moved in Python 3",
"bad-python3-import",
"Used when importing a module that no longer exists in Python 3.",
),
"W1649": (
"Accessing a deprecated function on the string module",
"deprecated-string-function",
"Used when accessing a string function that has been deprecated in Python 3.",
),
"W1650": (
"Using str.translate with deprecated deletechars parameters",
"deprecated-str-translate-call",
"Used when using the deprecated deletechars parameters from str.translate. Use "
"re.sub to remove the desired characters ",
),
"W1651": (
"Accessing a deprecated function on the itertools module",
"deprecated-itertools-function",
"Used when accessing a function on itertools that has been removed in Python 3.",
),
"W1652": (
"Accessing a deprecated fields on the types module",
"deprecated-types-field",
"Used when accessing a field on types that has been removed in Python 3.",
),
"W1653": (
"next method defined",
"next-method-defined",
"Used when a next method is defined that would be an iterator in Python 2 but "
"is treated as a normal function in Python 3.",
),
"W1654": (
"dict.items referenced when not iterating",
"dict-items-not-iterating",
"Used when dict.items is referenced in a non-iterating "
"context (returns an iterator in Python 3)",
),
"W1655": (
"dict.keys referenced when not iterating",
"dict-keys-not-iterating",
"Used when dict.keys is referenced in a non-iterating "
"context (returns an iterator in Python 3)",
),
"W1656": (
"dict.values referenced when not iterating",
"dict-values-not-iterating",
"Used when dict.values is referenced in a non-iterating "
"context (returns an iterator in Python 3)",
),
"W1657": (
"Accessing a removed attribute on the operator module",
"deprecated-operator-function",
"Used when accessing a field on operator module that has been "
"removed in Python 3.",
),
"W1658": (
"Accessing a removed attribute on the urllib module",
"deprecated-urllib-function",
"Used when accessing a field on urllib module that has been "
"removed or moved in Python 3.",
),
"W1659": (
"Accessing a removed xreadlines attribute",
"xreadlines-attribute",
"Used when accessing the xreadlines() function on a file stream, "
"removed in Python 3.",
),
"W1660": (
"Accessing a removed attribute on the sys module",
"deprecated-sys-function",
"Used when accessing a field on sys module that has been "
"removed in Python 3.",
),
"W1661": (
"Using an exception object that was bound by an except handler",
"exception-escape",
"Emitted when using an exception, that was bound in an except "
"handler, outside of the except handler. On Python 3 these "
"exceptions will be deleted once they get out "
"of the except handler.",
),
"W1662": (
"Using a variable that was bound inside a comprehension",
"comprehension-escape",
"Emitted when using a variable, that was bound in a comprehension "
"handler, outside of the comprehension itself. On Python 3 these "
"variables will be deleted outside of the "
"comprehension.",
),
}
_bad_builtins = frozenset(
[
"apply",
"basestring",
"buffer",
"cmp",
"coerce",
"execfile",
"file",
"input", # Not missing, but incompatible semantics
"intern",
"long",
"raw_input",
"reduce",
"round", # Not missing, but incompatible semantics
"StandardError",
"unichr",
"unicode",
"xrange",
"reload",
]
)
_unused_magic_methods = frozenset(
[
"__coerce__",
"__delslice__",
"__getslice__",
"__setslice__",
"__oct__",
"__hex__",
"__nonzero__",
"__cmp__",
"__div__",
"__idiv__",
"__rdiv__",
]
)
_invalid_encodings = frozenset(
[
"base64_codec",
"base64",
"base_64",
"bz2_codec",
"bz2",
"hex_codec",
"hex",
"quopri_codec",
"quopri",
"quotedprintable",
"quoted_printable",
"uu_codec",
"uu",
"zlib_codec",
"zlib",
"zip",
"rot13",
"rot_13",
]
)
_bad_python3_module_map = {
"sys-max-int": {"sys": frozenset(["maxint"])},
"deprecated-itertools-function": {
"itertools": frozenset(
["izip", "ifilter", "imap", "izip_longest", "ifilterfalse"]
)
},
"deprecated-types-field": {
"types": frozenset(
[
"EllipsisType",
"XRangeType",
"ComplexType",
"StringType",
"TypeType",
"LongType",
"UnicodeType",
"ClassType",
"BufferType",
"StringTypes",
"NotImplementedType",
"NoneType",
"InstanceType",
"FloatType",
"SliceType",
"UnboundMethodType",
"ObjectType",
"IntType",
"TupleType",
"ListType",
"DictType",
"FileType",
"DictionaryType",
"BooleanType",
"DictProxyType",
]
)
},
"bad-python3-import": frozenset(
[
"anydbm",
"BaseHTTPServer",
"__builtin__",
"CGIHTTPServer",
"ConfigParser",
"copy_reg",
"cPickle",
"cStringIO",
"Cookie",
"cookielib",
"dbhash",
"dumbdbm",
"dumbdb",
"Dialog",
"DocXMLRPCServer",
"FileDialog",
"FixTk",
"gdbm",
"htmlentitydefs",
"HTMLParser",
"httplib",
"markupbase",
"Queue",
"repr",
"robotparser",
"ScrolledText",
"SimpleDialog",
"SimpleHTTPServer",
"SimpleXMLRPCServer",
"StringIO",
"dummy_thread",
"SocketServer",
"test.test_support",
"Tkinter",
"Tix",
"Tkconstants",
"tkColorChooser",
"tkCommonDialog",
"Tkdnd",
"tkFileDialog",
"tkFont",
"tkMessageBox",
"tkSimpleDialog",
"UserList",
"UserString",
"whichdb",
"_winreg",
"xmlrpclib",
"audiodev",
"Bastion",
"bsddb185",
"bsddb3",
"Canvas",
"cfmfile",
"cl",
"commands",
"compiler",
"dircache",
"dl",
"exception",
"fpformat",
"htmllib",
"ihooks",
"imageop",
"imputil",
"linuxaudiodev",
"md5",
"mhlib",
"mimetools",
"MimeWriter",
"mimify",
"multifile",
"mutex",
"new",
"popen2",
"posixfile",
"pure",
"rexec",
"rfc822",
"sets",
"sha",
"sgmllib",
"sre",
"stringold",
"sunaudio",
"sv",
"test.testall",
"thread",
"timing",
"toaiff",
"user",
"urllib2",
"urlparse",
]
),
"deprecated-string-function": {
"string": frozenset(
[
"maketrans",
"atof",
"atoi",
"atol",
"capitalize",
"expandtabs",
"find",
"rfind",
"index",
"rindex",
"count",
"lower",
"letters",
"split",
"rsplit",
"splitfields",
"join",
"joinfields",
"lstrip",
"rstrip",
"strip",
"swapcase",
"translate",
"upper",
"ljust",
"rjust",
"center",
"zfill",
"replace",
"lowercase",
"letters",
"uppercase",
"atol_error",
"atof_error",
"atoi_error",
"index_error",
]
)
},
"deprecated-operator-function": {"operator": frozenset({"div"})},
"deprecated-urllib-function": {
"urllib": frozenset(
{
"addbase",
"addclosehook",
"addinfo",
"addinfourl",
"always_safe",
"basejoin",
"ftpcache",
"ftperrors",
"ftpwrapper",
"getproxies",
"getproxies_environment",
"getproxies_macosx_sysconf",
"main",
"noheaders",
"pathname2url",
"proxy_bypass",
"proxy_bypass_environment",
"proxy_bypass_macosx_sysconf",
"quote",
"quote_plus",
"reporthook",
"splitattr",
"splithost",
"splitnport",
"splitpasswd",
"splitport",
"splitquery",
"splittag",
"splittype",
"splituser",
"splitvalue",
"unquote",
"unquote_plus",
"unwrap",
"url2pathname",
"urlcleanup",
"urlencode",
"urlopen",
"urlretrieve",
}
)
},
"deprecated-sys-function": {"sys": frozenset({"exc_clear"})},
}
_deprecated_attrs = frozenset(
itertools.chain.from_iterable(
attr
for module_map in _bad_python3_module_map.values()
if isinstance(module_map, dict)
for attr in module_map.values()
)
)
_relevant_call_attrs = (
DICT_METHODS | _deprecated_attrs | {"encode", "decode", "translate"}
)
_python_2_tests = frozenset(
astroid.extract_node(x).repr_tree()
for x in (
"sys.version_info[0] == 2",
"sys.version_info[0] < 3",
"sys.version_info == (2, 7)",
"sys.version_info <= (2, 7)",
"sys.version_info < (3, 0)",
)
)
def __init__(self, *args, **kwargs):
self._future_division = False
self._future_absolute_import = False
self._modules_warned_about = set()
self._branch_stack = []
super().__init__(*args, **kwargs)
# pylint: disable=keyword-arg-before-vararg, arguments-differ
def add_message(self, msg_id, always_warn=False, *args, **kwargs):
if always_warn or not (
self._branch_stack and self._branch_stack[-1].is_py2_only
):
super().add_message(msg_id, *args, **kwargs)
def _is_py2_test(self, node):
if isinstance(node.test, nodes.Attribute) and isinstance(
node.test.expr, nodes.Name
):
if node.test.expr.name == "six" and node.test.attrname == "PY2":
return True
elif (
isinstance(node.test, nodes.Compare)
and node.test.repr_tree() in self._python_2_tests
):
return True
return False
def visit_if(self, node):
self._branch_stack.append(Branch(node, self._is_py2_test(node)))
def leave_if(self, node):
new_node = self._branch_stack.pop().node
assert new_node == node
def visit_ifexp(self, node):
self._branch_stack.append(Branch(node, self._is_py2_test(node)))
def leave_ifexp(self, node):
new_node = self._branch_stack.pop()
assert new_node.node == node
def visit_module(self, node): # pylint: disable=unused-argument
"""Clear checker state after previous module."""
self._future_division = False
self._future_absolute_import = False
def visit_functiondef(self, node):
if node.is_method():
if node.name in self._unused_magic_methods:
method_name = node.name
if node.name.startswith("__"):
method_name = node.name[2:-2]
self.add_message(method_name + "-method", node=node)
elif node.name == "next":
# If there is a method named `next` declared, if it is invokable
# with zero arguments then it implements the Iterator protocol.
# This means if the method is an instance method or a
# classmethod 1 argument should cause a failure, if it is a
# staticmethod 0 arguments should cause a failure.
failing_arg_count = 1
if utils.decorated_with(node, ["builtins.staticmethod"]):
failing_arg_count = 0
if len(node.args.args) == failing_arg_count:
self.add_message("next-method-defined", node=node)
@utils.check_messages("parameter-unpacking")
def visit_arguments(self, node):
for arg in node.args:
if isinstance(arg, nodes.Tuple):
self.add_message("parameter-unpacking", node=arg)
@utils.check_messages("comprehension-escape")
def visit_listcomp(self, node):
names = {
generator.target.name
for generator in node.generators
if isinstance(generator.target, nodes.AssignName)
}
scope = node.parent.scope()
scope_names = scope.nodes_of_class(nodes.Name, skip_klass=nodes.FunctionDef)
has_redefined_assign_name = any(
assign_name
for assign_name in scope.nodes_of_class(
nodes.AssignName, skip_klass=nodes.FunctionDef
)
if assign_name.name in names and assign_name.lineno > node.lineno
)
if has_redefined_assign_name:
return
emitted_for_names = set()
scope_names = list(scope_names)
for scope_name in scope_names:
if (
scope_name.name not in names
or scope_name.lineno <= node.lineno
or scope_name.name in emitted_for_names
or scope_name.scope() == node
):
continue
emitted_for_names.add(scope_name.name)
self.add_message("comprehension-escape", node=scope_name)
def visit_name(self, node):
"""Detect when a "bad" built-in is referenced."""
found_node, _ = node.lookup(node.name)
if not _is_builtin(found_node):
return
if node.name not in self._bad_builtins:
return
if node_ignores_exception(node) or isinstance(
find_try_except_wrapper_node(node), nodes.ExceptHandler
):
return
message = node.name.lower() + "-builtin"
self.add_message(message, node=node)
@utils.check_messages("print-statement")
def visit_print(self, node):
self.add_message("print-statement", node=node, always_warn=True)
def _warn_if_deprecated(self, node, module, attributes, report_on_modules=True):
for message, module_map in self._bad_python3_module_map.items():
if module in module_map and module not in self._modules_warned_about:
if isinstance(module_map, frozenset):
if report_on_modules:
self._modules_warned_about.add(module)
self.add_message(message, node=node)
elif attributes and module_map[module].intersection(attributes):
self.add_message(message, node=node)
def visit_importfrom(self, node):
if node.modname == "__future__":
for name, _ in node.names:
if name == "division":
self._future_division = True
elif name == "absolute_import":
self._future_absolute_import = True
else:
if not self._future_absolute_import:
if self.linter.is_message_enabled("no-absolute-import"):
self.add_message("no-absolute-import", node=node)
self._future_absolute_import = True
if not _is_conditional_import(node) and not node.level:
self._warn_if_deprecated(node, node.modname, {x[0] for x in node.names})
if node.names[0][0] == "*":
if self.linter.is_message_enabled("import-star-module-level"):
if not isinstance(node.scope(), nodes.Module):
self.add_message("import-star-module-level", node=node)
def visit_import(self, node):
if not self._future_absolute_import:
if self.linter.is_message_enabled("no-absolute-import"):
self.add_message("no-absolute-import", node=node)
self._future_absolute_import = True
if not _is_conditional_import(node):
for name, _ in node.names:
self._warn_if_deprecated(node, name, None)
@utils.check_messages("metaclass-assignment")
def visit_classdef(self, node):
if "__metaclass__" in node.locals:
self.add_message("metaclass-assignment", node=node)
locals_and_methods = set(node.locals).union(x.name for x in node.mymethods())
if "__eq__" in locals_and_methods and "__hash__" not in locals_and_methods:
self.add_message("eq-without-hash", node=node)
@utils.check_messages("old-division")
def visit_binop(self, node):
if not self._future_division and node.op == "/":
for arg in (node.left, node.right):
inferred = utils.safe_infer(arg)
# If we can infer the object and that object is not an int, bail out.
if inferred and not (
(
isinstance(inferred, nodes.Const)
and isinstance(inferred.value, int)
)
or (
isinstance(inferred, astroid.Instance)
and inferred.name == "int"
)
):
break
else:
self.add_message("old-division", node=node)
def _check_cmp_argument(self, node):
# Check that the `cmp` argument is used
kwargs = []
if isinstance(node.func, nodes.Attribute) and node.func.attrname == "sort":
inferred = utils.safe_infer(node.func.expr)
if not inferred:
return
builtins_list = "builtins.list"
if isinstance(inferred, nodes.List) or inferred.qname() == builtins_list:
kwargs = node.keywords
elif isinstance(node.func, nodes.Name) and node.func.name == "sorted":
inferred = utils.safe_infer(node.func)
if not inferred:
return
builtins_sorted = "builtins.sorted"
if inferred.qname() == builtins_sorted:
kwargs = node.keywords
for kwarg in kwargs or []:
if kwarg.arg == "cmp":
self.add_message("using-cmp-argument", node=node)
return
@staticmethod
def _is_constant_string_or_name(node):
if isinstance(node, nodes.Const):
return isinstance(node.value, str)
return isinstance(node, nodes.Name)
@staticmethod
def _is_none(node):
return isinstance(node, nodes.Const) and node.value is None
@staticmethod
def _has_only_n_positional_args(node, number_of_args):
return len(node.args) == number_of_args and all(node.args) and not node.keywords
@staticmethod
def _could_be_string(inferred_types):
confidence = INFERENCE if inferred_types else INFERENCE_FAILURE
for inferred_type in inferred_types:
if inferred_type is astroid.Uninferable:
confidence = INFERENCE_FAILURE
elif not (
isinstance(inferred_type, nodes.Const)
and isinstance(inferred_type.value, str)
):
return None
return confidence
def visit_call(self, node):
self._check_cmp_argument(node)
if isinstance(node.func, nodes.Attribute):
inferred_types = set()
try:
for inferred_receiver in _infer_if_relevant_attr(
node.func, self._relevant_call_attrs
):
if inferred_receiver is astroid.Uninferable:
continue
inferred_types.add(inferred_receiver)
if isinstance(inferred_receiver, nodes.Module):
self._warn_if_deprecated(
node,
inferred_receiver.name,
{node.func.attrname},
report_on_modules=False,
)
if (
_inferred_value_is_dict(inferred_receiver)
and node.func.attrname in DICT_METHODS
):
if not _in_iterating_context(node):
checker = "dict-{0}-not-iterating".format(
node.func.attrname
)
self.add_message(checker, node=node)
except astroid.InferenceError:
pass
if node.args:
is_str_confidence = self._could_be_string(inferred_types)
if is_str_confidence:
if (
node.func.attrname in ("encode", "decode")
and len(node.args) >= 1
and node.args[0]
):
first_arg = node.args[0]
self._validate_encoding(first_arg, node)
if (
node.func.attrname == "translate"
and self._has_only_n_positional_args(node, 2)
and self._is_none(node.args[0])
and self._is_constant_string_or_name(node.args[1])
):
# The above statement looking for calls of the form:
#
# foo.translate(None, 'abc123')
#
# or
#
# foo.translate(None, some_variable)
#
# This check is somewhat broad and _may_ have some false positives, but
# after checking several large codebases it did not have any false
# positives while finding several real issues. This call pattern seems
# rare enough that the trade off is worth it.
self.add_message(
"deprecated-str-translate-call",
node=node,
confidence=is_str_confidence,
)
return
if node.keywords:
return
if node.func.attrname == "next":
self.add_message("next-method-called", node=node)
elif node.func.attrname in ("iterkeys", "itervalues", "iteritems"):
self.add_message("dict-iter-method", node=node)
elif node.func.attrname in ("viewkeys", "viewvalues", "viewitems"):
self.add_message("dict-view-method", node=node)
elif isinstance(node.func, nodes.Name):
found_node = node.func.lookup(node.func.name)[0]
if _is_builtin(found_node):
if node.func.name in ("filter", "map", "range", "zip"):
if not _in_iterating_context(node):
checker = "{0}-builtin-not-iterating".format(
node.func.name
)
self.add_message(checker, node=node)
elif node.func.name == "open" and node.keywords:
kwargs = node.keywords
for kwarg in kwargs or []:
if kwarg.arg == "encoding":
self._validate_encoding(kwarg.value, node)
break
def _validate_encoding(self, encoding, node):
if isinstance(encoding, nodes.Const):
value = encoding.value
if value in self._invalid_encodings:
self.add_message("invalid-str-codec", node=node)
@utils.check_messages("indexing-exception")
def visit_subscript(self, node):
"""Look for indexing exceptions."""
try:
for inferred in node.value.infer():
if not isinstance(inferred, astroid.Instance):
continue
if utils.inherit_from_std_ex(inferred):
self.add_message("indexing-exception", node=node)
except astroid.InferenceError:
return
def visit_assignattr(self, node):
if isinstance(node.assign_type(), nodes.AugAssign):
self.visit_attribute(node)
def visit_delattr(self, node):
self.visit_attribute(node)
@utils.check_messages("exception-message-attribute", "xreadlines-attribute")
def visit_attribute(self, node):
"""Look for removed attributes"""
if node.attrname == "xreadlines":
self.add_message("xreadlines-attribute", node=node)
return
exception_message = "message"
try:
for inferred in _infer_if_relevant_attr(
node, self._deprecated_attrs | {exception_message}
):
if isinstance(inferred, astroid.Instance) and utils.inherit_from_std_ex(
inferred
):
if node.attrname == exception_message:
# Exceptions with .message clearly defined are an exception
if exception_message in inferred.instance_attrs:
continue
self.add_message("exception-message-attribute", node=node)
if isinstance(inferred, nodes.Module):
self._warn_if_deprecated(
node, inferred.name, {node.attrname}, report_on_modules=False
)
except astroid.InferenceError:
return
@utils.check_messages("unpacking-in-except", "comprehension-escape")
def visit_excepthandler(self, node):
"""Visit an except handler block and check for exception unpacking."""
def _is_used_in_except_block(node, block):
current = node
while current and current is not block:
current = current.parent
return current is not None
if isinstance(node.name, (nodes.Tuple, nodes.List)):
self.add_message("unpacking-in-except", node=node)
return
if not node.name:
return
# Find any names
scope = node.parent.scope()
scope_names = scope.nodes_of_class(nodes.Name, skip_klass=nodes.FunctionDef)
scope_names = list(scope_names)
potential_leaked_names = [
scope_name
for scope_name in scope_names
if scope_name.name == node.name.name
and scope_name.lineno > node.lineno
and not _is_used_in_except_block(scope_name, node)
]
reassignments_for_same_name = {
assign_name.lineno
for assign_name in scope.nodes_of_class(
nodes.AssignName, skip_klass=nodes.FunctionDef
)
if assign_name.name == node.name.name
}
for leaked_name in potential_leaked_names:
if any(
node.lineno < elem < leaked_name.lineno
for elem in reassignments_for_same_name
):
continue
self.add_message("exception-escape", node=leaked_name)
@utils.check_messages("backtick")
def visit_repr(self, node):
self.add_message("backtick", node=node)
@utils.check_messages("raising-string", "old-raise-syntax")
def visit_raise(self, node):
"""Visit a raise statement and check for raising
strings or old-raise-syntax.
"""
# Ignore empty raise.
if node.exc is None:
return
expr = node.exc
if self._check_raise_value(node, expr):
return
try:
value = next(astroid.unpack_infer(expr))
except astroid.InferenceError:
return
self._check_raise_value(node, value)
def _check_raise_value(self, node, expr):
if isinstance(expr, nodes.Const):
value = expr.value
if isinstance(value, str):
self.add_message("raising-string", node=node)
return True
return None
class Python3TokenChecker(checkers.BaseTokenChecker):
__implements__ = interfaces.ITokenChecker
name = "python3"
enabled = False
msgs = {
"E1606": (
"Use of long suffix",
"long-suffix",
'Used when "l" or "L" is used to mark a long integer. '
"This will not work in Python 3, since `int` and `long` "
"types have merged.",
{"maxversion": (3, 0)},
),
"E1607": (
"Use of the <> operator",
"old-ne-operator",
'Used when the deprecated "<>" operator is used instead '
'of "!=". This is removed in Python 3.',
{"maxversion": (3, 0), "old_names": [("W0331", "old-old-ne-operator")]},
),
"E1608": (
"Use of old octal literal",
"old-octal-literal",
"Used when encountering the old octal syntax, "
"removed in Python 3. To use the new syntax, "
"prepend 0o on the number.",
{"maxversion": (3, 0)},
),
"E1610": (
"Non-ascii bytes literals not supported in 3.x",
"non-ascii-bytes-literal",
"Used when non-ascii bytes literals are found in a program. "
"They are no longer supported in Python 3.",
{"maxversion": (3, 0)},
),
}
def process_tokens(self, tokens):
for idx, (tok_type, token, start, _, _) in enumerate(tokens):
if tok_type == tokenize.NUMBER:
if token.lower().endswith("l"):
# This has a different semantic than lowercase-l-suffix.
self.add_message("long-suffix", line=start[0])
elif _is_old_octal(token):
self.add_message("old-octal-literal", line=start[0])
if tokens[idx][1] == "<>":
self.add_message("old-ne-operator", line=tokens[idx][2][0])
if tok_type == tokenize.STRING and token.startswith("b"):
if any(elem for elem in token if ord(elem) > 127):
self.add_message("non-ascii-bytes-literal", line=start[0])
def register(linter):
linter.register_checker(Python3Checker(linter))
linter.register_checker(Python3TokenChecker(linter))
| 53,422 | Python | .py | 1,347 | 26.956199 | 95 | 0.52401 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
671 | py2_compat.py | ranger_ranger/tests/pylint/py2_compat.py | from __future__ import absolute_import
import astroid
from pylint.checkers import BaseChecker
from pylint.interfaces import HIGH
from pylint.checkers import utils
class Py2CompatibilityChecker(BaseChecker):
"""Verify some simple properties of code compatible with both 2 and 3"""
# The name defines a custom section of the config for this checker.
name = "py2-compat"
# The priority indicates the order that pylint will run the checkers.
priority = -1
# This class variable declares the messages (ie the warnings and errors)
# that the checker can emit.
msgs = {
# Each message has a code, a message that the user will see,
# a unique symbol that identifies the message,
# and a detailed help message
# that will be included in the documentation.
"E4200": ('Use explicit inheritance from "object"',
"old-style-class",
'Python 2 requires explicit inheritance from "object"'
' for new-style classes.'),
# old-division
# "E4210": ('Use "//" for integer division or import from "__future__"',
# "division-without-import",
# 'Python 2 might perform integer division unless you import'
# ' "division" from "__future__".'),
# no-absolute-import
# "E4211": ('Always import "absolute_import" from "__future__"',
# "old-no-absolute-import",
# 'Python 2 allows relative imports unless you import'
# ' "absolute_import" from "__future__".'),
"E4212": ('Import "print_function" from "__future__"',
"print-without-import",
'Python 2 requires importing "print_function" from'
' "__future__" to use the "print()" function syntax.'),
"E4220": ('Use explicit format spec numbering',
"implicit-format-spec",
'Python 2.6 does not support implicit format spec numbering'
' "{}", use explicit numbering "{0}" or keywords "{key}".'),
"E4230": ("Use popen23.Popen with with-statements",
"with-popen23",
"Python 2 subprocess.Popen objects were not contextmanagers,"
"popen23.Popen wraps them to enable use with"
"with-statements."),
"E4240": ("Use format method",
"use-format-method",
"Python 2 (and <3.6) does not support f-strings."),
}
# This class variable declares the options
# that are configurable by the user.
options = ()
def visit_classdef(self, node):
"""Make sure classes explicitly inherit from object"""
if not node.bases and node.type == 'class' and not node.metaclass():
# We use confidence HIGH here because this message should only ever
# be emitted for classes at the root of the inheritance hierarchy
self.add_message('old-style-class', node=node, confidence=HIGH)
def visit_call(self, node):
"""Make sure "print_function" is imported if necessary"""
if (isinstance(node.func, astroid.nodes.Name)
and node.func.name == "print"):
if "print_function" in node.root().future_imports:
def previous(node):
if node is not None:
parent = node.parent
previous = node.previous_sibling()
if previous is None:
return parent
return previous
prev = previous(node)
while prev is not None:
if (isinstance(prev, astroid.nodes.ImportFrom)
and prev.modname == "__future__"
and "print_function" in (name_alias[0] for name_alias in
prev.names)):
return
prev = previous(prev)
self.add_message("print-without-import", node=node,
confidence=HIGH)
else:
self.add_message("print-without-import", node=node,
confidence=HIGH)
func = utils.safe_infer(node.func)
if (
isinstance(func, astroid.BoundMethod)
and isinstance(func.bound, astroid.Instance)
and func.bound.name in ("str", "unicode", "bytes")
):
if func.name == "format":
if isinstance(node.func, astroid.Attribute) and not isinstance(
node.func.expr, astroid.Const
):
return
if node.starargs or node.kwargs:
return
try:
strnode = next(func.bound.infer())
except astroid.InferenceError:
return
if not (isinstance(strnode, astroid.Const) and isinstance(
strnode.value, str)):
return
try:
fields, num_args, manual_pos = (
utils.parse_format_method_string(strnode.value)
)
except utils.IncompleteFormatString:
self.add_message("bad-format-string", node=node)
if num_args != 0:
self.add_message("implicit-format-spec", node=node,
confidence=HIGH)
def visit_joinedstr(self, node):
"""Make sure we don't use f-strings"""
if isinstance(node, astroid.nodes.JoinedStr):
self.add_message("use-format-method", node=node, confidence=HIGH)
def visit_with(self, node):
"""Make sure subprocess.Popen objects aren't used in with-statements"""
for (cm, _) in node.items:
if isinstance(cm, astroid.nodes.Call):
if ((isinstance(cm.func, astroid.nodes.Name)
and cm.func.name.endswith("Popen")
and (node.root().scope_lookup(node.root(), "Popen")[1][0]
).modname == "subprocess")
or (isinstance(cm.func, astroid.nodes.Attribute)
and cm.func.expr == "subprocess"
and cm.func.attrname == "Popen")):
self.add_message("with-popen23", node=node, confidence=HIGH)
def register(linter):
"""This required method auto registers the checker.
:param linter: The linter to register the checker to.
:type linter: pylint.lint.PyLinter
"""
linter.register_checker(Py2CompatibilityChecker(linter))
| 6,700 | Python | .py | 134 | 35.634328 | 80 | 0.553435 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
672 | test_py2_compat.py | ranger_ranger/tests/pylint/test_py2_compat.py | from __future__ import absolute_import
import py2_compat
import astroid
import pylint.testutils
from pylint.interfaces import HIGH
from sys import version_info
PY2 = version_info[0] < 3
class TestPy2CompatibilityChecker(pylint.testutils.CheckerTestCase):
CHECKER_CLASS = py2_compat.Py2CompatibilityChecker
def test_oldstyle_class(self):
oldstyle_class, from_old = astroid.extract_node("""
class OldStyle(): #@
pass
class FromOld(OldStyle): #@
pass
""")
with self.assertAddsMessages(
pylint.testutils.MessageTest(
msg_id='old-style-class',
node=oldstyle_class,
confidence=HIGH,
),
ignore_position=True,
):
self.checker.visit_classdef(oldstyle_class)
with self.assertNoMessages():
self.checker.visit_classdef(from_old)
def test_newstyle_class(self):
newstyle_class, from_new = astroid.extract_node("""
class NewStyle(object): #@
pass
class FromNew(NewStyle): #@
pass
""")
with self.assertNoMessages():
self.checker.visit_classdef(newstyle_class)
self.checker.visit_classdef(from_new)
def test_print_without_import(self):
if PY2:
return
print_function_call = astroid.extract_node("""
print("Print function call without importing print_function")
""")
with self.assertAddsMessages(
pylint.testutils.MessageTest(
msg_id='print-without-import',
node=print_function_call,
confidence=HIGH,
),
ignore_position=True,
):
self.checker.visit_call(print_function_call)
def test_print_with_import(self):
print_function_call = astroid.extract_node("""
from __future__ import print_function
print("Print function call with importing print_function") #@
""")
nested_print_function_call = astroid.extract_node("""
def f():
from __future__ import print_function
class A():
def m(self):
print("Nested print with import in scope") #@
""")
with self.assertNoMessages():
self.checker.visit_call(print_function_call)
self.checker.visit_call(nested_print_function_call)
def test_print_late_import(self):
if PY2:
return
early_print_function_call = astroid.extract_node("""
print("Nested print with import in scope") #@
def f():
from __future__ import print_function
class A():
def m(self):
pass
""")
with self.assertAddsMessages(
pylint.testutils.MessageTest(
msg_id='print-without-import',
node=early_print_function_call,
confidence=HIGH,
),
ignore_position=True,
):
self.checker.visit_call(early_print_function_call)
def test_implicit_format_spec(self):
if PY2:
return
implicit_format_spec = astroid.extract_node("""
"{}".format("implicit") #@
""")
with self.assertAddsMessages(
pylint.testutils.MessageTest(
msg_id='implicit-format-spec',
node=implicit_format_spec,
confidence=HIGH,
),
ignore_position=True,
):
self.checker.visit_call(implicit_format_spec)
def test_with_Popen(self):
with_subprocess_Popen, with_Popen, with_Popen23 = astroid.extract_node("""
import subprocess
with subprocess.Popen(): #@
pass
from subprocess import Popen
with Popen(): #@
pass
from ranger.ext.popen23 import Popen23
with Popen23(): #@
pass
""")
with self.assertAddsMessages(
pylint.testutils.MessageTest(
msg_id='with-popen23',
node=with_Popen,
confidence=HIGH,
),
ignore_position=True,
):
self.checker.visit_with(with_subprocess_Popen)
self.checker.visit_with(with_Popen)
with self.assertNoMessages():
self.checker.visit_with(with_Popen23)
def test_use_format(self):
old_format, new_format, f_string = astroid.extract_node("""
"2 + 2 is %s" % (2+2) #@
"2 + 2 is {0}".format(2+2) #@
f"2 + 2 is {2+2}" #@
""")
with self.assertAddsMessages(
pylint.testutils.MessageTest(
msg_id='use-format-method',
node=f_string,
confidence=HIGH,
),
ignore_position=True,
):
self.checker.visit_joinedstr(f_string)
with self.assertNoMessages():
self.checker.visit_joinedstr(old_format)
self.checker.visit_joinedstr(new_format)
# # These checks still exist as old-division and no-absolute-import
# def test_division_without_import(self):
# division = astroid.extract_node("""
# 5/2
# """)
# with self.assertAddsMessages(
# pylint.testutils.MessageTest(
# msg_id='division-without-import',
# node=division,
# ),
# ):
# self.checker.visit_XXX(division)
# def test_division_with_import(self):
# division = astroid.extract_node("""
# from __future__ import division
# 5/2 #@
# """)
# with self.assertNoMessages():
# self.checker.visit_XXX(division)
# def test_absolute_import(self):
# no_import = astroid.extract_node("""
# import sys
# """)
# with self.assertAddsMessages(
# pylint.testutils.MessageTest(
# msg_id='old-no-absolute-import',
# node=no_import,
# ),
# ):
# self.checker.visit_XXX(no_import)
# only_import = astroid.extract_node("""
# from __future__ import absolute_import
# """)
# first_import = astroid.extract_node("""
# from __future__ import absolute_import, print_function
# """)
# second_import = astroid.extract_node("""
# from __future__ import print_function, absolute_import
# """)
# with self.assertNoMessages():
# self.checker.visit_XXX(only_import)
# self.checker.visit_XXX(first_import)
# self.checker.visit_XXX(second_import)
| 6,767 | Python | .py | 187 | 26.379679 | 82 | 0.556184 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
673 | test_container.py | ranger_ranger/tests/ranger/container/test_container.py | from __future__ import (absolute_import, division, print_function)
from ranger.container import history
HISTORY_TEST_ENTRIES = [str(k) for k in range(20)]
OTHER_TEST_ENTRIES = [str(k) for k in range(40, 45)]
def testhistorybasic():
# A history is a buffer of limited size that stores the last `maxlen`
# item added to it. It has a `current` index that serves as a cursor.
# A history has a limited size, check that only `maxlen` items are stored
hist = history.History(maxlen=10)
for entry in HISTORY_TEST_ENTRIES:
hist.add(entry)
# 10 items are stored
assert len(hist) == 10
assert hist.current() == "19"
assert hist.top() == "19"
assert hist.bottom() == "10"
# going back in time affects only changes current item
hist.back()
assert len(hist) == 10
assert hist.current() == "18"
assert hist.top() == "19"
assert hist.bottom() == "10"
# __iter__ is actually an iterator and we can iterate through the list
iterator = iter(hist)
assert iter(iterator) == iterator
assert list(iterator) == HISTORY_TEST_ENTRIES[10:]
# search allows to go back in time as long as a pattern matches and we don't
# go over a step limit
assert hist.search("45", -9) == "18"
assert hist.search("1", -5) == "13"
# fast forward selects the last item
hist.fast_forward()
assert hist.current() == "19"
# back followed by forward is a noop
hist.back()
hist.forward()
assert hist.current() == "19"
# move can be expressed as multiple calls to back and forward
hist.move(-3)
hist.forward()
hist.forward()
hist.forward()
assert hist.current() == "19"
# back, forward, move play well with boundaries
for _ in range(30):
hist.back()
for _ in range(30):
hist.forward()
for _ in range(30):
hist.move(-2)
for _ in range(30):
hist.move(2)
assert hist.current() == "19"
# we can create an history from another history
hist = history.History(maxlen=10)
for entry in HISTORY_TEST_ENTRIES:
hist.add(entry)
# XXX maxlen should not be used to refer to something that isn't a length
otherh = history.History(maxlen=hist)
assert list(hist) == list(otherh)
# Rebase replaces the past of the history with that of another
otherh = history.History(maxlen=hist)
old_current_item = hist.current()
for entry in OTHER_TEST_ENTRIES:
otherh.add(entry)
assert list(otherh)[-3:] == ["42", "43", "44"]
hist.rebase(otherh)
assert hist.current() == old_current_item
assert list(hist)[-3:] == ['43', '44', old_current_item]
# modify, modifies the top of the stack
hist.modify("23")
assert hist.current() == "23"
def testhistoryunique():
# Check that unique history refuses to store duplicated entries
hist = history.History(maxlen=10, unique=True)
for entry in HISTORY_TEST_ENTRIES:
hist.add(entry)
assert hist.current() == "19"
hist.add("17")
assert list(hist).count("17") == 1
assert hist.current() == "17"
| 3,101 | Python | .py | 81 | 32.91358 | 80 | 0.656771 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
674 | test_fsobject.py | ranger_ranger/tests/ranger/container/test_fsobject.py | from __future__ import (absolute_import, division, print_function)
import operator
from ranger.container.fsobject import FileSystemObject
class MockFM(object): # pylint: disable=too-few-public-methods
"""Used to fulfill the dependency by FileSystemObject."""
default_linemodes = []
def create_filesystem_object(path):
"""Create a FileSystemObject without an fm object."""
fso = FileSystemObject(path)
fso.fm_set(MockFM())
return fso
def test_basename_natural1():
"""Test filenames without extensions."""
fsos = [
create_filesystem_object(path)
for path in (
"0", "1", "2", "3",
"10", "11", "12", "13",
"100", "101", "102", "103",
"110", "111", "112", "113",
"hello",
"hello1", "hello2",
"hello11", "hello12",
"hello100", "hello101", "hello111", "hello112",
)
]
assert fsos == sorted(fsos[::-1], key=operator.attrgetter("basename_natural"))
assert fsos == sorted(fsos[::-1], key=operator.attrgetter("basename_natural_lower"))
def test_basename_natural2():
"""Test filenames with extensions."""
fsos = [
create_filesystem_object(path)
for path in (
"hello", "hello.txt",
"hello0.txt", "hello1.txt", "hello2.txt", "hello3.txt"
"hello10.txt", "hello11.txt", "hello12.txt", "hello13.txt"
"hello100.txt", "hello101.txt", "hello102.txt", "hello103.txt"
"hello110.txt", "hello111.txt", "hello112.txt", "hello113.txt"
)
]
assert fsos == sorted(fsos[::-1], key=operator.attrgetter("basename_natural"))
assert fsos == sorted(fsos[::-1], key=operator.attrgetter("basename_natural_lower"))
| 1,757 | Python | .py | 42 | 34.190476 | 88 | 0.606221 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
675 | test_bookmarks.py | ranger_ranger/tests/ranger/container/test_bookmarks.py | from __future__ import (absolute_import, division, print_function)
import os
import time
import pytest
from ranger.container.bookmarks import Bookmarks
class NotValidatedBookmarks(Bookmarks):
def _validate(self, value):
return True
def testbookmarks(tmpdir):
# Bookmarks point to directory location and allow fast access to
# 'favorite' directories. They are persisted to a bookmark file, plain text.
bookmarkfile = tmpdir.join("bookmarkfile")
bmstore = NotValidatedBookmarks(str(bookmarkfile))
# loading an empty bookmark file doesn't crash
bmstore.load()
# One can add / remove and check existing of bookmark
bmstore["h"] = "world"
assert "h" in bmstore
del bmstore["h"]
# Only one letter/digit bookmarks are valid, adding something else fails
# silently
bmstore["hello"] = "world"
assert "hello" not in bmstore
# The default bookmark is ', remember allows to set it
bmstore.remember("the milk")
assert bmstore["'"] == "the milk"
# We can persist bookmarks to disk and restore them from disk
bmstore.save()
secondstore = NotValidatedBookmarks(str(bookmarkfile))
secondstore.load()
assert "'" in secondstore
assert secondstore["'"] == "the milk"
# We don't unnecessary update when the file on disk does not change
origupdate = secondstore.update
class OutOfDateException(Exception):
pass
def crash():
raise OutOfDateException("Don't access me")
secondstore.update = crash
secondstore.update_if_outdated()
# If the modification time change, we try to read the file
newtime = time.time() - 5
os.utime(str(bookmarkfile), (newtime, newtime))
with pytest.raises(OutOfDateException):
secondstore.update_if_outdated()
secondstore.update = origupdate
secondstore.update_if_outdated()
def test_bookmark_symlink(tmpdir):
# Initialize plain file and symlink paths
bookmarkfile_link = tmpdir.join("bookmarkfile")
bookmarkfile_orig = tmpdir.join("bookmarkfile.orig")
# Create symlink pointing towards the original plain file.
os.symlink(str(bookmarkfile_orig), str(bookmarkfile_link))
# Initialize the bookmark file and save the file.
bmstore = Bookmarks(str(bookmarkfile_link))
bmstore.save()
# Once saved, the bookmark file should still be a symlink pointing towards the plain file.
assert os.path.islink(str(bookmarkfile_link))
assert not os.path.islink(str(bookmarkfile_orig))
| 2,510 | Python | .py | 59 | 37.457627 | 94 | 0.730453 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
676 | test_main.py | ranger_ranger/tests/ranger/core/test_main.py | from __future__ import absolute_import
import collections
import os
from ranger.core import main
def test_get_paths():
args_tuple = collections.namedtuple('args', 'paths')
args = args_tuple(paths=None)
paths = main.get_paths(args)
for path in paths:
assert os.path.exists(path)
if __name__ == '__main__':
test_get_paths()
| 357 | Python | .py | 12 | 25.833333 | 56 | 0.692308 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
677 | convert_papermode_to_metadata.py | ranger_ranger/doc/tools/convert_papermode_to_metadata.py | #!/usr/bin/env python
"""
usage: ./convert_papermode_to_metadata.py
This script converts the .paperinfo CSV file in the current directory to an
equivalent .metadata.json file.
ranger used to store metadata in .paperinfo files, but that format was rather
limited, so .metadata.json files were introduced.
"""
from __future__ import (absolute_import, division, print_function)
import csv
import json
import os
import sys
if sys.version_info[0] < 3:
input = raw_input # NOQA pylint: disable=undefined-variable,redefined-builtin,invalid-name
FIELDS = ["name", "year", "title", "authors", "url"]
def replace(source, target):
if not os.path.exists(source):
print("Source file `%s' doesn't exist, skipping." % source)
return
# Ask for user confirmation if the target file already exists
if os.path.exists(target):
sys.stdout.write("Warning: target file `%s' exists! Overwrite? [y/N]")
userinput = input()
if not (userinput.startswith("y") or userinput.startswith("Y")):
print("Skipping file `%s'" % source)
return
result = {}
# Read the input file and convert it to a dictionary
# pylint: disable=unspecified-encoding
with open(".paperinfo", "r") as infile:
reader = csv.reader(infile, skipinitialspace=True)
for lineno, row in enumerate(reader):
if len(row) != len(FIELDS):
print("skipping invalid row `%s' on line %d" % (row, lineno))
continue
name = row[0]
entry = {}
# Filling up the resulting entry dict
for i, column in enumerate(row[1:]):
if column:
entry[FIELDS[i + 1]] = column
# Adding the dict if it isn't empty
if entry:
result[name] = entry
# Write the obtained dictionary into the target file
if result:
# There's no way to specify encoding in 2.x even though in this case we
# could choose to write in UTF-8.
# pylint: disable=unspecified-encoding
with open(".metadata.json", "w") as outfile:
json.dump(result, outfile, indent=2)
else:
print("Skipping writing `%s' due to a lack of data" % target)
if __name__ == "__main__":
if set(['--help', '-h']) & set(sys.argv[1:]):
print(__doc__.strip())
else:
replace(".paperinfo", ".metadata.json")
| 2,434 | Python | .py | 59 | 33.728814 | 95 | 0.625106 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
678 | print_colors.py | ranger_ranger/doc/tools/print_colors.py | #!/usr/bin/env python
"""
You can use this tool to display all supported colors and their color number.
It will exit after a keypress.
"""
from __future__ import (absolute_import, division, print_function)
import curses
@curses.wrapper
def main(win):
def print_all_colors(attr):
for color in range(-1, curses.COLORS):
try:
curses.init_pair(color, color, 0)
except curses.error:
pass
else:
win.addstr(str(color) + ' ', curses.color_pair(color) | attr)
curses.start_color()
try:
curses.use_default_colors()
except curses.error:
pass
win.addstr("available colors: %d\n\n" % curses.COLORS)
print_all_colors(0)
win.addstr("\n\n")
print_all_colors(curses.A_BOLD)
win.refresh()
win.getch()
| 835 | Python | .py | 28 | 23.392857 | 77 | 0.62391 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
679 | performance_test.py | ranger_ranger/doc/tools/performance_test.py | #!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
import sys
import time
sys.path.insert(0, '../..')
sys.path.insert(0, '.')
def main():
import ranger.container.directory
import ranger.core.shared
import ranger.container.settings
import ranger.core.fm
from ranger.ext.openstruct import OpenStruct
ranger.args = OpenStruct()
ranger.args.clean = True
ranger.args.debug = False
settings = ranger.container.settings.Settings()
ranger.core.shared.SettingsAware.settings_set(settings)
fm = ranger.core.fm.FM()
ranger.core.shared.FileManagerAware.fm_set(fm)
time1 = time.time()
fm.initialize()
try:
usr = ranger.container.directory.Directory('/usr')
usr.load_content(schedule=False)
for fileobj in usr.files:
if fileobj.is_directory:
fileobj.load_content(schedule=False)
finally:
fm.destroy()
time2 = time.time()
print("%dms" % ((time2 - time1) * 1000))
if __name__ == '__main__':
main()
| 1,065 | Python | .py | 33 | 26.878788 | 66 | 0.673509 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
680 | print_keys.py | ranger_ranger/doc/tools/print_keys.py | #!/usr/bin/env python
"""
You can use this tool to find out values of keypresses
"""
from __future__ import (absolute_import, division, print_function)
import curses
SEPARATOR = '; '
@curses.wrapper
def main(window):
curses.mousemask(curses.ALL_MOUSE_EVENTS)
curses.mouseinterval(0)
while True:
char = window.getch()
if char == curses.KEY_MOUSE:
window.addstr(repr(curses.getmouse()) + SEPARATOR)
else:
window.addstr(str(char) + SEPARATOR)
| 506 | Python | .py | 17 | 24.882353 | 66 | 0.677019 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
681 | commands.py | ranger_ranger/doc/config/commands.py | # -*- coding: utf-8 -*-
# This file is part of ranger, the console file manager.
# This configuration file is licensed under the same terms as ranger.
# ===================================================================
#
# NOTE: If you copied this file to /etc/ranger/commands_full.py or
# ~/.config/ranger/commands_full.py, then it will NOT be loaded by ranger,
# and only serve as a reference.
#
# ===================================================================
# This file contains ranger's commands.
# It's all in python; lines beginning with # are comments.
#
# Note that additional commands are automatically generated from the methods
# of the class ranger.core.actions.Actions.
#
# You can customize commands in the files /etc/ranger/commands.py (system-wide)
# and ~/.config/ranger/commands.py (per user).
# They have the same syntax as this file. In fact, you can just copy this
# file to ~/.config/ranger/commands_full.py with
# `ranger --copy-config=commands_full' and make your modifications, don't
# forget to rename it to commands.py. You can also use
# `ranger --copy-config=commands' to copy a short sample commands.py that
# has everything you need to get started.
# But make sure you update your configs when you update ranger.
#
# ===================================================================
# Every class defined here which is a subclass of `Command' will be used as a
# command in ranger. Several methods are defined to interface with ranger:
# execute(): called when the command is executed.
# cancel(): called when closing the console.
# tab(tabnum): called when <TAB> is pressed.
# quick(): called after each keypress.
#
# tab() argument tabnum is 1 for <TAB> and -1 for <S-TAB> by default
#
# The return values for tab() can be either:
# None: There is no tab completion
# A string: Change the console to this string
# A list/tuple/generator: cycle through every item in it
#
# The return value for quick() can be:
# False: Nothing happens
# True: Execute the command afterwards
#
# The return value for execute() and cancel() doesn't matter.
#
# ===================================================================
# Commands have certain attributes and methods that facilitate parsing of
# the arguments:
#
# self.line: The whole line that was written in the console.
# self.args: A list of all (space-separated) arguments to the command.
# self.quantifier: If this command was mapped to the key "X" and
# the user pressed 6X, self.quantifier will be 6.
# self.arg(n): The n-th argument, or an empty string if it doesn't exist.
# self.rest(n): The n-th argument plus everything that followed. For example,
# if the command was "search foo bar a b c", rest(2) will be "bar a b c"
# self.start(n): Anything before the n-th argument. For example, if the
# command was "search foo bar a b c", start(2) will be "search foo"
#
# ===================================================================
# And this is a little reference for common ranger functions and objects:
#
# self.fm: A reference to the "fm" object which contains most information
# about ranger.
# self.fm.notify(string): Print the given string on the screen.
# self.fm.notify(string, bad=True): Print the given string in RED.
# self.fm.reload_cwd(): Reload the current working directory.
# self.fm.thisdir: The current working directory. (A File object.)
# self.fm.thisfile: The current file. (A File object too.)
# self.fm.thistab.get_selection(): A list of all selected files.
# self.fm.execute_console(string): Execute the string as a ranger command.
# self.fm.open_console(string): Open the console with the given string
# already typed in for you.
# self.fm.move(direction): Moves the cursor in the given direction, which
# can be something like down=3, up=5, right=1, left=1, to=6, ...
#
# File objects (for example self.fm.thisfile) have these useful attributes and
# methods:
#
# tfile.path: The path to the file.
# tfile.basename: The base name only.
# tfile.load_content(): Force a loading of the directories content (which
# obviously works with directories only)
# tfile.is_directory: True/False depending on whether it's a directory.
#
# For advanced commands it is unavoidable to dive a bit into the source code
# of ranger.
# ===================================================================
from __future__ import (absolute_import, division, print_function)
from collections import deque
import os
import re
from io import open
from ranger import PY3
from ranger.api.commands import Command
class alias(Command):
""":alias <newcommand> <oldcommand>
Copies the oldcommand as newcommand.
"""
context = 'browser'
resolve_macros = False
def execute(self):
if not self.arg(1) or not self.arg(2):
self.fm.notify('Syntax: alias <newcommand> <oldcommand>', bad=True)
return
self.fm.commands.alias(self.arg(1), self.rest(2))
class echo(Command):
""":echo <text>
Display the text in the statusbar.
"""
def execute(self):
self.fm.notify(self.rest(1))
class cd(Command):
""":cd [-r] <path>
The cd command changes the directory.
If the path is a file, selects that file.
The command 'cd -' is equivalent to typing ``.
Using the option "-r" will get you to the real path.
"""
def execute(self):
if self.arg(1) == '-r':
self.shift()
destination = os.path.realpath(self.rest(1))
if os.path.isfile(destination):
self.fm.select_file(destination)
return
elif self.arg(1) == '-e':
self.shift()
destination = os.path.realpath(os.path.expandvars(self.rest(1)))
if os.path.isfile(destination):
self.fm.select_file(destination)
return
else:
destination = self.rest(1)
if not destination:
destination = '~'
if destination == '-':
self.fm.enter_bookmark('`')
else:
self.fm.cd(destination)
def _tab_args(self):
# dest must be rest because path could contain spaces
if self.arg(1) == '-r':
start = self.start(2)
dest = self.rest(2)
else:
start = self.start(1)
dest = self.rest(1)
if dest:
head, tail = os.path.split(os.path.expanduser(dest))
if head:
dest_exp = os.path.join(os.path.normpath(head), tail)
else:
dest_exp = tail
else:
dest_exp = ''
return (start, dest_exp, os.path.join(self.fm.thisdir.path, dest_exp),
dest.endswith(os.path.sep))
@staticmethod
def _tab_paths(dest, dest_abs, ends_with_sep):
if not dest:
try:
return next(os.walk(dest_abs))[1], dest_abs
except (OSError, StopIteration):
return [], ''
if ends_with_sep:
try:
return [os.path.join(dest, path) for path in next(os.walk(dest_abs))[1]], ''
except (OSError, StopIteration):
return [], ''
return None, None
def _tab_match(self, path_user, path_file):
if self.fm.settings.cd_tab_case == 'insensitive':
path_user = path_user.lower()
path_file = path_file.lower()
elif self.fm.settings.cd_tab_case == 'smart' and path_user.islower():
path_file = path_file.lower()
return path_file.startswith(path_user)
def _tab_normal(self, dest, dest_abs):
dest_dir = os.path.dirname(dest)
dest_base = os.path.basename(dest)
try:
dirnames = next(os.walk(os.path.dirname(dest_abs)))[1]
except (OSError, StopIteration):
return [], ''
return [os.path.join(dest_dir, d) for d in dirnames if self._tab_match(dest_base, d)], ''
def _tab_fuzzy_match(self, basepath, tokens):
""" Find directories matching tokens recursively """
if not tokens:
tokens = ['']
paths = [basepath]
while True:
token = tokens.pop()
matches = []
for path in paths:
try:
directories = next(os.walk(path))[1]
except (OSError, StopIteration):
continue
matches += [os.path.join(path, d) for d in directories
if self._tab_match(token, d)]
if not tokens or not matches:
return matches
paths = matches
return None
def _tab_fuzzy(self, dest, dest_abs):
tokens = []
basepath = dest_abs
while True:
basepath_old = basepath
basepath, token = os.path.split(basepath)
if basepath == basepath_old:
break
if os.path.isdir(basepath_old) and not token.startswith('.'):
basepath = basepath_old
break
tokens.append(token)
paths = self._tab_fuzzy_match(basepath, tokens)
if not os.path.isabs(dest):
paths_rel = self.fm.thisdir.path
paths = [os.path.relpath(os.path.join(basepath, path), paths_rel)
for path in paths]
else:
paths_rel = ''
return paths, paths_rel
def tab(self, tabnum):
from os.path import sep
start, dest, dest_abs, ends_with_sep = self._tab_args()
paths, paths_rel = self._tab_paths(dest, dest_abs, ends_with_sep)
if paths is None:
if self.fm.settings.cd_tab_fuzzy:
paths, paths_rel = self._tab_fuzzy(dest, dest_abs)
else:
paths, paths_rel = self._tab_normal(dest, dest_abs)
paths.sort()
if self.fm.settings.cd_bookmarks:
paths[0:0] = [
os.path.relpath(v.path, paths_rel) if paths_rel else v.path
for v in self.fm.bookmarks.dct.values() for path in paths
if v.path.startswith(os.path.join(paths_rel, path) + sep)
]
if not paths:
return None
if len(paths) == 1:
return start + paths[0] + sep
return [start + dirname + sep for dirname in paths]
class chain(Command):
""":chain <command1>; <command2>; ...
Calls multiple commands at once, separated by semicolons.
"""
resolve_macros = False
def execute(self):
if not self.rest(1).strip():
self.fm.notify('Syntax: chain <command1>; <command2>; ...', bad=True)
return
for command in [s.strip() for s in self.rest(1).split(";")]:
self.fm.execute_console(command)
class shell(Command):
escape_macros_for_shell = True
def execute(self):
if self.arg(1) and self.arg(1)[0] == '-':
flags = self.arg(1)[1:]
command = self.rest(2)
else:
flags = ''
command = self.rest(1)
if command:
self.fm.execute_command(command, flags=flags)
def tab(self, tabnum):
from ranger.ext.get_executables import get_executables
if self.arg(1) and self.arg(1)[0] == '-':
command = self.rest(2)
else:
command = self.rest(1)
start = self.line[0:len(self.line) - len(command)]
try:
position_of_last_space = command.rindex(" ")
except ValueError:
return (start + program + ' ' for program
in get_executables() if program.startswith(command))
if position_of_last_space == len(command) - 1:
selection = self.fm.thistab.get_selection()
if len(selection) == 1:
return self.line + selection[0].shell_escaped_basename + ' '
return self.line + '%s '
before_word, start_of_word = self.line.rsplit(' ', 1)
return (before_word + ' ' + file.shell_escaped_basename
for file in self.fm.thisdir.files or []
if file.shell_escaped_basename.startswith(start_of_word))
class open_with(Command):
def execute(self):
app, flags, mode = self._get_app_flags_mode(self.rest(1))
self.fm.execute_file(
files=self.fm.thistab.get_selection(),
app=app,
flags=flags,
mode=mode)
def tab(self, tabnum):
return self._tab_through_executables()
def _get_app_flags_mode(self, string): # pylint: disable=too-many-branches,too-many-statements
"""Extracts the application, flags and mode from a string.
examples:
"mplayer f 1" => ("mplayer", "f", 1)
"atool 4" => ("atool", "", 4)
"p" => ("", "p", 0)
"" => None
"""
app = ''
flags = ''
mode = 0
split = string.split()
if len(split) == 1:
part = split[0]
if self._is_app(part):
app = part
elif self._is_flags(part):
flags = part
elif self._is_mode(part):
mode = part
elif len(split) == 2:
part0 = split[0]
part1 = split[1]
if self._is_app(part0):
app = part0
if self._is_flags(part1):
flags = part1
elif self._is_mode(part1):
mode = part1
elif self._is_flags(part0):
flags = part0
if self._is_mode(part1):
mode = part1
elif self._is_mode(part0):
mode = part0
if self._is_flags(part1):
flags = part1
elif len(split) >= 3:
part0 = split[0]
part1 = split[1]
part2 = split[2]
if self._is_app(part0):
app = part0
if self._is_flags(part1):
flags = part1
if self._is_mode(part2):
mode = part2
elif self._is_mode(part1):
mode = part1
if self._is_flags(part2):
flags = part2
elif self._is_flags(part0):
flags = part0
if self._is_mode(part1):
mode = part1
elif self._is_mode(part0):
mode = part0
if self._is_flags(part1):
flags = part1
return app, flags, int(mode)
def _is_app(self, arg):
return not self._is_flags(arg) and not arg.isdigit()
@staticmethod
def _is_flags(arg):
from ranger.core.runner import ALLOWED_FLAGS
return all(x in ALLOWED_FLAGS for x in arg)
@staticmethod
def _is_mode(arg):
return all(x in '0123456789' for x in arg)
class set_(Command):
""":set <option name>=<python expression>
Gives an option a new value.
Use `:set <option>!` to toggle or cycle it, e.g. `:set flush_input!`
"""
name = 'set' # don't override the builtin set class
def execute(self):
name = self.arg(1)
name, value, _, toggle = self.parse_setting_line_v2()
if toggle:
self.fm.toggle_option(name)
else:
self.fm.set_option_from_string(name, value)
def tab(self, tabnum): # pylint: disable=too-many-return-statements
from ranger.gui.colorscheme import get_all_colorschemes
name, value, name_done = self.parse_setting_line()
settings = self.fm.settings
if not name:
return sorted(self.firstpart + setting for setting in settings)
if not value and not name_done:
return sorted(self.firstpart + setting for setting in settings
if setting.startswith(name))
if not value:
value_completers = {
"colorscheme":
# Cycle through colorschemes when name, but no value is specified
lambda: sorted(self.firstpart + colorscheme for colorscheme
in get_all_colorschemes(self.fm)),
"column_ratios":
lambda: self.firstpart + ",".join(map(str, settings[name])),
}
def default_value_completer():
return self.firstpart + str(settings[name])
return value_completers.get(name, default_value_completer)()
if bool in settings.types_of(name):
if 'true'.startswith(value.lower()):
return self.firstpart + 'True'
if 'false'.startswith(value.lower()):
return self.firstpart + 'False'
# Tab complete colorscheme values if incomplete value is present
if name == "colorscheme":
return sorted(self.firstpart + colorscheme for colorscheme
in get_all_colorschemes(self.fm) if colorscheme.startswith(value))
return None
class setlocal_(set_):
"""Shared class for setinpath and setinregex
By implementing the _arg abstract properly you can affect what the name of
the pattern/path/regex argument can be, this should be a regular expression
without match groups.
By implementing the _format_arg abstract method you can affect how the
argument is formatted as a regular expression.
"""
from abc import (ABCMeta, abstractmethod, abstractproperty)
__metaclass__ = ABCMeta
@property
@abstractmethod
def _arg(self):
"""The name of the option for the path/regex"""
raise NotImplementedError
def __init__(self, *args, **kwargs):
super(setlocal_, self).__init__(*args, **kwargs)
# We require quoting of paths with whitespace so we have to take care
# not to match escaped quotes.
self.path_re_dquoted = re.compile(
r'^set.+?\s+{arg}="(.*?[^\\])"'.format(arg=self._arg)
)
self.path_re_squoted = re.compile(
r"^set.+?\s+{arg}='(.*?[^\\])'".format(arg=self._arg)
)
self.path_re_unquoted = re.compile(
r'^{arg}=(.+?)$'.format(arg=self._arg)
)
def _re_shift(self, match):
if not match:
return None
path = match.group(1)
# Prepend something that behaves like "path=" in case path starts with
# whitespace
for _ in "={0}".format(path).split():
self.shift()
return os.path.expanduser(path)
@abstractmethod
def _format_arg(self, arg):
"""How to format the argument as a regular expression"""
raise NotImplementedError
def execute(self):
arg = self._re_shift(self.path_re_dquoted.match(self.line))
if arg is None:
arg = self._re_shift(self.path_re_squoted.match(self.line))
if arg is None:
arg = self._re_shift(self.path_re_unquoted.match(self.arg(1)))
if arg is None and self.fm.thisdir:
arg = self.fm.thisdir.path
if arg is None:
return
else:
arg = self._format_arg(arg)
name, value, _ = self.parse_setting_line()
self.fm.set_option_from_string(name, value, localpath=arg)
class setinpath(setlocal_):
""":setinpath path=<path> <option name>=<python expression>
Sets an option when in a directory that matches <path>, relative paths can
match multiple directories, for example, ``path=build`` would match a build
directory in any directory. If the <path> contains whitespace it needs to
be quoted and nested quotes need to be backslash-escaped. The "path"
argument can also be named "pattern" to allow for easier switching with
``setinregex``.
"""
_arg = "(?:path|pattern)"
def _format_arg(self, arg):
return "{0}$".format(re.escape(arg))
class setlocal(setinpath):
""":setlocal is an alias for :setinpath"""
class setinregex(setlocal_):
""":setinregex re=<regex> <option name>=<python expression>
Sets an option when in a specific directory. If the <regex> contains
whitespace it needs to be quoted and nested quotes need to be
backslash-escaped. Special characters need to be escaped if they are
intended to match literally as documented in the ``re`` library
documentation. The "re" argument can also be named "regex" or "pattern,"
which allows for easier switching with ``setinpath``.
"""
_arg = "(?:re(?:gex)?|pattern)"
def _format_arg(self, arg):
return arg
class setintag(set_):
""":setintag <tag or tags> <option name>=<option value>
Sets an option for directories that are tagged with a specific tag.
"""
def execute(self):
tags = self.arg(1)
self.shift()
name, value, _ = self.parse_setting_line()
self.fm.set_option_from_string(name, value, tags=tags)
class default_linemode(Command):
def execute(self):
from ranger.container.fsobject import FileSystemObject
if len(self.args) < 2:
self.fm.notify(
"Usage: default_linemode [path=<regexp> | tag=<tag(s)>] <linemode>", bad=True)
# Extract options like "path=..." or "tag=..." from the command line
arg1 = self.arg(1)
method = "always"
argument = None
if arg1.startswith("path="):
method = "path"
argument = re.compile(arg1[5:])
self.shift()
elif arg1.startswith("tag="):
method = "tag"
argument = arg1[4:]
self.shift()
# Extract and validate the line mode from the command line
lmode = self.rest(1)
if lmode not in FileSystemObject.linemode_dict:
self.fm.notify(
"Invalid linemode: %s; should be %s" % (
lmode, "/".join(FileSystemObject.linemode_dict)),
bad=True,
)
# Add the prepared entry to the fm.default_linemodes
entry = [method, argument, lmode]
self.fm.default_linemodes.appendleft(entry)
# Redraw the columns
if self.fm.ui.browser:
for col in self.fm.ui.browser.columns:
col.need_redraw = True
def tab(self, tabnum):
return (self.arg(0) + " " + lmode
for lmode in self.fm.thisfile.linemode_dict.keys()
if lmode.startswith(self.arg(1)))
class quit(Command): # pylint: disable=redefined-builtin
""":quit
Closes the current tab, if there's more than one tab.
Otherwise quits if there are no tasks in progress.
"""
def _exit_no_work(self):
if self.fm.loader.has_work():
self.fm.notify('Not quitting: Tasks in progress: Use `quit!` to force quit')
else:
self.fm.exit()
def execute(self):
if len(self.fm.tabs) >= 2:
self.fm.tab_close()
else:
self._exit_no_work()
class quit_bang(Command):
""":quit!
Closes the current tab, if there's more than one tab.
Otherwise force quits immediately.
"""
name = 'quit!'
allow_abbrev = False
def execute(self):
if len(self.fm.tabs) >= 2:
self.fm.tab_close()
else:
self.fm.exit()
class quitall(Command):
""":quitall
Quits if there are no tasks in progress.
"""
def _exit_no_work(self):
if self.fm.loader.has_work():
self.fm.notify('Not quitting: Tasks in progress: Use `quitall!` to force quit')
else:
self.fm.exit()
def execute(self):
self._exit_no_work()
class quitall_bang(Command):
""":quitall!
Force quits immediately.
"""
name = 'quitall!'
allow_abbrev = False
def execute(self):
self.fm.exit()
class terminal(Command):
""":terminal
Spawns an "x-terminal-emulator" starting in the current directory.
"""
def execute(self):
from ranger.ext.get_executables import get_term
self.fm.run(get_term(), flags='f')
class delete(Command):
""":delete
Tries to delete the selection or the files passed in arguments (if any).
The arguments use a shell-like escaping.
"Selection" is defined as all the "marked files" (by default, you
can mark files with space or v). If there are no marked files,
use the "current file" (where the cursor is)
When attempting to delete non-empty directories or multiple
marked files, it will require a confirmation.
"""
allow_abbrev = False
escape_macros_for_shell = True
def execute(self):
import shlex
from functools import partial
def is_directory_with_files(path):
return os.path.isdir(path) and not os.path.islink(path) and len(os.listdir(path)) > 0
if self.rest(1):
files = shlex.split(self.rest(1))
many_files = (len(files) > 1 or is_directory_with_files(files[0]))
else:
cwd = self.fm.thisdir
tfile = self.fm.thisfile
if not cwd or not tfile:
self.fm.notify("Error: no file selected for deletion!", bad=True)
return
# relative_path used for a user-friendly output in the confirmation.
files = [f.relative_path for f in self.fm.thistab.get_selection()]
many_files = (cwd.marked_items or is_directory_with_files(tfile.path))
confirm = self.fm.settings.confirm_on_delete
if confirm != 'never' and (confirm != 'multiple' or many_files):
self.fm.ui.console.ask(
"Confirm deletion of: %s (y/N)" % ', '.join(files),
partial(self._question_callback, files),
('n', 'N', 'y', 'Y'),
)
else:
# no need for a confirmation, just delete
self.fm.delete(files)
def tab(self, tabnum):
return self._tab_directory_content()
def _question_callback(self, files, answer):
if answer.lower() == 'y':
self.fm.delete(files)
class trash(Command):
""":trash
Tries to move the selection or the files passed in arguments (if any) to
the trash, using rifle rules with label "trash".
The arguments use a shell-like escaping.
"Selection" is defined as all the "marked files" (by default, you
can mark files with space or v). If there are no marked files,
use the "current file" (where the cursor is)
When attempting to trash non-empty directories or multiple
marked files, it will require a confirmation.
"""
allow_abbrev = False
escape_macros_for_shell = True
def execute(self):
import shlex
from functools import partial
def is_directory_with_files(path):
return os.path.isdir(path) and not os.path.islink(path) and len(os.listdir(path)) > 0
if self.rest(1):
file_names = shlex.split(self.rest(1))
files = self.fm.get_filesystem_objects(file_names)
if files is None:
return
many_files = (len(files) > 1 or is_directory_with_files(files[0].path))
else:
cwd = self.fm.thisdir
tfile = self.fm.thisfile
if not cwd or not tfile:
self.fm.notify("Error: no file selected for deletion!", bad=True)
return
files = self.fm.thistab.get_selection()
# relative_path used for a user-friendly output in the confirmation.
file_names = [f.relative_path for f in files]
many_files = (cwd.marked_items or is_directory_with_files(tfile.path))
confirm = self.fm.settings.confirm_on_delete
if confirm != 'never' and (confirm != 'multiple' or many_files):
self.fm.ui.console.ask(
"Confirm deletion of: %s (y/N)" % ', '.join(file_names),
partial(self._question_callback, files),
('n', 'N', 'y', 'Y'),
)
else:
# no need for a confirmation, just delete
self._trash_files_catch_arg_list_error(files)
def tab(self, tabnum):
return self._tab_directory_content()
def _question_callback(self, files, answer):
if answer.lower() == 'y':
self._trash_files_catch_arg_list_error(files)
def _trash_files_catch_arg_list_error(self, files):
"""
Executes the fm.execute_file method but catches the OSError ("Argument list too long")
that occurs when moving too many files to trash (and would otherwise crash ranger).
"""
try:
self.fm.execute_file(files, label='trash')
except OSError as err:
if err.errno == 7:
self.fm.notify("Error: Command too long (try passing less files at once)",
bad=True)
else:
raise
class jump_non(Command):
""":jump_non [-FLAGS...]
Jumps to first non-directory if highlighted file is a directory and vice versa.
Flags:
-r Jump in reverse order
-w Wrap around if reaching end of filelist
"""
def __init__(self, *args, **kwargs):
super(jump_non, self).__init__(*args, **kwargs)
flags, _ = self.parse_flags()
self._flag_reverse = 'r' in flags
self._flag_wrap = 'w' in flags
@staticmethod
def _non(fobj, is_directory):
return fobj.is_directory if not is_directory else not fobj.is_directory
def execute(self):
tfile = self.fm.thisfile
passed = False
found_before = None
found_after = None
for fobj in self.fm.thisdir.files[::-1] if self._flag_reverse else self.fm.thisdir.files:
if fobj.path == tfile.path:
passed = True
continue
if passed:
if self._non(fobj, tfile.is_directory):
found_after = fobj.path
break
elif not found_before and self._non(fobj, tfile.is_directory):
found_before = fobj.path
if found_after:
self.fm.select_file(found_after)
elif self._flag_wrap and found_before:
self.fm.select_file(found_before)
class mark_tag(Command):
""":mark_tag [<tags>]
Mark all tags that are tagged with either of the given tags.
When leaving out the tag argument, all tagged files are marked.
"""
do_mark = True
def execute(self):
cwd = self.fm.thisdir
tags = self.rest(1).replace(" ", "")
if not self.fm.tags or not cwd.files:
return
for fileobj in cwd.files:
try:
tag = self.fm.tags.tags[fileobj.realpath]
except KeyError:
continue
if not tags or tag in tags:
cwd.mark_item(fileobj, val=self.do_mark)
self.fm.ui.status.need_redraw = True
self.fm.ui.need_redraw = True
class console(Command):
""":console [-p N | -s sep] <command>
Flags:
-p N Set position at N index
-s sep Set position at separator(any char[s] sequence), example '#'
Open the console with the given command.
"""
def execute(self):
position = None
command = self.rest(1)
if self.arg(1)[0:2] == '-p':
command = self.rest(2)
try:
position = int(self.arg(1)[2:])
except ValueError:
pass
elif self.arg(1)[0:2] == '-s':
command = self.rest(3)
sentinel = self.arg(2)
pos = command.find(sentinel)
if pos != -1:
command = command.replace(sentinel, '', 1)
position = pos
self.fm.open_console(command, position=position)
class load_copy_buffer(Command):
""":load_copy_buffer
Load the copy buffer from datadir/copy_buffer
"""
copy_buffer_filename = 'copy_buffer'
def execute(self):
from os.path import exists
from ranger.container.file import File
fname = self.fm.datapath(self.copy_buffer_filename)
unreadable = OSError if PY3 else IOError
try:
with open(fname, "r", encoding="utf-8") as fobj:
self.fm.copy_buffer = set(
File(g) for g in fobj.read().split("\n") if exists(g)
)
except unreadable:
return self.fm.notify(
"Cannot open %s" % (fname or self.copy_buffer_filename), bad=True)
self.fm.ui.redraw_main_column()
return None
class save_copy_buffer(Command):
""":save_copy_buffer
Save the copy buffer to datadir/copy_buffer
"""
copy_buffer_filename = 'copy_buffer'
def execute(self):
fname = None
fname = self.fm.datapath(self.copy_buffer_filename)
unwritable = OSError if PY3 else IOError
try:
with open(fname, "w", encoding="utf-8") as fobj:
fobj.write("\n".join(fobj.path for fobj in self.fm.copy_buffer))
except unwritable:
return self.fm.notify("Cannot open %s" %
(fname or self.copy_buffer_filename), bad=True)
return None
class unmark_tag(mark_tag):
""":unmark_tag [<tags>]
Unmark all tags that are tagged with either of the given tags.
When leaving out the tag argument, all tagged files are unmarked.
"""
do_mark = False
class mkdir(Command):
""":mkdir <dirname>
Creates a directory with the name <dirname>.
"""
def execute(self):
from os.path import join, expanduser, lexists
from os import makedirs
dirname = join(self.fm.thisdir.path, expanduser(self.rest(1)))
if not lexists(dirname):
makedirs(dirname)
else:
self.fm.notify("file/directory exists!", bad=True)
def tab(self, tabnum):
return self._tab_directory_content()
class touch(Command):
""":touch <fname>
Creates a file with the name <fname>.
"""
def execute(self):
from os.path import join, expanduser, lexists, dirname
from os import makedirs
fname = join(self.fm.thisdir.path, expanduser(self.rest(1)))
dirname = dirname(fname)
if not lexists(fname):
if not lexists(dirname):
makedirs(dirname)
with open(fname, 'a', encoding="utf-8"):
pass # Just create the file
else:
self.fm.notify("file/directory exists!", bad=True)
def tab(self, tabnum):
return self._tab_directory_content()
class edit(Command):
""":edit <filename>
Opens the specified file in vim
"""
def execute(self):
if not self.arg(1):
self.fm.edit_file(self.fm.thisfile.path)
else:
self.fm.edit_file(self.rest(1))
def tab(self, tabnum):
return self._tab_directory_content()
class eval_(Command):
""":eval [-q] <python code>
Evaluates the python code.
`fm' is a reference to the FM instance.
To display text, use the function `p'.
Examples:
:eval fm
:eval len(fm.directories)
:eval p("Hello World!")
"""
name = 'eval'
resolve_macros = False
def execute(self):
# The import is needed so eval() can access the ranger module
import ranger # NOQA pylint: disable=unused-import,unused-variable
if self.arg(1) == '-q':
code = self.rest(2)
quiet = True
else:
code = self.rest(1)
quiet = False
global cmd, fm, p, quantifier # pylint: disable=invalid-name,global-variable-undefined
fm = self.fm
cmd = self.fm.execute_console
p = fm.notify
quantifier = self.quantifier
try:
try:
result = eval(code) # pylint: disable=eval-used
except SyntaxError:
exec(code) # pylint: disable=exec-used
else:
if result and not quiet:
p(result)
except Exception as err: # pylint: disable=broad-except
fm.notify("The error `%s` was caused by evaluating the "
"following code: `%s`" % (err, code), bad=True)
class rename(Command):
""":rename <newname>
Changes the name of the currently highlighted file to <newname>
"""
def execute(self):
from ranger.container.file import File
from os import access
new_name = self.rest(1)
if not new_name:
return self.fm.notify('Syntax: rename <newname>', bad=True)
if new_name == self.fm.thisfile.relative_path:
return None
if access(new_name, os.F_OK):
return self.fm.notify("Can't rename: file already exists!", bad=True)
if self.fm.rename(self.fm.thisfile, new_name):
file_new = File(new_name)
self.fm.bookmarks.update_path(self.fm.thisfile.path, file_new)
self.fm.tags.update_path(self.fm.thisfile.path, file_new.path)
self.fm.thisdir.pointed_obj = file_new
self.fm.thisfile = file_new
return None
def tab(self, tabnum):
return self._tab_directory_content()
class rename_append(Command):
""":rename_append [-FLAGS...]
Opens the console with ":rename <current file>" with the cursor positioned
before the file extension.
Flags:
-a Position before all extensions
-r Remove everything before extensions
"""
def __init__(self, *args, **kwargs):
super(rename_append, self).__init__(*args, **kwargs)
flags, _ = self.parse_flags()
self._flag_ext_all = 'a' in flags
self._flag_remove = 'r' in flags
def execute(self):
from ranger import MACRO_DELIMITER, MACRO_DELIMITER_ESC
tfile = self.fm.thisfile
relpath = tfile.relative_path.replace(MACRO_DELIMITER, MACRO_DELIMITER_ESC)
basename = tfile.basename.replace(MACRO_DELIMITER, MACRO_DELIMITER_ESC)
if basename.find('.') <= 0 or os.path.isdir(relpath):
self.fm.open_console('rename ' + relpath)
return
if self._flag_ext_all:
pos_ext = re.search(r'[^.]+', basename).end(0)
else:
pos_ext = basename.rindex('.')
pos = len(relpath) - len(basename) + pos_ext
if self._flag_remove:
relpath = relpath[:-len(basename)] + basename[pos_ext:]
pos -= pos_ext
self.fm.open_console('rename ' + relpath, position=(7 + pos))
class chmod(Command):
""":chmod <octal number>
Sets the permissions of the selection to the octal number.
The octal number is between 0 and 777. The digits specify the
permissions for the user, the group and others.
A 1 permits execution, a 2 permits writing, a 4 permits reading.
Add those numbers to combine them. So a 7 permits everything.
"""
def execute(self):
mode_str = self.rest(1)
if not mode_str:
if self.quantifier is None:
self.fm.notify("Syntax: chmod <octal number> "
"or specify a quantifier", bad=True)
return
mode_str = str(self.quantifier)
try:
mode = int(mode_str, 8)
if mode < 0 or mode > 0o777:
raise ValueError
except ValueError:
self.fm.notify("Need an octal number between 0 and 777!", bad=True)
return
for fobj in self.fm.thistab.get_selection():
try:
os.chmod(fobj.path, mode)
except OSError as ex:
self.fm.notify(ex)
# reloading directory. maybe its better to reload the selected
# files only.
self.fm.thisdir.content_outdated = True
class bulkrename(Command):
""":bulkrename
This command opens a list of selected files in an external editor.
After you edit and save the file, it will generate a shell script
which does bulk renaming according to the changes you did in the file.
This shell script is opened in an editor for you to review.
After you close it, it will be executed.
"""
def __init__(self, *args, **kwargs):
super(bulkrename, self).__init__(*args, **kwargs)
self.flags, _ = self.parse_flags()
if not self.flags:
self.flags = "w"
def execute(self):
# pylint: disable=too-many-locals,too-many-statements,too-many-branches
import tempfile
from ranger.container.file import File
from ranger.ext.shell_escape import shell_escape as esc
# Create and edit the file list
filenames = [f.relative_path for f in self.fm.thistab.get_selection()]
with tempfile.NamedTemporaryFile(delete=False) as listfile:
listpath = listfile.name
if PY3:
listfile.write("\n".join(filenames).encode(
encoding="utf-8", errors="surrogateescape"))
else:
listfile.write("\n".join(filenames))
self.fm.execute_file([File(listpath)], app='editor')
with open(
listpath, "r", encoding="utf-8", errors="surrogateescape"
) as listfile:
new_filenames = listfile.read().split("\n")
os.unlink(listpath)
if all(a == b for a, b in zip(filenames, new_filenames)):
self.fm.notify("No renaming to be done!")
return
# Generate script
with tempfile.NamedTemporaryFile() as cmdfile:
script_lines = []
script_lines.append("# This file will be executed when you close"
" the editor.")
script_lines.append("# Please double-check everything, clear the"
" file to abort.")
new_dirs = []
for old, new in zip(filenames, new_filenames):
if old != new:
basepath, _ = os.path.split(new)
if (basepath and basepath not in new_dirs
and not os.path.isdir(basepath)):
script_lines.append("mkdir -vp -- {dir}".format(
dir=esc(basepath)))
new_dirs.append(basepath)
script_lines.append("mv -vi -- {old} {new}".format(
old=esc(old), new=esc(new)))
# Make sure not to forget the ending newline
script_content = "\n".join(script_lines) + "\n"
if PY3:
cmdfile.write(script_content.encode(encoding="utf-8",
errors="surrogateescape"))
else:
cmdfile.write(script_content)
cmdfile.flush()
# Open the script and let the user review it, then check if the
# script was modified by the user
self.fm.execute_file([File(cmdfile.name)], app='editor')
cmdfile.seek(0)
script_was_edited = (script_content != cmdfile.read())
# Do the renaming
self.fm.run(['/bin/sh', cmdfile.name], flags=self.flags)
# Retag the files, but only if the script wasn't changed during review,
# because only then we know which are the source and destination files.
if not script_was_edited:
tags_changed = False
for old, new in zip(filenames, new_filenames):
if old != new:
oldpath = self.fm.thisdir.path + '/' + old
newpath = self.fm.thisdir.path + '/' + new
if oldpath in self.fm.tags:
old_tag = self.fm.tags.tags[oldpath]
self.fm.tags.remove(oldpath)
self.fm.tags.tags[newpath] = old_tag
tags_changed = True
if tags_changed:
self.fm.tags.dump()
else:
fm.notify("files have not been retagged")
class relink(Command):
""":relink <newpath>
Changes the linked path of the currently highlighted symlink to <newpath>
"""
def execute(self):
new_path = self.rest(1)
tfile = self.fm.thisfile
if not new_path:
return self.fm.notify('Syntax: relink <newpath>', bad=True)
if not tfile.is_link:
return self.fm.notify('%s is not a symlink!' % tfile.relative_path, bad=True)
if new_path == os.readlink(tfile.path):
return None
try:
os.remove(tfile.path)
os.symlink(new_path, tfile.path)
except OSError as err:
self.fm.notify(err)
self.fm.reset()
self.fm.thisdir.pointed_obj = tfile
self.fm.thisfile = tfile
return None
def tab(self, tabnum):
if not self.rest(1):
return self.line + os.readlink(self.fm.thisfile.path)
return self._tab_directory_content()
class help_(Command):
""":help
Display ranger's manual page.
"""
name = 'help'
def execute(self):
def callback(answer):
if answer == "q":
return
elif answer == "m":
self.fm.display_help()
elif answer == "c":
self.fm.dump_commands()
elif answer == "k":
self.fm.dump_keybindings()
elif answer == "s":
self.fm.dump_settings()
self.fm.ui.console.ask(
"View [m]an page, [k]ey bindings, [c]ommands or [s]ettings? (press q to abort)",
callback,
list("mqkcs")
)
class copymap(Command):
""":copymap <keys> <newkeys1> [<newkeys2>...]
Copies a "browser" keybinding from <keys> to <newkeys>
"""
context = 'browser'
def execute(self):
if not self.arg(1) or not self.arg(2):
return self.fm.notify("Not enough arguments", bad=True)
for arg in self.args[2:]:
self.fm.ui.keymaps.copy(self.context, self.arg(1), arg)
return None
class copypmap(copymap):
""":copypmap <keys> <newkeys1> [<newkeys2>...]
Copies a "pager" keybinding from <keys> to <newkeys>
"""
context = 'pager'
class copycmap(copymap):
""":copycmap <keys> <newkeys1> [<newkeys2>...]
Copies a "console" keybinding from <keys> to <newkeys>
"""
context = 'console'
class copytmap(copymap):
""":copytmap <keys> <newkeys1> [<newkeys2>...]
Copies a "taskview" keybinding from <keys> to <newkeys>
"""
context = 'taskview'
class unmap(Command):
""":unmap <keys> [<keys2>, ...]
Remove the given "browser" mappings
"""
context = 'browser'
def execute(self):
for arg in self.args[1:]:
self.fm.ui.keymaps.unbind(self.context, arg)
class uncmap(unmap):
""":uncmap <keys> [<keys2>, ...]
Remove the given "console" mappings
"""
context = 'console'
class cunmap(uncmap):
""":cunmap <keys> [<keys2>, ...]
Remove the given "console" mappings
DEPRECATED in favor of uncmap.
"""
def execute(self):
self.fm.notify("cunmap is deprecated in favor of uncmap!")
super(cunmap, self).execute()
class unpmap(unmap):
""":unpmap <keys> [<keys2>, ...]
Remove the given "pager" mappings
"""
context = 'pager'
class punmap(unpmap):
""":punmap <keys> [<keys2>, ...]
Remove the given "pager" mappings
DEPRECATED in favor of unpmap.
"""
def execute(self):
self.fm.notify("punmap is deprecated in favor of unpmap!")
super(punmap, self).execute()
class untmap(unmap):
""":untmap <keys> [<keys2>, ...]
Remove the given "taskview" mappings
"""
context = 'taskview'
class tunmap(untmap):
""":tunmap <keys> [<keys2>, ...]
Remove the given "taskview" mappings
DEPRECATED in favor of untmap.
"""
def execute(self):
self.fm.notify("tunmap is deprecated in favor of untmap!")
super(tunmap, self).execute()
class map_(Command):
""":map <keysequence> <command>
Maps a command to a keysequence in the "browser" context.
Example:
map j move down
map J move down 10
"""
name = 'map'
context = 'browser'
resolve_macros = False
def execute(self):
if not self.arg(1) or not self.arg(2):
self.fm.notify("Syntax: {0} <keysequence> <command>".format(self.get_name()), bad=True)
return
self.fm.ui.keymaps.bind(self.context, self.arg(1), self.rest(2))
class cmap(map_):
""":cmap <keysequence> <command>
Maps a command to a keysequence in the "console" context.
Example:
cmap <ESC> console_close
cmap <C-x> console_type test
"""
context = 'console'
class tmap(map_):
""":tmap <keysequence> <command>
Maps a command to a keysequence in the "taskview" context.
"""
context = 'taskview'
class pmap(map_):
""":pmap <keysequence> <command>
Maps a command to a keysequence in the "pager" context.
"""
context = 'pager'
class scout(Command):
""":scout [-FLAGS...] <pattern>
Swiss army knife command for searching, traveling and filtering files.
Flags:
-a Automatically open a file on unambiguous match
-e Open the selected file when pressing enter
-f Filter files that match the current search pattern
-g Interpret pattern as a glob pattern
-i Ignore the letter case of the files
-k Keep the console open when changing a directory with the command
-l Letter skipping; e.g. allow "rdme" to match the file "readme"
-m Mark the matching files after pressing enter
-M Unmark the matching files after pressing enter
-p Permanent filter: hide non-matching files after pressing enter
-r Interpret pattern as a regular expression pattern
-s Smart case; like -i unless pattern contains upper case letters
-t Apply filter and search pattern as you type
-v Inverts the match
Multiple flags can be combined. For example, ":scout -gpt" would create
a :filter-like command using globbing.
"""
AUTO_OPEN = "a"
OPEN_ON_ENTER = "e"
FILTER = "f"
SM_GLOB = "g"
IGNORE_CASE = "i"
KEEP_OPEN = "k"
SM_LETTERSKIP = "l"
MARK = "m"
UNMARK = "M"
PERM_FILTER = "p"
SM_REGEX = "r"
SMART_CASE = "s"
AS_YOU_TYPE = "t"
INVERT = "v"
def __init__(self, *args, **kwargs):
super(scout, self).__init__(*args, **kwargs)
self._regex = None
self.flags, self.pattern = self.parse_flags()
def execute(self): # pylint: disable=too-many-branches
thisdir = self.fm.thisdir
flags = self.flags
pattern = self.pattern
regex = self._build_regex()
count = self._count(move=True)
self.fm.thistab.last_search = regex
self.fm.set_search_method(order="search")
if (self.MARK in flags or self.UNMARK in flags) and thisdir.files:
value = flags.find(self.MARK) > flags.find(self.UNMARK)
if self.FILTER in flags:
for fobj in thisdir.files:
thisdir.mark_item(fobj, value)
else:
for fobj in thisdir.files:
if regex.search(fobj.relative_path):
thisdir.mark_item(fobj, value)
if self.PERM_FILTER in flags:
thisdir.filter = regex if pattern else None
# clean up:
self.cancel()
if self.OPEN_ON_ENTER in flags or \
(self.AUTO_OPEN in flags and count == 1):
if pattern == '..':
self.fm.cd(pattern)
else:
self.fm.move(right=1)
if self.quickly_executed:
self.fm.block_input(0.5)
if self.KEEP_OPEN in flags and thisdir != self.fm.thisdir:
# reopen the console:
if not pattern:
self.fm.open_console(self.line)
else:
self.fm.open_console(self.line[0:-len(pattern)])
if self.quickly_executed and thisdir != self.fm.thisdir and pattern != "..":
self.fm.block_input(0.5)
def cancel(self):
self.fm.thisdir.temporary_filter = None
self.fm.thisdir.refilter()
def quick(self):
asyoutype = self.AS_YOU_TYPE in self.flags
if self.FILTER in self.flags:
self.fm.thisdir.temporary_filter = self._build_regex()
if self.PERM_FILTER in self.flags and asyoutype:
self.fm.thisdir.filter = self._build_regex()
if self.FILTER in self.flags or self.PERM_FILTER in self.flags:
self.fm.thisdir.refilter()
if self._count(move=asyoutype) == 1 and self.AUTO_OPEN in self.flags:
return True
return False
def tab(self, tabnum):
self._count(move=True, offset=tabnum)
def _build_regex(self):
if self._regex is not None:
return self._regex
frmat = "%s"
flags = self.flags
pattern = self.pattern
if pattern == ".":
return re.compile("")
# Handle carets at start and dollar signs at end separately
if pattern.startswith('^'):
pattern = pattern[1:]
frmat = "^" + frmat
if pattern.endswith('$'):
pattern = pattern[:-1]
frmat += "$"
# Apply one of the search methods
if self.SM_REGEX in flags:
regex = pattern
elif self.SM_GLOB in flags:
regex = re.escape(pattern).replace("\\*", ".*").replace("\\?", ".")
elif self.SM_LETTERSKIP in flags:
regex = ".*".join(re.escape(c) for c in pattern)
else:
regex = re.escape(pattern)
regex = frmat % regex
# Invert regular expression if necessary
if self.INVERT in flags:
regex = "^(?:(?!%s).)*$" % regex
# Compile Regular Expression
# pylint: disable=no-member
options = re.UNICODE
if self.IGNORE_CASE in flags or self.SMART_CASE in flags and \
pattern.islower():
options |= re.IGNORECASE
# pylint: enable=no-member
try:
self._regex = re.compile(regex, options)
except re.error:
self._regex = re.compile("")
return self._regex
def _count(self, move=False, offset=0):
count = 0
cwd = self.fm.thisdir
pattern = self.pattern
if not pattern or not cwd.files:
return 0
if pattern == '.':
return 0
if pattern == '..':
return 1
deq = deque(cwd.files)
deq.rotate(-cwd.pointer - offset)
i = offset
regex = self._build_regex()
for fsobj in deq:
if regex.search(fsobj.relative_path):
count += 1
if move and count == 1:
cwd.move(to=(cwd.pointer + i) % len(cwd.files))
self.fm.thisfile = cwd.pointed_obj
if count > 1:
return count
i += 1
return count == 1
class narrow(Command):
"""
:narrow
Show only the files selected right now. If no files are selected,
disable narrowing.
"""
def execute(self):
if self.fm.thisdir.marked_items:
selection = [f.basename for f in self.fm.thistab.get_selection()]
self.fm.thisdir.narrow_filter = selection
else:
self.fm.thisdir.narrow_filter = None
self.fm.thisdir.refilter()
class filter_inode_type(Command):
"""
:filter_inode_type [dfl]
Displays only the files of specified inode type. Parameters
can be combined.
d display directories
f display files
l display links
"""
def execute(self):
if not self.arg(1):
self.fm.thisdir.inode_type_filter = ""
else:
self.fm.thisdir.inode_type_filter = self.arg(1)
self.fm.thisdir.refilter()
class filter_stack(Command):
"""
:filter_stack ...
Manages the filter stack.
filter_stack add FILTER_TYPE ARGS...
filter_stack pop
filter_stack decompose
filter_stack rotate [N=1]
filter_stack clear
filter_stack show
"""
def execute(self):
from ranger.core.filter_stack import SIMPLE_FILTERS, FILTER_COMBINATORS
subcommand = self.arg(1)
if subcommand == "add":
try:
self.fm.thisdir.filter_stack.append(
SIMPLE_FILTERS[self.arg(2)](self.rest(3))
)
except KeyError:
FILTER_COMBINATORS[self.arg(2)](self.fm.thisdir.filter_stack)
elif subcommand == "pop":
self.fm.thisdir.filter_stack.pop()
elif subcommand == "decompose":
inner_filters = self.fm.thisdir.filter_stack.pop().decompose()
if inner_filters:
self.fm.thisdir.filter_stack.extend(inner_filters)
elif subcommand == "clear":
self.fm.thisdir.filter_stack = []
elif subcommand == "rotate":
rotate_by = int(self.arg(2) or self.quantifier or 1)
self.fm.thisdir.filter_stack = (
self.fm.thisdir.filter_stack[-rotate_by:]
+ self.fm.thisdir.filter_stack[:-rotate_by]
)
elif subcommand == "show":
stack = list(map(str, self.fm.thisdir.filter_stack))
pager = self.fm.ui.open_pager()
pager.set_source(["Filter stack: "] + stack)
pager.move(to=100, percentage=True)
return
else:
self.fm.notify(
"Unknown subcommand: {sub}".format(sub=subcommand),
bad=True
)
return
# Cleanup.
self.cancel()
def quick(self):
if self.rest(1).startswith("add name "):
try:
regex = re.compile(self.rest(3))
except re.error:
regex = re.compile("")
self.fm.thisdir.temporary_filter = regex
self.fm.thisdir.refilter()
return False
def cancel(self):
self.fm.thisdir.temporary_filter = None
self.fm.thisdir.refilter()
class grep(Command):
""":grep <string>
Looks for a string in all marked files or directories
"""
def execute(self):
if self.rest(1):
action = ['grep', '-n']
action.extend(['-e', self.rest(1), '-r'])
action.extend(f.path for f in self.fm.thistab.get_selection())
self.fm.execute_command(action, flags='p')
class flat(Command):
"""
:flat <level>
Flattens the directory view up to the specified level.
-1 fully flattened
0 remove flattened view
"""
def execute(self):
try:
level_str = self.rest(1)
level = int(level_str)
except ValueError:
level = self.quantifier
if level is None:
self.fm.notify("Syntax: flat <level>", bad=True)
return
if level < -1:
self.fm.notify("Need an integer number (-1, 0, 1, ...)", bad=True)
self.fm.thisdir.unload()
self.fm.thisdir.flat = level
self.fm.thisdir.load_content()
class reset_previews(Command):
""":reset_previews
Reset the file previews.
"""
def execute(self):
self.fm.previews = {}
self.fm.ui.need_redraw = True
# Version control commands
# --------------------------------
class stage(Command):
"""
:stage
Stage selected files for the corresponding version control system
"""
def execute(self):
from ranger.ext.vcs import VcsError
if self.fm.thisdir.vcs and self.fm.thisdir.vcs.track:
filelist = [f.path for f in self.fm.thistab.get_selection()]
try:
self.fm.thisdir.vcs.action_add(filelist)
except VcsError as ex:
self.fm.notify('Unable to stage files: {0}'.format(ex))
self.fm.ui.vcsthread.process(self.fm.thisdir)
else:
self.fm.notify('Unable to stage files: Not in repository')
class unstage(Command):
"""
:unstage
Unstage selected files for the corresponding version control system
"""
def execute(self):
from ranger.ext.vcs import VcsError
if self.fm.thisdir.vcs and self.fm.thisdir.vcs.track:
filelist = [f.path for f in self.fm.thistab.get_selection()]
try:
self.fm.thisdir.vcs.action_reset(filelist)
except VcsError as ex:
self.fm.notify('Unable to unstage files: {0}'.format(ex))
self.fm.ui.vcsthread.process(self.fm.thisdir)
else:
self.fm.notify('Unable to unstage files: Not in repository')
# Metadata commands
# --------------------------------
class prompt_metadata(Command):
"""
:prompt_metadata <key1> [<key2> [<key3> ...]]
Prompt the user to input metadata for multiple keys in a row.
"""
_command_name = "meta"
_console_chain = None
def execute(self):
prompt_metadata._console_chain = self.args[1:]
self._process_command_stack()
def _process_command_stack(self):
if prompt_metadata._console_chain:
key = prompt_metadata._console_chain.pop()
self._fill_console(key)
else:
for col in self.fm.ui.browser.columns:
col.need_redraw = True
def _fill_console(self, key):
metadata = self.fm.metadata.get_metadata(self.fm.thisfile.path)
if key in metadata and metadata[key]:
existing_value = metadata[key]
else:
existing_value = ""
text = "%s %s %s" % (self._command_name, key, existing_value)
self.fm.open_console(text, position=len(text))
class meta(prompt_metadata):
"""
:meta <key> [<value>]
Change metadata of a file. Deletes the key if value is empty.
"""
def execute(self):
key = self.arg(1)
update_dict = {}
update_dict[key] = self.rest(2)
selection = self.fm.thistab.get_selection()
for fobj in selection:
self.fm.metadata.set_metadata(fobj.path, update_dict)
self._process_command_stack()
def tab(self, tabnum):
key = self.arg(1)
metadata = self.fm.metadata.get_metadata(self.fm.thisfile.path)
if key in metadata and metadata[key]:
return [" ".join([self.arg(0), self.arg(1), metadata[key]])]
return [self.arg(0) + " " + k for k in sorted(metadata)
if k.startswith(self.arg(1))]
class linemode(default_linemode):
"""
:linemode <mode>
Change what is displayed as a filename.
- "mode" may be any of the defined linemodes (see: ranger.core.linemode).
"normal" is mapped to "filename".
"""
def execute(self):
mode = self.arg(1)
if mode == "normal":
from ranger.core.linemode import DEFAULT_LINEMODE
mode = DEFAULT_LINEMODE
if mode not in self.fm.thisfile.linemode_dict:
self.fm.notify("Unhandled linemode: `%s'" % mode, bad=True)
return
self.fm.thisdir.set_linemode_of_children(mode)
# Ask the browsercolumns to redraw
for col in self.fm.ui.browser.columns:
col.need_redraw = True
class yank(Command):
""":yank [name|dir|path|name_without_extension]
Copies the file's name (default), directory or path into both the primary X
selection and the clipboard.
"""
modes = {
'': 'basename',
'name_without_extension': 'basename_without_extension',
'name': 'basename',
'dir': 'dirname',
'path': 'path',
}
def execute(self):
import subprocess
def clipboards():
from ranger.ext.get_executables import get_executables
clipboard_managers = {
'xclip': [
['xclip'],
['xclip', '-selection', 'clipboard'],
],
'xsel': [
['xsel'],
['xsel', '-b'],
],
'wl-copy': [
['wl-copy'],
],
'pbcopy': [
['pbcopy'],
],
}
ordered_managers = ['pbcopy', 'xclip', 'xsel', 'wl-copy']
executables = get_executables()
for manager in ordered_managers:
if manager in executables:
return clipboard_managers[manager]
return []
clipboard_commands = clipboards()
mode = self.modes[self.arg(1)]
selection = self.get_selection_attr(mode)
new_clipboard_contents = "\n".join(selection)
for command in clipboard_commands:
with subprocess.Popen(
command, universal_newlines=True, stdin=subprocess.PIPE
) as process:
process.communicate(input=new_clipboard_contents)
def get_selection_attr(self, attr):
return [getattr(item, attr) for item in
self.fm.thistab.get_selection()]
def tab(self, tabnum):
return (
self.start(1) + mode for mode
in sorted(self.modes.keys())
if mode
)
class paste_ext(Command):
"""
:paste_ext
Like paste but tries to rename conflicting files so that the
file extension stays intact (e.g. file_.ext).
"""
@staticmethod
def make_safe_path(dst):
if not os.path.exists(dst):
return dst
dst_name, dst_ext = os.path.splitext(dst)
if not dst_name.endswith("_"):
dst_name += "_"
if not os.path.exists(dst_name + dst_ext):
return dst_name + dst_ext
n = 0
test_dst = dst_name + str(n)
while os.path.exists(test_dst + dst_ext):
n += 1
test_dst = dst_name + str(n)
return test_dst + dst_ext
def execute(self):
return self.fm.paste(make_safe_path=paste_ext.make_safe_path)
| 66,598 | Python | .py | 1,686 | 29.956702 | 99 | 0.581115 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
682 | default.py | ranger_ranger/doc/config/colorschemes/default.py | # This file is part of ranger, the console file manager.
# License: GNU GPL version 3, see the file "AUTHORS" for details.
from __future__ import (absolute_import, division, print_function)
from ranger.gui.colorscheme import ColorScheme
from ranger.gui.color import (
black, blue, cyan, green, magenta, red, white, yellow, default,
normal, bold, reverse, dim, BRIGHT,
default_colors,
)
class Default(ColorScheme):
progress_bar_color = blue
def use(self, context): # pylint: disable=too-many-branches,too-many-statements
fg, bg, attr = default_colors
if context.reset:
return default_colors
elif context.in_browser:
if context.selected:
attr = reverse
else:
attr = normal
if context.empty or context.error:
bg = red
if context.border:
fg = default
if context.media:
if context.image:
fg = yellow
else:
fg = magenta
if context.container:
fg = red
if context.directory:
attr |= bold
fg = blue
fg += BRIGHT
elif context.executable and not \
any((context.media, context.container,
context.fifo, context.socket)):
attr |= bold
fg = green
fg += BRIGHT
if context.socket:
attr |= bold
fg = magenta
fg += BRIGHT
if context.fifo or context.device:
fg = yellow
if context.device:
attr |= bold
fg += BRIGHT
if context.link:
fg = cyan if context.good else magenta
if context.tag_marker and not context.selected:
attr |= bold
if fg in (red, magenta):
fg = white
else:
fg = red
fg += BRIGHT
if context.line_number and not context.selected:
fg = default
attr &= ~bold
if not context.selected and (context.cut or context.copied):
attr |= bold
fg = black
fg += BRIGHT
# If the terminal doesn't support bright colors, use dim white
# instead of black.
if BRIGHT == 0:
attr |= dim
fg = white
if context.main_column:
# Doubling up with BRIGHT here causes issues because it's
# additive not idempotent.
if context.selected:
attr |= bold
if context.marked:
attr |= bold
fg = yellow
if context.badinfo:
if attr & reverse:
bg = magenta
else:
fg = magenta
if context.inactive_pane:
fg = cyan
elif context.in_titlebar:
if context.hostname:
fg = red if context.bad else green
elif context.directory:
fg = blue
elif context.tab:
if context.good:
bg = green
elif context.link:
fg = cyan
attr |= bold
elif context.in_statusbar:
if context.permissions:
if context.good:
fg = cyan
elif context.bad:
fg = magenta
if context.marked:
attr |= bold | reverse
fg = yellow
fg += BRIGHT
if context.frozen:
attr |= bold | reverse
fg = cyan
fg += BRIGHT
if context.message:
if context.bad:
attr |= bold
fg = red
fg += BRIGHT
if context.loaded:
bg = self.progress_bar_color
if context.vcsinfo:
fg = blue
attr &= ~bold
if context.vcscommit:
fg = yellow
attr &= ~bold
if context.vcsdate:
fg = cyan
attr &= ~bold
if context.text:
if context.highlight:
attr |= reverse
if context.in_taskview:
if context.title:
fg = blue
if context.selected:
attr |= reverse
if context.loaded:
if context.selected:
fg = self.progress_bar_color
else:
bg = self.progress_bar_color
if context.vcsfile and not context.selected:
attr &= ~bold
if context.vcsconflict:
fg = magenta
elif context.vcsuntracked:
fg = cyan
elif context.vcschanged:
fg = red
elif context.vcsunknown:
fg = red
elif context.vcsstaged:
fg = green
elif context.vcssync:
fg = green
elif context.vcsignored:
fg = default
elif context.vcsremote and not context.selected:
attr &= ~bold
if context.vcssync or context.vcsnone:
fg = green
elif context.vcsbehind:
fg = red
elif context.vcsahead:
fg = blue
elif context.vcsdiverged:
fg = magenta
elif context.vcsunknown:
fg = red
return fg, bg, attr
| 5,864 | Python | .py | 169 | 19.674556 | 84 | 0.457908 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
683 | jungle.py | ranger_ranger/doc/config/colorschemes/jungle.py | # This file is part of ranger, the console file manager.
# License: GNU GPL version 3, see the file "AUTHORS" for details.
from __future__ import (absolute_import, division, print_function)
from ranger.colorschemes.default import Default
from ranger.gui.color import green, red, blue, bold
class Scheme(Default):
progress_bar_color = green
def use(self, context):
fg, bg, attr = Default.use(self, context)
if context.directory and not context.marked and not context.link \
and not context.inactive_pane:
fg = self.progress_bar_color
if context.line_number and not context.selected:
fg = self.progress_bar_color
attr &= ~bold
if context.in_titlebar and context.hostname:
fg = red if context.bad else blue
return fg, bg, attr
| 845 | Python | .py | 18 | 39.222222 | 74 | 0.679707 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
684 | snow.py | ranger_ranger/doc/config/colorschemes/snow.py | # This file is part of ranger, the console file manager.
# License: GNU GPL version 3, see the file "AUTHORS" for details.
from __future__ import (absolute_import, division, print_function)
from ranger.gui.colorscheme import ColorScheme
from ranger.gui.color import default_colors, reverse, bold, BRIGHT
class Snow(ColorScheme):
def use(self, context):
fg, bg, attr = default_colors
if context.reset:
pass
elif context.in_browser:
if context.selected:
attr = reverse
if context.directory:
attr |= bold
fg += BRIGHT
if context.line_number and not context.selected:
attr |= bold
fg += BRIGHT
elif context.highlight:
attr |= reverse
elif context.in_titlebar and context.tab and context.good:
attr |= reverse
elif context.in_statusbar:
if context.loaded:
attr |= reverse
if context.marked:
attr |= reverse
elif context.in_taskview:
if context.selected:
attr |= bold
fg += BRIGHT
if context.loaded:
attr |= reverse
return fg, bg, attr
| 1,290 | Python | .py | 35 | 25.571429 | 66 | 0.570394 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
685 | __init__.py | ranger_ranger/ranger/__init__.py | # This file is part of ranger, the console file manager.
# License: GNU GPL version 3, see the file "AUTHORS" for details.
"""A console file manager with VI key bindings.
It provides a minimalistic and nice curses interface with a view on the
directory hierarchy. The secondary task of ranger is to figure out which
program you want to use to open your files with.
"""
from __future__ import (absolute_import, division, print_function)
import os
from sys import version_info
# Version helper
def version_helper():
if __release__:
version_string = 'ranger {0}'.format(__version__)
else:
import subprocess
version_string = 'ranger-master {0}'
try:
with subprocess.Popen(
["git", "describe"],
universal_newlines=True,
cwd=RANGERDIR,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
) as git_describe:
(git_description, _) = git_describe.communicate()
version_string = version_string.format(git_description.strip('\n'))
except (OSError, subprocess.CalledProcessError, AttributeError):
version_string = version_string.format(__version__)
return version_string
# Information
__license__ = 'GPL3'
__version__ = '1.9.3'
__release__ = False
__author__ = __maintainer__ = 'Roman Zimbelmann'
__email__ = '[email protected]'
# Constants
RANGERDIR = os.path.dirname(__file__)
TICKS_BEFORE_COLLECTING_GARBAGE = 100
TIME_BEFORE_FILE_BECOMES_GARBAGE = 1200
MAX_RESTORABLE_TABS = 3
MACRO_DELIMITER = '%'
MACRO_DELIMITER_ESC = '%%'
DEFAULT_PAGER = 'less'
USAGE = '%prog [options] [path]'
VERSION = version_helper()
PY3 = version_info[0] >= 3
# These variables are ignored if the corresponding
# XDG environment variable is non-empty and absolute
CACHEDIR = os.path.expanduser('~/.cache/ranger')
CONFDIR = os.path.expanduser('~/.config/ranger')
DATADIR = os.path.expanduser('~/.local/share/ranger')
args = None # pylint: disable=invalid-name
from ranger.core.main import main # NOQA pylint: disable=wrong-import-position
| 2,105 | Python | .py | 54 | 34.12963 | 79 | 0.687102 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
686 | commands.py | ranger_ranger/ranger/config/commands.py | # -*- coding: utf-8 -*-
# This file is part of ranger, the console file manager.
# This configuration file is licensed under the same terms as ranger.
# ===================================================================
#
# NOTE: If you copied this file to /etc/ranger/commands_full.py or
# ~/.config/ranger/commands_full.py, then it will NOT be loaded by ranger,
# and only serve as a reference.
#
# ===================================================================
# This file contains ranger's commands.
# It's all in python; lines beginning with # are comments.
#
# Note that additional commands are automatically generated from the methods
# of the class ranger.core.actions.Actions.
#
# You can customize commands in the files /etc/ranger/commands.py (system-wide)
# and ~/.config/ranger/commands.py (per user).
# They have the same syntax as this file. In fact, you can just copy this
# file to ~/.config/ranger/commands_full.py with
# `ranger --copy-config=commands_full' and make your modifications, don't
# forget to rename it to commands.py. You can also use
# `ranger --copy-config=commands' to copy a short sample commands.py that
# has everything you need to get started.
# But make sure you update your configs when you update ranger.
#
# ===================================================================
# Every class defined here which is a subclass of `Command' will be used as a
# command in ranger. Several methods are defined to interface with ranger:
# execute(): called when the command is executed.
# cancel(): called when closing the console.
# tab(tabnum): called when <TAB> is pressed.
# quick(): called after each keypress.
#
# tab() argument tabnum is 1 for <TAB> and -1 for <S-TAB> by default
#
# The return values for tab() can be either:
# None: There is no tab completion
# A string: Change the console to this string
# A list/tuple/generator: cycle through every item in it
#
# The return value for quick() can be:
# False: Nothing happens
# True: Execute the command afterwards
#
# The return value for execute() and cancel() doesn't matter.
#
# ===================================================================
# Commands have certain attributes and methods that facilitate parsing of
# the arguments:
#
# self.line: The whole line that was written in the console.
# self.args: A list of all (space-separated) arguments to the command.
# self.quantifier: If this command was mapped to the key "X" and
# the user pressed 6X, self.quantifier will be 6.
# self.arg(n): The n-th argument, or an empty string if it doesn't exist.
# self.rest(n): The n-th argument plus everything that followed. For example,
# if the command was "search foo bar a b c", rest(2) will be "bar a b c"
# self.start(n): Anything before the n-th argument. For example, if the
# command was "search foo bar a b c", start(2) will be "search foo"
#
# ===================================================================
# And this is a little reference for common ranger functions and objects:
#
# self.fm: A reference to the "fm" object which contains most information
# about ranger.
# self.fm.notify(string): Print the given string on the screen.
# self.fm.notify(string, bad=True): Print the given string in RED.
# self.fm.reload_cwd(): Reload the current working directory.
# self.fm.thisdir: The current working directory. (A File object.)
# self.fm.thisfile: The current file. (A File object too.)
# self.fm.thistab.get_selection(): A list of all selected files.
# self.fm.execute_console(string): Execute the string as a ranger command.
# self.fm.open_console(string): Open the console with the given string
# already typed in for you.
# self.fm.move(direction): Moves the cursor in the given direction, which
# can be something like down=3, up=5, right=1, left=1, to=6, ...
#
# File objects (for example self.fm.thisfile) have these useful attributes and
# methods:
#
# tfile.path: The path to the file.
# tfile.basename: The base name only.
# tfile.load_content(): Force a loading of the directories content (which
# obviously works with directories only)
# tfile.is_directory: True/False depending on whether it's a directory.
#
# For advanced commands it is unavoidable to dive a bit into the source code
# of ranger.
# ===================================================================
from __future__ import (absolute_import, division, print_function)
from collections import deque
import os
import re
from io import open
from ranger import PY3
from ranger.api.commands import Command
class alias(Command):
""":alias <newcommand> <oldcommand>
Copies the oldcommand as newcommand.
"""
context = 'browser'
resolve_macros = False
def execute(self):
if not self.arg(1) or not self.arg(2):
self.fm.notify('Syntax: alias <newcommand> <oldcommand>', bad=True)
return
self.fm.commands.alias(self.arg(1), self.rest(2))
class echo(Command):
""":echo <text>
Display the text in the statusbar.
"""
def execute(self):
self.fm.notify(self.rest(1))
class cd(Command):
""":cd [-r] <path>
The cd command changes the directory.
If the path is a file, selects that file.
The command 'cd -' is equivalent to typing ``.
Using the option "-r" will get you to the real path.
"""
def execute(self):
if self.arg(1) == '-r':
self.shift()
destination = os.path.realpath(self.rest(1))
if os.path.isfile(destination):
self.fm.select_file(destination)
return
elif self.arg(1) == '-e':
self.shift()
destination = os.path.realpath(os.path.expandvars(self.rest(1)))
if os.path.isfile(destination):
self.fm.select_file(destination)
return
else:
destination = self.rest(1)
if not destination:
destination = '~'
if destination == '-':
self.fm.enter_bookmark('`')
else:
self.fm.cd(destination)
def _tab_args(self):
# dest must be rest because path could contain spaces
if self.arg(1) == '-r':
start = self.start(2)
dest = self.rest(2)
else:
start = self.start(1)
dest = self.rest(1)
if dest:
head, tail = os.path.split(os.path.expanduser(dest))
if head:
dest_exp = os.path.join(os.path.normpath(head), tail)
else:
dest_exp = tail
else:
dest_exp = ''
return (start, dest_exp, os.path.join(self.fm.thisdir.path, dest_exp),
dest.endswith(os.path.sep))
@staticmethod
def _tab_paths(dest, dest_abs, ends_with_sep):
if not dest:
try:
return next(os.walk(dest_abs))[1], dest_abs
except (OSError, StopIteration):
return [], ''
if ends_with_sep:
try:
return [os.path.join(dest, path) for path in next(os.walk(dest_abs))[1]], ''
except (OSError, StopIteration):
return [], ''
return None, None
def _tab_match(self, path_user, path_file):
if self.fm.settings.cd_tab_case == 'insensitive':
path_user = path_user.lower()
path_file = path_file.lower()
elif self.fm.settings.cd_tab_case == 'smart' and path_user.islower():
path_file = path_file.lower()
return path_file.startswith(path_user)
def _tab_normal(self, dest, dest_abs):
dest_dir = os.path.dirname(dest)
dest_base = os.path.basename(dest)
try:
dirnames = next(os.walk(os.path.dirname(dest_abs)))[1]
except (OSError, StopIteration):
return [], ''
return [os.path.join(dest_dir, d) for d in dirnames if self._tab_match(dest_base, d)], ''
def _tab_fuzzy_match(self, basepath, tokens):
""" Find directories matching tokens recursively """
if not tokens:
tokens = ['']
paths = [basepath]
while True:
token = tokens.pop()
matches = []
for path in paths:
try:
directories = next(os.walk(path))[1]
except (OSError, StopIteration):
continue
matches += [os.path.join(path, d) for d in directories
if self._tab_match(token, d)]
if not tokens or not matches:
return matches
paths = matches
return None
def _tab_fuzzy(self, dest, dest_abs):
tokens = []
basepath = dest_abs
while True:
basepath_old = basepath
basepath, token = os.path.split(basepath)
if basepath == basepath_old:
break
if os.path.isdir(basepath_old) and not token.startswith('.'):
basepath = basepath_old
break
tokens.append(token)
paths = self._tab_fuzzy_match(basepath, tokens)
if not os.path.isabs(dest):
paths_rel = self.fm.thisdir.path
paths = [os.path.relpath(os.path.join(basepath, path), paths_rel)
for path in paths]
else:
paths_rel = ''
return paths, paths_rel
def tab(self, tabnum):
from os.path import sep
start, dest, dest_abs, ends_with_sep = self._tab_args()
paths, paths_rel = self._tab_paths(dest, dest_abs, ends_with_sep)
if paths is None:
if self.fm.settings.cd_tab_fuzzy:
paths, paths_rel = self._tab_fuzzy(dest, dest_abs)
else:
paths, paths_rel = self._tab_normal(dest, dest_abs)
paths.sort()
if self.fm.settings.cd_bookmarks:
paths[0:0] = [
os.path.relpath(v.path, paths_rel) if paths_rel else v.path
for v in self.fm.bookmarks.dct.values() for path in paths
if v.path.startswith(os.path.join(paths_rel, path) + sep)
]
if not paths:
return None
if len(paths) == 1:
return start + paths[0] + sep
return [start + dirname + sep for dirname in paths]
class chain(Command):
""":chain <command1>; <command2>; ...
Calls multiple commands at once, separated by semicolons.
"""
resolve_macros = False
def execute(self):
if not self.rest(1).strip():
self.fm.notify('Syntax: chain <command1>; <command2>; ...', bad=True)
return
for command in [s.strip() for s in self.rest(1).split(";")]:
self.fm.execute_console(command)
class shell(Command):
escape_macros_for_shell = True
def execute(self):
if self.arg(1) and self.arg(1)[0] == '-':
flags = self.arg(1)[1:]
command = self.rest(2)
else:
flags = ''
command = self.rest(1)
if command:
self.fm.execute_command(command, flags=flags)
def tab(self, tabnum):
from ranger.ext.get_executables import get_executables
if self.arg(1) and self.arg(1)[0] == '-':
command = self.rest(2)
else:
command = self.rest(1)
start = self.line[0:len(self.line) - len(command)]
try:
position_of_last_space = command.rindex(" ")
except ValueError:
return (start + program + ' ' for program
in get_executables() if program.startswith(command))
if position_of_last_space == len(command) - 1:
selection = self.fm.thistab.get_selection()
if len(selection) == 1:
return self.line + selection[0].shell_escaped_basename + ' '
return self.line + '%s '
before_word, start_of_word = self.line.rsplit(' ', 1)
return (before_word + ' ' + file.shell_escaped_basename
for file in self.fm.thisdir.files or []
if file.shell_escaped_basename.startswith(start_of_word))
class open_with(Command):
def execute(self):
app, flags, mode = self._get_app_flags_mode(self.rest(1))
self.fm.execute_file(
files=self.fm.thistab.get_selection(),
app=app,
flags=flags,
mode=mode)
def tab(self, tabnum):
return self._tab_through_executables()
def _get_app_flags_mode(self, string): # pylint: disable=too-many-branches,too-many-statements
"""Extracts the application, flags and mode from a string.
examples:
"mplayer f 1" => ("mplayer", "f", 1)
"atool 4" => ("atool", "", 4)
"p" => ("", "p", 0)
"" => None
"""
app = ''
flags = ''
mode = 0
split = string.split()
if len(split) == 1:
part = split[0]
if self._is_app(part):
app = part
elif self._is_flags(part):
flags = part
elif self._is_mode(part):
mode = part
elif len(split) == 2:
part0 = split[0]
part1 = split[1]
if self._is_app(part0):
app = part0
if self._is_flags(part1):
flags = part1
elif self._is_mode(part1):
mode = part1
elif self._is_flags(part0):
flags = part0
if self._is_mode(part1):
mode = part1
elif self._is_mode(part0):
mode = part0
if self._is_flags(part1):
flags = part1
elif len(split) >= 3:
part0 = split[0]
part1 = split[1]
part2 = split[2]
if self._is_app(part0):
app = part0
if self._is_flags(part1):
flags = part1
if self._is_mode(part2):
mode = part2
elif self._is_mode(part1):
mode = part1
if self._is_flags(part2):
flags = part2
elif self._is_flags(part0):
flags = part0
if self._is_mode(part1):
mode = part1
elif self._is_mode(part0):
mode = part0
if self._is_flags(part1):
flags = part1
return app, flags, int(mode)
def _is_app(self, arg):
return not self._is_flags(arg) and not arg.isdigit()
@staticmethod
def _is_flags(arg):
from ranger.core.runner import ALLOWED_FLAGS
return all(x in ALLOWED_FLAGS for x in arg)
@staticmethod
def _is_mode(arg):
return all(x in '0123456789' for x in arg)
class set_(Command):
""":set <option name>=<python expression>
Gives an option a new value.
Use `:set <option>!` to toggle or cycle it, e.g. `:set flush_input!`
"""
name = 'set' # don't override the builtin set class
def execute(self):
name = self.arg(1)
name, value, _, toggle = self.parse_setting_line_v2()
if toggle:
self.fm.toggle_option(name)
else:
self.fm.set_option_from_string(name, value)
def tab(self, tabnum): # pylint: disable=too-many-return-statements
from ranger.gui.colorscheme import get_all_colorschemes
name, value, name_done = self.parse_setting_line()
settings = self.fm.settings
if not name:
return sorted(self.firstpart + setting for setting in settings)
if not value and not name_done:
return sorted(self.firstpart + setting for setting in settings
if setting.startswith(name))
if not value:
value_completers = {
"colorscheme":
# Cycle through colorschemes when name, but no value is specified
lambda: sorted(self.firstpart + colorscheme for colorscheme
in get_all_colorschemes(self.fm)),
"column_ratios":
lambda: self.firstpart + ",".join(map(str, settings[name])),
}
def default_value_completer():
return self.firstpart + str(settings[name])
return value_completers.get(name, default_value_completer)()
if bool in settings.types_of(name):
if 'true'.startswith(value.lower()):
return self.firstpart + 'True'
if 'false'.startswith(value.lower()):
return self.firstpart + 'False'
# Tab complete colorscheme values if incomplete value is present
if name == "colorscheme":
return sorted(self.firstpart + colorscheme for colorscheme
in get_all_colorschemes(self.fm) if colorscheme.startswith(value))
return None
class setlocal_(set_):
"""Shared class for setinpath and setinregex
By implementing the _arg abstract properly you can affect what the name of
the pattern/path/regex argument can be, this should be a regular expression
without match groups.
By implementing the _format_arg abstract method you can affect how the
argument is formatted as a regular expression.
"""
from abc import (ABCMeta, abstractmethod, abstractproperty)
__metaclass__ = ABCMeta
@property
@abstractmethod
def _arg(self):
"""The name of the option for the path/regex"""
raise NotImplementedError
def __init__(self, *args, **kwargs):
super(setlocal_, self).__init__(*args, **kwargs)
# We require quoting of paths with whitespace so we have to take care
# not to match escaped quotes.
self.path_re_dquoted = re.compile(
r'^set.+?\s+{arg}="(.*?[^\\])"'.format(arg=self._arg)
)
self.path_re_squoted = re.compile(
r"^set.+?\s+{arg}='(.*?[^\\])'".format(arg=self._arg)
)
self.path_re_unquoted = re.compile(
r'^{arg}=(.+?)$'.format(arg=self._arg)
)
def _re_shift(self, match):
if not match:
return None
path = match.group(1)
# Prepend something that behaves like "path=" in case path starts with
# whitespace
for _ in "={0}".format(path).split():
self.shift()
return os.path.expanduser(path)
@abstractmethod
def _format_arg(self, arg):
"""How to format the argument as a regular expression"""
raise NotImplementedError
def execute(self):
arg = self._re_shift(self.path_re_dquoted.match(self.line))
if arg is None:
arg = self._re_shift(self.path_re_squoted.match(self.line))
if arg is None:
arg = self._re_shift(self.path_re_unquoted.match(self.arg(1)))
if arg is None and self.fm.thisdir:
arg = self.fm.thisdir.path
if arg is None:
return
else:
arg = self._format_arg(arg)
name, value, _ = self.parse_setting_line()
self.fm.set_option_from_string(name, value, localpath=arg)
class setinpath(setlocal_):
""":setinpath path=<path> <option name>=<python expression>
Sets an option when in a directory that matches <path>, relative paths can
match multiple directories, for example, ``path=build`` would match a build
directory in any directory. If the <path> contains whitespace it needs to
be quoted and nested quotes need to be backslash-escaped. The "path"
argument can also be named "pattern" to allow for easier switching with
``setinregex``.
"""
_arg = "(?:path|pattern)"
def _format_arg(self, arg):
return "{0}$".format(re.escape(arg))
class setlocal(setinpath):
""":setlocal is an alias for :setinpath"""
class setinregex(setlocal_):
""":setinregex re=<regex> <option name>=<python expression>
Sets an option when in a specific directory. If the <regex> contains
whitespace it needs to be quoted and nested quotes need to be
backslash-escaped. Special characters need to be escaped if they are
intended to match literally as documented in the ``re`` library
documentation. The "re" argument can also be named "regex" or "pattern,"
which allows for easier switching with ``setinpath``.
"""
_arg = "(?:re(?:gex)?|pattern)"
def _format_arg(self, arg):
return arg
class setintag(set_):
""":setintag <tag or tags> <option name>=<option value>
Sets an option for directories that are tagged with a specific tag.
"""
def execute(self):
tags = self.arg(1)
self.shift()
name, value, _ = self.parse_setting_line()
self.fm.set_option_from_string(name, value, tags=tags)
class default_linemode(Command):
def execute(self):
from ranger.container.fsobject import FileSystemObject
if len(self.args) < 2:
self.fm.notify(
"Usage: default_linemode [path=<regexp> | tag=<tag(s)>] <linemode>", bad=True)
# Extract options like "path=..." or "tag=..." from the command line
arg1 = self.arg(1)
method = "always"
argument = None
if arg1.startswith("path="):
method = "path"
argument = re.compile(arg1[5:])
self.shift()
elif arg1.startswith("tag="):
method = "tag"
argument = arg1[4:]
self.shift()
# Extract and validate the line mode from the command line
lmode = self.rest(1)
if lmode not in FileSystemObject.linemode_dict:
self.fm.notify(
"Invalid linemode: %s; should be %s" % (
lmode, "/".join(FileSystemObject.linemode_dict)),
bad=True,
)
# Add the prepared entry to the fm.default_linemodes
entry = [method, argument, lmode]
self.fm.default_linemodes.appendleft(entry)
# Redraw the columns
if self.fm.ui.browser:
for col in self.fm.ui.browser.columns:
col.need_redraw = True
def tab(self, tabnum):
return (self.arg(0) + " " + lmode
for lmode in self.fm.thisfile.linemode_dict.keys()
if lmode.startswith(self.arg(1)))
class quit(Command): # pylint: disable=redefined-builtin
""":quit
Closes the current tab, if there's more than one tab.
Otherwise quits if there are no tasks in progress.
"""
def _exit_no_work(self):
if self.fm.loader.has_work():
self.fm.notify('Not quitting: Tasks in progress: Use `quit!` to force quit')
else:
self.fm.exit()
def execute(self):
if len(self.fm.tabs) >= 2:
self.fm.tab_close()
else:
self._exit_no_work()
class quit_bang(Command):
""":quit!
Closes the current tab, if there's more than one tab.
Otherwise force quits immediately.
"""
name = 'quit!'
allow_abbrev = False
def execute(self):
if len(self.fm.tabs) >= 2:
self.fm.tab_close()
else:
self.fm.exit()
class quitall(Command):
""":quitall
Quits if there are no tasks in progress.
"""
def _exit_no_work(self):
if self.fm.loader.has_work():
self.fm.notify('Not quitting: Tasks in progress: Use `quitall!` to force quit')
else:
self.fm.exit()
def execute(self):
self._exit_no_work()
class quitall_bang(Command):
""":quitall!
Force quits immediately.
"""
name = 'quitall!'
allow_abbrev = False
def execute(self):
self.fm.exit()
class terminal(Command):
""":terminal
Spawns an "x-terminal-emulator" starting in the current directory.
"""
def execute(self):
from ranger.ext.get_executables import get_term
self.fm.run(get_term(), flags='f')
class delete(Command):
""":delete
Tries to delete the selection or the files passed in arguments (if any).
The arguments use a shell-like escaping.
"Selection" is defined as all the "marked files" (by default, you
can mark files with space or v). If there are no marked files,
use the "current file" (where the cursor is)
When attempting to delete non-empty directories or multiple
marked files, it will require a confirmation.
"""
allow_abbrev = False
escape_macros_for_shell = True
def execute(self):
import shlex
from functools import partial
def is_directory_with_files(path):
return os.path.isdir(path) and not os.path.islink(path) and len(os.listdir(path)) > 0
if self.rest(1):
files = shlex.split(self.rest(1))
many_files = (len(files) > 1 or is_directory_with_files(files[0]))
else:
cwd = self.fm.thisdir
tfile = self.fm.thisfile
if not cwd or not tfile:
self.fm.notify("Error: no file selected for deletion!", bad=True)
return
# relative_path used for a user-friendly output in the confirmation.
files = [f.relative_path for f in self.fm.thistab.get_selection()]
many_files = (cwd.marked_items or is_directory_with_files(tfile.path))
confirm = self.fm.settings.confirm_on_delete
if confirm != 'never' and (confirm != 'multiple' or many_files):
self.fm.ui.console.ask(
"Confirm deletion of: %s (y/N)" % ', '.join(files),
partial(self._question_callback, files),
('n', 'N', 'y', 'Y'),
)
else:
# no need for a confirmation, just delete
self.fm.delete(files)
def tab(self, tabnum):
return self._tab_directory_content()
def _question_callback(self, files, answer):
if answer.lower() == 'y':
self.fm.delete(files)
class trash(Command):
""":trash
Tries to move the selection or the files passed in arguments (if any) to
the trash, using rifle rules with label "trash".
The arguments use a shell-like escaping.
"Selection" is defined as all the "marked files" (by default, you
can mark files with space or v). If there are no marked files,
use the "current file" (where the cursor is)
When attempting to trash non-empty directories or multiple
marked files, it will require a confirmation.
"""
allow_abbrev = False
escape_macros_for_shell = True
def execute(self):
import shlex
from functools import partial
def is_directory_with_files(path):
return os.path.isdir(path) and not os.path.islink(path) and len(os.listdir(path)) > 0
if self.rest(1):
file_names = shlex.split(self.rest(1))
files = self.fm.get_filesystem_objects(file_names)
if files is None:
return
many_files = (len(files) > 1 or is_directory_with_files(files[0].path))
else:
cwd = self.fm.thisdir
tfile = self.fm.thisfile
if not cwd or not tfile:
self.fm.notify("Error: no file selected for deletion!", bad=True)
return
files = self.fm.thistab.get_selection()
# relative_path used for a user-friendly output in the confirmation.
file_names = [f.relative_path for f in files]
many_files = (cwd.marked_items or is_directory_with_files(tfile.path))
confirm = self.fm.settings.confirm_on_delete
if confirm != 'never' and (confirm != 'multiple' or many_files):
self.fm.ui.console.ask(
"Confirm deletion of: %s (y/N)" % ', '.join(file_names),
partial(self._question_callback, files),
('n', 'N', 'y', 'Y'),
)
else:
# no need for a confirmation, just delete
self._trash_files_catch_arg_list_error(files)
def tab(self, tabnum):
return self._tab_directory_content()
def _question_callback(self, files, answer):
if answer.lower() == 'y':
self._trash_files_catch_arg_list_error(files)
def _trash_files_catch_arg_list_error(self, files):
"""
Executes the fm.execute_file method but catches the OSError ("Argument list too long")
that occurs when moving too many files to trash (and would otherwise crash ranger).
"""
try:
self.fm.execute_file(files, label='trash')
except OSError as err:
if err.errno == 7:
self.fm.notify("Error: Command too long (try passing less files at once)",
bad=True)
else:
raise
class jump_non(Command):
""":jump_non [-FLAGS...]
Jumps to first non-directory if highlighted file is a directory and vice versa.
Flags:
-r Jump in reverse order
-w Wrap around if reaching end of filelist
"""
def __init__(self, *args, **kwargs):
super(jump_non, self).__init__(*args, **kwargs)
flags, _ = self.parse_flags()
self._flag_reverse = 'r' in flags
self._flag_wrap = 'w' in flags
@staticmethod
def _non(fobj, is_directory):
return fobj.is_directory if not is_directory else not fobj.is_directory
def execute(self):
tfile = self.fm.thisfile
passed = False
found_before = None
found_after = None
for fobj in self.fm.thisdir.files[::-1] if self._flag_reverse else self.fm.thisdir.files:
if fobj.path == tfile.path:
passed = True
continue
if passed:
if self._non(fobj, tfile.is_directory):
found_after = fobj.path
break
elif not found_before and self._non(fobj, tfile.is_directory):
found_before = fobj.path
if found_after:
self.fm.select_file(found_after)
elif self._flag_wrap and found_before:
self.fm.select_file(found_before)
class mark_tag(Command):
""":mark_tag [<tags>]
Mark all tags that are tagged with either of the given tags.
When leaving out the tag argument, all tagged files are marked.
"""
do_mark = True
def execute(self):
cwd = self.fm.thisdir
tags = self.rest(1).replace(" ", "")
if not self.fm.tags or not cwd.files:
return
for fileobj in cwd.files:
try:
tag = self.fm.tags.tags[fileobj.realpath]
except KeyError:
continue
if not tags or tag in tags:
cwd.mark_item(fileobj, val=self.do_mark)
self.fm.ui.status.need_redraw = True
self.fm.ui.need_redraw = True
class console(Command):
""":console [-p N | -s sep] <command>
Flags:
-p N Set position at N index
-s sep Set position at separator(any char[s] sequence), example '#'
Open the console with the given command.
"""
def execute(self):
position = None
command = self.rest(1)
if self.arg(1)[0:2] == '-p':
command = self.rest(2)
try:
position = int(self.arg(1)[2:])
except ValueError:
pass
elif self.arg(1)[0:2] == '-s':
command = self.rest(3)
sentinel = self.arg(2)
pos = command.find(sentinel)
if pos != -1:
command = command.replace(sentinel, '', 1)
position = pos
self.fm.open_console(command, position=position)
class load_copy_buffer(Command):
""":load_copy_buffer
Load the copy buffer from datadir/copy_buffer
"""
copy_buffer_filename = 'copy_buffer'
def execute(self):
from os.path import exists
from ranger.container.file import File
fname = self.fm.datapath(self.copy_buffer_filename)
unreadable = OSError if PY3 else IOError
try:
with open(fname, "r", encoding="utf-8") as fobj:
self.fm.copy_buffer = set(
File(g) for g in fobj.read().split("\n") if exists(g)
)
except unreadable:
return self.fm.notify(
"Cannot open %s" % (fname or self.copy_buffer_filename), bad=True)
self.fm.ui.redraw_main_column()
return None
class save_copy_buffer(Command):
""":save_copy_buffer
Save the copy buffer to datadir/copy_buffer
"""
copy_buffer_filename = 'copy_buffer'
def execute(self):
fname = None
fname = self.fm.datapath(self.copy_buffer_filename)
unwritable = OSError if PY3 else IOError
try:
with open(fname, "w", encoding="utf-8") as fobj:
fobj.write("\n".join(fobj.path for fobj in self.fm.copy_buffer))
except unwritable:
return self.fm.notify("Cannot open %s" %
(fname or self.copy_buffer_filename), bad=True)
return None
class unmark_tag(mark_tag):
""":unmark_tag [<tags>]
Unmark all tags that are tagged with either of the given tags.
When leaving out the tag argument, all tagged files are unmarked.
"""
do_mark = False
class mkdir(Command):
""":mkdir <dirname>
Creates a directory with the name <dirname>.
"""
def execute(self):
from os.path import join, expanduser, lexists
from os import makedirs
dirname = join(self.fm.thisdir.path, expanduser(self.rest(1)))
if not lexists(dirname):
makedirs(dirname)
else:
self.fm.notify("file/directory exists!", bad=True)
def tab(self, tabnum):
return self._tab_directory_content()
class touch(Command):
""":touch <fname>
Creates a file with the name <fname>.
"""
def execute(self):
from os.path import join, expanduser, lexists, dirname
from os import makedirs
fname = join(self.fm.thisdir.path, expanduser(self.rest(1)))
dirname = dirname(fname)
if not lexists(fname):
if not lexists(dirname):
makedirs(dirname)
with open(fname, 'a', encoding="utf-8"):
pass # Just create the file
else:
self.fm.notify("file/directory exists!", bad=True)
def tab(self, tabnum):
return self._tab_directory_content()
class edit(Command):
""":edit <filename>
Opens the specified file in vim
"""
def execute(self):
if not self.arg(1):
self.fm.edit_file(self.fm.thisfile.path)
else:
self.fm.edit_file(self.rest(1))
def tab(self, tabnum):
return self._tab_directory_content()
class eval_(Command):
""":eval [-q] <python code>
Evaluates the python code.
`fm' is a reference to the FM instance.
To display text, use the function `p'.
Examples:
:eval fm
:eval len(fm.directories)
:eval p("Hello World!")
"""
name = 'eval'
resolve_macros = False
def execute(self):
# The import is needed so eval() can access the ranger module
import ranger # NOQA pylint: disable=unused-import,unused-variable
if self.arg(1) == '-q':
code = self.rest(2)
quiet = True
else:
code = self.rest(1)
quiet = False
global cmd, fm, p, quantifier # pylint: disable=invalid-name,global-variable-undefined
fm = self.fm
cmd = self.fm.execute_console
p = fm.notify
quantifier = self.quantifier
try:
try:
result = eval(code) # pylint: disable=eval-used
except SyntaxError:
exec(code) # pylint: disable=exec-used
else:
if result and not quiet:
p(result)
except Exception as err: # pylint: disable=broad-except
fm.notify("The error `%s` was caused by evaluating the "
"following code: `%s`" % (err, code), bad=True)
class rename(Command):
""":rename <newname>
Changes the name of the currently highlighted file to <newname>
"""
def execute(self):
from ranger.container.file import File
from os import access
new_name = self.rest(1)
if not new_name:
return self.fm.notify('Syntax: rename <newname>', bad=True)
if new_name == self.fm.thisfile.relative_path:
return None
if access(new_name, os.F_OK):
return self.fm.notify("Can't rename: file already exists!", bad=True)
if self.fm.rename(self.fm.thisfile, new_name):
file_new = File(new_name)
self.fm.bookmarks.update_path(self.fm.thisfile.path, file_new)
self.fm.tags.update_path(self.fm.thisfile.path, file_new.path)
self.fm.thisdir.pointed_obj = file_new
self.fm.thisfile = file_new
return None
def tab(self, tabnum):
return self._tab_directory_content()
class rename_append(Command):
""":rename_append [-FLAGS...]
Opens the console with ":rename <current file>" with the cursor positioned
before the file extension.
Flags:
-a Position before all extensions
-r Remove everything before extensions
"""
def __init__(self, *args, **kwargs):
super(rename_append, self).__init__(*args, **kwargs)
flags, _ = self.parse_flags()
self._flag_ext_all = 'a' in flags
self._flag_remove = 'r' in flags
def execute(self):
from ranger import MACRO_DELIMITER, MACRO_DELIMITER_ESC
tfile = self.fm.thisfile
relpath = tfile.relative_path.replace(MACRO_DELIMITER, MACRO_DELIMITER_ESC)
basename = tfile.basename.replace(MACRO_DELIMITER, MACRO_DELIMITER_ESC)
if basename.find('.') <= 0 or os.path.isdir(relpath):
self.fm.open_console('rename ' + relpath)
return
if self._flag_ext_all:
pos_ext = re.search(r'[^.]+', basename).end(0)
else:
pos_ext = basename.rindex('.')
pos = len(relpath) - len(basename) + pos_ext
if self._flag_remove:
relpath = relpath[:-len(basename)] + basename[pos_ext:]
pos -= pos_ext
self.fm.open_console('rename ' + relpath, position=(7 + pos))
class chmod(Command):
""":chmod <octal number>
Sets the permissions of the selection to the octal number.
The octal number is between 0 and 777. The digits specify the
permissions for the user, the group and others.
A 1 permits execution, a 2 permits writing, a 4 permits reading.
Add those numbers to combine them. So a 7 permits everything.
"""
def execute(self):
mode_str = self.rest(1)
if not mode_str:
if self.quantifier is None:
self.fm.notify("Syntax: chmod <octal number> "
"or specify a quantifier", bad=True)
return
mode_str = str(self.quantifier)
try:
mode = int(mode_str, 8)
if mode < 0 or mode > 0o777:
raise ValueError
except ValueError:
self.fm.notify("Need an octal number between 0 and 777!", bad=True)
return
for fobj in self.fm.thistab.get_selection():
try:
os.chmod(fobj.path, mode)
except OSError as ex:
self.fm.notify(ex)
# reloading directory. maybe its better to reload the selected
# files only.
self.fm.thisdir.content_outdated = True
class bulkrename(Command):
""":bulkrename
This command opens a list of selected files in an external editor.
After you edit and save the file, it will generate a shell script
which does bulk renaming according to the changes you did in the file.
This shell script is opened in an editor for you to review.
After you close it, it will be executed.
"""
def __init__(self, *args, **kwargs):
super(bulkrename, self).__init__(*args, **kwargs)
self.flags, _ = self.parse_flags()
if not self.flags:
self.flags = "w"
def execute(self):
# pylint: disable=too-many-locals,too-many-statements,too-many-branches
import tempfile
from ranger.container.file import File
from ranger.ext.shell_escape import shell_escape as esc
# Create and edit the file list
filenames = [f.relative_path for f in self.fm.thistab.get_selection()]
with tempfile.NamedTemporaryFile(delete=False) as listfile:
listpath = listfile.name
if PY3:
listfile.write("\n".join(filenames).encode(
encoding="utf-8", errors="surrogateescape"))
else:
listfile.write("\n".join(filenames))
self.fm.execute_file([File(listpath)], app='editor')
with open(
listpath, "r", encoding="utf-8", errors="surrogateescape"
) as listfile:
new_filenames = listfile.read().split("\n")
os.unlink(listpath)
if all(a == b for a, b in zip(filenames, new_filenames)):
self.fm.notify("No renaming to be done!")
return
# Generate script
with tempfile.NamedTemporaryFile() as cmdfile:
script_lines = []
script_lines.append("# This file will be executed when you close"
" the editor.")
script_lines.append("# Please double-check everything, clear the"
" file to abort.")
new_dirs = []
for old, new in zip(filenames, new_filenames):
if old != new:
basepath, _ = os.path.split(new)
if (basepath and basepath not in new_dirs
and not os.path.isdir(basepath)):
script_lines.append("mkdir -vp -- {dir}".format(
dir=esc(basepath)))
new_dirs.append(basepath)
script_lines.append("mv -vi -- {old} {new}".format(
old=esc(old), new=esc(new)))
# Make sure not to forget the ending newline
script_content = "\n".join(script_lines) + "\n"
if PY3:
cmdfile.write(script_content.encode(encoding="utf-8",
errors="surrogateescape"))
else:
cmdfile.write(script_content)
cmdfile.flush()
# Open the script and let the user review it, then check if the
# script was modified by the user
self.fm.execute_file([File(cmdfile.name)], app='editor')
cmdfile.seek(0)
script_was_edited = (script_content != cmdfile.read())
# Do the renaming
self.fm.run(['/bin/sh', cmdfile.name], flags=self.flags)
# Retag the files, but only if the script wasn't changed during review,
# because only then we know which are the source and destination files.
if not script_was_edited:
tags_changed = False
for old, new in zip(filenames, new_filenames):
if old != new:
oldpath = self.fm.thisdir.path + '/' + old
newpath = self.fm.thisdir.path + '/' + new
if oldpath in self.fm.tags:
old_tag = self.fm.tags.tags[oldpath]
self.fm.tags.remove(oldpath)
self.fm.tags.tags[newpath] = old_tag
tags_changed = True
if tags_changed:
self.fm.tags.dump()
else:
fm.notify("files have not been retagged")
class relink(Command):
""":relink <newpath>
Changes the linked path of the currently highlighted symlink to <newpath>
"""
def execute(self):
new_path = self.rest(1)
tfile = self.fm.thisfile
if not new_path:
return self.fm.notify('Syntax: relink <newpath>', bad=True)
if not tfile.is_link:
return self.fm.notify('%s is not a symlink!' % tfile.relative_path, bad=True)
if new_path == os.readlink(tfile.path):
return None
try:
os.remove(tfile.path)
os.symlink(new_path, tfile.path)
except OSError as err:
self.fm.notify(err)
self.fm.reset()
self.fm.thisdir.pointed_obj = tfile
self.fm.thisfile = tfile
return None
def tab(self, tabnum):
if not self.rest(1):
return self.line + os.readlink(self.fm.thisfile.path)
return self._tab_directory_content()
class help_(Command):
""":help
Display ranger's manual page.
"""
name = 'help'
def execute(self):
def callback(answer):
if answer == "q":
return
elif answer == "m":
self.fm.display_help()
elif answer == "c":
self.fm.dump_commands()
elif answer == "k":
self.fm.dump_keybindings()
elif answer == "s":
self.fm.dump_settings()
self.fm.ui.console.ask(
"View [m]an page, [k]ey bindings, [c]ommands or [s]ettings? (press q to abort)",
callback,
list("mqkcs")
)
class copymap(Command):
""":copymap <keys> <newkeys1> [<newkeys2>...]
Copies a "browser" keybinding from <keys> to <newkeys>
"""
context = 'browser'
def execute(self):
if not self.arg(1) or not self.arg(2):
return self.fm.notify("Not enough arguments", bad=True)
for arg in self.args[2:]:
self.fm.ui.keymaps.copy(self.context, self.arg(1), arg)
return None
class copypmap(copymap):
""":copypmap <keys> <newkeys1> [<newkeys2>...]
Copies a "pager" keybinding from <keys> to <newkeys>
"""
context = 'pager'
class copycmap(copymap):
""":copycmap <keys> <newkeys1> [<newkeys2>...]
Copies a "console" keybinding from <keys> to <newkeys>
"""
context = 'console'
class copytmap(copymap):
""":copytmap <keys> <newkeys1> [<newkeys2>...]
Copies a "taskview" keybinding from <keys> to <newkeys>
"""
context = 'taskview'
class unmap(Command):
""":unmap <keys> [<keys2>, ...]
Remove the given "browser" mappings
"""
context = 'browser'
def execute(self):
for arg in self.args[1:]:
self.fm.ui.keymaps.unbind(self.context, arg)
class uncmap(unmap):
""":uncmap <keys> [<keys2>, ...]
Remove the given "console" mappings
"""
context = 'console'
class cunmap(uncmap):
""":cunmap <keys> [<keys2>, ...]
Remove the given "console" mappings
DEPRECATED in favor of uncmap.
"""
def execute(self):
self.fm.notify("cunmap is deprecated in favor of uncmap!")
super(cunmap, self).execute()
class unpmap(unmap):
""":unpmap <keys> [<keys2>, ...]
Remove the given "pager" mappings
"""
context = 'pager'
class punmap(unpmap):
""":punmap <keys> [<keys2>, ...]
Remove the given "pager" mappings
DEPRECATED in favor of unpmap.
"""
def execute(self):
self.fm.notify("punmap is deprecated in favor of unpmap!")
super(punmap, self).execute()
class untmap(unmap):
""":untmap <keys> [<keys2>, ...]
Remove the given "taskview" mappings
"""
context = 'taskview'
class tunmap(untmap):
""":tunmap <keys> [<keys2>, ...]
Remove the given "taskview" mappings
DEPRECATED in favor of untmap.
"""
def execute(self):
self.fm.notify("tunmap is deprecated in favor of untmap!")
super(tunmap, self).execute()
class map_(Command):
""":map <keysequence> <command>
Maps a command to a keysequence in the "browser" context.
Example:
map j move down
map J move down 10
"""
name = 'map'
context = 'browser'
resolve_macros = False
def execute(self):
if not self.arg(1) or not self.arg(2):
self.fm.notify("Syntax: {0} <keysequence> <command>".format(self.get_name()), bad=True)
return
self.fm.ui.keymaps.bind(self.context, self.arg(1), self.rest(2))
class cmap(map_):
""":cmap <keysequence> <command>
Maps a command to a keysequence in the "console" context.
Example:
cmap <ESC> console_close
cmap <C-x> console_type test
"""
context = 'console'
class tmap(map_):
""":tmap <keysequence> <command>
Maps a command to a keysequence in the "taskview" context.
"""
context = 'taskview'
class pmap(map_):
""":pmap <keysequence> <command>
Maps a command to a keysequence in the "pager" context.
"""
context = 'pager'
class scout(Command):
""":scout [-FLAGS...] <pattern>
Swiss army knife command for searching, traveling and filtering files.
Flags:
-a Automatically open a file on unambiguous match
-e Open the selected file when pressing enter
-f Filter files that match the current search pattern
-g Interpret pattern as a glob pattern
-i Ignore the letter case of the files
-k Keep the console open when changing a directory with the command
-l Letter skipping; e.g. allow "rdme" to match the file "readme"
-m Mark the matching files after pressing enter
-M Unmark the matching files after pressing enter
-p Permanent filter: hide non-matching files after pressing enter
-r Interpret pattern as a regular expression pattern
-s Smart case; like -i unless pattern contains upper case letters
-t Apply filter and search pattern as you type
-v Inverts the match
Multiple flags can be combined. For example, ":scout -gpt" would create
a :filter-like command using globbing.
"""
AUTO_OPEN = "a"
OPEN_ON_ENTER = "e"
FILTER = "f"
SM_GLOB = "g"
IGNORE_CASE = "i"
KEEP_OPEN = "k"
SM_LETTERSKIP = "l"
MARK = "m"
UNMARK = "M"
PERM_FILTER = "p"
SM_REGEX = "r"
SMART_CASE = "s"
AS_YOU_TYPE = "t"
INVERT = "v"
def __init__(self, *args, **kwargs):
super(scout, self).__init__(*args, **kwargs)
self._regex = None
self.flags, self.pattern = self.parse_flags()
def execute(self): # pylint: disable=too-many-branches
thisdir = self.fm.thisdir
flags = self.flags
pattern = self.pattern
regex = self._build_regex()
count = self._count(move=True)
self.fm.thistab.last_search = regex
self.fm.set_search_method(order="search")
if (self.MARK in flags or self.UNMARK in flags) and thisdir.files:
value = flags.find(self.MARK) > flags.find(self.UNMARK)
if self.FILTER in flags:
for fobj in thisdir.files:
thisdir.mark_item(fobj, value)
else:
for fobj in thisdir.files:
if regex.search(fobj.relative_path):
thisdir.mark_item(fobj, value)
if self.PERM_FILTER in flags:
thisdir.filter = regex if pattern else None
# clean up:
self.cancel()
if self.OPEN_ON_ENTER in flags or \
(self.AUTO_OPEN in flags and count == 1):
if pattern == '..':
self.fm.cd(pattern)
else:
self.fm.move(right=1)
if self.quickly_executed:
self.fm.block_input(0.5)
if self.KEEP_OPEN in flags and thisdir != self.fm.thisdir:
# reopen the console:
if not pattern:
self.fm.open_console(self.line)
else:
self.fm.open_console(self.line[0:-len(pattern)])
if self.quickly_executed and thisdir != self.fm.thisdir and pattern != "..":
self.fm.block_input(0.5)
def cancel(self):
self.fm.thisdir.temporary_filter = None
self.fm.thisdir.refilter()
def quick(self):
asyoutype = self.AS_YOU_TYPE in self.flags
if self.FILTER in self.flags:
self.fm.thisdir.temporary_filter = self._build_regex()
if self.PERM_FILTER in self.flags and asyoutype:
self.fm.thisdir.filter = self._build_regex()
if self.FILTER in self.flags or self.PERM_FILTER in self.flags:
self.fm.thisdir.refilter()
if self._count(move=asyoutype) == 1 and self.AUTO_OPEN in self.flags:
return True
return False
def tab(self, tabnum):
self._count(move=True, offset=tabnum)
def _build_regex(self):
if self._regex is not None:
return self._regex
frmat = "%s"
flags = self.flags
pattern = self.pattern
if pattern == ".":
return re.compile("")
# Handle carets at start and dollar signs at end separately
if pattern.startswith('^'):
pattern = pattern[1:]
frmat = "^" + frmat
if pattern.endswith('$'):
pattern = pattern[:-1]
frmat += "$"
# Apply one of the search methods
if self.SM_REGEX in flags:
regex = pattern
elif self.SM_GLOB in flags:
regex = re.escape(pattern).replace("\\*", ".*").replace("\\?", ".")
elif self.SM_LETTERSKIP in flags:
regex = ".*".join(re.escape(c) for c in pattern)
else:
regex = re.escape(pattern)
regex = frmat % regex
# Invert regular expression if necessary
if self.INVERT in flags:
regex = "^(?:(?!%s).)*$" % regex
# Compile Regular Expression
# pylint: disable=no-member
options = re.UNICODE
if self.IGNORE_CASE in flags or self.SMART_CASE in flags and \
pattern.islower():
options |= re.IGNORECASE
# pylint: enable=no-member
try:
self._regex = re.compile(regex, options)
except re.error:
self._regex = re.compile("")
return self._regex
def _count(self, move=False, offset=0):
count = 0
cwd = self.fm.thisdir
pattern = self.pattern
if not pattern or not cwd.files:
return 0
if pattern == '.':
return 0
if pattern == '..':
return 1
deq = deque(cwd.files)
deq.rotate(-cwd.pointer - offset)
i = offset
regex = self._build_regex()
for fsobj in deq:
if regex.search(fsobj.relative_path):
count += 1
if move and count == 1:
cwd.move(to=(cwd.pointer + i) % len(cwd.files))
self.fm.thisfile = cwd.pointed_obj
if count > 1:
return count
i += 1
return count == 1
class narrow(Command):
"""
:narrow
Show only the files selected right now. If no files are selected,
disable narrowing.
"""
def execute(self):
if self.fm.thisdir.marked_items:
selection = [f.basename for f in self.fm.thistab.get_selection()]
self.fm.thisdir.narrow_filter = selection
else:
self.fm.thisdir.narrow_filter = None
self.fm.thisdir.refilter()
class filter_inode_type(Command):
"""
:filter_inode_type [dfl]
Displays only the files of specified inode type. Parameters
can be combined.
d display directories
f display files
l display links
"""
def execute(self):
if not self.arg(1):
self.fm.thisdir.inode_type_filter = ""
else:
self.fm.thisdir.inode_type_filter = self.arg(1)
self.fm.thisdir.refilter()
class filter_stack(Command):
"""
:filter_stack ...
Manages the filter stack.
filter_stack add FILTER_TYPE ARGS...
filter_stack pop
filter_stack decompose
filter_stack rotate [N=1]
filter_stack clear
filter_stack show
"""
def execute(self):
from ranger.core.filter_stack import SIMPLE_FILTERS, FILTER_COMBINATORS
subcommand = self.arg(1)
if subcommand == "add":
try:
self.fm.thisdir.filter_stack.append(
SIMPLE_FILTERS[self.arg(2)](self.rest(3))
)
except KeyError:
FILTER_COMBINATORS[self.arg(2)](self.fm.thisdir.filter_stack)
elif subcommand == "pop":
self.fm.thisdir.filter_stack.pop()
elif subcommand == "decompose":
inner_filters = self.fm.thisdir.filter_stack.pop().decompose()
if inner_filters:
self.fm.thisdir.filter_stack.extend(inner_filters)
elif subcommand == "clear":
self.fm.thisdir.filter_stack = []
elif subcommand == "rotate":
rotate_by = int(self.arg(2) or self.quantifier or 1)
self.fm.thisdir.filter_stack = (
self.fm.thisdir.filter_stack[-rotate_by:]
+ self.fm.thisdir.filter_stack[:-rotate_by]
)
elif subcommand == "show":
stack = list(map(str, self.fm.thisdir.filter_stack))
pager = self.fm.ui.open_pager()
pager.set_source(["Filter stack: "] + stack)
pager.move(to=100, percentage=True)
return
else:
self.fm.notify(
"Unknown subcommand: {sub}".format(sub=subcommand),
bad=True
)
return
# Cleanup.
self.cancel()
def quick(self):
if self.rest(1).startswith("add name "):
try:
regex = re.compile(self.rest(3))
except re.error:
regex = re.compile("")
self.fm.thisdir.temporary_filter = regex
self.fm.thisdir.refilter()
return False
def cancel(self):
self.fm.thisdir.temporary_filter = None
self.fm.thisdir.refilter()
class grep(Command):
""":grep <string>
Looks for a string in all marked files or directories
"""
def execute(self):
if self.rest(1):
action = ['grep', '-n']
action.extend(['-e', self.rest(1), '-r'])
action.extend(f.path for f in self.fm.thistab.get_selection())
self.fm.execute_command(action, flags='p')
class flat(Command):
"""
:flat <level>
Flattens the directory view up to the specified level.
-1 fully flattened
0 remove flattened view
"""
def execute(self):
try:
level_str = self.rest(1)
level = int(level_str)
except ValueError:
level = self.quantifier
if level is None:
self.fm.notify("Syntax: flat <level>", bad=True)
return
if level < -1:
self.fm.notify("Need an integer number (-1, 0, 1, ...)", bad=True)
self.fm.thisdir.unload()
self.fm.thisdir.flat = level
self.fm.thisdir.load_content()
class reset_previews(Command):
""":reset_previews
Reset the file previews.
"""
def execute(self):
self.fm.previews = {}
self.fm.ui.need_redraw = True
# Version control commands
# --------------------------------
class stage(Command):
"""
:stage
Stage selected files for the corresponding version control system
"""
def execute(self):
from ranger.ext.vcs import VcsError
if self.fm.thisdir.vcs and self.fm.thisdir.vcs.track:
filelist = [f.path for f in self.fm.thistab.get_selection()]
try:
self.fm.thisdir.vcs.action_add(filelist)
except VcsError as ex:
self.fm.notify('Unable to stage files: {0}'.format(ex))
self.fm.ui.vcsthread.process(self.fm.thisdir)
else:
self.fm.notify('Unable to stage files: Not in repository')
class unstage(Command):
"""
:unstage
Unstage selected files for the corresponding version control system
"""
def execute(self):
from ranger.ext.vcs import VcsError
if self.fm.thisdir.vcs and self.fm.thisdir.vcs.track:
filelist = [f.path for f in self.fm.thistab.get_selection()]
try:
self.fm.thisdir.vcs.action_reset(filelist)
except VcsError as ex:
self.fm.notify('Unable to unstage files: {0}'.format(ex))
self.fm.ui.vcsthread.process(self.fm.thisdir)
else:
self.fm.notify('Unable to unstage files: Not in repository')
# Metadata commands
# --------------------------------
class prompt_metadata(Command):
"""
:prompt_metadata <key1> [<key2> [<key3> ...]]
Prompt the user to input metadata for multiple keys in a row.
"""
_command_name = "meta"
_console_chain = None
def execute(self):
prompt_metadata._console_chain = self.args[1:]
self._process_command_stack()
def _process_command_stack(self):
if prompt_metadata._console_chain:
key = prompt_metadata._console_chain.pop()
self._fill_console(key)
else:
for col in self.fm.ui.browser.columns:
col.need_redraw = True
def _fill_console(self, key):
metadata = self.fm.metadata.get_metadata(self.fm.thisfile.path)
if key in metadata and metadata[key]:
existing_value = metadata[key]
else:
existing_value = ""
text = "%s %s %s" % (self._command_name, key, existing_value)
self.fm.open_console(text, position=len(text))
class meta(prompt_metadata):
"""
:meta <key> [<value>]
Change metadata of a file. Deletes the key if value is empty.
"""
def execute(self):
key = self.arg(1)
update_dict = {}
update_dict[key] = self.rest(2)
selection = self.fm.thistab.get_selection()
for fobj in selection:
self.fm.metadata.set_metadata(fobj.path, update_dict)
self._process_command_stack()
def tab(self, tabnum):
key = self.arg(1)
metadata = self.fm.metadata.get_metadata(self.fm.thisfile.path)
if key in metadata and metadata[key]:
return [" ".join([self.arg(0), self.arg(1), metadata[key]])]
return [self.arg(0) + " " + k for k in sorted(metadata)
if k.startswith(self.arg(1))]
class linemode(default_linemode):
"""
:linemode <mode>
Change what is displayed as a filename.
- "mode" may be any of the defined linemodes (see: ranger.core.linemode).
"normal" is mapped to "filename".
"""
def execute(self):
mode = self.arg(1)
if mode == "normal":
from ranger.core.linemode import DEFAULT_LINEMODE
mode = DEFAULT_LINEMODE
if mode not in self.fm.thisfile.linemode_dict:
self.fm.notify("Unhandled linemode: `%s'" % mode, bad=True)
return
self.fm.thisdir.set_linemode_of_children(mode)
# Ask the browsercolumns to redraw
for col in self.fm.ui.browser.columns:
col.need_redraw = True
class yank(Command):
""":yank [name|dir|path|name_without_extension]
Copies the file's name (default), directory or path into both the primary X
selection and the clipboard.
"""
modes = {
'': 'basename',
'name_without_extension': 'basename_without_extension',
'name': 'basename',
'dir': 'dirname',
'path': 'path',
}
def execute(self):
import subprocess
def clipboards():
from ranger.ext.get_executables import get_executables
clipboard_managers = {
'xclip': [
['xclip'],
['xclip', '-selection', 'clipboard'],
],
'xsel': [
['xsel'],
['xsel', '-b'],
],
'wl-copy': [
['wl-copy'],
],
'pbcopy': [
['pbcopy'],
],
}
ordered_managers = ['pbcopy', 'xclip', 'xsel', 'wl-copy']
executables = get_executables()
for manager in ordered_managers:
if manager in executables:
return clipboard_managers[manager]
return []
clipboard_commands = clipboards()
mode = self.modes[self.arg(1)]
selection = self.get_selection_attr(mode)
new_clipboard_contents = "\n".join(selection)
for command in clipboard_commands:
with subprocess.Popen(
command, universal_newlines=True, stdin=subprocess.PIPE
) as process:
process.communicate(input=new_clipboard_contents)
def get_selection_attr(self, attr):
return [getattr(item, attr) for item in
self.fm.thistab.get_selection()]
def tab(self, tabnum):
return (
self.start(1) + mode for mode
in sorted(self.modes.keys())
if mode
)
class paste_ext(Command):
"""
:paste_ext
Like paste but tries to rename conflicting files so that the
file extension stays intact (e.g. file_.ext).
"""
@staticmethod
def make_safe_path(dst):
if not os.path.exists(dst):
return dst
dst_name, dst_ext = os.path.splitext(dst)
if not dst_name.endswith("_"):
dst_name += "_"
if not os.path.exists(dst_name + dst_ext):
return dst_name + dst_ext
n = 0
test_dst = dst_name + str(n)
while os.path.exists(test_dst + dst_ext):
n += 1
test_dst = dst_name + str(n)
return test_dst + dst_ext
def execute(self):
return self.fm.paste(make_safe_path=paste_ext.make_safe_path)
| 66,598 | Python | .py | 1,686 | 29.956702 | 99 | 0.581115 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
687 | commands_sample.py | ranger_ranger/ranger/config/commands_sample.py | # This is a sample commands.py. You can add your own commands here.
#
# Please refer to commands_full.py for all the default commands and a complete
# documentation. Do NOT add them all here, or you may end up with defunct
# commands when upgrading ranger.
# A simple command for demonstration purposes follows.
# -----------------------------------------------------------------------------
from __future__ import (absolute_import, division, print_function)
# You can import any python module as needed.
import os
# You always need to import ranger.api.commands here to get the Command class:
from ranger.api.commands import Command
# Any class that is a subclass of "Command" will be integrated into ranger as a
# command. Try typing ":my_edit<ENTER>" in ranger!
class my_edit(Command):
# The so-called doc-string of the class will be visible in the built-in
# help that is accessible by typing "?c" inside ranger.
""":my_edit <filename>
A sample command for demonstration purposes that opens a file in an editor.
"""
# The execute method is called when you run this command in ranger.
def execute(self):
# self.arg(1) is the first (space-separated) argument to the function.
# This way you can write ":my_edit somefilename<ENTER>".
if self.arg(1):
# self.rest(1) contains self.arg(1) and everything that follows
target_filename = self.rest(1)
else:
# self.fm is a ranger.core.filemanager.FileManager object and gives
# you access to internals of ranger.
# self.fm.thisfile is a ranger.container.file.File object and is a
# reference to the currently selected file.
target_filename = self.fm.thisfile.path
# This is a generic function to print text in ranger.
self.fm.notify("Let's edit the file " + target_filename + "!")
# Using bad=True in fm.notify allows you to print error messages:
if not os.path.exists(target_filename):
self.fm.notify("The given file does not exist!", bad=True)
return
# This executes a function from ranger.core.actions, a module with a
# variety of subroutines that can help you construct commands.
# Check out the source, or run "pydoc ranger.core.actions" for a list.
self.fm.edit_file(target_filename)
# The tab method is called when you press tab, and should return a list of
# suggestions that the user will tab through.
# tabnum is 1 for <TAB> and -1 for <S-TAB> by default
def tab(self, tabnum):
# This is a generic tab-completion function that iterates through the
# content of the current directory.
return self._tab_directory_content()
| 2,763 | Python | .py | 50 | 48.58 | 79 | 0.674935 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
688 | bookmarks.py | ranger_ranger/ranger/container/bookmarks.py | # This file is part of ranger, the console file manager.
# License: GNU GPL version 3, see the file "AUTHORS" for details.
from __future__ import (absolute_import, division, print_function)
import string
import re
import os
from io import open
from ranger import PY3
from ranger.core.shared import FileManagerAware
ALLOWED_KEYS = string.ascii_letters + string.digits + "`'"
class Bookmarks(FileManagerAware):
"""Bookmarks is a container which associates keys with bookmarks.
A key is a string with: len(key) == 1 and key in ALLOWED_KEYS.
A bookmark is an object with: bookmark == bookmarktype(str(instance))
Which is true for str or FileSystemObject. This condition is required
so bookmark-objects can be saved to and loaded from a file.
Optionally, a bookmark.go() method is used for entering a bookmark.
"""
last_mtime = None
autosave = True
load_pattern = re.compile(r"^[\d\w']:.")
def __init__(self, bookmarkfile, bookmarktype=str, autosave=False,
nonpersistent_bookmarks=()):
"""Initializes Bookmarks.
<bookmarkfile> specifies the path to the file where
bookmarks are saved in.
"""
self.autosave = autosave
self.dct = {}
self.original_dict = {}
self.path = bookmarkfile
self.bookmarktype = bookmarktype
self.nonpersistent_bookmarks = set(nonpersistent_bookmarks)
def load(self):
"""Load the bookmarks from path/bookmarks"""
new_dict = self._load_dict()
if new_dict is None:
return
self._set_dict(new_dict, original=new_dict)
def enter(self, key):
"""Enter the bookmark with the given key.
Requires the bookmark instance to have a go() method.
"""
try:
return self[key].go()
except (IndexError, KeyError, AttributeError):
return False
def update_if_outdated(self):
if self.last_mtime != self._get_mtime():
self.update()
def remember(self, value):
"""Bookmarks <value> to the key '"""
self["'"] = value
if self.autosave:
self.save()
def __delitem__(self, key):
"""Delete the bookmark with the given key"""
if key == '`':
key = "'"
if key in self.dct:
del self.dct[key]
if self.autosave:
self.save()
def __iter__(self):
return iter(self.dct.items())
def __getitem__(self, key):
"""Get the bookmark associated with the key"""
if key == '`':
key = "'"
if key in self.dct:
value = self.dct[key]
if self._validate(value):
return value
else:
raise KeyError("Cannot open bookmark: `%s'!" % key)
else:
raise KeyError("Nonexistent Bookmark: `%s'!" % key)
def __setitem__(self, key, value):
"""Bookmark <value> to the key <key>.
key is expected to be a 1-character string and element of ALLOWED_KEYS.
value is expected to be a filesystemobject.
"""
if key == '`':
key = "'"
if key in ALLOWED_KEYS:
self.dct[key] = value
if self.autosave:
self.save()
def __contains__(self, key):
"""Test whether a bookmark-key is defined"""
return key in self.dct
def update_path(self, path_old, file_new):
"""Update bookmarks containing path"""
self.update_if_outdated()
changed = False
for key, bfile in self:
if bfile.path == path_old:
self.dct[key] = file_new
changed = True
elif bfile.path.startswith(path_old + os.path.sep):
self.dct[key] = self.bookmarktype(file_new.path + bfile.path[len(path_old):])
changed = True
if changed:
self.save()
def update(self):
"""Update the bookmarks from the bookmark file.
Useful if two instances are running which define different bookmarks.
"""
real_dict = self._load_dict()
if real_dict is None:
return
real_dict_copy = real_dict.copy()
for key in set(self.dct) | set(real_dict):
# set some variables
if key in self.dct:
current = self.dct[key]
else:
current = None
if key in self.original_dict:
original = self.original_dict[key]
else:
original = None
if key in real_dict:
real = real_dict[key]
else:
real = None
# determine if there have been changes
if current == original and current != real:
continue # another ranger instance has changed the bookmark
if key not in self.dct:
del real_dict[key] # the user has deleted it
else:
real_dict[key] = current # the user has changed it
self._set_dict(real_dict, original=real_dict_copy)
def save(self):
"""Save the bookmarks to the bookmarkfile.
This is done automatically after every modification if autosave is True.
"""
self.update()
if self.path is None:
return
path_new = self.path + '.new'
try:
with open(path_new, 'w', encoding="utf-8") as fobj:
for key, value in self.dct.items():
if key in ALLOWED_KEYS \
and key not in self.nonpersistent_bookmarks:
key_value = "{0}:{1}\n".format(key, value)
if not PY3 and isinstance(key_value, str):
key_value = key_value.decode("utf-8")
fobj.write(key_value)
except OSError as ex:
self.fm.notify('Bookmarks error: {0}'.format(str(ex)), bad=True)
return
try:
old_perms = os.stat(self.path)
os.chown(path_new, old_perms.st_uid, old_perms.st_gid)
os.chmod(path_new, old_perms.st_mode)
if os.path.islink(self.path):
target_path = os.path.realpath(self.path)
os.rename(path_new, target_path)
else:
os.rename(path_new, self.path)
except OSError as ex:
self.fm.notify('Bookmarks error: {0}'.format(str(ex)), bad=True)
return
self._update_mtime()
def enable_saving_backtick_bookmark(self, boolean):
"""
Adds or removes the ' from the list of nonpersitent bookmarks
"""
if boolean:
if "'" in self.nonpersistent_bookmarks:
self.nonpersistent_bookmarks.remove("'") # enable
else:
self.nonpersistent_bookmarks.add("'") # disable
def _load_dict(self):
if self.path is None:
return {}
if not os.path.exists(self.path):
try:
with open(self.path, 'w', encoding="utf-8") as fobj:
pass
except OSError as ex:
self.fm.notify('Bookmarks error: {0}'.format(str(ex)), bad=True)
return None
try:
with open(self.path, 'r', encoding="utf-8") as fobj:
dct = {}
for line in fobj:
if self.load_pattern.match(line):
key, value = line[0], line[2:-1]
if key in ALLOWED_KEYS:
dct[key] = self.bookmarktype(value)
except OSError as ex:
self.fm.notify('Bookmarks error: {0}'.format(str(ex)), bad=True)
return None
return dct
def _set_dict(self, dct, original):
if original is None:
original = {}
self.dct.clear()
self.dct.update(dct)
self.original_dict = original
self._update_mtime()
def _get_mtime(self):
if self.path is None:
return None
try:
return os.stat(self.path).st_mtime
except OSError:
return None
def _update_mtime(self):
self.last_mtime = self._get_mtime()
def _validate(self, value):
return os.path.isdir(str(value))
| 8,408 | Python | .py | 216 | 27.777778 | 93 | 0.553372 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
689 | settings.py | ranger_ranger/ranger/container/settings.py | # This file is part of ranger, the console file manager.
# License: GNU GPL version 3, see the file "AUTHORS" for details.
from __future__ import (absolute_import, division, print_function)
import re
import os.path
from inspect import isfunction
import ranger
from ranger.ext.signals import SignalDispatcher
from ranger.core.shared import FileManagerAware
from ranger.gui.colorscheme import _colorscheme_name_to_class
# Use these priority constants to trigger events at specific points in time
# during processing of the signals "setopt" and "setopt.<some_setting_name>"
SIGNAL_PRIORITY_RAW = 2.0 # signal.value will be raw
SIGNAL_PRIORITY_SANITIZE = 1.0 # (Internal) post-processing signal.value
SIGNAL_PRIORITY_BETWEEN = 0.6 # sanitized signal.value, old fm.settings.XYZ
SIGNAL_PRIORITY_SYNC = 0.2 # (Internal) updating fm.settings.XYZ
SIGNAL_PRIORITY_AFTER_SYNC = 0.1 # after fm.settings.XYZ was updated
ALLOWED_SETTINGS = {
'automatically_count_files': bool,
'autosave_bookmarks': bool,
'autoupdate_cumulative_size': bool,
'bidi_support': bool,
'binary_size_prefix': bool,
'cd_bookmarks': bool,
'cd_tab_case': str,
'cd_tab_fuzzy': bool,
'clear_filters_on_dir_change': bool,
'collapse_preview': bool,
'colorscheme': str,
'column_ratios': (tuple, list),
'confirm_on_delete': str,
'dirname_in_tabs': bool,
'display_size_in_main_column': bool,
'display_size_in_status_bar': bool,
"display_free_space_in_status_bar": bool,
'display_tags_in_all_columns': bool,
'draw_borders': str,
'draw_borders_multipane': str,
'draw_progress_bar_in_status_bar': bool,
'filter_dead_tabs_on_startup': bool,
'flushinput': bool,
'freeze_files': bool,
'global_inode_type_filter': str,
'hidden_filter': str,
'hint_collapse_threshold': int,
'hostname_in_titlebar': bool,
'size_in_bytes': bool,
'idle_delay': int,
'iterm2_font_width': int,
'iterm2_font_height': int,
'line_numbers': str,
'max_console_history_size': (int, type(None)),
'max_history_size': (int, type(None)),
'metadata_deep_search': bool,
'mouse_enabled': bool,
'nested_ranger_warning': str,
'one_indexed': bool,
'open_all_images': bool,
'padding_right': bool,
'preview_directories': bool,
'preview_files': bool,
'preview_images': bool,
'preview_images_method': str,
'preview_max_size': int,
'preview_script': (str, type(None)),
'relative_current_zero': bool,
'save_backtick_bookmark': bool,
'save_console_history': bool,
'save_tabs_on_exit': bool,
'scroll_offset': int,
'shorten_title': int,
'show_cursor': bool, # TODO: not working?
'show_hidden_bookmarks': bool,
'show_hidden': bool,
'show_selection_in_titlebar': bool,
'sort_case_insensitive': bool,
'sort_directories_first': bool,
'sort_reverse': bool,
'sort': str,
'sort_unicode': bool,
'status_bar_on_top': bool,
'tilde_in_titlebar': bool,
'unicode_ellipsis': bool,
'update_title': bool,
'update_tmux_title': bool,
'use_preview_script': bool,
'vcs_aware': bool,
'vcs_backend_bzr': str,
'vcs_backend_git': str,
'vcs_backend_hg': str,
'vcs_backend_svn': str,
'vcs_msg_length': int,
'viewmode': str,
'w3m_delay': float,
'w3m_offset': int,
'wrap_plaintext_previews': bool,
'wrap_scroll': bool,
'xterm_alt_key': bool,
'sixel_dithering': str,
}
ALLOWED_VALUES = {
'cd_tab_case': ['sensitive', 'insensitive', 'smart'],
'confirm_on_delete': ['multiple', 'always', 'never'],
'draw_borders': ['none', 'both', 'outline', 'separators'],
'draw_borders_multipane': [None, 'none', 'both', 'outline',
'separators', 'active-pane'],
'line_numbers': ['false', 'absolute', 'relative'],
'nested_ranger_warning': ['true', 'false', 'error'],
'one_indexed': [False, True],
'preview_images_method': ['w3m', 'iterm2', 'terminology',
'sixel', 'urxvt', 'urxvt-full',
'kitty', 'ueberzug'],
'vcs_backend_bzr': ['disabled', 'local', 'enabled'],
'vcs_backend_git': ['enabled', 'disabled', 'local'],
'vcs_backend_hg': ['disabled', 'local', 'enabled'],
'vcs_backend_svn': ['disabled', 'local', 'enabled'],
'viewmode': ['miller', 'multipane'],
}
DEFAULT_VALUES = {
bool: False,
type(None): None,
str: "",
int: 0,
float: 0.0,
list: [],
tuple: tuple([]),
}
class Settings(SignalDispatcher, FileManagerAware):
def __init__(self):
SignalDispatcher.__init__(self)
self.__dict__['_localsettings'] = {}
self.__dict__['_localregexes'] = {}
self.__dict__['_tagsettings'] = {}
self.__dict__['_settings'] = {}
for name in ALLOWED_SETTINGS:
self.signal_bind('setopt.' + name, self._sanitize,
priority=SIGNAL_PRIORITY_SANITIZE)
self.signal_bind('setopt.' + name, self._raw_set_with_signal,
priority=SIGNAL_PRIORITY_SYNC)
for name, values in ALLOWED_VALUES.items():
assert values
assert name in ALLOWED_SETTINGS
self._raw_set(name, values[0])
def _sanitize(self, signal):
name, value = signal.setting, signal.value
if name == 'column_ratios':
# TODO: cover more cases here
if isinstance(value, tuple):
signal.value = list(value)
if not isinstance(value, list) or len(value) < 2:
signal.value = [1, 1]
else:
signal.value = [int(i) if str(i).isdigit() else 1
for i in value]
elif name == 'colorscheme':
_colorscheme_name_to_class(signal)
elif name == 'preview_script':
if isinstance(value, str):
result = os.path.expanduser(value)
if os.path.exists(result):
signal.value = result
else:
self.fm.notify("Preview script `{0}` doesn't exist!".format(result), bad=True)
signal.value = None
elif name == 'use_preview_script':
if self._settings.get('preview_script') is None and value and self.fm.ui.is_on:
self.fm.notify("Preview script undefined or not found!",
bad=True)
def set(self, name, value, path=None, tags=None):
assert name in ALLOWED_SETTINGS, "No such setting: {0}!".format(name)
if name not in self._settings:
previous = None
else:
previous = self._settings[name]
assert self._check_type(name, value)
assert not (tags and path), "Can't set a setting for path and tag " \
"at the same time!"
kws = {
"setting": name,
"value": value,
"previous": previous,
"path": path,
"tags": tags,
"fm": self.fm,
}
self.signal_emit('setopt', **kws)
self.signal_emit('setopt.' + name, **kws)
def _get_default(self, name):
if name == 'preview_script':
if ranger.args.clean:
value = self.fm.relpath('data/scope.sh')
else:
value = self.fm.confpath('scope.sh')
if not os.path.exists(value):
value = self.fm.relpath('data/scope.sh')
else:
value = DEFAULT_VALUES[self.types_of(name)[0]]
return value
def get(self, name, path=None):
assert name in ALLOWED_SETTINGS, "No such setting: {0}!".format(name)
if path:
localpath = path
else:
try:
localpath = self.fm.thisdir.path
except AttributeError:
localpath = None
if localpath:
for pattern, regex in self._localregexes.items():
if name in self._localsettings[pattern] and\
regex.search(localpath):
return self._localsettings[pattern][name]
if self._tagsettings and path:
realpath = os.path.realpath(path)
if realpath in self.fm.tags:
tag = self.fm.tags.marker(realpath)
if tag in self._tagsettings and name in self._tagsettings[tag]:
return self._tagsettings[tag][name]
if name not in self._settings:
value = self._get_default(name)
self._raw_set(name, value)
setattr(self, name, value)
return self._settings[name]
def __setattr__(self, name, value):
if name.startswith('_'):
self.__dict__[name] = value
else:
self.set(name, value, None)
def __getattr__(self, name):
if name.startswith('_'):
return self.__dict__[name]
return self.get(name, None)
def __iter__(self):
for setting in self._settings:
yield setting
@staticmethod
def types_of(name):
try:
typ = ALLOWED_SETTINGS[name]
except KeyError:
return tuple()
else:
if isinstance(typ, tuple):
return typ
return (typ,)
def _check_type(self, name, value):
typ = ALLOWED_SETTINGS[name]
if isfunction(typ):
assert typ(value), \
"Warning: The option `" + name + "' has an incorrect type!"
else:
assert isinstance(value, typ), \
"Warning: The option `" + name + "' has an incorrect type!"\
" Got " + str(type(value)) + ", expected " + str(typ) + "!" +\
" Please check if your commands.py is up to date." if not \
self.fm.ui.is_set_up else ""
return True
__getitem__ = __getattr__
__setitem__ = __setattr__
def _raw_set(self, name, value, path=None, tags=None):
if path:
if path not in self._localsettings:
try:
regex = re.compile(path)
except re.error: # Bad regular expression
return
self._localregexes[path] = regex
self._localsettings[path] = {}
self._localsettings[path][name] = value
# make sure name is in _settings, so __iter__ runs through
# local settings too.
if name not in self._settings:
type_ = self.types_of(name)[0]
value = DEFAULT_VALUES[type_]
self._settings[name] = value
elif tags:
for tag in tags:
if tag not in self._tagsettings:
self._tagsettings[tag] = {}
self._tagsettings[tag][name] = value
else:
self._settings[name] = value
def _raw_set_with_signal(self, signal):
self._raw_set(signal.setting, signal.value, signal.path, signal.tags)
class LocalSettings(object): # pylint: disable=too-few-public-methods
def __init__(self, path, parent):
self.__dict__['_parent'] = parent
self.__dict__['_path'] = path
def __setattr__(self, name, value):
if name.startswith('_'):
self.__dict__[name] = value
else:
self._parent.set(name, value, self._path)
def __getattr__(self, name):
if name.startswith('_'):
return self.__dict__[name]
if name.startswith('signal_'):
return getattr(self._parent, name)
return self._parent.get(name, self._path)
def __iter__(self):
for setting in self._parent._settings: # pylint: disable=protected-access
yield setting
__getitem__ = __getattr__
__setitem__ = __setattr__
| 11,873 | Python | .py | 304 | 29.759868 | 98 | 0.57159 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
690 | tags.py | ranger_ranger/ranger/container/tags.py | # This file is part of ranger, the console file manager.
# License: GNU GPL version 3, see the file "AUTHORS" for details.
# TODO: add a __getitem__ method to get the tag of a file
from __future__ import (absolute_import, division, print_function)
import string
from io import open
from os.path import exists, abspath, realpath, expanduser, sep
from ranger.core.shared import FileManagerAware
ALLOWED_KEYS = string.ascii_letters + string.digits + string.punctuation
class Tags(FileManagerAware):
default_tag = '*'
def __init__(self, filename):
# COMPAT: The intent is to get abspath/normpath's behavior of
# collapsing `symlink/..`, abspath is retained for historical reasons
# because the documentation states its behavior isn't necessarily in
# line with normpath's.
self._filename = realpath(abspath(expanduser(filename)))
self.sync()
def __contains__(self, item):
return item in self.tags
def add(self, *items, **others):
if len(items) == 0:
return
tag = others.get('tag', self.default_tag)
self.sync()
for item in items:
self.tags[item] = tag
self.dump()
def remove(self, *items):
if len(items) == 0:
return
self.sync()
for item in items:
try:
del self.tags[item]
except KeyError:
pass
self.dump()
def toggle(self, *items, **others):
if len(items) == 0:
return
tag = others.get('tag', self.default_tag)
tag = str(tag)
if tag not in ALLOWED_KEYS:
return
self.sync()
for item in items:
try:
if item in self and tag in (self.tags[item], self.default_tag):
del self.tags[item]
else:
self.tags[item] = tag
except KeyError:
pass
self.dump()
def marker(self, item):
if item in self.tags:
return self.tags[item]
return self.default_tag
def sync(self):
try:
with open(
self._filename, "r", encoding="utf-8", errors="replace"
) as fobj:
self.tags = self._parse(fobj)
except (OSError, IOError) as err:
if exists(self._filename):
self.fm.notify(err, bad=True)
else:
self.tags = {}
def dump(self):
try:
with open(self._filename, 'w', encoding="utf-8") as fobj:
self._compile(fobj)
except OSError as err:
self.fm.notify(err, bad=True)
def _compile(self, fobj):
for path, tag in self.tags.items():
if tag == self.default_tag:
# COMPAT: keep the old format if the default tag is used
fobj.write(path + '\n')
elif tag in ALLOWED_KEYS:
fobj.write('{0}:{1}\n'.format(tag, path))
def _parse(self, fobj):
result = {}
for line in fobj:
line = line.rstrip('\n')
if len(line) > 2 and line[1] == ':':
tag, path = line[0], line[2:]
if tag in ALLOWED_KEYS:
result[path] = tag
else:
result[line] = self.default_tag
return result
def update_path(self, path_old, path_new):
self.sync()
changed = False
for path, tag in self.tags.items():
pnew = None
if path == path_old:
pnew = path_new
elif path.startswith(path_old + sep):
pnew = path_new + path[len(path_old):]
if pnew:
# pylint: disable=unnecessary-dict-index-lookup
del self.tags[path]
self.tags[pnew] = tag
changed = True
if changed:
self.dump()
def __nonzero__(self):
return True
__bool__ = __nonzero__
class TagsDummy(Tags):
"""A dummy Tags class for use with `ranger --clean`.
It acts like there are no tags and avoids writing any changes.
"""
def __init__(self, filename): # pylint: disable=super-init-not-called
self.tags = {}
def __contains__(self, item):
return False
def add(self, *items, **others):
pass
def remove(self, *items):
pass
def toggle(self, *items, **others):
pass
def marker(self, item):
return self.default_tag
def sync(self):
pass
def dump(self):
pass
def _compile(self, fobj):
pass
def _parse(self, fobj):
pass
| 4,718 | Python | .py | 137 | 24.306569 | 79 | 0.543555 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
691 | history.py | ranger_ranger/ranger/container/history.py | # This file is part of ranger, the console file manager.
# License: GNU GPL version 3, see the file "AUTHORS" for details.
# TODO: rewrite to use deque instead of list
from __future__ import (absolute_import, division, print_function)
class HistoryEmptyException(Exception):
pass
class History(object):
def __init__(self, maxlen=None, unique=True):
assert maxlen is not None, "maxlen cannot be None"
if isinstance(maxlen, History):
self.history = list(maxlen.history)
self.index = maxlen.index
self.maxlen = maxlen.maxlen
self.unique = maxlen.unique
else:
self.history = []
self.index = 0
self.maxlen = maxlen
self.unique = unique
def add(self, item):
# Remove everything after index
if self.index < len(self.history) - 2:
del self.history[:self.index + 1]
# Remove Duplicates
if self.unique:
try:
self.history.remove(item)
except ValueError:
pass
else:
if self.history and self.history[-1] == item:
del self.history[-1]
# Remove first if list is too long
if len(self.history) > max(self.maxlen - 1, 0):
del self.history[0]
# Append the item and fast forward
self.history.append(item)
self.index = len(self.history) - 1
def modify(self, item, unique=False):
if self.history and unique:
try:
self.history.remove(item)
except ValueError:
pass
else:
self.index -= 1
try:
self.history[self.index] = item
except IndexError:
self.add(item)
def rebase(self, other_history):
"""
Replace the past of this history by that of another.
This is used when creating a new tab to seamlessly blend in the history
of the old tab into the new one.
Example: if self is [a,b,C], the current item is uppercase, and
other_history is [x,Y,z], then self.merge(other_history) will result in
[x, y, C].
"""
assert isinstance(other_history, History)
if not self.history:
self.index = 0
future_length = 0
else:
future_length = len(self.history) - self.index - 1
self.history[:self.index] = list(
other_history.history[:other_history.index + 1])
if len(self.history) > self.maxlen:
self.history = self.history[
-self.maxlen:] # pylint: disable=invalid-unary-operand-type
self.index = len(self.history) - future_length - 1
assert self.index < len(self.history)
def __len__(self):
return len(self.history)
def current(self):
if self.history:
return self.history[self.index]
else:
raise HistoryEmptyException
def top(self):
try:
return self.history[-1]
except IndexError:
raise HistoryEmptyException
def bottom(self):
try:
return self.history[0]
except IndexError:
raise HistoryEmptyException
def back(self):
self.index = max(0, self.index - 1)
return self.current()
def move(self, n):
self.index = max(0, min(len(self.history) - 1, self.index + n))
return self.current()
def search(self, string, n):
if n != 0 and string:
step = 1 if n > 0 else -1
i = self.index
steps_left = steps_left_at_start = int(abs(n))
while steps_left:
i += step
if i >= len(self.history) or i < 0:
break
if self.history[i].startswith(string):
steps_left -= 1
if steps_left != steps_left_at_start:
self.index = i
return self.current()
def __iter__(self):
return self.history.__iter__()
def forward(self):
if self.history:
self.index += 1
if self.index > len(self.history) - 1:
self.index = len(self.history) - 1
else:
self.index = 0
return self.current()
def fast_forward(self):
if self.history:
self.index = len(self.history) - 1
else:
self.index = 0
| 4,481 | Python | .py | 124 | 25.580645 | 79 | 0.556787 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
692 | file.py | ranger_ranger/ranger/container/file.py | # This file is part of ranger, the console file manager.
# License: GNU GPL version 3, see the file "AUTHORS" for details.
from __future__ import (absolute_import, division, print_function)
import re
from ranger import PY3
from ranger.container.fsobject import FileSystemObject
N_FIRST_BYTES = 256
CONTROL_CHARACTERS = set(list(range(0, 9)) + list(range(14, 32)))
if not PY3:
CONTROL_CHARACTERS = set(chr(n) for n in CONTROL_CHARACTERS)
# Don't even try to preview files which match this regular expression:
PREVIEW_BLACKLIST = re.compile(r"""
# look at the extension:
\.(
# one character extensions:
[oa]
# media formats:
| avi | mpe?g | mp\d | og[gmv] | wm[av] | mkv | flv
| vob | wav | mpc | flac | divx? | xcf | pdf
# binary files:
| torrent | class | so | img | py[co] | dmg
)
# ignore filetype-independent suffixes:
(\.part|\.bak|~)?
# ignore fully numerical file extensions:
(\.\d+)*?
$
""", re.VERBOSE | re.IGNORECASE) # pylint: disable=no-member
# Preview these files (almost) always:
PREVIEW_WHITELIST = re.compile(r"""
\.(
txt | py | c
)
# ignore filetype-independent suffixes:
(\.part|\.bak|~)?
$
""", re.VERBOSE | re.IGNORECASE) # pylint: disable=no-member
class File(FileSystemObject):
is_file = True
preview_data = None
preview_known = False
preview_loading = False
_firstbytes = None
@property
def firstbytes(self):
if self._firstbytes is not None:
return self._firstbytes
try:
with open(self.path, 'rb') as fobj:
self._firstbytes = set(fobj.read(N_FIRST_BYTES))
# IOError for Python2, OSError for Python3
except (IOError, OSError):
return None
return self._firstbytes
def is_binary(self):
if self.firstbytes and CONTROL_CHARACTERS & self.firstbytes:
return True
return False
def has_preview(self): # pylint: disable=too-many-return-statements
if not self.fm.settings.preview_files:
return False
if self.is_socket or self.is_fifo or self.is_device:
return False
if not self.accessible:
return False
if self.fm.settings.preview_max_size and \
self.size > self.fm.settings.preview_max_size:
return False
if self.fm.settings.preview_script and \
self.fm.settings.use_preview_script:
return True
if self.container:
return False
if PREVIEW_WHITELIST.search(self.basename):
return True
if PREVIEW_BLACKLIST.search(self.basename):
return False
if self.path in ('/dev/core', '/proc/kcore'):
return False
if self.is_binary():
return False
return True
def get_preview_source(self, width, height):
return self.fm.get_preview(self, width, height)
def is_image_preview(self):
try:
return self.fm.previews[self.realpath]['imagepreview']
except KeyError:
return False
def __eq__(self, other):
return isinstance(other, File) and self.path == other.path
def __neq__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self.path)
| 3,491 | Python | .py | 95 | 28 | 72 | 0.597929 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
693 | fsobject.py | ranger_ranger/ranger/container/fsobject.py | # This file is part of ranger, the console file manager.
# License: GNU GPL version 3, see the file "AUTHORS" for details.
from __future__ import (absolute_import, division, print_function)
import re
from grp import getgrgid
from os import lstat, stat
from os.path import abspath, basename, dirname, realpath, relpath, splitext
from pwd import getpwuid
from time import time
from ranger.core.linemode import (
DEFAULT_LINEMODE, DefaultLinemode, TitleLinemode,
PermissionsLinemode, FileInfoLinemode, MtimeLinemode, SizeMtimeLinemode,
HumanReadableMtimeLinemode, SizeHumanReadableMtimeLinemode
)
from ranger.core.shared import FileManagerAware, SettingsAware
from ranger.ext.shell_escape import shell_escape
from ranger.ext import spawn
from ranger.ext.lazy_property import lazy_property
from ranger.ext.human_readable import human_readable
# Python 2 compatibility
try:
maketrans = str.maketrans # pylint: disable=invalid-name,no-member
except AttributeError:
from string import maketrans # pylint: disable=no-name-in-module
CONTAINER_EXTENSIONS = ('7z', 'ace', 'ar', 'arc', 'bz', 'bz2', 'cab', 'cpio',
'cpt', 'deb', 'dgc', 'dmg', 'gz', 'iso', 'jar', 'msi',
'pkg', 'rar', 'shar', 'tar', 'tbz', 'tgz', 'txz',
'xar', 'xpi', 'xz', 'zip')
DOCUMENT_EXTENSIONS = ('cbr', 'cbz', 'cfg', 'css', 'cvs', 'djvu', 'doc',
'docx', 'gnm', 'gnumeric', 'htm', 'html', 'md', 'odf',
'odg', 'odp', 'ods', 'odt', 'pdf', 'pod', 'ps', 'rtf',
'sxc', 'txt', 'xls', 'xlw', 'xml', 'xslx')
DOCUMENT_BASENAMES = ('bugs', 'bugs', 'changelog', 'copying', 'credits',
'hacking', 'help', 'install', 'license', 'readme', 'todo')
BAD_INFO = '?'
_UNSAFE_CHARS = '\n' + ''.join(map(chr, range(32))) + ''.join(map(chr, range(128, 256)))
_SAFE_STRING_TABLE = maketrans(_UNSAFE_CHARS, '?' * len(_UNSAFE_CHARS))
_EXTRACT_NUMBER_RE = re.compile(r'(\d+|\D)')
def safe_path(path):
return path.translate(_SAFE_STRING_TABLE)
class FileSystemObject( # pylint: disable=too-many-instance-attributes,too-many-public-methods
FileManagerAware, SettingsAware):
basename = None
relative_path = None
infostring = None
path = None
permissions = None
stat = None
content_loaded = False
force_load = False
is_device = False
is_directory = False
is_file = False
is_fifo = False
is_link = False
is_socket = False
accessible = False
exists = False # "exists" currently means "link_target_exists"
loaded = False
marked = False
runnable = False
stopped = False
tagged = False
audio = False
container = False
document = False
image = False
media = False
video = False
size = 0
last_load_time = -1
vcsstatus = None
vcsremotestatus = None
linemode_dict = dict(
(linemode.name, linemode()) for linemode in
[DefaultLinemode, TitleLinemode, PermissionsLinemode, FileInfoLinemode,
MtimeLinemode, SizeMtimeLinemode, HumanReadableMtimeLinemode,
SizeHumanReadableMtimeLinemode]
)
def __init__(self, path, preload=None, path_is_abs=False, basename_is_rel_to=None):
if not path_is_abs:
path = abspath(path)
self.path = path
self.basename = basename(path)
if basename_is_rel_to is None:
self.relative_path = self.basename
else:
self.relative_path = relpath(path, basename_is_rel_to)
self.preload = preload
self.display_data = {}
def __repr__(self):
return "<{0} {1}>".format(self.__class__.__name__, self.path)
@lazy_property
def extension(self):
try:
lastdot = self.basename.rindex('.') + 1
return self.basename[lastdot:].lower()
except ValueError:
return None
@lazy_property
def relative_path_lower(self):
return self.relative_path.lower()
@lazy_property
def linemode(self): # pylint: disable=method-hidden
# Set the line mode from fm.default_linemodes
for method, argument, linemode in self.fm.default_linemodes:
if linemode in self.linemode_dict:
if method == "always":
return linemode
if method == "path" and argument.search(self.path):
return linemode
if method == "tag" and self.realpath in self.fm.tags and \
self.fm.tags.marker(self.realpath) in argument:
return linemode
return DEFAULT_LINEMODE
@lazy_property
def dirname(self):
return dirname(self.path)
@lazy_property
def shell_escaped_basename(self):
return shell_escape(self.basename)
@lazy_property
def filetype(self):
try:
return spawn.check_output(["file", '-Lb', '--mime-type', self.path])
except OSError:
return ""
@lazy_property
def basename_natural(self):
basename_list = []
for string in _EXTRACT_NUMBER_RE.split(self.relative_path):
try:
basename_list += [('0', int(string))]
except ValueError:
basename_list += [(string, 0)]
return basename_list
@lazy_property
def basename_natural_lower(self):
basename_list = []
for string in _EXTRACT_NUMBER_RE.split(self.relative_path_lower):
try:
basename_list += [('0', int(string))]
except ValueError:
basename_list += [(string, 0)]
return basename_list
@lazy_property
def basename_without_extension(self):
return splitext(self.basename)[0]
@lazy_property
def safe_basename(self):
return self.basename.translate(_SAFE_STRING_TABLE)
@lazy_property
def user(self):
try:
return getpwuid(self.stat.st_uid)[0]
except KeyError:
return str(self.stat.st_uid)
@lazy_property
def group(self):
try:
return getgrgid(self.stat.st_gid)[0]
except KeyError:
return str(self.stat.st_gid)
for attr in ('video', 'audio', 'image', 'media', 'document', 'container'):
exec( # pylint: disable=exec-used
"%s = lazy_property(lambda self: self.set_mimetype() or self.%s)" % (attr, attr))
def __str__(self):
"""returns a string containing the absolute path"""
return str(self.path)
def use(self):
"""Used in garbage-collecting. Override in Directory"""
def look_up_cumulative_size(self):
pass # normal files have no cumulative size
def set_mimetype(self):
"""assign attributes such as self.video according to the mimetype"""
bname = self.basename
if self.extension == 'part':
bname = bname[0:-5]
# pylint: disable=attribute-defined-outside-init
self._mimetype = self.fm.mimetypes.guess_type(bname, False)[0]
if self._mimetype is None:
self._mimetype = ''
# pylint: enable=attribute-defined-outside-init
self.video = self._mimetype.startswith('video')
self.image = self._mimetype.startswith('image')
self.audio = self._mimetype.startswith('audio')
self.media = self.video or self.image or self.audio
self.document = self._mimetype.startswith('text') \
or self.extension in DOCUMENT_EXTENSIONS \
or self.basename.lower() in DOCUMENT_BASENAMES
self.container = self.extension in CONTAINER_EXTENSIONS
# pylint: disable=attribute-defined-outside-init
keys = ('video', 'audio', 'image', 'media', 'document', 'container')
self._mimetype_tuple = tuple(key for key in keys if getattr(self, key))
if self._mimetype == '':
self._mimetype = None
# pylint: enable=attribute-defined-outside-init
@property
def mimetype(self):
try:
return self._mimetype
except AttributeError:
self.set_mimetype()
return self._mimetype
@property
def mimetype_tuple(self):
try:
return self._mimetype_tuple
except AttributeError:
self.set_mimetype()
return self._mimetype_tuple
def mark(self, _):
directory = self.fm.get_directory(self.dirname)
directory.mark_item(self)
def mark_set(self, boolean):
"""Called by directory.mark_item() and similar functions"""
self.marked = bool(boolean)
@lazy_property
def realpath(self):
if self.is_link:
try:
return realpath(self.path)
except OSError:
return None # it is impossible to get the link destination
return self.path
def load(self): # pylint: disable=too-many-statements
"""Loads information about the directory itself.
reads useful information about the filesystem-object from the
filesystem and caches it for later use
"""
self.loaded = True
if self.settings.freeze_files:
return
self.display_data = {}
self.fm.update_preview(self.path)
# Get the stat object, either from preload or from [l]stat
self.permissions = None
new_stat = None
path = self.path
self.is_link = False
if self.preload:
new_stat = self.preload[1]
self.is_link = new_stat.st_mode & 0o170000 == 0o120000
if self.is_link:
new_stat = self.preload[0]
self.preload = None
self.exists = bool(new_stat)
else:
try:
new_stat = lstat(path)
self.is_link = new_stat.st_mode & 0o170000 == 0o120000
if self.is_link:
new_stat = stat(path)
self.exists = True
except OSError:
self.exists = False
# Set some attributes
self.accessible = bool(new_stat)
mode = new_stat.st_mode if new_stat else 0
fmt = mode & 0o170000
if fmt in (0o020000, 0o060000): # stat.S_IFCHR/BLK
self.is_device = True
self.size = 0
self.infostring = 'dev'
elif fmt == 0o010000: # stat.S_IFIFO
self.is_fifo = True
self.size = 0
self.infostring = 'fifo'
elif fmt == 0o140000: # stat.S_IFSOCK
self.is_socket = True
self.size = 0
self.infostring = 'sock'
elif self.is_file:
if new_stat:
self.size = new_stat.st_size
self.infostring = ' ' + human_readable(self.size)
else:
self.size = 0
self.infostring = '?'
if self.is_link and not self.is_directory:
self.infostring = '->' + self.infostring
self.stat = new_stat
self.last_load_time = time()
def get_permission_string(self):
if self.permissions is not None:
return self.permissions
if self.is_link:
perms = ['l']
elif self.is_directory:
perms = ['d']
else:
perms = ['-']
mode = self.stat.st_mode
test = 0o0400
while test: # will run 3 times because 0o400 >> 9 = 0
for what in "rwx":
if mode & test:
perms.append(what)
else:
perms.append('-')
test >>= 1
self.permissions = ''.join(perms)
return self.permissions
def load_if_outdated(self):
"""Calls load() if the currently cached information is outdated"""
if not self.loaded:
self.load()
return True
try:
real_ctime = stat(self.path).st_ctime
except OSError:
real_ctime = None
if not self.stat or self.stat.st_ctime != real_ctime:
self.load()
return True
return False
def set_linemode(self, mode):
self.linemode = mode
| 12,251 | Python | .py | 318 | 29.113208 | 95 | 0.592031 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
694 | directory.py | ranger_ranger/ranger/container/directory.py | # This file is part of ranger, the console file manager.
# License: GNU GPL version 3, see the file "AUTHORS" for details.
from __future__ import (absolute_import, division, print_function)
import locale
import os.path
from os import stat as os_stat, lstat as os_lstat
import random
import re
from collections import deque
from time import time
from ranger.container.fsobject import BAD_INFO, FileSystemObject
from ranger.core import filter_stack
from ranger.core.filter_stack import InodeFilterConstants, accept_file
from ranger.core.loader import Loadable
from ranger.ext.mount_path import mount_path
from ranger.container.file import File
from ranger.ext.accumulator import Accumulator
from ranger.ext.lazy_property import lazy_property
from ranger.ext.human_readable import human_readable
from ranger.container.settings import LocalSettings
from ranger.ext.vcs import Vcs
def sort_by_basename(path):
"""returns path.relative_path (for sorting)"""
return path.relative_path
def sort_by_basename_icase(path):
"""returns case-insensitive path.relative_path (for sorting)"""
return path.relative_path_lower
def sort_by_directory(path):
"""returns 0 if path is a directory, otherwise 1 (for sorting)"""
return 1 - path.is_directory
def sort_naturally(path):
return path.basename_natural
def sort_naturally_icase(path):
return path.basename_natural_lower
def sort_unicode_wrapper_string(old_sort_func):
def sort_unicode(path):
return locale.strxfrm(old_sort_func(path))
return sort_unicode
def sort_unicode_wrapper_list(old_sort_func):
def sort_unicode(path):
return [locale.strxfrm(str(c)) for c in old_sort_func(path)]
return sort_unicode
def walklevel(some_dir, level):
some_dir = some_dir.rstrip(os.path.sep)
followlinks = level > 0
assert os.path.isdir(some_dir)
num_sep = some_dir.count(os.path.sep)
for root, dirs, files in os.walk(some_dir, followlinks=followlinks):
yield root, dirs, files
num_sep_this = root.count(os.path.sep)
if level != -1 and num_sep + level <= num_sep_this:
del dirs[:]
def mtimelevel(path, level):
mtime = os.stat(path).st_mtime
for dirpath, dirnames, _ in walklevel(path, level):
dirlist = [os.path.join("/", dirpath, d) for d in dirnames
if level == -1 or dirpath.count(os.path.sep) - path.count(os.path.sep) <= level]
mtime = max([mtime] + [os.stat(d).st_mtime for d in dirlist])
return mtime
class Directory( # pylint: disable=too-many-instance-attributes,too-many-public-methods
FileSystemObject, Accumulator, Loadable):
is_directory = True
enterable = False
load_generator = None
cycle_list = None
loading = False
progressbar_supported = True
flat = 0
filenames = None
files = None
files_all = None
temporary_filter = None
narrow_filter = None
inode_type_filter = None
marked_items = None
scroll_begin = 0
mount_path = '/'
disk_usage = 0
last_update_time = -1
load_content_mtime = -1
order_outdated = False
content_outdated = False
content_loaded = False
has_vcschild = False
_vcs_signal_handler_installed = False
cumulative_size_calculated = False
sort_dict = {
'basename': sort_by_basename,
'natural': sort_naturally,
'size': lambda path: -(path.size or 1),
'mtime': lambda path: -(path.stat and path.stat.st_mtime or 1),
'ctime': lambda path: -(path.stat and path.stat.st_ctime or 1),
'atime': lambda path: -(path.stat and path.stat.st_atime or 1),
'random': lambda path: random.random(),
'type': lambda path: path.mimetype or '',
'extension': lambda path: path.extension or '',
}
def __init__(self, path, **kw):
assert not os.path.isfile(path), "No directory given!"
Loadable.__init__(self, None, None)
Accumulator.__init__(self)
FileSystemObject.__init__(self, path, **kw)
self.marked_items = []
self.filter_stack = []
self._signal_functions = []
func = self.signal_function_factory(self.sort)
self._signal_functions += [func]
for opt in ('sort_directories_first', 'sort', 'sort_reverse', 'sort_case_insensitive'):
self.settings.signal_bind('setopt.' + opt, func, weak=True, autosort=False)
func = self.signal_function_factory(self.refilter)
self._signal_functions += [func]
for opt in ('hidden_filter', 'show_hidden'):
self.settings.signal_bind('setopt.' + opt, func, weak=True, autosort=False)
self.settings = LocalSettings(path, self.settings)
self.use()
@property
def filter(self):
"""A compatibility layer between the legacy filters and the filter stack."""
if not self.filter_stack:
return None
topmost_filter = self.filter_stack[-1]
if isinstance(topmost_filter, filter_stack.NameFilter):
return topmost_filter.regex
return None
@filter.setter
def filter(self, new_filter):
if not self.filter_stack:
# No filters applied at all, a trivial case.
if new_filter:
self.filter_stack.append(filter_stack.NameFilter(new_filter))
return
topmost_filter = self.filter_stack[-1]
if isinstance(topmost_filter, filter_stack.NameFilter):
# The topmost filter is a simple name filter. Let's
# replace it, or possibly remove it if the new one
# is empty.
if new_filter:
# Consider the possibility of it being a filter
# derived from NameFilter. Let's use the actual class
# it belonged to.
topmost_filter_class = type(topmost_filter)
self.filter_stack[-1] = topmost_filter_class(new_filter)
else:
self.filter_stack.pop()
else:
# Append a new filter as the existing ones are non-trivial.
if new_filter:
self.filter_stack.append(filter_stack.NameFilter(new_filter))
@lazy_property
def vcs(self):
if not self._vcs_signal_handler_installed:
self.settings.signal_bind(
'setopt.vcs_aware', self.vcs__reset, # pylint: disable=no-member
weak=True, autosort=False,
)
self._vcs_signal_handler_installed = True
if self.settings.vcs_aware:
return Vcs(self)
return None
def signal_function_factory(self, function):
def signal_function():
self.load_if_outdated()
if not self.exists:
return
function()
return signal_function
def request_resort(self):
self.order_outdated = True
def request_reload(self):
self.content_outdated = True
def get_list(self):
return self.files
def mark_item(self, item, val):
item.mark_set(val)
if val:
if item in self.files and item not in self.marked_items:
self.marked_items.append(item)
else:
while True:
try:
self.marked_items.remove(item)
except ValueError:
break
def toggle_mark(self, item):
self.mark_item(item, not item.marked)
def toggle_all_marks(self):
for item in self.files:
self.toggle_mark(item)
def mark_all(self, val):
for item in self.files:
self.mark_item(item, val)
if not val:
del self.marked_items[:]
self._clear_marked_items()
# XXX: Is it really necessary to have the marked items in a list?
# Can't we just recalculate them with [f for f in self.files if f.marked]?
def _gc_marked_items(self):
for item in list(self.marked_items):
if item.path not in self.filenames:
self.marked_items.remove(item)
def _clear_marked_items(self):
for item in self.marked_items:
item.mark_set(False)
del self.marked_items[:]
def get_selection(self):
"""READ ONLY"""
self._gc_marked_items()
if not self.files:
return []
if self.marked_items:
return [item for item in self.files if item.marked]
elif self.pointed_obj:
return [self.pointed_obj]
return []
def refilter(self):
if self.files_all is None:
return # probably not loaded yet
self.last_update_time = time()
filters = []
if not self.settings.show_hidden and self.settings.hidden_filter:
hidden_filter = re.compile(self.settings.hidden_filter)
hidden_filter_search = hidden_filter.search
def hidden_filter_func(fobj):
for comp in fobj.relative_path.split(os.path.sep):
if hidden_filter_search(comp):
return False
return True
filters.append(hidden_filter_func)
if self.narrow_filter:
# pylint: disable=unsupported-membership-test
# Pylint complains that self.narrow_filter is by default
# None but the execution won't reach this line if it is
# still None.
filters.append(lambda fobj: fobj.basename in self.narrow_filter)
if self.settings.global_inode_type_filter or self.inode_type_filter:
def inode_filter_func(obj):
# Use local inode_type_filter if present, global otherwise
inode_filter = self.inode_type_filter or self.settings.global_inode_type_filter
# Apply filter
if InodeFilterConstants.DIRS in inode_filter and \
obj.is_directory:
return True
elif InodeFilterConstants.FILES in inode_filter and \
obj.is_file and not obj.is_link:
return True
elif InodeFilterConstants.LINKS in inode_filter and \
obj.is_link:
return True
return False
filters.append(inode_filter_func)
if self.temporary_filter:
temporary_filter_search = self.temporary_filter.search
filters.append(lambda fobj: temporary_filter_search(fobj.basename))
filters.extend(self.filter_stack)
self.files = [f for f in self.files_all if accept_file(f, filters)]
# A fix for corner cases when the user invokes show_hidden on a
# directory that contains only hidden directories and hidden files.
if self.files and not self.pointed_obj:
self.pointed_obj = self.files[0]
elif not self.files:
self.content_loaded = False
self.pointed_obj = None
self.move_to_obj(self.pointed_obj)
# XXX: Check for possible race conditions
# pylint: disable=too-many-locals,too-many-branches,too-many-statements
def load_bit_by_bit(self):
"""An iterator that loads a part on every next() call
Returns a generator which load a part of the directory
in each iteration.
"""
self.loading = True
self.percent = 0
self.load_if_outdated()
basename_is_rel_to = self.path if self.flat else None
try: # pylint: disable=too-many-nested-blocks
if self.runnable:
yield
mypath = self.path
self.mount_path = mount_path(mypath)
if self.flat:
filelist = []
for dirpath, dirnames, filenames in walklevel(mypath, self.flat):
dirlist = [
os.path.join("/", dirpath, d)
for d in dirnames
if self.flat == -1
or (dirpath.count(os.path.sep)
- mypath.count(os.path.sep)) <= self.flat
]
filelist += dirlist
filelist += [os.path.join("/", dirpath, f) for f in filenames]
filenames = filelist
self.load_content_mtime = mtimelevel(mypath, self.flat)
else:
filelist = os.listdir(mypath)
filenames = [mypath + (mypath == '/' and fname or '/' + fname)
for fname in filelist]
self.load_content_mtime = os.stat(mypath).st_mtime
if self.cumulative_size_calculated:
# If self.content_loaded is true, this is not the first
# time loading. So I can't really be sure if the
# size has changed and I'll add a "?".
if self.content_loaded:
if self.fm.settings.autoupdate_cumulative_size:
self.look_up_cumulative_size()
else:
self.infostring = ' %s' % human_readable(
self.size, separator='? ')
else:
self.infostring = ' %s' % human_readable(self.size)
else:
self.size = len(filelist)
self.infostring = ' %d' % self.size
if self.is_link:
self.infostring = '->' + self.infostring
yield
marked_paths = [obj.path for obj in self.marked_items]
files = []
disk_usage = 0
has_vcschild = False
for name in filenames:
try:
file_lstat = os_lstat(name)
if file_lstat.st_mode & 0o170000 == 0o120000:
file_stat = os_stat(name)
else:
file_stat = file_lstat
except OSError:
file_lstat = None
file_stat = None
if file_lstat and file_stat:
stats = (file_stat, file_lstat)
is_a_dir = file_stat.st_mode & 0o170000 == 0o040000
else:
stats = None
is_a_dir = False
if is_a_dir:
item = self.fm.get_directory(name, preload=stats, path_is_abs=True,
basename_is_rel_to=basename_is_rel_to)
item.load_if_outdated()
if self.flat:
item.relative_path = os.path.relpath(item.path, self.path)
else:
item.relative_path = item.basename
item.relative_path_lower = item.relative_path.lower()
if item.vcs and item.vcs.track:
if item.vcs.is_root_pointer:
has_vcschild = True
else:
item.vcsstatus = \
item.vcs.rootvcs.status_subpath( # pylint: disable=no-member
os.path.join(self.realpath, item.basename),
is_directory=True,
)
else:
item = File(name, preload=stats, path_is_abs=True,
basename_is_rel_to=basename_is_rel_to)
item.load()
disk_usage += item.size
if self.vcs and self.vcs.track:
item.vcsstatus = \
self.vcs.rootvcs.status_subpath( # pylint: disable=no-member
os.path.join(self.realpath, item.basename))
files.append(item)
self.percent = 100 * len(files) // len(filenames)
yield
self.has_vcschild = has_vcschild
self.disk_usage = disk_usage
self.filenames = filenames
self.files_all = files
self._clear_marked_items()
for item in self.files_all:
if item.path in marked_paths:
item.mark_set(True)
self.marked_items.append(item)
else:
item.mark_set(False)
self.sort()
if files:
if self.pointed_obj is not None:
self.sync_index()
else:
self.move(to=0)
else:
self.filenames = None
self.files_all = None
self.files = None
self.cycle_list = None
self.content_loaded = True
self.last_update_time = time()
self.correct_pointer()
finally:
self.loading = False
self.fm.signal_emit("finished_loading_dir", directory=self)
if self.vcs:
self.fm.ui.vcsthread.process(self)
# pylint: enable=too-many-locals,too-many-branches,too-many-statements
def unload(self):
self.loading = False
self.load_generator = None
def load_content(self, schedule=None):
"""Loads the contents of the directory.
Use this sparingly since it takes rather long.
"""
self.content_outdated = False
if self.settings.freeze_files:
return
if not self.loading:
if not self.loaded:
self.load()
if not self.accessible:
self.content_loaded = True
return
if schedule is None:
schedule = True # was: self.size > 30
if self.load_generator is None:
self.load_generator = self.load_bit_by_bit()
if schedule and self.fm:
self.fm.loader.add(self)
else:
for _ in self.load_generator:
pass
self.load_generator = None
elif not schedule or not self.fm:
for _ in self.load_generator:
pass
self.load_generator = None
def sort(self):
"""Sort the contained files"""
# pylint: disable=comparison-with-callable
if self.files_all is None:
return
try:
sort_func = self.sort_dict[self.settings.sort]
except KeyError:
sort_func = sort_by_basename
if self.settings.sort_case_insensitive and \
sort_func == sort_by_basename:
sort_func = sort_by_basename_icase
if self.settings.sort_case_insensitive and \
sort_func == sort_naturally:
sort_func = sort_naturally_icase
# XXX Does not work with usermade sorting functions :S
if self.settings.sort_unicode:
if sort_func in (sort_naturally, sort_naturally_icase):
sort_func = sort_unicode_wrapper_list(sort_func)
elif sort_func in (sort_by_basename, sort_by_basename_icase):
sort_func = sort_unicode_wrapper_string(sort_func)
self.files_all.sort(key=sort_func)
if self.settings.sort_reverse:
self.files_all.reverse()
if self.settings.sort_directories_first:
self.files_all.sort(key=sort_by_directory)
self.refilter()
def _get_cumulative_size(self):
if self.size == 0:
return 0
cum = 0
realpath = os.path.realpath
for dirpath, _, filenames in os.walk(self.path, onerror=lambda _: None):
for fname in filenames:
try:
if dirpath == self.path:
stat = os_stat(realpath(dirpath + "/" + fname))
else:
stat = os_stat(dirpath + "/" + fname)
except OSError:
continue
cum += stat.st_size
return cum
def look_up_cumulative_size(self):
self.cumulative_size_calculated = True
self.size = self._get_cumulative_size()
self.infostring = ('-> ' if self.is_link else ' ') + human_readable(self.size)
@lazy_property
def size(self): # pylint: disable=method-hidden
try:
if self.fm.settings.automatically_count_files:
size = len(os.listdir(self.path))
else:
size = None
except OSError:
self.infostring = BAD_INFO
self.accessible = False
self.runnable = False
return 0
else:
if size is None:
self.infostring = ''
else:
self.infostring = ' %d' % size
self.accessible = True
self.runnable = True
return size
@lazy_property
def infostring(self): # pylint: disable=method-hidden
self.size # trigger the lazy property initializer pylint: disable=pointless-statement
if self.is_link:
return '->' + self.infostring
return self.infostring
@lazy_property
def runnable(self): # pylint: disable=method-hidden
self.size # trigger the lazy property initializer pylint: disable=pointless-statement
return self.runnable
def sort_if_outdated(self):
"""Sort the containing files if they are outdated"""
if self.order_outdated:
self.order_outdated = False
self.sort()
return True
return False
def move_to_obj(self, arg, attr=None):
try:
arg = arg.path
except AttributeError:
pass
self.load_content_once(schedule=False)
if self.empty():
return
Accumulator.move_to_obj(self, arg, attr='path')
def search_fnc(self, fnc, offset=1, forward=True):
length = len(self)
if forward:
generator = ((self.pointer + (x + offset)) % length
for x in range(length - 1))
else:
generator = ((self.pointer - (x + offset)) % length
for x in range(length - 1))
for i in generator:
_file = self.files[i]
if fnc(_file):
self.pointer = i
self.pointed_obj = _file
self.correct_pointer()
return True
return False
def set_cycle_list(self, lst):
self.cycle_list = deque(lst)
def cycle(self, forward=True):
if self.cycle_list:
if forward is True:
self.cycle_list.rotate(-1)
elif forward is False:
self.cycle_list.rotate(1)
self.move_to_obj(self.cycle_list[0])
def correct_pointer(self):
"""Make sure the pointer is in the valid range"""
Accumulator.correct_pointer(self)
if self == self.fm.thisdir:
try:
self.fm.thisfile = self.pointed_obj
except AttributeError:
pass
def load_content_once(self, *a, **k):
"""Load the contents of the directory if not done yet"""
if not self.content_loaded:
self.load_content(*a, **k)
return True
return False
def load_content_if_outdated(self, *a, **k):
"""Load the contents of the directory if outdated"""
if self.load_content_once(*a, **k):
return True
if self.files_all is None or self.content_outdated:
self.load_content(*a, **k)
return True
try:
if self.flat:
real_mtime = mtimelevel(self.path, self.flat)
else:
real_mtime = os.stat(self.path).st_mtime
except OSError:
real_mtime = None
return False
if self.stat:
cached_mtime = self.load_content_mtime
else:
cached_mtime = 0
if real_mtime != cached_mtime:
self.load_content(*a, **k)
return True
return False
def get_description(self):
return "Loading " + str(self)
def use(self):
"""mark the filesystem-object as used at the current time"""
self.last_used = time()
def is_older_than(self, seconds):
"""returns whether this object wasn't use()d in the last n seconds"""
if seconds < 0:
return True
return self.last_used + seconds < time()
def go(self, history=True): # pylint: disable=invalid-name
"""enter the directory if the filemanager is running"""
if self.fm:
return self.fm.enter_dir(self.path, history=history)
return False
def empty(self):
"""Is the directory empty?"""
return not self.files
def set_linemode_of_children(self, mode):
for fobj in self.files:
fobj.set_linemode(mode)
def __nonzero__(self):
"""Always True"""
return True
__bool__ = __nonzero__
def __len__(self):
"""The number of containing files"""
assert self.accessible
assert self.content_loaded
assert self.files is not None
return len(self.files)
def __eq__(self, other):
"""Check for equality of the directories paths"""
return isinstance(other, Directory) and self.path == other.path
def __neq__(self, other):
"""Check for inequality of the directories paths"""
return not self.__eq__(other)
def __hash__(self):
return hash(self.path)
| 26,135 | Python | .py | 621 | 29.276973 | 99 | 0.555407 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
695 | ansi.py | ranger_ranger/ranger/gui/ansi.py | # This file is part of ranger, the console file manager.
# License: GNU GPL version 3, see the file "AUTHORS" for details.
# Author: David Barnett <[email protected]>, 2010
"""A library to help to convert ANSI codes to curses instructions."""
from __future__ import (absolute_import, division, print_function)
import re
from ranger.ext.widestring import WideString
from ranger.gui import color
# pylint: disable=invalid-name
ansi_re = re.compile('(\x1b' + r'\[\d*(?:;\d+)*?[a-zA-Z])')
codesplit_re = re.compile(r'38;5;(\d+);|48;5;(\d+);|(\d*);')
reset = '\x1b[0m'
# pylint: enable=invalid-name
def split_ansi_from_text(ansi_text):
return ansi_re.split(ansi_text)
# For information on the ANSI codes see
# githttp://en.wikipedia.org/wiki/ANSI_escape_code
def text_with_fg_bg_attr(ansi_text): # pylint: disable=too-many-branches,too-many-statements
fg, bg, attr = -1, -1, 0
for chunk in split_ansi_from_text(ansi_text):
if chunk and chunk[0] == '\x1b':
if chunk[-1] != 'm':
continue
match = re.match(r'^.\[(.*).$', chunk)
if not match:
# XXX I have no test case to determine what should happen here
continue
attr_args = match.group(1)
# Convert arguments to attributes/colors
for x256fg, x256bg, arg in codesplit_re.findall(attr_args + ';'):
# first handle xterm256 codes
try:
if x256fg: # xterm256 foreground
fg = int(x256fg)
continue
elif x256bg: # xterm256 background
bg = int(x256bg)
continue
elif arg: # usual ansi code
n = int(arg)
else: # empty code means reset
n = 0
except ValueError:
continue
if n == 0: # reset colors and attributes
fg, bg, attr = -1, -1, 0
elif n == 1: # enable attribute
attr |= color.bold
elif n == 4:
attr |= color.underline
elif n == 5:
attr |= color.blink
elif n == 7:
attr |= color.reverse
elif n == 8:
attr |= color.invisible
elif n == 22: # disable attribute
attr &= not color.bold
elif n == 24:
attr &= not color.underline
elif n == 25:
attr &= not color.blink
elif n == 27:
attr &= not color.reverse
elif n == 28:
attr &= not color.invisible
elif 30 <= n <= 37: # 8 ansi foreground and background colors
fg = n - 30
elif n == 39:
fg = -1
elif 40 <= n <= 47:
bg = n - 40
elif n == 49:
bg = -1
# 8 aixterm high intensity colors (light but not bold)
elif 90 <= n <= 97:
fg = n - 90 + 8
elif n == 99:
fg = -1
elif 100 <= n <= 107:
bg = n - 100 + 8
elif n == 109:
bg = -1
yield (fg, bg, attr)
else:
yield chunk
def char_len(ansi_text):
"""Count the number of visible characters.
>>> char_len("\x1b[0;30;40mX\x1b[0m")
1
>>> char_len("\x1b[0;30;40mXY\x1b[0m")
2
>>> char_len("\x1b[0;30;40mX\x1b[0mY")
2
>>> char_len("hello")
5
>>> char_len("")
0
"""
return len(WideString(ansi_re.sub('', ansi_text)))
def char_slice(ansi_text, start, length):
"""Slices a string with respect to ansi code sequences
Acts as if the ansi codes aren't there, slices the text from the
given start point to the given length and adds the codes back in.
>>> test_string = "abcde\x1b[30mfoo\x1b[31mbar\x1b[0mnormal"
>>> split_ansi_from_text(test_string)
['abcde', '\\x1b[30m', 'foo', '\\x1b[31m', 'bar', '\\x1b[0m', 'normal']
>>> char_slice(test_string, 1, 3)
'bcd'
>>> char_slice(test_string, 5, 6)
'\\x1b[30mfoo\\x1b[31mbar'
>>> char_slice(test_string, 0, 8)
'abcde\\x1b[30mfoo'
>>> char_slice(test_string, 4, 4)
'e\\x1b[30mfoo'
>>> char_slice(test_string, 11, 100)
'\\x1b[0mnormal'
>>> char_slice(test_string, 9, 100)
'\\x1b[31mar\\x1b[0mnormal'
>>> char_slice(test_string, 9, 4)
'\\x1b[31mar\\x1b[0mno'
"""
chunks = []
last_color = ""
pos = old_pos = 0
for i, chunk in enumerate(split_ansi_from_text(ansi_text)):
if i % 2 == 1:
last_color = chunk
continue
chunk = WideString(chunk)
old_pos = pos
pos += len(chunk)
if pos <= start:
pass # seek
elif old_pos < start <= pos:
chunks.append(last_color)
chunks.append(str(chunk[start - old_pos:start - old_pos + length]))
elif pos > length + start:
chunks.append(last_color)
chunks.append(str(chunk[:start - old_pos + length]))
else:
chunks.append(last_color)
chunks.append(str(chunk))
if pos - start >= length:
break
return ''.join(chunks)
if __name__ == '__main__':
import doctest
import sys
sys.exit(doctest.testmod()[0])
| 5,692 | Python | .py | 150 | 26.593333 | 93 | 0.497914 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
696 | context.py | ranger_ranger/ranger/gui/context.py | # This file is part of ranger, the console file manager.
# License: GNU GPL version 3, see the file "AUTHORS" for details.
from __future__ import (absolute_import, division, print_function)
CONTEXT_KEYS = [
'reset', 'error', 'badinfo',
'in_browser', 'in_statusbar', 'in_titlebar', 'in_console',
'in_pager', 'in_taskview',
'active_pane', 'inactive_pane',
'directory', 'file', 'hostname',
'executable', 'media', 'link', 'fifo', 'socket', 'device',
'video', 'audio', 'image', 'media', 'document', 'container',
'selected', 'empty', 'main_column', 'message', 'background',
'good', 'bad',
'space', 'permissions', 'owner', 'group', 'mtime', 'nlink',
'scroll', 'all', 'bot', 'top', 'percentage', 'filter',
'flat', 'marked', 'tagged', 'tag_marker', 'line_number',
'cut', 'copied', 'frozen',
'help_markup', # COMPAT
'seperator', 'key', 'special', 'border', # COMPAT
'title', 'text', 'highlight', 'bars', 'quotes', 'tab', 'loaded',
'keybuffer',
'infostring',
'vcsfile', 'vcsremote', 'vcsinfo', 'vcscommit', 'vcsdate',
'vcsconflict', 'vcschanged', 'vcsunknown', 'vcsignored', 'vcsuntracked',
'vcsstaged', 'vcssync', 'vcsnone', 'vcsbehind', 'vcsahead', 'vcsdiverged'
]
class Context(object): # pylint: disable=too-few-public-methods
def __init__(self, keys):
# set all given keys to True
dictionary = self.__dict__
for key in keys:
dictionary[key] = True
def _context_init():
# set all keys to False
for key in CONTEXT_KEYS:
setattr(Context, key, False)
_context_init()
| 1,610 | Python | .py | 37 | 38.459459 | 77 | 0.618682 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
697 | ui.py | ranger_ranger/ranger/gui/ui.py | # This file is part of ranger, the console file manager.
# License: GNU GPL version 3, see the file "AUTHORS" for details.
from __future__ import (absolute_import, division, print_function)
import os
import sys
import threading
import curses
from subprocess import CalledProcessError
from ranger.ext.get_executables import get_executables
from ranger.ext.keybinding_parser import KeyBuffer, KeyMaps, ALT_KEY
from ranger.ext.lazy_property import lazy_property
from ranger.ext.signals import Signal
from ranger.ext.spawn import check_output
from .displayable import DisplayableContainer
from .mouse_event import MouseEvent
MOUSEMASK = curses.ALL_MOUSE_EVENTS | curses.REPORT_MOUSE_POSITION
# This escape is not available with a capname from terminfo unlike
# tsl (to_status_line), so it's hardcoded here. It's used just like tsl,
# but it sets the icon title (WM_ICON_NAME) instead of the window title
# (WM_NAME).
ESCAPE_ICON_TITLE = '\033]1;'
_ASCII = ''.join(chr(c) for c in range(32, 127))
def ascii_only(string):
return ''.join(c if c in _ASCII else '?' for c in string)
def _setup_mouse(signal):
if signal['value']:
curses.mousemask(MOUSEMASK)
curses.mouseinterval(0)
# This line solves this problem:
# If a mouse click triggers an action that disables curses and
# starts curses again, (e.g. running a ## file by clicking on its
# preview) and the next key is another mouse click, the bstate of this
# mouse event will be invalid. (atm, invalid bstates are recognized
# as scroll-down, so this avoids an erroneous scroll-down action)
curses.ungetmouse(0, 0, 0, 0, 0)
else:
curses.mousemask(0)
def _in_tmux():
return (os.environ.get("TMUX", "")
and 'tmux' in get_executables())
def _in_screen():
return ('screen' in os.environ.get("TERM", "")
and 'screen' in get_executables())
class UI( # pylint: disable=too-many-instance-attributes,too-many-public-methods
DisplayableContainer):
ALLOWED_VIEWMODES = 'miller', 'multipane'
is_set_up = False
load_mode = False
is_on = False
termsize = None
def __init__(self, env=None, fm=None): # pylint: disable=super-init-not-called
self.keybuffer = KeyBuffer()
self.keymaps = KeyMaps(self.keybuffer)
self.redrawlock = threading.Event()
self.redrawlock.set()
self.titlebar = None
self._viewmode = None
self.taskview = None
self.status = None
self.console = None
self.pager = None
self.multiplexer = None
self._draw_title = None
self._tmux_automatic_rename = None
self._multiplexer_title = None
self.browser = None
if fm is not None:
self.fm = fm
def setup_curses(self):
os.environ['ESCDELAY'] = '25' # don't know a cleaner way
try:
self.win = curses.initscr()
except curses.error as ex:
if ex.args[0] == "setupterm: could not find terminal":
os.environ['TERM'] = 'linux'
self.win = curses.initscr()
self.keymaps.use_keymap('browser')
DisplayableContainer.__init__(self, None)
def initialize(self):
"""initialize curses, then call setup (at the first time) and resize."""
self.win.leaveok(0)
self.win.keypad(1)
self.load_mode = False
curses.cbreak()
curses.noecho()
curses.halfdelay(20)
try:
curses.curs_set(int(bool(self.settings.show_cursor)))
except curses.error:
pass
curses.start_color()
try:
curses.use_default_colors()
except curses.error:
pass
self.settings.signal_bind('setopt.mouse_enabled', _setup_mouse)
self.settings.signal_bind('setopt.freeze_files', self.redraw_statusbar)
_setup_mouse({"value": self.settings.mouse_enabled})
if not self.is_set_up:
self.is_set_up = True
self.setup()
self.win.addstr("loading...")
self.win.refresh()
self._draw_title = curses.tigetflag('hs') # has_status_line
self.update_size()
self.is_on = True
self.handle_multiplexer()
if 'vcsthread' in self.__dict__:
self.vcsthread.unpause()
def suspend(self):
"""Turn off curses"""
if 'vcsthread' in self.__dict__:
self.vcsthread.pause()
self.vcsthread.paused.wait()
if self.fm.image_displayer:
self.fm.image_displayer.quit()
self.win.keypad(0)
curses.nocbreak()
curses.echo()
try:
curses.curs_set(1)
except curses.error:
pass
if self.settings.mouse_enabled:
_setup_mouse({"value": False})
curses.endwin()
self.is_on = False
def set_load_mode(self, boolean):
boolean = bool(boolean)
if boolean != self.load_mode:
self.load_mode = boolean
if boolean:
# don't wait for key presses in the load mode
curses.cbreak()
self.win.nodelay(1)
else:
self.win.nodelay(0)
# Sanitize halfdelay setting
halfdelay = min(255, max(1, self.settings.idle_delay // 100))
curses.halfdelay(halfdelay)
def destroy(self):
"""Destroy all widgets and turn off curses"""
if 'vcsthread' in self.__dict__:
if not self.vcsthread.stop():
self.fm.notify('Failed to stop `UI.vcsthread`', bad=True)
del self.__dict__['vcsthread']
DisplayableContainer.destroy(self)
self.restore_multiplexer_name()
self.suspend()
def handle_mouse(self):
"""Handles mouse input"""
try:
event = MouseEvent(curses.getmouse())
except curses.error:
return
if not self.console.visible:
DisplayableContainer.click(self, event)
def handle_key(self, key):
"""Handles key input"""
self.hint()
if key < 0:
self.keybuffer.clear()
elif not DisplayableContainer.press(self, key):
self.keymaps.use_keymap('browser')
self.press(key)
def press(self, key):
keybuffer = self.keybuffer
self.status.clear_message()
keybuffer.add(key)
self.fm.hide_bookmarks()
self.browser.draw_hints = not keybuffer.finished_parsing \
and keybuffer.finished_parsing_quantifier
if keybuffer.result is not None:
try:
self.fm.execute_console(
keybuffer.result,
wildcards=keybuffer.wildcards,
quantifier=keybuffer.quantifier,
)
finally:
if keybuffer.finished_parsing:
keybuffer.clear()
elif keybuffer.finished_parsing:
keybuffer.clear()
return False
return True
def handle_keys(self, *keys):
for key in keys:
self.handle_key(key)
def handle_input(self): # pylint: disable=too-many-branches
key = self.win.getch()
if key == curses.KEY_ENTER:
key = ord('\n')
if key == 27 or (128 <= key < 256):
# Handle special keys like ALT+X or unicode here:
keys = [key]
previous_load_mode = self.load_mode
self.set_load_mode(True)
for _ in range(4):
getkey = self.win.getch()
if getkey != -1:
keys.append(getkey)
if len(keys) == 1:
keys.append(-1)
elif keys[0] == 27:
keys[0] = ALT_KEY
if self.settings.xterm_alt_key:
if len(keys) == 2 and keys[1] in range(127, 256):
if keys[0] == 195:
keys = [ALT_KEY, keys[1] - 64]
elif keys[0] == 194:
keys = [ALT_KEY, keys[1] - 128]
self.handle_keys(*keys)
self.set_load_mode(previous_load_mode)
if self.settings.flushinput and not self.console.visible:
curses.flushinp()
else:
# Handle simple key presses, CTRL+X, etc here:
if key >= 0:
if self.settings.flushinput and not self.console.visible:
curses.flushinp()
if key == curses.KEY_MOUSE:
self.handle_mouse()
elif key == curses.KEY_RESIZE:
self.update_size()
else:
if not self.fm.input_is_blocked():
self.handle_key(key)
elif key == -1 and not os.isatty(sys.stdin.fileno()):
# STDIN has been closed
self.fm.exit()
def setup(self):
"""Build up the UI by initializing widgets."""
from ranger.gui.widgets.titlebar import TitleBar
from ranger.gui.widgets.console import Console
from ranger.gui.widgets.statusbar import StatusBar
from ranger.gui.widgets.taskview import TaskView
from ranger.gui.widgets.pager import Pager
# Create a titlebar
self.titlebar = TitleBar(self.win)
self.add_child(self.titlebar)
# Create the browser view
self.settings.signal_bind('setopt.viewmode', self._set_viewmode)
self._viewmode = None
# The following line sets self.browser implicitly through the signal
self.viewmode = self.settings.viewmode
self.add_child(self.browser)
# Create the process manager
self.taskview = TaskView(self.win)
self.taskview.visible = False
self.add_child(self.taskview)
# Create the status bar
self.status = StatusBar(self.win, self.browser.main_column)
self.add_child(self.status)
# Create the console
self.console = Console(self.win)
self.add_child(self.console)
self.console.visible = False
# Create the pager
self.pager = Pager(self.win)
self.pager.visible = False
self.add_child(self.pager)
@lazy_property
def vcsthread(self):
"""VCS thread"""
from ranger.ext.vcs import VcsThread
thread = VcsThread(self)
thread.start()
return thread
def redraw(self):
"""Redraw all widgets"""
self.redrawlock.wait()
self.redrawlock.clear()
self.poke()
# determine which widgets are shown
if self.console.wait_for_command_input or self.console.question_queue:
self.console.focused = True
self.console.visible = True
self.status.visible = False
else:
self.console.focused = False
self.console.visible = False
self.status.visible = True
self.draw()
self.finalize()
self.redrawlock.set()
def redraw_window(self):
"""Redraw the window. This only calls self.win.redrawwin()."""
self.win.erase()
self.win.redrawwin()
self.win.refresh()
self.win.redrawwin()
self.need_redraw = True
def update_size(self):
"""resize all widgets"""
self.termsize = self.win.getmaxyx()
y, x = self.termsize
self.browser.resize(self.settings.status_bar_on_top and 2 or 1, 0, y - 2, x)
self.taskview.resize(1, 0, y - 2, x)
self.pager.resize(1, 0, y - 2, x)
self.titlebar.resize(0, 0, 1, x)
self.status.resize(self.settings.status_bar_on_top and 1 or y - 1, 0, 1, x)
self.console.resize(y - 1, 0, 1, x)
def draw(self):
"""Draw all objects in the container"""
self.win.touchwin()
DisplayableContainer.draw(self)
if self._draw_title and self.settings.update_title:
if self.fm.thisdir:
cwd = self.fm.thisdir.path
if self.settings.tilde_in_titlebar \
and (cwd == self.fm.home_path
or cwd.startswith(self.fm.home_path + "/")):
cwd = '~' + cwd[len(self.fm.home_path):]
if self.settings.shorten_title:
split = cwd.rsplit(os.sep, self.settings.shorten_title)
if os.sep in split[0]:
cwd = os.sep.join(split[1:])
else:
cwd = "not accessible"
try:
fixed_cwd = cwd.encode('utf-8', 'surrogateescape'). \
decode('utf-8', 'replace')
titlecap = curses.tigetstr('tsl')
escapes = (
[titlecap.decode("latin-1")]
if titlecap is not None
else [] + [ESCAPE_ICON_TITLE]
)
belcap = curses.tigetstr('fsl')
bel = belcap.decode('latin-1') if belcap is not None else ""
fmt_tups = [(e, fixed_cwd, bel) for e in escapes]
except UnicodeError:
pass
else:
for fmt_tup in fmt_tups:
sys.stdout.write("%sranger:%s%s" % fmt_tup)
sys.stdout.flush()
self.win.refresh()
def finalize(self):
"""Finalize every object in container and refresh the window"""
DisplayableContainer.finalize(self)
self.win.refresh()
def draw_images(self):
if self.pager.visible:
self.pager.draw_image()
elif self.browser.pager:
if self.browser.pager.visible:
self.browser.pager.draw_image()
else:
self.browser.columns[-1].draw_image()
def close_pager(self):
if self.console.visible:
self.console.focused = True
self.pager.close()
self.pager.visible = False
self.pager.focused = False
self.browser.visible = True
def open_pager(self):
self.browser.columns[-1].clear_image(force=True)
if self.console.focused:
self.console.focused = False
self.pager.open()
self.pager.visible = True
self.pager.focused = True
self.browser.visible = False
return self.pager
def open_embedded_pager(self):
self.browser.open_pager()
for column in self.browser.columns:
if column == self.browser.main_column:
break
column.level_shift(amount=1)
return self.browser.pager
def close_embedded_pager(self):
self.browser.close_pager()
for column in self.browser.columns:
column.level_restore()
def open_console(self, string='', prompt=None, position=None):
if self.console.open(string, prompt=prompt, position=position):
self.status.msg = None
def close_console(self):
self.console.close()
self.close_pager()
def open_taskview(self):
self.browser.columns[-1].clear_image(force=True)
self.pager.close()
self.pager.visible = False
self.pager.focused = False
self.console.visible = False
self.browser.visible = False
self.taskview.visible = True
self.taskview.focused = True
def redraw_main_column(self):
self.browser.main_column.need_redraw = True
def redraw_statusbar(self):
self.status.need_redraw = True
def close_taskview(self):
self.taskview.visible = False
self.browser.visible = True
self.taskview.focused = False
def throbber(self, string='.', remove=False):
if remove:
self.titlebar.throbber = type(self.titlebar).throbber
else:
self.titlebar.throbber = string
# Handles window renaming behaviour of the terminal multiplexers
# GNU Screen and Tmux
def handle_multiplexer(self):
if (self.settings.update_tmux_title and not self._multiplexer_title):
try:
if _in_tmux():
# Stores the automatic-rename setting
# prints out a warning if allow-rename isn't set in tmux
try:
tmux_allow_rename = check_output(
['tmux', 'show-window-options', '-v',
'allow-rename']).strip()
except CalledProcessError:
tmux_allow_rename = 'off'
if tmux_allow_rename == 'off':
self.fm.notify('Warning: allow-rename not set in Tmux!',
bad=True)
else:
self._multiplexer_title = check_output(
['tmux', 'display-message', '-p', '#W']).strip()
self._tmux_automatic_rename = check_output(
['tmux', 'show-window-options', '-v',
'automatic-rename']).strip()
if self._tmux_automatic_rename == 'on':
check_output(['tmux', 'set-window-option',
'automatic-rename', 'off'])
elif _in_screen():
# Stores the screen window name before renaming it
# gives out a warning if $TERM is not "screen"
self._multiplexer_title = check_output(
['screen', '-Q', 'title']).strip()
except CalledProcessError:
self.fm.notify("Couldn't access previous multiplexer window"
" name, won't be able to restore.",
bad=False)
if not self._multiplexer_title:
self._multiplexer_title = os.path.basename(
os.environ.get("SHELL", "shell"))
sys.stdout.write("\033kranger\033\\")
sys.stdout.flush()
# Restore window name
def restore_multiplexer_name(self):
if self._multiplexer_title:
try:
if _in_tmux():
if self._tmux_automatic_rename:
check_output(['tmux', 'set-window-option',
'automatic-rename',
self._tmux_automatic_rename])
else:
check_output(['tmux', 'set-window-option', '-u',
'automatic-rename'])
except CalledProcessError:
self.fm.notify("Could not restore multiplexer window name!",
bad=True)
sys.stdout.write("\033k{0}\033\\".format(self._multiplexer_title))
sys.stdout.flush()
def hint(self, text=None):
self.status.hint = text
def get_pager(self):
if self.browser.pager and self.browser.pager.visible:
return self.browser.pager
return self.pager
def _get_viewmode(self):
return self._viewmode
def _set_viewmode(self, value):
if isinstance(value, Signal):
value = value.value
if value == '':
value = self.ALLOWED_VIEWMODES[0]
if value in self.ALLOWED_VIEWMODES:
if self._viewmode != value:
self._viewmode = value
new_browser = self._viewmode_to_class(value)(self.win)
if self.browser is None:
self.add_child(new_browser)
else:
old_size = self.browser.y, self.browser.x, self.browser.hei, self.browser.wid
self.replace_child(self.browser, new_browser)
self.browser.destroy()
new_browser.resize(*old_size)
self.browser = new_browser
self.redraw_window()
else:
raise ValueError("Attempting to set invalid viewmode `%s`, should "
"be one of `%s`." % (value, "`, `".join(self.ALLOWED_VIEWMODES)))
viewmode = property(_get_viewmode, _set_viewmode)
@staticmethod
def _viewmode_to_class(viewmode):
if viewmode == 'miller':
from ranger.gui.widgets.view_miller import ViewMiller
return ViewMiller
elif viewmode == 'multipane':
from ranger.gui.widgets.view_multipane import ViewMultipane
return ViewMultipane
return None
| 20,665 | Python | .py | 502 | 29.171315 | 97 | 0.562768 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
698 | bar.py | ranger_ranger/ranger/gui/bar.py | # This file is part of ranger, the console file manager.
# License: GNU GPL version 3, see the file "AUTHORS" for details.
from __future__ import (absolute_import, division, print_function)
from ranger import PY3
from ranger.ext.widestring import WideString, utf_char_width
class Bar(object):
left = None
right = None
gap = None
def __init__(self, base_color_tag):
self.left = BarSide(base_color_tag)
self.right = BarSide(base_color_tag)
self.gap = BarSide(base_color_tag)
def add(self, *a, **kw):
self.left.add(*a, **kw)
def addright(self, *a, **kw):
self.right.add(*a, **kw)
def sumsize(self):
return self.left.sumsize() + self.right.sumsize()
def fixedsize(self):
return self.left.fixedsize() + self.right.fixedsize()
def shrink_by_removing(self, wid):
leftsize = self.left.sumsize()
rightsize = self.right.sumsize()
sumsize = leftsize + rightsize
# remove elements from the left until it fits
if sumsize > wid:
while self.left:
leftsize -= len(self.left.pop(-1))
if leftsize + rightsize <= wid:
break
sumsize = leftsize + rightsize
# remove elements from the right until it fits
if sumsize > wid:
while self.right:
rightsize -= len(self.right.pop(0))
if leftsize + rightsize <= wid:
break
sumsize = leftsize + rightsize
if sumsize < wid:
self.fill_gap(' ', (wid - sumsize), gapwidth=True)
def shrink_from_the_left(self, wid):
fixedsize = self.fixedsize()
if wid < fixedsize:
raise ValueError("Cannot shrink down to that size by cutting")
leftsize = self.left.sumsize()
rightsize = self.right.sumsize()
oversize = leftsize + rightsize - wid
if oversize <= 0:
return self.fill_gap(' ', wid, gapwidth=False)
# Shrink items to a minimum size until there is enough room.
for item in self.left:
if not item.fixed:
itemlen = len(item)
if oversize > itemlen - item.min_size:
item.cut_off_to(item.min_size)
oversize -= (itemlen - item.min_size)
else:
item.cut_off(oversize)
break
return None
def fill_gap(self, char, wid, gapwidth=False):
del self.gap[:]
if not gapwidth:
wid = wid - self.sumsize()
if wid > 0:
self.gap.add(char * wid, 'space')
def combine(self):
return self.left + self.gap + self.right
class BarSide(list):
def __init__(self, base_color_tag): # pylint: disable=super-init-not-called
self.base_color_tag = base_color_tag
def add(self, string, *lst, **kw):
colorstr = ColoredString(string, self.base_color_tag, *lst)
colorstr.__dict__.update(kw)
self.append(colorstr)
def add_space(self, n=1):
self.add(' ' * n, 'space')
def sumsize(self):
return sum(len(item) for item in self)
def fixedsize(self):
n = 0
for item in self:
if item.fixed:
n += len(item)
else:
n += item.min_size
return n
class ColoredString(object):
def __init__(self, string, *lst):
self.string = WideString(string)
self.lst = lst
self.fixed = False
if not string or not self.string.chars:
self.min_size = 0
elif PY3:
self.min_size = utf_char_width(string[0])
else:
self.min_size = utf_char_width(self.string.chars[0].decode('utf-8'))
def cut_off(self, n):
if n >= 1:
self.string = self.string[:-n]
def cut_off_to(self, n):
if n < self.min_size:
self.string = self.string[:self.min_size]
elif n < len(self.string):
self.string = self.string[:n]
def __len__(self):
return len(self.string)
def __str__(self):
return str(self.string)
| 4,225 | Python | .py | 111 | 27.936937 | 80 | 0.562852 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
699 | mouse_event.py | ranger_ranger/ranger/gui/mouse_event.py | # This file is part of ranger, the console file manager.
# License: GNU GPL version 3, see the file "AUTHORS" for details.
from __future__ import (absolute_import, division, print_function)
import curses
class MouseEvent(object):
PRESSED = [
0,
curses.BUTTON1_PRESSED,
curses.BUTTON2_PRESSED,
curses.BUTTON3_PRESSED,
curses.BUTTON4_PRESSED,
]
CTRL_SCROLLWHEEL_MULTIPLIER = 5
def __init__(self, getmouse):
"""Creates a MouseEvent object from the result of win.getmouse()"""
_, self.x, self.y, _, self.bstate = getmouse
# x-values above ~220 suddenly became negative, apparently
# it's sufficient to add 0xFF to fix that error.
if self.x < 0:
self.x += 0xFF
if self.y < 0:
self.y += 0xFF
def pressed(self, n):
"""Returns whether the mouse key n is pressed"""
try:
return (self.bstate & MouseEvent.PRESSED[n]) != 0
except IndexError:
return False
def mouse_wheel_direction(self):
"""Returns the direction of the scroll action, 0 if there was none"""
# If the bstate > ALL_MOUSE_EVENTS, it's an invalid mouse button.
# I interpret invalid buttons as "scroll down" because all tested
# systems have a broken curses implementation and this is a workaround.
# Recently it seems to have been fixed, as 2**21 was introduced as
# the code for the "scroll down" button.
if self.bstate & curses.BUTTON4_PRESSED:
return -self.CTRL_SCROLLWHEEL_MULTIPLIER if self.ctrl() else -1
elif self.bstate & curses.BUTTON2_PRESSED \
or self.bstate & 2**21 \
or self.bstate > curses.ALL_MOUSE_EVENTS:
return self.CTRL_SCROLLWHEEL_MULTIPLIER if self.ctrl() else 1
return 0
def ctrl(self):
return self.bstate & curses.BUTTON_CTRL
def alt(self):
return self.bstate & curses.BUTTON_ALT
def shift(self):
return self.bstate & curses.BUTTON_SHIFT
def key_invalid(self):
return self.bstate > curses.ALL_MOUSE_EVENTS
| 2,155 | Python | .py | 50 | 34.64 | 79 | 0.637667 | ranger/ranger | 15,344 | 881 | 907 | GPL-3.0 | 9/5/2024, 5:08:02 PM (Europe/Amsterdam) |
Subsets and Splits